diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f220c8d77eb..c1a32c07ded 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -17,7 +17,6 @@ name: Build CI env: CI_DEPLOY_USERNAME: ${{ secrets.CI_DEPLOY_USERNAME }} CI_DEPLOY_PASSWORD: ${{ secrets.CI_DEPLOY_PASSWORD }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} MAVEN_OPTS: "-Xmx6g" on: [push, pull_request] @@ -40,7 +39,19 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 + + - name: Cache Maven dependencies + uses: actions/cache@v3 + env: + cache-name: cache-mvn-deps + with: + path: ~/.m2/repository + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}- + ${{ runner.os }}-build- + ${{ runner.os }}- - name: Set up JDK uses: actions/setup-java@v1 @@ -68,13 +79,12 @@ jobs: with: theme: dark - - name: Build + Test + - name: Build (PR) if: github.ref != 'refs/heads/master' run: | mvn -B -e -DskipTests=true install - mvn -B -e surefire:test -DargLine="-XX:MaxRAMPercentage=70.0" -Dsurefire.reports.directory=${GITHUB_WORKSPACE}/surefire-reports-aggregate - - name: Build + Test + Maven Deploy + Sonar + Docker Snapshot + - name: Build (with Maven Deploy + Docker Snapshot) if: github.ref == 'refs/heads/master' env: DOCKER_USERNAME: finos @@ -86,7 +96,10 @@ jobs: # and can cause problem with some code generators # See https://github.com/finos/legend-engine/pull/924 run: | - mvn -B -e -DskipTests=true deploy -P docker-snapshot,sonar + mvn -B -e -DskipTests=true deploy -P docker-snapshot + + - name: Test + run: | mvn -B -e surefire:test -DargLine="-XX:MaxRAMPercentage=70.0" -Dsurefire.reports.directory=${GITHUB_WORKSPACE}/surefire-reports-aggregate - name: Upload Test Results diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml new file mode 100644 index 00000000000..d7eab549541 --- /dev/null +++ b/.github/workflows/code-quality.yml @@ -0,0 +1,74 @@ +# Copyright 2022 Goldman Sachs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Code Quality Check + +on: [workflow_dispatch] + # NOTE: currently, this is failing, we need further investigation to fix this build, for now, we will + # disable this code quality check to save resource + # push: + # branches: + # - master + +env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + MAVEN_OPTS: "-Xmx6g" + +# Cancel running jobs from previous pipelines of the same workflow on PR to save resource when commits are pushed quickly +# NOTE: we don't want this behavior on default branch +# See https://stackoverflow.com/a/68422069 +concurrency: + group: ${{ github.ref == 'refs/heads/master' && format('ci-default-branch-{0}-{1}', github.sha, github.workflow) || format('ci-pr-{0}-{1}', github.ref, github.workflow) }} + cancel-in-progress: true + +jobs: + sonar-code-check: + name: Sonar Code Quality Check + # NOTE: we cannot run this action in PR anyway because secrets are not accessible from forks + # See https://portal.productboard.com/sonarsource/1-sonarcloud/c/50-sonarcloud-analyzes-external-pull-request + # See https://community.sonarsource.com/t/github-action-ci-build-fail-with-set-the-sonar-token-env-variable/38997 + if: github.repository == 'finos/legend-engine' + # NOTE: larger runner is required to run this build + runs-on: ubuntu-latest-4-cores + + steps: + - name: Checkout repo + uses: actions/checkout@v4 + + - name: Cache Maven dependencies + uses: actions/cache@v3 + env: + cache-name: cache-mvn-deps + with: + path: ~/.m2/repository + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}- + ${{ runner.os }}-build- + ${{ runner.os }}- + + - name: Set up JDK + uses: actions/setup-java@v1 + with: + java-version: 11 + + - name: Check Java version + run: java -version + + - name: Download deps and plugins + run: mvn de.qaware.maven:go-offline-maven-plugin:resolve-dependencies + + - name: Check Code Quality + run: | + mvn -B -e -DskipTests=true install -P sonar diff --git a/.github/workflows/release-large-runner.yml b/.github/workflows/release-large-runner.yml index 39d6abd5077..920fbb1a565 100644 --- a/.github/workflows/release-large-runner.yml +++ b/.github/workflows/release-large-runner.yml @@ -20,7 +20,7 @@ env: CI_GPG_PASSPHRASE: ${{ secrets.CI_GPG_PASSPHRASE }} DOCKER_USERNAME: finos DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - MAVEN_OPTS: -Xmx20g + MAVEN_OPTS: -Xmx10g on: workflow_dispatch: @@ -72,4 +72,4 @@ jobs: run: mvn -B -e release:prepare -Darguments='-B -e' -DpreparationGoals=clean -DreleaseVersion=${{ github.event.inputs.releaseVersion }} -DdevelopmentVersion=${{ env.DEVELOPMENT_VERSION }} -P release - name: Perform release - run: mvn -B -e release:perform -Darguments='-B -e -T 2 -DargLine="-Xmx20g"' -P release,docker + run: mvn -B -e release:perform -Darguments='-B -e -T 4 -DargLine="-Xmx12g"' -P release,docker diff --git a/CODEOWNERS b/CODEOWNERS index feb7e32fc2b..a246f0d3e3f 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -8,6 +8,7 @@ /legend-engine-xts-analytics/** @finos/legend-engine-maintainers /legend-engine-xts-authentication/** @finos/legend-engine-maintainers /legend-engine-xts-avro/** @finos/legend-engine-maintainers +/legend-engine-xts-bigqueryFunction/** @finos/legend-engine-maintainers /legend-engine-xts-changetoken/** @finos/legend-engine-maintainers /legend-engine-xts-daml/** @finos/legend-engine-maintainers /legend-engine-xts-data-push/** @finos/legend-engine-maintainers @@ -33,6 +34,7 @@ /legend-engine-xts-service/** @finos/legend-engine-maintainers /legend-engine-xts-serviceStore/** @finos/legend-engine-maintainers /legend-engine-xts-snowflakeApp/** @finos/legend-engine-maintainers +/legend-engine-xts-hostedService/** @finos/legend-engine-maintainers /legend-engine-xts-sql/** @finos/legend-engine-maintainers /legend-engine-xts-text/** @finos/legend-engine-maintainers /legend-engine-xts-xml/** @finos/legend-engine-maintainers diff --git a/docs/connection/new-connection-framework-metamodel.png b/docs/connection/new-connection-framework-metamodel.png new file mode 100644 index 00000000000..151efcc4de2 Binary files /dev/null and b/docs/connection/new-connection-framework-metamodel.png differ diff --git a/docs/connection/new-connection-framework.md b/docs/connection/new-connection-framework.md new file mode 100644 index 00000000000..54c5edf9fca --- /dev/null +++ b/docs/connection/new-connection-framework.md @@ -0,0 +1,166 @@ +# New Connection Framework (PoC) + +This is _an attempt_ to cleanup the existing connection acquisition framework to make it (1) more extensible, (2) integrate better with work that has been done on [identity/credential management](https://github.com/finos/legend/wiki/Legend-Authentication) by @epsstan as well as feedback from @pierredebelen, and (3) reduce code complexity. The core mission centers around: **_"How do we make it easy for people (both platform developers and users) to acquire database connection given their identity?"_** + +> This is still a work in progress, we haven't enabled this in production. To use/test, set environment variable `org.finos.legend.engine.enableNewConnectionFramework=true` when starting the server. To execute, since at the time of speaking, we haven't include the new connection models in latest production protocol, choose `clientVersion=vX_X_X` + +## Overview + +Fundamentally, the new connection framework improves DX for integrating new type of connection by making it consistent across different types of databases (not limited to only relational databases); in other words, adding support for `MongoDB` or `ElasticSearch` should require similar set of change to adding support for `Snowflake` or `BigQuery`, for example. + +The new API is also designed to be more declarative about the connection acquisition flows that it supports, hence, it's a continuation of the work that has been done with [DatabaseAuthenticationFlow](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication/src/main/java/org/finos/legend/engine/authentication/DatabaseAuthenticationFlow.java#L42) by @epsstan. In the new framework, when we setup the connection manager/factory, we make it very explicit which flows are supported (which types of credentials can be resolved given an identity, and which those credentials, which types of connection can be acquired). + +Last but not least, we want to clean up the existing connection pooling code using [HikariCP](https://github.com/brettwooldridge/HikariCP) which has become fairly complex after many iterations. + +For more details on design and implementation, see the following sub-sections: + +- [Connection Factory](#connection-factory) +- [Connection Pooling](#connection-pooling) +- [Migration Strategy](#migration-strategy) +- [Future Work / TODO](#future-work-/-todo) + +## Connection Factory + +`ConnectionFactory` is the API entry of the new connection framework. It is simplified to specifically addresses the main objective: **given an identity, obtain a connection to a database**. A new set of (meta)model have been created to serve this new API. + +These models are heavily inspired by [RelationalDatabaseConnection](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/RelationalDatabaseConnection.java#L25); a `Connection` in the new framework comes with `AuthenticationConfiguration` corresponding to [AuthenticationStrategy](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/authentication/AuthenticationStrategy.java#L21) and `ConnectionSpecification` corresponding to [DatasourceSpecification](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/specification/DatasourceSpecification.java#L21). The former is responsible for providing extra metadata needed to obtain necessary credential from the user's identity while the latter provides metadata to obtain the actual connection to the database given the obtained credentials. + +![framework-metamodel](./new-connection-framework-metamodel.png) + +> We believe in this abstraction since concepts like `authentication` and `connection specification` are certainly applicable to NoSQL databases like `MongoDB` and `ElasticSearch` + +Each connection targets a particular type of database (similar to [RelationalDatabaseConnection.databaseType](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/RelationalDatabaseConnection.java#L29)), which comes with a set of database support defined in the platform configuration. As of now, this database support only includes information about the various authentication mechanisms that a database supports in Legend. For example, for `Snowflake`, we only support `OAuth` and `KeyPair`; for `Postgres`, we only support `UsernamePassword`. Since `AuthenticationConfiguration` provides extra metadata to obtain credentials, each type of configuration therefore is bound to an `AuthenticationMechanism` -- this specification is also something defined explicitly at top-level. + +> Unlike [RelationalDatabaseConnection.databaseType](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/RelationalDatabaseConnection.java#L29)) which is practically an enum which is not modularizable and hardcoded to Relational types, `database support` is more extensible and a useful concept on its own, in the future, we can add more database capabilities here as needed + +As for the `ConnectionFactory`, it's configured with 2 types of components:`CredentialBuilder` - (can be chained) taking the identity and build a credential, `ConnectionBuilder` - taking a credential and acquire a connection. + +```java +class ConnectionBuilder +... + +class CredentialBuilder +... +``` + +> Notice how the credential builder is bound to the authentication configuration as some credential builders need extra metadata from the configuration to obtain the credential, for example, `UserPasswordCredentialBuilder` needs the name of the user and the reference to the password in the secret vault. @epsstan mentioned here that we should also tie `CrendetialBuilder` by the `AuthenticationMechanism` which is a point worths exploring. + +The factory exposes 2 methods: `getAuthenticator(Identity, ConnectionInfo): Authenticator` and `getConnection(Identity, Authenticator): Connection`. Given connection information (i.e. `Connection` metamodel) and user's identity, the factory will resolve a chain of credential builders that sequentially can _authenticate_ users and help them obtain the necessary credentials to acquire a connection to the specified database. The second method is self-explantory, the factory can use the authenticator to establish the connection given user's identity. + +> The resolution process of the credential builders chain is implemented in `breadth first search` manner, and the fact that we configure the factory with small components makes it convenient to reuse these pieces in different flows, easier to debug, and flexible enough to support fairly complex flows: e.g. given a `kerberos` identity, obtain a password from the vault which can be used to obtain a `single-sign-on` token, and so on... + +```mermaid +sequenceDiagram + participant USER + participant factory as ConnectionFactory + participant connBuilder as ConnectionBuilder + participant pool as ConnectionPool + participant credBuilder as CredentialBuilder + participant database as Database + + Note over USER: (comes with an Identity) + USER ->>+ factory: getAuthenticator(identity, connectionInfo) + factory --)- USER: authenticator + USER ->>+ factory: getConnection(identity, authenticator) + factory ->>+ connBuilder: getConnection(connectionInfo, authenticator, identity) + connBuilder ->>+ pool: getConnectionFromPool(identity, authenticator, connectionInfo) + alt get available connection in pool + pool --)- connBuilder: connection + else establish new connection + connBuilder ->>+ pool: getConnectionFromPool(identity, authenticator, ...) + loop each credential builder from authenticator + pool ->>+ credBuilder: getCredential(identity, credentials) + credBuilder --)- pool: credentials + end + pool ->>+ database: getConnection(credentials, connectionInfo) + database --)- pool: connection + pool --)- connBuilder: connection + end + connBuilder --)- factory: connection + factory --)- USER: connection +``` + +## Connection Pooling + +The current implementaion of the connection pool is fairly simple, we maintain a hashmap that indexes the connection by the database and user's identity. This hasn't been able to address @pierredebelen biggest concern that is when a connection from the pool dies (e.g. it coudl timeout/closed by the database), we don't have a way to re-establish that connection with the user's identity since `HikariCP` creates connection asynchronously; caching identity across thread is potentially security-risk. + +> Right now, we cache the identity in the connection pool for a brief period of time and have a cleanup process that runs on a separate thread to remove connections/pools with expired/invalid credentials. + +## Migration Strategy + +The bulk of the work is to migrate existing relational database connection framework to the new framework. To do that, we opt for a gradual migration strategy, convert one [DatabaseAuthenticationFlow](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication/src/main/java/org/finos/legend/engine/authentication/DatabaseAuthenticationFlow.java#L42) [flow at a time](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication-default/src/main/java/org/finos/legend/engine/authentication/LegendDefaultDatabaseAuthenticationFlowProvider.java#L50). We create new subtypes for `DatasourceSpecification` and `AuthenticationStrategy` that wraps new models `ConnectionSpecification` and `AuthenticationConfiguration` respectively. + +```java +public class ConnectionSpecificationWrapper extends DatasourceSpecification +{ + public ConnectionSpecification value; +} + +public class AuthenticationConfigurationWrapper extends AuthenticationStrategy +{ + public AuthenticationConfiguration value; +} +``` + +In `RelationalExecutor`'s `ConnectionManagerSelector.getDatabaseConnectionImpl(...)`, we check for the presence of the wrapper types to determine if we can opt-in to the new connection framework to acquire connections. + +On top level, we configure a custom deserializer for [RelationalDatabaseConnection](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/RelationalDatabaseConnection.java#L25) if it matches a particular combination, we will convert its datasource specification and authentication strategy to wrapper type. For example, if the connection database type is [Snowflake](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/DatabaseType.java#L20) with [SnowflakeDatasourceSpecification](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/specification/SnowflakeDatasourceSpecification.java#L17) and [SnowflakePublicAuthenticationStrategy](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/store/relational/connection/authentication/SnowflakePublicAuthenticationStrategy.java#L17) (which corresponds to [SnowflakeWithKeyPairFlow](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/main/java/org/finos/legend/engine/authentication/flows/SnowflakeWithKeyPairFlow.java#L49)), we can convert this to use wrapper type, such as: + +``` +RelationalDatabaseConnection connection::snowflake +{ + store: store::MyDatabase; + type: Snowflake; + specification: ConnectionSpecification // wrapper type + { + rawValue: #{ + { + "_type" : "snowflake", + "accountName" : "ki79827", + "region" : "us-east-2", + "warehouseName" : "SUMMIT_DEV", + "databaseName" : "SUMMIT_DEV", + "cloudType" : "aws", + "role" : "SUMMIT_DEV" + } + }#; + }; + auth: AuthenticationConfiguration // wrapper type + { + rawValue: #{ + { + "_type" : "KeyPair", + "userName" : "SUMMIT_DEV1", + "privateKey" : { + "_type" : "properties", + "propertyName" : "TEST_SNOWFLAKE_PK" + }, + "passphrase" : { + "_type" : "properties", + "propertyName" : "TEST_SNOWFLAKE_PK_PASSPHRASE" + } + } + }#; + }; +} +``` + +> Note: since this is still a PoC, we _hack_ a fair bit to make things just work while keeping things loose enough to adapt to new feedback, therefore, we haven't settled with a particular grammar/Pure protocol/Pure metamodel, that's something we have to come back to change + +> When migrate to the new models, we also use new `CredentialVault` mechanism created by @epsstan, which requires specifying the type of [CredentialVaultSecret](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-authentication/legend-engine-xt-authentication-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/authentication/vault/CredentialVaultSecret.java#L23), e.g. [PropertiesFileSecret](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-authentication/legend-engine-xt-authentication-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/authentication/vault/PropertiesFileSecret.java#L21C14-L21C34), [EnvironmentCredentialVaultSecret](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-authentication/legend-engine-xt-authentication-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/authentication/vault/EnvironmentCredentialVaultSecret.java#L21C14-L21C46), etc. When we configure the custom deserializer, we need to be aware of this though, for legacy infrastructure, most likely, we will use their proprietary secret vault implementation. + +Last but not least, since right now, this POC supports only one flow `SnowflakeWithKeyPair`, to test it out, we need to set environment variable `org.finos.legend.engine.enableNewConnectionFramework=true` when starting the server. +For the execution flow, since we need to transform Pure metamodel into protocol as part of building the execution plan, we need to make change to `Pure protocol`, right now those changes are in `vX_X_X`, when execute, we need to set `clientVersion=vX_X_X`. + +## Future Work / TODO + +- [ ] Settle on the metamodels and implement grammar/compiler/Pure protocol/metamodel code for these models (search for marker **`@HACKY: new-connection-framework`** in the codebase) +- [ ] Improve connection pooling recovery mechanism +- [ ] Improve connection acquisition flow coverage, see [LegendDefaultDatabaseAuthenticationFlowProvider](https://github.com/finos/legend-engine/blob/07e7129ec2b68d7c1606dd157c23e65d7cd6857b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication-default/src/main/java/org/finos/legend/engine/authentication/LegendDefaultDatabaseAuthenticationFlowProvider.java#L50) +- [ ] Make use of this new connection framework in data-push server to battle-test the implementation +- [ ] Design parameterized tests for new connection framework flows. These tests should be parameterized on (potentially) `database type`, `credential vault type`, `connection specification`, `authentication mechanism`, `authentication configuration` and should have a simple `READ` operation to ensure the connection is working. +- [ ] Consistenyly support `aws-sdk S3`-styled builder patterns +- [ ] Cleanups: + - [ ] Move `GCPApplicatonDefaultsCredential` to `bigquery` db extension + - [ ] Cleanup logic for `IdentityFactory` + - [ ] Potentially unify the work done on `xt-authenticaton` with the new connection framework diff --git a/legend-engine-application-query/pom.xml b/legend-engine-application-query/pom.xml index a5644d49a5d..e892e4cf443 100644 --- a/legend-engine-application-query/pom.xml +++ b/legend-engine-application-query/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-application-query diff --git a/legend-engine-config/legend-engine-configuration/pom.xml b/legend-engine-config/legend-engine-configuration/pom.xml index 19b085a060c..dbc53d392c2 100644 --- a/legend-engine-config/legend-engine-configuration/pom.xml +++ b/legend-engine-config/legend-engine-configuration/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-config - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-configuration diff --git a/legend-engine-config/legend-engine-connection-integration-tests/pom.xml b/legend-engine-config/legend-engine-connection-integration-tests/pom.xml index e8b2a4156e3..82f6fb194d7 100644 --- a/legend-engine-config/legend-engine-connection-integration-tests/pom.xml +++ b/legend-engine-config/legend-engine-connection-integration-tests/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-config - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-connection-integration-tests @@ -46,7 +46,12 @@ org.finos.legend.engine - legend-engine-xt-authentication-connection-factory + legend-engine-xt-connection-factory + test + + + org.finos.legend.engine + legend-engine-xt-connection-protocol test @@ -54,11 +59,21 @@ legend-engine-xt-relationalStore-connection test + + org.finos.legend.engine + legend-engine-xt-relationalStore-protocol + test + org.finos.legend.engine legend-engine-xt-relationalStore-postgres-test-support test + + org.finos.legend.engine + legend-engine-xt-relationalStore-snowflake-protocol + test + diff --git a/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/AbstractConnectionFactoryTest.java b/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/AbstractConnectionFactoryTest.java index e5b6435ece9..27b8e42a8c6 100644 --- a/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/AbstractConnectionFactoryTest.java +++ b/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/AbstractConnectionFactoryTest.java @@ -17,25 +17,26 @@ import org.finos.legend.authentication.vault.CredentialVault; import org.finos.legend.authentication.vault.impl.EnvironmentCredentialVault; import org.finos.legend.authentication.vault.impl.SystemPropertiesCredentialVault; -import org.finos.legend.connection.AuthenticationMechanismConfiguration; +import org.finos.legend.connection.AuthenticationMechanism; import org.finos.legend.connection.Authenticator; +import org.finos.legend.connection.Connection; import org.finos.legend.connection.ConnectionFactory; +import org.finos.legend.connection.DatabaseSupport; import org.finos.legend.connection.DatabaseType; import org.finos.legend.connection.IdentityFactory; import org.finos.legend.connection.IdentitySpecification; -import org.finos.legend.connection.impl.InstrumentedStoreInstanceProvider; import org.finos.legend.connection.LegendEnvironment; -import org.finos.legend.connection.RelationalDatabaseStoreSupport; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.impl.EncryptedPrivateKeyPairAuthenticationConfiguration; +import org.finos.legend.connection.impl.CoreAuthenticationMechanismType; import org.finos.legend.connection.impl.KerberosCredentialExtractor; import org.finos.legend.connection.impl.KeyPairCredentialBuilder; +import org.finos.legend.connection.impl.RelationalDatabaseType; import org.finos.legend.connection.impl.SnowflakeConnectionBuilder; -import org.finos.legend.connection.impl.UserPasswordAuthenticationConfiguration; -import org.finos.legend.connection.impl.UserPasswordCredentialBuilder; import org.finos.legend.connection.impl.StaticJDBCConnectionBuilder; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanismType; +import org.finos.legend.connection.impl.UserPasswordCredentialBuilder; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.EncryptedPrivateKeyPairAuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Identity; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -43,11 +44,8 @@ public abstract class AbstractConnectionFactoryTest { - protected static final String TEST_STORE_INSTANCE_NAME = "test-store"; - protected LegendEnvironment environment; protected IdentityFactory identityFactory; - protected InstrumentedStoreInstanceProvider storeInstanceProvider; protected ConnectionFactory connectionFactory; @BeforeEach @@ -55,26 +53,30 @@ public void initialize() { this.setup(); - LegendEnvironment.Builder environmentBuilder = new LegendEnvironment.Builder() - .withVaults( + LegendEnvironment.Builder environmentBuilder = LegendEnvironment.builder() + .vaults( new SystemPropertiesCredentialVault(), new EnvironmentCredentialVault() ) - .withStoreSupports( - new RelationalDatabaseStoreSupport.Builder(DatabaseType.POSTGRES) - .withIdentifier("Postgres") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD).withAuthenticationConfigurationTypes( - UserPasswordAuthenticationConfiguration.class - ).build() + .databaseSupports( + DatabaseSupport.builder() + .type(RelationalDatabaseType.POSTGRES) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .authenticationConfigurationTypes( + UserPasswordAuthenticationConfiguration.class + ).build() ) .build(), - new RelationalDatabaseStoreSupport.Builder(DatabaseType.SNOWFLAKE) - .withIdentifier("Snowflake") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.KEY_PAIR).withAuthenticationConfigurationTypes( - EncryptedPrivateKeyPairAuthenticationConfiguration.class - ).build() + DatabaseSupport.builder() + .type(RelationalDatabaseType.SNOWFLAKE) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.KEY_PAIR) + .authenticationConfigurationTypes( + EncryptedPrivateKeyPairAuthenticationConfiguration.class + ).build() ) .build() ); @@ -82,22 +84,23 @@ public void initialize() CredentialVault credentialVault = this.getCredentialVault(); if (credentialVault != null) { - environmentBuilder.withVault(credentialVault); + environmentBuilder.vault(credentialVault); } this.environment = environmentBuilder.build(); - this.identityFactory = new IdentityFactory.Builder(this.environment) + this.identityFactory = IdentityFactory.builder() + .environment(this.environment) .build(); - this.storeInstanceProvider = new InstrumentedStoreInstanceProvider(); - this.connectionFactory = new ConnectionFactory.Builder(this.environment, this.storeInstanceProvider) - .withCredentialBuilders( + this.connectionFactory = ConnectionFactory.builder() + .environment(this.environment) + .credentialBuilders( new KerberosCredentialExtractor(), new UserPasswordCredentialBuilder(), new KeyPairCredentialBuilder() ) - .withConnectionBuilders( + .connectionBuilders( new StaticJDBCConnectionBuilder.WithPlaintextUsernamePassword(), new SnowflakeConnectionBuilder.WithKeyPair() ) @@ -119,10 +122,12 @@ public CredentialVault getCredentialVault() return null; } - public abstract StoreInstance getStoreInstance(); - public abstract Identity getIdentity(); + public abstract DatabaseType getDatabaseType(); + + public abstract ConnectionSpecification getConnectionSpecification(); + public abstract AuthenticationConfiguration getAuthenticationConfiguration(); public abstract void runTestWithConnection(T connection) throws Exception; @@ -130,11 +135,19 @@ public CredentialVault getCredentialVault() @Test public void runTest() throws Exception { - this.storeInstanceProvider.injectStoreInstance(this.getStoreInstance()); Identity identity = this.getIdentity(); + DatabaseType databaseType = this.getDatabaseType(); + ConnectionSpecification connectionSpecification = this.getConnectionSpecification(); AuthenticationConfiguration authenticationConfiguration = this.getAuthenticationConfiguration(); - Authenticator authenticator = this.connectionFactory.getAuthenticator(identity, TEST_STORE_INSTANCE_NAME, authenticationConfiguration); + Connection databaseConnection = Connection.builder() + .databaseSupport(this.environment.getDatabaseSupport(databaseType)) + .identifier("test::connection") + .connectionSpecification(connectionSpecification) + .authenticationConfiguration(authenticationConfiguration) + .build(); + + Authenticator authenticator = this.connectionFactory.getAuthenticator(identity, databaseConnection); T connection = this.connectionFactory.getConnection(identity, authenticator); this.runTestWithConnection(connection); @@ -146,8 +159,8 @@ public void runTest() throws Exception protected static Identity getAnonymousIdentity(IdentityFactory identityFactory) { return identityFactory.createIdentity( - new IdentitySpecification.Builder() - .withName("test-user") + IdentitySpecification.builder() + .name("test-user") .build() ); } diff --git a/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestJDBCConnectionManager.java b/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestJDBCConnectionManager.java index 7abf243d608..dc08b4dc741 100644 --- a/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestJDBCConnectionManager.java +++ b/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestJDBCConnectionManager.java @@ -16,47 +16,43 @@ import net.bytebuddy.asm.Advice; import org.finos.legend.authentication.vault.impl.PropertiesFileCredentialVault; -import org.finos.legend.connection.AuthenticationMechanismConfiguration; +import org.finos.legend.connection.AuthenticationMechanism; import org.finos.legend.connection.Authenticator; +import org.finos.legend.connection.Connection; import org.finos.legend.connection.ConnectionFactory; -import org.finos.legend.connection.DatabaseType; +import org.finos.legend.connection.DatabaseSupport; import org.finos.legend.connection.IdentityFactory; import org.finos.legend.connection.IdentitySpecification; -import org.finos.legend.connection.JDBCConnectionBuilder; import org.finos.legend.connection.LegendEnvironment; import org.finos.legend.connection.PostgresTestContainerWrapper; -import org.finos.legend.connection.RelationalDatabaseStoreSupport; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.impl.InstrumentedStoreInstanceProvider; +import org.finos.legend.connection.impl.CoreAuthenticationMechanismType; +import org.finos.legend.connection.impl.JDBCConnectionBuilder; import org.finos.legend.connection.impl.JDBCConnectionManager; +import org.finos.legend.connection.impl.RelationalDatabaseType; import org.finos.legend.connection.impl.StaticJDBCConnectionBuilder; -import org.finos.legend.connection.impl.UserPasswordAuthenticationConfiguration; import org.finos.legend.connection.impl.UserPasswordCredentialBuilder; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanismType; -import org.finos.legend.connection.protocol.ConnectionSpecification; -import org.finos.legend.connection.protocol.StaticJDBCConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.model.connection.StaticJDBCConnectionSpecification; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.PropertiesFileSecret; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Identity; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.sql.Connection; import java.sql.SQLTransientConnectionException; import java.util.Properties; public class TestJDBCConnectionManager { PostgresTestContainerWrapper postgresContainer; - private static final String TEST_STORE_INSTANCE_NAME = "test-store"; private LegendEnvironment environment; private IdentityFactory identityFactory; - private InstrumentedStoreInstanceProvider storeInstanceProvider; private ConnectionFactory connectionFactory; - private StoreInstance storeInstance; + private Connection connection; @BeforeEach public void setup() @@ -67,32 +63,37 @@ public void setup() Properties properties = new Properties(); properties.put("passwordRef", this.postgresContainer.getPassword()); - LegendEnvironment.Builder environmentBuilder = new LegendEnvironment.Builder() - .withVaults(new PropertiesFileCredentialVault(properties)) - .withStoreSupports( - new RelationalDatabaseStoreSupport.Builder(DatabaseType.POSTGRES) - .withIdentifier("Postgres") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD).withAuthenticationConfigurationTypes( - UserPasswordAuthenticationConfiguration.class - ).build() + LegendEnvironment.Builder environmentBuilder = LegendEnvironment.builder() + .vaults(new PropertiesFileCredentialVault(properties)) + .databaseSupports( + DatabaseSupport.builder() + .type(RelationalDatabaseType.POSTGRES) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD).authenticationConfigurationTypes( + UserPasswordAuthenticationConfiguration.class + ).build() ) .build() ); this.environment = environmentBuilder.build(); - this.identityFactory = new IdentityFactory.Builder(this.environment) + this.identityFactory = IdentityFactory.builder() + .environment(this.environment) .build(); - this.storeInstanceProvider = new InstrumentedStoreInstanceProvider(); ConnectionSpecification connectionSpecification = new StaticJDBCConnectionSpecification( this.postgresContainer.getHost(), this.postgresContainer.getPort(), this.postgresContainer.getDatabaseName() ); - this.storeInstance = new StoreInstance.Builder(this.environment) - .withIdentifier(TEST_STORE_INSTANCE_NAME) - .withStoreSupportIdentifier("Postgres") - .withConnectionSpecification(connectionSpecification) + this.connection = Connection.builder() + .databaseSupport(this.environment.getDatabaseSupport(RelationalDatabaseType.POSTGRES)) + .identifier("test::connection") + .connectionSpecification(connectionSpecification) + .authenticationConfiguration(new UserPasswordAuthenticationConfiguration( + postgresContainer.getUser(), + new PropertiesFileSecret("passwordRef") + )) .build(); } @@ -114,39 +115,37 @@ public void testBasicConnectionPooling() throws Exception .withConnectionTimeout(1000L) .build() ); - this.connectionFactory = new ConnectionFactory.Builder(this.environment, this.storeInstanceProvider) - .withCredentialBuilders( + this.connectionFactory = ConnectionFactory.builder() + .environment(this.environment) + .credentialBuilders( new UserPasswordCredentialBuilder() ) - .withConnectionBuilders( + .connectionBuilders( customizedJDBCConnectionBuilder ) .build(); - this.storeInstanceProvider.injectStoreInstance(this.storeInstance); Identity identity = identityFactory.createIdentity( - new IdentitySpecification.Builder() - .withName("test-user") + IdentitySpecification.builder() + .name("test-user") .build() ); - ConnectionSpecification connectionSpecification = this.storeInstance.getConnectionSpecification(); - AuthenticationConfiguration authenticationConfiguration = new UserPasswordAuthenticationConfiguration( - postgresContainer.getUser(), - new PropertiesFileSecret("passwordRef") - ); - Authenticator authenticator = this.connectionFactory.getAuthenticator(identity, TEST_STORE_INSTANCE_NAME, authenticationConfiguration); + ConnectionSpecification connectionSpecification = this.connection.getConnectionSpecification(); + AuthenticationConfiguration authenticationConfiguration = this.connection.getAuthenticationConfiguration(); + + Authenticator authenticator = this.connectionFactory.getAuthenticator(identity, this.connection); JDBCConnectionManager connectionManager = JDBCConnectionManager.getInstance(); Assertions.assertEquals(0, connectionManager.getPoolSize()); // 1. Get a connection, this should initialize the pool as well as create a new connection in the empty pool // this connection should be active - Connection connection0 = this.connectionFactory.getConnection(identity, authenticator); + java.sql.Connection connection0 = this.connectionFactory.getConnection(identity, authenticator); String poolName = JDBCConnectionManager.getPoolName(identity, connectionSpecification, authenticationConfiguration); JDBCConnectionManager.ConnectionPool connectionPool = connectionManager.getPool(poolName); // 2. Close the connection, verify that the pool keeps this connection around in idle state - Connection underlyingConnection0 = connection0.unwrap(Connection.class); + java.sql.Connection underlyingConnection0 = connection0.unwrap(java.sql.Connection.class); connection0.close(); Assertions.assertEquals(1, connectionPool.getTotalConnections()); @@ -154,9 +153,9 @@ public void testBasicConnectionPooling() throws Exception Assertions.assertEquals(1, connectionPool.getIdleConnections()); // 3. Get a new connection, the pool should return the idle connection and create no new connection - Connection connection1 = this.connectionFactory.getConnection(identity, authenticator); + java.sql.Connection connection1 = this.connectionFactory.getConnection(identity, authenticator); - Assertions.assertEquals(underlyingConnection0, connection1.unwrap(Connection.class)); + Assertions.assertEquals(underlyingConnection0, connection1.unwrap(java.sql.Connection.class)); Assertions.assertEquals(1, connectionPool.getTotalConnections()); Assertions.assertEquals(1, connectionPool.getActiveConnections()); Assertions.assertEquals(0, connectionPool.getIdleConnections()); @@ -180,36 +179,33 @@ public void testBasicConnectionPooling() throws Exception @Test public void testConnectionPoolingForDifferentIdentities() throws Exception { - this.connectionFactory = new ConnectionFactory.Builder(this.environment, this.storeInstanceProvider) - .withCredentialBuilders( + this.connectionFactory = ConnectionFactory.builder() + .environment(this.environment) + .credentialBuilders( new UserPasswordCredentialBuilder() ) - .withConnectionBuilders( + .connectionBuilders( new StaticJDBCConnectionBuilder.WithPlaintextUsernamePassword() ) .build(); - this.storeInstanceProvider.injectStoreInstance(this.storeInstance); Identity identity1 = identityFactory.createIdentity( - new IdentitySpecification.Builder() - .withName("testUser1") + IdentitySpecification.builder() + .name("testUser1") .build() ); Identity identity2 = identityFactory.createIdentity( - new IdentitySpecification.Builder() - .withName("testUser2") + IdentitySpecification.builder() + .name("testUser2") .build() ); - ConnectionSpecification connectionSpecification = this.storeInstance.getConnectionSpecification(); - AuthenticationConfiguration authenticationConfiguration = new UserPasswordAuthenticationConfiguration( - postgresContainer.getUser(), - new PropertiesFileSecret("passwordRef") - ); + ConnectionSpecification connectionSpecification = this.connection.getConnectionSpecification(); + AuthenticationConfiguration authenticationConfiguration = this.connection.getAuthenticationConfiguration(); JDBCConnectionManager connectionManager = JDBCConnectionManager.getInstance(); Assertions.assertEquals(0, connectionManager.getPoolSize()); // 1. Get a new connection for identity1, which should initialize a pool - this.connectionFactory.getConnection(identity1, this.connectionFactory.getAuthenticator(identity1, TEST_STORE_INSTANCE_NAME, authenticationConfiguration)); + this.connectionFactory.getConnection(identity1, this.connectionFactory.getAuthenticator(identity1, this.connection)); String poolName1 = JDBCConnectionManager.getPoolName(identity1, connectionSpecification, authenticationConfiguration); JDBCConnectionManager.ConnectionPool connectionPool1 = connectionManager.getPool(poolName1); @@ -220,7 +216,7 @@ public void testConnectionPoolingForDifferentIdentities() throws Exception Assertions.assertEquals(0, connectionPool1.getIdleConnections()); // 2. Get a new connection for identity2, which should initialize another pool - this.connectionFactory.getConnection(identity2, this.connectionFactory.getAuthenticator(identity2, TEST_STORE_INSTANCE_NAME, authenticationConfiguration)); + this.connectionFactory.getConnection(identity2, this.connectionFactory.getAuthenticator(identity2, this.connection)); String poolName2 = JDBCConnectionManager.getPoolName(identity2, connectionSpecification, authenticationConfiguration); JDBCConnectionManager.ConnectionPool connectionPool2 = connectionManager.getPool(poolName2); @@ -234,7 +230,7 @@ public void testConnectionPoolingForDifferentIdentities() throws Exception @Test public void testRetryOnBrokenConnection() { - // + // TODO } public static class CustomAdvice @@ -246,50 +242,4 @@ public static void intercept(@Advice.Return(readOnly = false) String value) value = "hi: " + value; } } - -// public static class MyWay -// { -// } -// -// private static class InstrumentedStaticJDBCConnectionBuilder -// { -// static class WithPlaintextUsernamePassword extends StaticJDBCConnectionBuilder.WithPlaintextUsernamePassword -// { -// WithPlaintextUsernamePassword(Function hikariConfigHandler) -// { -// this.connectionManager = new InstrumentedJDBCConnectionManager(hikariConfigHandler); -// } -// -// @Override -// public JDBCConnectionManager getConnectionManager() -// { -// return this.connectionManager; -// } -// -// @Override -// protected Type[] actualTypeArguments() -// { -// Type genericSuperClass = this.getClass().getSuperclass().getGenericSuperclass(); -// ParameterizedType parameterizedType = (ParameterizedType) genericSuperClass; -// return parameterizedType.getActualTypeArguments(); -// } -// } -// } -// -// private static class InstrumentedJDBCConnectionManager extends JDBCConnectionManager -// { -// private final Function hikariConfigHandler; -// -// InstrumentedJDBCConnectionManager(Function hikariConfigHandler) -// { -// this.hikariConfigHandler = hikariConfigHandler; -// } -// -//// @Override -//// protected void handleHikariConfig(HikariConfig config) -//// { -//// config.setRegisterMbeans(true); -//// this.hikariConfigHandler.apply(config); -//// } -// } } diff --git a/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestPostgresConnection.java b/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestPostgresConnection.java index 9e311f05fa8..6c9addedd6b 100644 --- a/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestPostgresConnection.java +++ b/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestPostgresConnection.java @@ -16,18 +16,16 @@ import org.finos.legend.authentication.vault.CredentialVault; import org.finos.legend.authentication.vault.impl.PropertiesFileCredentialVault; -import org.finos.legend.connection.AuthenticationMechanismConfiguration; +import org.finos.legend.connection.DatabaseType; import org.finos.legend.connection.PostgresTestContainerWrapper; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.impl.UserPasswordAuthenticationConfiguration; -import org.finos.legend.connection.protocol.StaticJDBCConnectionSpecification; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanismType; -import org.finos.legend.connection.protocol.ConnectionSpecification; +import org.finos.legend.connection.impl.RelationalDatabaseType; +import org.finos.legend.engine.protocol.pure.v1.model.connection.StaticJDBCConnectionSpecification; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.PropertiesFileSecret; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Identity; -import java.sql.Connection; import java.sql.Statement; import java.util.Properties; @@ -35,7 +33,7 @@ public class TestPostgresConnection { - public static class WithUserPassword extends AbstractConnectionFactoryTest + public static class WithUserPassword extends AbstractConnectionFactoryTest { private PostgresTestContainerWrapper postgresContainer; @@ -71,27 +69,25 @@ public CredentialVault getCredentialVault() } @Override - public StoreInstance getStoreInstance() + public Identity getIdentity() { - ConnectionSpecification connectionSpecification = new StaticJDBCConnectionSpecification( - this.postgresContainer.getHost(), - this.postgresContainer.getPort(), - this.postgresContainer.getDatabaseName() - ); - return new StoreInstance.Builder(this.environment) - .withIdentifier(TEST_STORE_INSTANCE_NAME) - .withStoreSupportIdentifier("Postgres") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD).build() - ) - .withConnectionSpecification(connectionSpecification) - .build(); + return getAnonymousIdentity(this.identityFactory); } @Override - public Identity getIdentity() + public DatabaseType getDatabaseType() { - return getAnonymousIdentity(this.identityFactory); + return RelationalDatabaseType.POSTGRES; + } + + @Override + public ConnectionSpecification getConnectionSpecification() + { + return new StaticJDBCConnectionSpecification( + this.postgresContainer.getHost(), + this.postgresContainer.getPort(), + this.postgresContainer.getDatabaseName() + ); } @Override @@ -104,7 +100,7 @@ public AuthenticationConfiguration getAuthenticationConfiguration() } @Override - public void runTestWithConnection(Connection connection) throws Exception + public void runTestWithConnection(java.sql.Connection connection) throws Exception { Statement statement = connection.createStatement(); statement.setMaxRows(10); diff --git a/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestSnowflakeConnection.java b/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestSnowflakeConnection.java index 825725a5eea..b0874a64e54 100644 --- a/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestSnowflakeConnection.java +++ b/legend-engine-config/legend-engine-connection-integration-tests/src/test/java/org/finos/legend/engine/connection/test/TestSnowflakeConnection.java @@ -16,17 +16,16 @@ import org.finos.legend.authentication.vault.CredentialVault; import org.finos.legend.authentication.vault.impl.PropertiesFileCredentialVault; -import org.finos.legend.connection.AuthenticationMechanismConfiguration; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.impl.EncryptedPrivateKeyPairAuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanismType; -import org.finos.legend.connection.protocol.SnowflakeConnectionSpecification; +import org.finos.legend.connection.DatabaseType; +import org.finos.legend.connection.impl.RelationalDatabaseType; +import org.finos.legend.engine.protocol.pure.v1.connection.SnowflakeConnectionSpecification; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.EnvironmentCredentialVaultSecret; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.PropertiesFileSecret; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.EncryptedPrivateKeyPairAuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Identity; -import java.sql.Connection; import java.sql.Statement; import java.util.Properties; @@ -34,7 +33,7 @@ public class TestSnowflakeConnection { - public static class WithKeyPair extends AbstractConnectionFactoryTest + public static class ForSnowflakeWithKeyPairFlow extends AbstractConnectionFactoryTest { private static final String CONNECTION_INTEGRATION_TEST__SNOWFLAKE_PK = "CONNECTION_INTEGRATION_TEST__SNOWFLAKE_PK"; private static final String CONNECTION_INTEGRATION_TEST__SNOWFLAKE_PK_PASSPHRASE = "CONNECTION_INTEGRATION_TEST__SNOWFLAKE_PK_PASSPHRASE"; @@ -71,7 +70,19 @@ public CredentialVault getCredentialVault() } @Override - public StoreInstance getStoreInstance() + public Identity getIdentity() + { + return getAnonymousIdentity(this.identityFactory); + } + + @Override + public DatabaseType getDatabaseType() + { + return RelationalDatabaseType.SNOWFLAKE; + } + + @Override + public ConnectionSpecification getConnectionSpecification() { SnowflakeConnectionSpecification connectionSpecification = new SnowflakeConnectionSpecification(); connectionSpecification.databaseName = "SUMMIT_DEV"; @@ -80,20 +91,7 @@ public StoreInstance getStoreInstance() connectionSpecification.region = "us-east-2"; connectionSpecification.cloudType = "aws"; connectionSpecification.role = "SUMMIT_DEV"; - return new StoreInstance.Builder(this.environment) - .withIdentifier(TEST_STORE_INSTANCE_NAME) - .withStoreSupportIdentifier("Snowflake") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.KEY_PAIR).build() - ) - .withConnectionSpecification(connectionSpecification) - .build(); - } - - @Override - public Identity getIdentity() - { - return getAnonymousIdentity(this.identityFactory); + return connectionSpecification; } @Override @@ -107,7 +105,7 @@ public AuthenticationConfiguration getAuthenticationConfiguration() } @Override - public void runTestWithConnection(Connection connection) throws Exception + public void runTestWithConnection(java.sql.Connection connection) throws Exception { Statement statement = connection.createStatement(); statement.setMaxRows(10); diff --git a/legend-engine-config/legend-engine-extensions-collection-execution/pom.xml b/legend-engine-config/legend-engine-extensions-collection-execution/pom.xml index b13cd3058fb..febeb4fe0ab 100644 --- a/legend-engine-config/legend-engine-extensions-collection-execution/pom.xml +++ b/legend-engine-config/legend-engine-extensions-collection-execution/pom.xml @@ -19,7 +19,7 @@ legend-engine-config org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-config/legend-engine-extensions-collection-generation/pom.xml b/legend-engine-config/legend-engine-extensions-collection-generation/pom.xml index 3067f7c74f8..e702972ddc2 100644 --- a/legend-engine-config/legend-engine-extensions-collection-generation/pom.xml +++ b/legend-engine-config/legend-engine-extensions-collection-generation/pom.xml @@ -19,7 +19,7 @@ legend-engine-config org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -250,6 +250,21 @@ + + + org.finos.legend.engine + legend-engine-xt-connection-grammar + + + org.finos.legend.engine + legend-engine-xt-connection-compiler + + + org.finos.legend.engine + legend-engine-xt-connection-protocol + + + org.finos.legend.engine @@ -478,8 +493,27 @@ org.finos.legend.engine legend-engine-xt-snowflakeApp-protocol + + org.finos.legend.engine + legend-engine-xt-snowflakeApp-generator + + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-compiler + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-grammar + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-protocol + + + org.finos.legend.engine diff --git a/legend-engine-config/legend-engine-extensions-collection-generation/src/test/java/org/finos/legend/engine/extensions/collection/generation/TestExtensions.java b/legend-engine-config/legend-engine-extensions-collection-generation/src/test/java/org/finos/legend/engine/extensions/collection/generation/TestExtensions.java index 2d51c4927c9..84d174859f4 100644 --- a/legend-engine-config/legend-engine-extensions-collection-generation/src/test/java/org/finos/legend/engine/extensions/collection/generation/TestExtensions.java +++ b/legend-engine-config/legend-engine-extensions-collection-generation/src/test/java/org/finos/legend/engine/extensions/collection/generation/TestExtensions.java @@ -28,8 +28,12 @@ import org.finos.legend.engine.generation.DataSpaceAnalyticsArtifactGenerationExtension; import org.finos.legend.engine.generation.OpenApiArtifactGenerationExtension; import org.finos.legend.engine.generation.SearchDocumentArtifactGenerationExtension; +import org.finos.legend.engine.language.bigqueryFunction.compiler.toPureGraph.BigQueryFunctionCompilerExtension; +import org.finos.legend.engine.language.bigqueryFunction.grammar.from.BigQueryFunctionGrammarParserExtension; +import org.finos.legend.engine.language.bigqueryFunction.grammar.to.BigQueryFunctionGrammarComposer; import org.finos.legend.engine.language.graphQL.grammar.integration.GraphQLGrammarParserExtension; import org.finos.legend.engine.language.graphQL.grammar.integration.GraphQLPureGrammarComposerExtension; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.ConnectionCompilerExtension; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension; import org.finos.legend.engine.language.pure.dsl.authentication.grammar.from.AuthenticationGrammarParserExtension; @@ -48,6 +52,7 @@ import org.finos.legend.engine.language.pure.dsl.persistence.relational.grammar.to.PersistenceRelationalComposerExtension; import org.finos.legend.engine.language.pure.dsl.service.grammar.from.ServiceParserExtension; import org.finos.legend.engine.language.pure.dsl.service.grammar.to.ServiceGrammarComposerExtension; +import org.finos.legend.engine.language.pure.grammar.from.ConnectionParserExtension; import org.finos.legend.engine.language.pure.grammar.from.CorePureGrammarParser; import org.finos.legend.engine.language.pure.grammar.from.DataSpaceParserExtension; import org.finos.legend.engine.language.pure.grammar.from.DiagramParserExtension; @@ -57,6 +62,7 @@ import org.finos.legend.engine.language.pure.grammar.from.TextParserExtension; import org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension; import org.finos.legend.engine.language.pure.grammar.to.BigQueryGrammarComposerExtension; +import org.finos.legend.engine.language.pure.grammar.to.ConnectionGrammarComposerExtension; import org.finos.legend.engine.language.pure.grammar.to.CorePureGrammarComposer; import org.finos.legend.engine.language.pure.grammar.to.DataSpaceGrammarComposerExtension; import org.finos.legend.engine.language.pure.grammar.to.DiagramGrammarComposerExtension; @@ -71,12 +77,13 @@ import org.finos.legend.engine.language.pure.grammar.to.DatabricksGrammarComposerExtension; import org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension; import org.finos.legend.engine.language.snowflakeApp.compiler.toPureGraph.SnowflakeAppCompilerExtension; +import org.finos.legend.engine.language.snowflakeApp.generator.SnowflakeAppArtifactGenerationExtension; import org.finos.legend.engine.language.snowflakeApp.grammar.from.SnowflakeAppGrammarParserExtension; import org.finos.legend.engine.language.snowflakeApp.grammar.to.SnowflakeAppGrammarComposer; import org.finos.legend.engine.language.sql.grammar.integration.SQLGrammarParserExtension; import org.finos.legend.engine.language.sql.grammar.integration.SQLPureGrammarComposerExtension; import org.finos.legend.engine.language.stores.elasticsearch.v7.from.ElasticsearchGrammarParserExtension; -import org.finos.legend.engine.protocol.mongodb.schema.metamodel.MongoDBPureProtocolExtension; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunctionProtocolExtension; import org.finos.legend.pure.code.core.ElasticsearchPureCoreExtension; import org.finos.legend.engine.language.stores.elasticsearch.v7.to.ElasticsearchGrammarComposerExtension; import org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension; @@ -265,7 +272,9 @@ protected Iterable> getExpected return Lists.mutable.>empty() .with(org.finos.legend.engine.protocol.pure.v1.CorePureProtocolExtension.class) .with(org.finos.legend.engine.protocol.pure.v1.DataSpaceProtocolExtension.class) + .with(org.finos.legend.engine.protocol.pure.v1.ConnectionProtocolExtension.class) .with(SnowflakeAppProtocolExtension.class) + .with(BigQueryFunctionProtocolExtension.class) .with(org.finos.legend.engine.protocol.pure.v1.DiagramProtocolExtension.class) .with(org.finos.legend.engine.protocol.pure.v1.GenerationProtocolExtension.class) .with(org.finos.legend.engine.protocol.pure.v1.PersistenceProtocolExtension.class) @@ -309,7 +318,9 @@ protected Iterable> getExp return Lists.mutable.>empty() .with(CorePureGrammarParser.class) .with(DataSpaceParserExtension.class) + .with(ConnectionParserExtension.class) .with(SnowflakeAppGrammarParserExtension.class) + .with(BigQueryFunctionGrammarParserExtension.class) .with(DiagramParserExtension.class) .with(ExternalFormatGrammarParserExtension.class) .with(GenerationParserExtension.class) @@ -335,7 +346,9 @@ protected Iterable> getE return Lists.mutable.>empty() .with(CorePureGrammarComposer.class) .with(DataSpaceGrammarComposerExtension.class) + .with(ConnectionGrammarComposerExtension.class) .with(SnowflakeAppGrammarComposer.class) + .with(BigQueryFunctionGrammarComposer.class) .with(DiagramGrammarComposerExtension.class) .with(ExternalFormatGrammarComposerExtension.class) .with(GenerationGrammarComposerExtension.class) @@ -367,7 +380,9 @@ protected Iterable> getExpectedComp return Lists.mutable.>empty() .with(org.finos.legend.engine.language.pure.compiler.toPureGraph.DiagramCompilerExtension.class) .with(SnowflakeAppCompilerExtension.class) + .with(BigQueryFunctionCompilerExtension.class) .with(org.finos.legend.engine.language.pure.compiler.toPureGraph.DataSpaceCompilerExtension.class) + .with(ConnectionCompilerExtension.class) .with(org.finos.legend.engine.language.pure.compiler.toPureGraph.TextCompilerExtension.class) .with(org.finos.legend.engine.language.pure.compiler.toPureGraph.CoreCompilerExtension.class) .with(org.finos.legend.engine.language.pure.dsl.generation.compiler.toPureGraph.GenerationCompilerExtensionImpl.class) @@ -452,7 +467,8 @@ protected Iterable> getEx return Lists.mutable.>empty() .with(DataSpaceAnalyticsArtifactGenerationExtension.class) .with(SearchDocumentArtifactGenerationExtension.class) - .with(OpenApiArtifactGenerationExtension.class); + .with(OpenApiArtifactGenerationExtension.class) + .with(SnowflakeAppArtifactGenerationExtension.class); } protected Iterable> getExpectedEntitlementServiceExtensions() @@ -476,6 +492,7 @@ protected Iterable getExpectedCodeRepositories() .with("core_analytics_lineage") .with("core_analytics_mapping") .with("core_analytics_search") + .with("core_connection_metamodel") .with("core_data_space") .with("core_data_space_metamodel") .with("core_diagram") @@ -517,6 +534,7 @@ protected Iterable getExpectedCodeRepositories() .with("core_servicestore") .with("core_authentication") .with("core_snowflakeapp") + .with("core_bigqueryfunction") .with("core_text_metamodel") .with("core_external_language_java") .with("core_java_platform_binding") diff --git a/legend-engine-config/legend-engine-pure-code-compiled-core-configuration/pom.xml b/legend-engine-config/legend-engine-pure-code-compiled-core-configuration/pom.xml index f860a58d691..a4bf5ea6603 100644 --- a/legend-engine-config/legend-engine-pure-code-compiled-core-configuration/pom.xml +++ b/legend-engine-config/legend-engine-pure-code-compiled-core-configuration/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-config - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-config/legend-engine-server-integration-tests/pom.xml b/legend-engine-config/legend-engine-server-integration-tests/pom.xml index b92093ed2fb..ba650774c0b 100644 --- a/legend-engine-config/legend-engine-server-integration-tests/pom.xml +++ b/legend-engine-config/legend-engine-server-integration-tests/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-config - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-server-integration-tests diff --git a/legend-engine-config/legend-engine-server-support-core/pom.xml b/legend-engine-config/legend-engine-server-support-core/pom.xml index 29959d07831..a99e1cfb149 100644 --- a/legend-engine-config/legend-engine-server-support-core/pom.xml +++ b/legend-engine-config/legend-engine-server-support-core/pom.xml @@ -3,7 +3,7 @@ legend-engine-config org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-config/legend-engine-server/pom.xml b/legend-engine-config/legend-engine-server/pom.xml index 5b97f0ebd47..37ed8f3b41e 100644 --- a/legend-engine-config/legend-engine-server/pom.xml +++ b/legend-engine-config/legend-engine-server/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-config - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-server @@ -78,6 +78,10 @@ + + org.finos.legend.engine + legend-engine-server-support-core + org.finos.legend.engine legend-engine-pure-code-compiled-core @@ -192,11 +196,7 @@ org.finos.legend.engine - legend-engine-xt-snowflakeApp-protocol - - - org.finos.legend.engine - legend-engine-xt-hostedService-protocol + legend-engine-xt-bigqueryFunction-protocol org.finos.legend.engine @@ -263,7 +263,16 @@ runtime - + + org.finos.legend.engine + legend-engine-xt-connection-compiler + runtime + + + org.finos.legend.engine + legend-engine-xt-connection-grammar + runtime + org.finos.legend.engine legend-engine-xt-persistence-grammar @@ -340,6 +349,24 @@ org.finos.legend.engine legend-engine-xt-snowflakeApp-api + + + org.finos.legend.engine + legend-engine-xt-snowflakeApp-protocol + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-compiler + runtime + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-grammar + runtime + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-api runtime @@ -355,7 +382,10 @@ org.finos.legend.engine legend-engine-xt-hostedService-api - runtime + + + org.finos.legend.engine + legend-engine-xt-hostedService-protocol org.finos.legend.engine @@ -426,10 +456,6 @@ org.finos.legend.engine legend-engine-xt-analytics-binding-api - - org.finos.legend.engine - legend-engine-xt-analytics-binding-api - org.finos.legend.engine legend-engine-xt-relationalStore-grammar @@ -620,6 +646,22 @@ org.finos.legend.engine legend-engine-xt-sql-query + + org.finos.legend.engine + legend-engine-xt-sql-providers-relationalStore + + + org.finos.legend.engine + legend-engine-xt-sql-providers-service + + + org.finos.legend.engine + legend-engine-xt-sql-providers-core + + + org.finos.legend.engine + legend-engine-xt-sql-providers-shared + org.finos.legend.engine legend-engine-test-data-generation @@ -711,7 +753,11 @@ org.finos.legend.engine - legend-engine-xt-authentication-connection-factory + legend-engine-xt-connection-factory + + + org.finos.legend.engine + legend-engine-xt-connection-protocol org.finos.legend.engine diff --git a/legend-engine-config/legend-engine-server/src/main/java/org/finos/legend/engine/server/Server.java b/legend-engine-config/legend-engine-server/src/main/java/org/finos/legend/engine/server/Server.java index c0933bc6f60..035ec9fd3e1 100644 --- a/legend-engine-config/legend-engine-server/src/main/java/org/finos/legend/engine/server/Server.java +++ b/legend-engine-config/legend-engine-server/src/main/java/org/finos/legend/engine/server/Server.java @@ -31,6 +31,7 @@ import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.impl.list.mutable.FastList; import org.eclipse.collections.impl.utility.Iterate; +import org.eclipse.collections.impl.utility.LazyIterate; import org.eclipse.collections.impl.utility.ListIterate; import org.eclipse.jetty.server.session.SessionHandler; import org.eclipse.jetty.servlets.CrossOriginFilter; @@ -42,22 +43,18 @@ import org.finos.legend.authentication.vault.impl.EnvironmentCredentialVault; import org.finos.legend.authentication.vault.impl.PropertiesFileCredentialVault; import org.finos.legend.authentication.vault.impl.SystemPropertiesCredentialVault; -import org.finos.legend.connection.AuthenticationMechanismConfiguration; +import org.finos.legend.connection.AuthenticationMechanism; import org.finos.legend.connection.ConnectionFactory; -import org.finos.legend.connection.DatabaseType; +import org.finos.legend.connection.DatabaseSupport; import org.finos.legend.connection.LegendEnvironment; -import org.finos.legend.connection.RelationalDatabaseStoreSupport; -import org.finos.legend.connection.StoreInstanceProvider; -import org.finos.legend.connection.impl.DefaultStoreInstanceProvider; -import org.finos.legend.connection.impl.EncryptedPrivateKeyPairAuthenticationConfiguration; +import org.finos.legend.connection.impl.CoreAuthenticationMechanismType; import org.finos.legend.connection.impl.HACKY__SnowflakeConnectionAdapter; import org.finos.legend.connection.impl.KerberosCredentialExtractor; import org.finos.legend.connection.impl.KeyPairCredentialBuilder; +import org.finos.legend.connection.impl.RelationalDatabaseType; import org.finos.legend.connection.impl.SnowflakeConnectionBuilder; import org.finos.legend.connection.impl.StaticJDBCConnectionBuilder; -import org.finos.legend.connection.impl.UserPasswordAuthenticationConfiguration; import org.finos.legend.connection.impl.UserPasswordCredentialBuilder; -import org.finos.legend.connection.protocol.AuthenticationMechanismType; import org.finos.legend.engine.api.analytics.BindingAnalytics; import org.finos.legend.engine.api.analytics.ClassAnalytics; import org.finos.legend.engine.api.analytics.DataSpaceAnalytics; @@ -82,6 +79,8 @@ import org.finos.legend.engine.external.shared.format.model.api.ExternalFormats; import org.finos.legend.engine.functionActivator.api.FunctionActivatorAPI; import org.finos.legend.engine.generation.artifact.api.ArtifactGenerationExtensionApi; +import org.finos.legend.engine.language.hostedService.api.HostedServiceService; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceDeploymentConfiguration; import org.finos.legend.engine.language.pure.compiler.api.Compile; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.language.pure.grammar.api.grammarToJson.GrammarToJson; @@ -95,13 +94,13 @@ import org.finos.legend.engine.language.pure.modelManager.ModelManager; import org.finos.legend.engine.language.pure.modelManager.sdlc.SDLCLoader; import org.finos.legend.engine.language.pure.relational.api.relationalElement.RelationalElementAPI; +import org.finos.legend.engine.language.snowflakeApp.api.SnowflakeAppService; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppDeploymentConfiguration; import org.finos.legend.engine.plan.execution.PlanExecutor; import org.finos.legend.engine.plan.execution.api.ExecutePlanLegacy; import org.finos.legend.engine.plan.execution.api.ExecutePlanStrategic; import org.finos.legend.engine.plan.execution.api.concurrent.ConcurrentExecutionNodeExecutorPoolInfo; import org.finos.legend.engine.plan.execution.api.concurrent.ParallelGraphFetchExecutionExecutorPoolInfo; -import org.finos.legend.engine.plan.execution.api.request.RequestContextHelper; -import org.finos.legend.engine.plan.execution.api.result.ResultManager; import org.finos.legend.engine.plan.execution.concurrent.ParallelGraphFetchExecutionExecutorPool; import org.finos.legend.engine.plan.execution.graphFetch.GraphFetchExecutionConfiguration; import org.finos.legend.engine.plan.execution.service.api.ServiceModelingApi; @@ -120,17 +119,23 @@ import org.finos.legend.engine.plan.execution.stores.service.plugin.ServiceStoreExecutor; import org.finos.legend.engine.plan.execution.stores.service.plugin.ServiceStoreExecutorBuilder; import org.finos.legend.engine.plan.generation.extension.PlanGeneratorExtension; -import org.finos.legend.engine.protocol.hostedService.metamodel.HostedServiceDeploymentConfiguration; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunctionDeploymentConfiguration; import org.finos.legend.engine.protocol.pure.v1.PureProtocolObjectMapperFactory; import org.finos.legend.engine.protocol.pure.v1.model.PureProtocol; -import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeDeploymentConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.EncryptedPrivateKeyPairAuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; import org.finos.legend.engine.pure.code.core.PureCoreExtensionLoader; import org.finos.legend.engine.query.graphQL.api.debug.GraphQLDebug; import org.finos.legend.engine.query.graphQL.api.execute.GraphQLExecute; import org.finos.legend.engine.query.graphQL.api.grammar.GraphQLGrammar; import org.finos.legend.engine.query.pure.api.Execute; +import org.finos.legend.engine.query.sql.api.SQLExecutor; import org.finos.legend.engine.query.sql.api.execute.SqlExecute; import org.finos.legend.engine.query.sql.api.grammar.SqlGrammar; +import org.finos.legend.engine.query.sql.providers.LegendServiceSQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.RelationalStoreSQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.shared.FunctionSQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; import org.finos.legend.engine.server.core.ServerShared; import org.finos.legend.engine.server.core.api.CurrentUser; import org.finos.legend.engine.server.core.api.Info; @@ -212,7 +217,8 @@ protected SwaggerBundleConfiguration getSwaggerBundleConfiguration( bootstrap.getObjectMapper().registerSubtypes(new NamedType(LegendDefaultDatabaseAuthenticationFlowProviderConfiguration.class, "legendDefault")); bootstrap.getObjectMapper().registerSubtypes(new NamedType(HostedServiceDeploymentConfiguration.class, "hostedServiceConfig")); - bootstrap.getObjectMapper().registerSubtypes(new NamedType(SnowflakeDeploymentConfiguration.class, "snowflakeAppConfig")); + bootstrap.getObjectMapper().registerSubtypes(new NamedType(SnowflakeAppDeploymentConfiguration.class, "snowflakeAppConfig")); + bootstrap.getObjectMapper().registerSubtypes(new NamedType(BigQueryFunctionDeploymentConfiguration.class, "snowflakeAppConfig")); } public CredentialProviderProvider configureCredentialProviders(List vaultConfigurations) @@ -378,7 +384,7 @@ public void run(T serverConfiguration, Environment environment) environment.jersey().register(new ExecutePlanLegacy(planExecutor)); // Function Activator - environment.jersey().register(new FunctionActivatorAPI(modelManager, Lists.mutable.empty(), routerExtensions)); + environment.jersey().register(new FunctionActivatorAPI(modelManager, Lists.mutable.empty(), Lists.mutable.with(new SnowflakeAppService(planExecutor), new HostedServiceService()), routerExtensions)); // GraphQL environment.jersey().register(new GraphQLGrammar()); @@ -386,7 +392,12 @@ public void run(T serverConfiguration, Environment environment) environment.jersey().register(new GraphQLDebug(modelManager, serverConfiguration.metadataserver, routerExtensions)); // SQL - environment.jersey().register(new SqlExecute(modelManager, planExecutor, routerExtensions, FastList.newListWith(), generatorExtensions.flatCollect(PlanGeneratorExtension::getExtraPlanTransformers))); + ProjectCoordinateLoader projectCoordinateLoader = new ProjectCoordinateLoader(modelManager, serverConfiguration.metadataserver.getSdlc()); + environment.jersey().register(new SqlExecute(new SQLExecutor(modelManager, planExecutor, routerExtensions, FastList.newListWith( + new RelationalStoreSQLSourceProvider(projectCoordinateLoader), + new FunctionSQLSourceProvider(projectCoordinateLoader), + new LegendServiceSQLSourceProvider(projectCoordinateLoader)), + generatorExtensions.flatCollect(PlanGeneratorExtension::getExtraPlanTransformers)))); environment.jersey().register(new SqlGrammar()); // Service @@ -420,45 +431,54 @@ public void run(T serverConfiguration, Environment environment) //TestData Generation environment.jersey().register(new TestDataGeneration(modelManager)); - enableCors(environment); + enableCors(environment, serverConfiguration); } // TODO: @akphi - this is temporary, rework when we find a better way to handle the initialization of connection factory from config or some external source. private ConnectionFactory setupConnectionFactory(List vaultConfigurations) { - LegendEnvironment environment = new LegendEnvironment.Builder() - .withVaults( + LegendEnvironment environment = LegendEnvironment + .builder() + .vaults( new SystemPropertiesCredentialVault(), new EnvironmentCredentialVault(), new PropertiesFileCredentialVault(this.buildVaultProperties(vaultConfigurations)) ) - .withStoreSupports( - new RelationalDatabaseStoreSupport.Builder(DatabaseType.POSTGRES) - .withIdentifier("Postgres") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD).withAuthenticationConfigurationTypes( - UserPasswordAuthenticationConfiguration.class - ).build() + .databaseSupports( + DatabaseSupport + .builder() + .type(RelationalDatabaseType.POSTGRES) + .authenticationMechanisms( + AuthenticationMechanism + .builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .authenticationConfigurationTypes( + UserPasswordAuthenticationConfiguration.class + ).build() ) .build(), - new RelationalDatabaseStoreSupport.Builder(DatabaseType.SNOWFLAKE) - .withIdentifier("Snowflake") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.KEY_PAIR).withAuthenticationConfigurationTypes( - EncryptedPrivateKeyPairAuthenticationConfiguration.class - ).build() + DatabaseSupport + .builder() + .type(RelationalDatabaseType.SNOWFLAKE) + .authenticationMechanisms( + AuthenticationMechanism + .builder() + .type(CoreAuthenticationMechanismType.KEY_PAIR) + .authenticationConfigurationTypes( + EncryptedPrivateKeyPairAuthenticationConfiguration.class + ).build() ) .build() ).build(); - StoreInstanceProvider storeInstanceProvider = new DefaultStoreInstanceProvider.Builder().build(); - return new ConnectionFactory.Builder(environment, storeInstanceProvider) - .withCredentialBuilders( + return ConnectionFactory.builder() + .environment(environment) + .credentialBuilders( new KerberosCredentialExtractor(), new UserPasswordCredentialBuilder(), new KeyPairCredentialBuilder() ) - .withConnectionBuilders( + .connectionBuilders( new StaticJDBCConnectionBuilder.WithPlaintextUsernamePassword(), new SnowflakeConnectionBuilder.WithKeyPair() ) @@ -479,13 +499,24 @@ public void shutDown() throws Exception CollectorRegistry.defaultRegistry.clear(); } - private void enableCors(Environment environment) + private void enableCors(Environment environment, ServerConfiguration configuration) { + // Enable CORS FilterRegistration.Dynamic corsFilter = environment.servlets().addFilter("CORS", CrossOriginFilter.class); corsFilter.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM, "GET,PUT,POST,DELETE,OPTIONS"); corsFilter.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, "*"); corsFilter.setInitParameter(CrossOriginFilter.ALLOWED_TIMING_ORIGINS_PARAM, "*"); - corsFilter.setInitParameter(CrossOriginFilter.ALLOWED_HEADERS_PARAM, "X-Requested-With,Content-Type,Accept,Origin,Access-Control-Allow-Credentials,x-b3-parentspanid,x-b3-sampled,x-b3-spanid,x-b3-traceid," + RequestContextHelper.LEGEND_REQUEST_ID + "," + RequestContextHelper.LEGEND_USE_PLAN_CACHE + "," + ResultManager.LEGEND_RESPONSE_FORMAT); + + if (configuration.cors != null && configuration.cors.getAllowedHeaders() != null) + { + corsFilter.setInitParameter(CrossOriginFilter.ALLOWED_HEADERS_PARAM, LazyIterate.adapt(configuration.cors.getAllowedHeaders()).makeString(",")); + } + else + { + // NOTE: this set of headers are kept as default for backward compatibility, the headers starting with prefix `x-` are meant for Zipkin + // client using SDLC server. We should consider using the CORS configuration and remove those from this default list. + corsFilter.setInitParameter(CrossOriginFilter.ALLOWED_HEADERS_PARAM, "X-Requested-With,Content-Type,Accept,Origin,Access-Control-Allow-Credentials,x-b3-parentspanid,x-b3-sampled,x-b3-spanid,x-b3-traceid"); + } corsFilter.setInitParameter(CrossOriginFilter.CHAIN_PREFLIGHT_PARAM, "false"); corsFilter.addMappingForUrlPatterns(EnumSet.of(DispatcherType.REQUEST), false, "*"); } diff --git a/legend-engine-config/legend-engine-server/src/main/java/org/finos/legend/engine/server/ServerConfiguration.java b/legend-engine-config/legend-engine-server/src/main/java/org/finos/legend/engine/server/ServerConfiguration.java index 470cfb23f50..05c5b69f965 100644 --- a/legend-engine-config/legend-engine-server/src/main/java/org/finos/legend/engine/server/ServerConfiguration.java +++ b/legend-engine-config/legend-engine-server/src/main/java/org/finos/legend/engine/server/ServerConfiguration.java @@ -23,6 +23,7 @@ import org.finos.legend.engine.server.core.configuration.DeploymentConfiguration; import org.finos.legend.engine.server.core.configuration.ErrorHandlingConfiguration; import org.finos.legend.engine.server.core.configuration.OpenTracingConfiguration; +import org.finos.legend.engine.server.support.server.config.CORSConfiguration; import org.finos.legend.engine.shared.core.vault.VaultConfiguration; import org.finos.legend.server.pac4j.LegendPac4jConfiguration; @@ -45,6 +46,7 @@ public class ServerConfiguration extends Configuration public GraphFetchExecutionConfiguration graphFetchExecutionConfiguration; public ErrorHandlingConfiguration errorhandlingconfiguration = new ErrorHandlingConfiguration(); public List activatorConfiguration; + public CORSConfiguration cors; /* This configuration has been deprecated in favor of the 'temporarytestdb' in RelationalExecutionConfiguration diff --git a/legend-engine-config/pom.xml b/legend-engine-config/pom.xml index f3cdfd4a74c..6601f367d97 100644 --- a/legend-engine-config/pom.xml +++ b/legend-engine-config/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-dependencies/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-dependencies/pom.xml index 6c91f276510..8dd18ff4551 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-dependencies/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-dependencies/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-executionPlan-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-executionPlan-dependencies diff --git a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-api/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-api/pom.xml index 0c2a2cee499..20755af5f9a 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-api/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-executionPlan-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-executionPlan-execution-api diff --git a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-authorizer/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-authorizer/pom.xml index 1210baadae3..4c35141a25a 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-authorizer/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-authorizer/pom.xml @@ -3,7 +3,7 @@ legend-engine-core-executionPlan-execution org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-store-inMemory/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-store-inMemory/pom.xml index ec312791f98..6ca535cad77 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-store-inMemory/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution-store-inMemory/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-executionPlan-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-executionPlan-execution-store-inMemory diff --git a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution/pom.xml index 2850f59dfd2..89769027a29 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-executionPlan-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-executionPlan-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-executionPlan-execution diff --git a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-external-shared-format-runtime/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-external-shared-format-runtime/pom.xml index a5102224d6c..be7ee9c7ce9 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-external-shared-format-runtime/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-execution/legend-engine-external-shared-format-runtime/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-executionPlan-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-executionPlan-execution/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-execution/pom.xml index 5cae8208813..ba0fff434fc 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-execution/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-execution/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-core - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-executionPlan-generation/legend-engine-executionPlan-generation/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-generation/legend-engine-executionPlan-generation/pom.xml index 945bffaefe6..d89976c25cb 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-generation/legend-engine-executionPlan-generation/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-generation/legend-engine-executionPlan-generation/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-executionPlan-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-executionPlan-generation/pom.xml b/legend-engine-core/legend-engine-core-executionPlan-generation/pom.xml index c57a58439a4..5c8f58f3a04 100644 --- a/legend-engine-core/legend-engine-core-executionPlan-generation/pom.xml +++ b/legend-engine-core/legend-engine-core-executionPlan-generation/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-core - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-external-shared-format-model/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-external-shared-format-model/pom.xml index 00c9263a2ce..3f391d204f9 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-external-shared-format-model/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-external-shared-format-model/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler-api/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler-api/pom.xml index 8f65a7cd6bb..1d294c8a7ad 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler-api/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-compiler-api diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/pom.xml index aca4bf1dc65..0ba068241e7 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-compiler diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/HelperRuntimeBuilder.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/HelperRuntimeBuilder.java index 792570cc0af..c10e4d4d7b9 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/HelperRuntimeBuilder.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/HelperRuntimeBuilder.java @@ -126,7 +126,7 @@ public static void buildEngineRuntime(EngineRuntime engineRuntime, Root_meta_cor Root_meta_core_runtime_ConnectionStore connectionStore = new Root_meta_core_runtime_ConnectionStore_Impl("") ._connection(connection) - ._element(getElement(storePointer.path, storePointer.sourceInformation, context)); + ._element(getStore(storePointer.path, storePointer.sourceInformation, context)); pureRuntime._connectionStoresAdd(connectionStore); }); }); @@ -155,7 +155,7 @@ public static void buildEngineRuntime(EngineRuntime engineRuntime, Root_meta_cor connection.accept(new ConnectionSecondPassBuilder(context, pureConnection)); final Root_meta_core_runtime_ConnectionStore connectionStore = new Root_meta_core_runtime_ConnectionStore_Impl("", null, context.pureModel.getClass("meta::core::runtime::ConnectionStore")) ._connection(pureConnection) - ._element(getElement(storeConnections.store.path, storeConnections.store.sourceInformation, context)); + ._element(getStore(storeConnections.store.path, storeConnections.store.sourceInformation, context)); pureRuntime._connectionStoresAdd(connectionStore); }); @@ -222,7 +222,7 @@ public static Root_meta_core_runtime_Runtime buildPureRuntime(Runtime runtime, C connection.accept(new ConnectionSecondPassBuilder(context, pureConnection)); final Root_meta_core_runtime_ConnectionStore connectionStore = new Root_meta_core_runtime_ConnectionStore_Impl("", null, context.pureModel.getClass("meta::core::runtime::ConnectionStore")) ._connection(pureConnection) - ._element(getElement(connection.element, connection.sourceInformation, context)); + ._element(getStore(connection.element, connection.sourceInformation, context)); pureRuntime._connectionStoresAdd(connectionStore); }); return pureRuntime; @@ -238,23 +238,11 @@ else if (runtime instanceof RuntimePointer) throw new UnsupportedOperationException(); } - public static Object getElement(String element, SourceInformation sourceInformation, CompileContext context) + public static Object getStore(String element, SourceInformation sourceInformation, CompileContext context) { return element.equals("ModelStore") ? new Root_meta_external_store_model_ModelStore_Impl("", null, context.pureModel.getClass("meta::external::store::model::ModelStore")) - : resolveElementSafe(element, sourceInformation, context); - } - - private static Object resolveElementSafe(String element, SourceInformation sourceInformation, CompileContext context) - { - try - { - return context.resolveStore(element, sourceInformation); - } - catch (EngineException e) - { - return element; - } + : context.resolveStore(element, sourceInformation); } /** diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/handlers/Handlers.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/handlers/Handlers.java index d71acb85d72..9f2ddfd01dc 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/handlers/Handlers.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/handlers/Handlers.java @@ -619,6 +619,8 @@ public Handlers(PureModel pureModel) register(m(h("meta::pure::mutation::save_T_MANY__RootGraphFetchTree_1__Mapping_1__Runtime_1__T_MANY_", false, ps -> res(ps.get(0)._genericType(), "zeroMany"), ps -> true))); register("meta::pure::tds::extensions::firstNotNull_T_MANY__T_$0_1$_", false, ps -> res(ps.get(0)._genericType(), "zeroOne")); + + register("meta::pure::functions::hash::hash_String_1__HashType_1__String_1_", true, ps -> res("String","one")); // Extensions CompileContext context = this.pureModel.getContext(); @@ -922,6 +924,11 @@ private void registerStrings() register("meta::pure::functions::string::trim_String_1__String_1_", true, ps -> res("String", "one")); register("meta::pure::functions::string::ltrim_String_1__String_1_", true, ps -> res("String", "one")); register("meta::pure::functions::string::rtrim_String_1__String_1_", true, ps -> res("String", "one")); + register(m(m(h("meta::pure::functions::string::lpad_String_1__Integer_1__String_1_", false, ps -> res("String", "one"), ps -> ps.size() == 2)), + m(h("meta::pure::functions::string::lpad_String_1__Integer_1__String_1__String_1_", false, ps -> res("String", "one"), ps -> true)))); + + register(m(m(h("meta::pure::functions::string::rpad_String_1__Integer_1__String_1_", false, ps -> res("String", "one"), ps -> ps.size() == 2)), + m(h("meta::pure::functions::string::rpad_String_1__Integer_1__String_1__String_1_", false, ps -> res("String", "one"), ps -> true)))); register("meta::pure::functions::string::matches_String_1__String_1__Boolean_1_", true, ps -> res("Boolean", "one")); register("meta::pure::functions::string::isAlphaNumeric_String_1__Boolean_1_", false, ps -> res("Boolean", "one")); register("meta::pure::functions::string::isNoLongerThan_String_$0_1$__Integer_1__Boolean_1_", false, ps -> res("Boolean", "one")); @@ -1885,6 +1892,7 @@ private Map buildDispatch() map.put("meta::pure::functions::date::weekOfYear_Date_1__Integer_1_", (List ps) -> ps.size() == 1 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "Date", "StrictDate", "DateTime", "LatestDate").contains(ps.get(0)._genericType()._rawType()._name())); map.put("meta::pure::functions::date::year_Date_$0_1$__Integer_$0_1$_", (List ps) -> ps.size() == 1 && matchZeroOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "Date", "StrictDate", "DateTime", "LatestDate").contains(ps.get(0)._genericType()._rawType()._name())); map.put("meta::pure::functions::date::year_Date_1__Integer_1_", (List ps) -> ps.size() == 1 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "Date", "StrictDate", "DateTime", "LatestDate").contains(ps.get(0)._genericType()._rawType()._name())); + map.put("meta::pure::functions::hash::hash_String_1__HashType_1__String_1_", (List ps) -> ps.size() == 2 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name())) && isOne(ps.get(1)._multiplicity()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || "HashType".equals(ps.get(1)._genericType()._rawType()._name()))); map.put("meta::pure::functions::lang::cast_Any_m__T_1__T_m_", (List ps) -> ps.size() == 2 && isOne(ps.get(1)._multiplicity())); map.put("meta::pure::functions::lang::compare_T_1__T_1__Integer_1_", (List ps) -> ps.size() == 2 && isOne(ps.get(0)._multiplicity()) && isOne(ps.get(1)._multiplicity())); map.put("meta::pure::functions::lang::eval_Function_1__T_n__V_m_", (List ps) -> ps.size() == 2 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || check(funcType(ps.get(0)._genericType()), (FunctionType ft) -> check(ft._parameters().toList(), (List nps) -> nps.size() == 1)))); @@ -2053,6 +2061,12 @@ private Map buildDispatch() map.put("meta::pure::functions::string::toString_Any_1__String_1_", (List ps) -> ps.size() == 1 && isOne(ps.get(0)._multiplicity())); map.put("meta::pure::functions::string::toUpper_String_1__String_1_", (List ps) -> ps.size() == 1 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name()))); map.put("meta::pure::functions::string::trim_String_1__String_1_", (List ps) -> ps.size() == 1 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name()))); + map.put("meta::pure::functions::string::ltrim_String_1__String_1_", (List ps) -> ps.size() == 1 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name()))); + map.put("meta::pure::functions::string::rtrim_String_1__String_1_", (List ps) -> ps.size() == 1 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name()))); + map.put("meta::pure::functions::string::lpad_String_1__Integer_1__String_1_", (List ps) -> ps.size() == 2 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name())) && isOne(ps.get(1)._multiplicity()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || "Integer".equals(ps.get(1)._genericType()._rawType()._name()))); + map.put("meta::pure::functions::string::lpad_String_1__Integer_1__String_1__String_1_", (List ps) -> ps.size() == 3 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name())) && isOne(ps.get(1)._multiplicity()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || "Integer".equals(ps.get(1)._genericType()._rawType()._name())) && isOne(ps.get(2)._multiplicity()) && ("Nil".equals(ps.get(2)._genericType()._rawType()._name()) || "String".equals(ps.get(2)._genericType()._rawType()._name()))); + map.put("meta::pure::functions::string::rpad_String_1__Integer_1__String_1_", (List ps) -> ps.size() == 2 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name())) && isOne(ps.get(1)._multiplicity()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || "Integer".equals(ps.get(1)._genericType()._rawType()._name()))); + map.put("meta::pure::functions::string::rpad_String_1__Integer_1__String_1__String_1_", (List ps) -> ps.size() == 3 && isOne(ps.get(0)._multiplicity()) && ("Nil".equals(ps.get(0)._genericType()._rawType()._name()) || "String".equals(ps.get(0)._genericType()._rawType()._name())) && isOne(ps.get(1)._multiplicity()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || "Integer".equals(ps.get(1)._genericType()._rawType()._name())) && isOne(ps.get(2)._multiplicity()) && ("Nil".equals(ps.get(2)._genericType()._rawType()._name()) || "String".equals(ps.get(2)._genericType()._rawType()._name()))); map.put("meta::pure::graphFetch::calculateSourceTree_RootGraphFetchTree_1__Mapping_1__Extension_MANY__RootGraphFetchTree_1_", (List ps) -> ps.size() == 3 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "RootGraphFetchTree", "ExtendedRootGraphFetchTree", "RoutedRootGraphFetchTree", "SerializeTopRootGraphFetchTree").contains(ps.get(0)._genericType()._rawType()._name()) && isOne(ps.get(1)._multiplicity()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || "Mapping".equals(ps.get(1)._genericType()._rawType()._name())) && ("Nil".equals(ps.get(2)._genericType()._rawType()._name()) || "Extension".equals(ps.get(2)._genericType()._rawType()._name()))); map.put("meta::pure::graphFetch::execution::graphFetchChecked_T_MANY__RootGraphFetchTree_1__Checked_MANY_", (List ps) -> ps.size() == 2 && isOne(ps.get(1)._multiplicity()) && Sets.immutable.with("Nil", "RootGraphFetchTree", "ExtendedRootGraphFetchTree", "RoutedRootGraphFetchTree", "SerializeTopRootGraphFetchTree").contains(ps.get(1)._genericType()._rawType()._name())); map.put("meta::pure::graphFetch::execution::graphFetch_T_MANY__RootGraphFetchTree_1__Integer_1__T_MANY_", (List ps) -> ps.size() == 3 && isOne(ps.get(1)._multiplicity()) && Sets.immutable.with("Nil", "RootGraphFetchTree", "ExtendedRootGraphFetchTree", "RoutedRootGraphFetchTree", "SerializeTopRootGraphFetchTree").contains(ps.get(1)._genericType()._rawType()._name()) && isOne(ps.get(2)._multiplicity()) && ("Nil".equals(ps.get(2)._genericType()._rawType()._name()) || "Integer".equals(ps.get(2)._genericType()._rawType()._name()))); @@ -2079,8 +2093,6 @@ private Map buildDispatch() map.put("meta::pure::tds::distinct_TabularDataSet_1__TabularDataSet_1_", (List ps) -> ps.size() == 1 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "TabularDataSet", "TabularDataSetImplementation", "TableTDS").contains(ps.get(0)._genericType()._rawType()._name())); map.put("meta::pure::tds::drop_TabularDataSet_1__Integer_1__TabularDataSet_1_", (List ps) -> ps.size() == 2 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "TabularDataSet", "TabularDataSetImplementation", "TableTDS").contains(ps.get(0)._genericType()._rawType()._name()) && isOne(ps.get(1)._multiplicity()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || "Integer".equals(ps.get(1)._genericType()._rawType()._name()))); map.put("meta::pure::tds::extend_TabularDataSet_1__BasicColumnSpecification_MANY__TabularDataSet_1_", (List ps) -> ps.size() == 2 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "TabularDataSet", "TabularDataSetImplementation", "TableTDS").contains(ps.get(0)._genericType()._rawType()._name()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || "BasicColumnSpecification".equals(ps.get(1)._genericType()._rawType()._name()))); - map.put("meta::pure::tds::extensions::columnValueDifference_TabularDataSet_1__TabularDataSet_1__String_$1_MANY$__String_$1_MANY$__String_$1_MANY$__TabularDataSet_1_", (List ps) -> ps.size() == 5 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "TabularDataSet", "TabularDataSetImplementation", "TableTDS").contains(ps.get(0)._genericType()._rawType()._name()) && isOne(ps.get(1)._multiplicity()) && Sets.immutable.with("Nil", "TabularDataSet", "TabularDataSetImplementation", "TableTDS").contains(ps.get(1)._genericType()._rawType()._name()) && matchOneMany(ps.get(2)._multiplicity()) && ("Nil".equals(ps.get(2)._genericType()._rawType()._name()) || "String".equals(ps.get(2)._genericType()._rawType()._name())) && matchOneMany(ps.get(3)._multiplicity()) && ("Nil".equals(ps.get(3)._genericType()._rawType()._name()) || "String".equals(ps.get(3)._genericType()._rawType()._name())) && matchOneMany(ps.get(4)._multiplicity()) && ("Nil".equals(ps.get(4)._genericType()._rawType()._name()) || "String".equals(ps.get(4)._genericType()._rawType()._name()))); - map.put("meta::pure::tds::extensions::columnValueDifference_TabularDataSet_1__TabularDataSet_1__String_$1_MANY$__String_$1_MANY$__TabularDataSet_1_", (List ps) -> ps.size() == 4 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "TabularDataSet", "TabularDataSetImplementation", "TableTDS").contains(ps.get(0)._genericType()._rawType()._name()) && isOne(ps.get(1)._multiplicity()) && Sets.immutable.with("Nil", "TabularDataSet", "TabularDataSetImplementation", "TableTDS").contains(ps.get(1)._genericType()._rawType()._name()) && matchOneMany(ps.get(2)._multiplicity()) && ("Nil".equals(ps.get(2)._genericType()._rawType()._name()) || "String".equals(ps.get(2)._genericType()._rawType()._name())) && matchOneMany(ps.get(3)._multiplicity()) && ("Nil".equals(ps.get(3)._genericType()._rawType()._name()) || "String".equals(ps.get(3)._genericType()._rawType()._name()))); map.put("meta::pure::tds::filter_TabularDataSet_1__Function_1__TabularDataSet_1_", (List ps) -> ps.size() == 2 && isOne(ps.get(0)._multiplicity()) && Sets.immutable.with("Nil", "TabularDataSet", "TabularDataSetImplementation", "TableTDS").contains(ps.get(0)._genericType()._rawType()._name()) && isOne(ps.get(1)._multiplicity()) && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || check(funcType(ps.get(1)._genericType()), (FunctionType ft) -> isOne(ft._returnMultiplicity()) && ("Nil".equals(ft._returnType()._rawType()._name()) || "Boolean".equals(ft._returnType()._rawType()._name())) && check(ft._parameters().toList(), (List nps) -> nps.size() == 1 && isOne(nps.get(0)._multiplicity()) && Sets.immutable.with("TDSRow", "Any").contains(nps.get(0)._genericType()._rawType()._name()))))); map.put("meta::pure::tds::groupByWithWindowSubset_K_MANY__Function_MANY__AggregateValue_MANY__String_MANY__String_MANY__String_MANY__TabularDataSet_1_", (List ps) -> ps.size() == 6 && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || check(funcType(ps.get(1)._genericType()), (FunctionType ft) -> check(ft._parameters().toList(), (List nps) -> nps.size() == 1 && isOne(nps.get(0)._multiplicity())))) && ("Nil".equals(ps.get(2)._genericType()._rawType()._name()) || "AggregateValue".equals(ps.get(2)._genericType()._rawType()._name())) && ("Nil".equals(ps.get(3)._genericType()._rawType()._name()) || "String".equals(ps.get(3)._genericType()._rawType()._name())) && ("Nil".equals(ps.get(4)._genericType()._rawType()._name()) || "String".equals(ps.get(4)._genericType()._rawType()._name())) && ("Nil".equals(ps.get(5)._genericType()._rawType()._name()) || "String".equals(ps.get(5)._genericType()._rawType()._name()))); map.put("meta::pure::tds::groupBy_K_MANY__Function_MANY__AggregateValue_MANY__String_MANY__TabularDataSet_1_", (List ps) -> ps.size() == 4 && ("Nil".equals(ps.get(1)._genericType()._rawType()._name()) || check(funcType(ps.get(1)._genericType()), (FunctionType ft) -> check(ft._parameters().toList(), (List nps) -> nps.size() == 1 && isOne(nps.get(0)._multiplicity())))) && ("Nil".equals(ps.get(2)._genericType()._rawType()._name()) || "AggregateValue".equals(ps.get(2)._genericType()._rawType()._name())) && ("Nil".equals(ps.get(3)._genericType()._rawType()._name()) || "String".equals(ps.get(3)._genericType()._rawType()._name()))); diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-grammar-api/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-grammar-api/pom.xml index ca8ab38350f..f77d7ed2ef7 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-grammar-api/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-grammar-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-grammar-api diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-grammar/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-grammar/pom.xml index ea064956592..a9ba1412713 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-grammar/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-grammar diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/pom.xml index 3137cbabb3a..23ec895b294 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/pom.xml @@ -19,13 +19,24 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-modelManager-sdlc Legend Engine - Language Pure - Model Manager - SDLC + + org.finos.legend.shared + legend-shared-pac4j-gitlab + + + * + * + + + + org.finos.legend.engine diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/SDLCFetcher.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/SDLCFetcher.java new file mode 100644 index 00000000000..1b467eefe74 --- /dev/null +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/SDLCFetcher.java @@ -0,0 +1,113 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.language.pure.modelManager.sdlc; + +import io.opentracing.Scope; +import io.opentracing.Span; +import io.opentracing.util.GlobalTracer; +import java.util.List; +import java.util.function.Function; +import javax.security.auth.Subject; +import org.apache.http.impl.client.CloseableHttpClient; +import org.eclipse.collections.api.list.MutableList; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.language.pure.modelManager.sdlc.alloy.AlloySDLCLoader; +import org.finos.legend.engine.language.pure.modelManager.sdlc.pure.PureServerLoader; +import org.finos.legend.engine.language.pure.modelManager.sdlc.workspace.WorkspaceSDLCLoader; +import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureSDLC; +import org.finos.legend.engine.protocol.pure.v1.model.context.SDLCVisitor; +import org.finos.legend.engine.protocol.pure.v1.model.context.WorkspaceSDLC; +import org.finos.legend.engine.shared.core.kerberos.SubjectTools; +import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; +import org.pac4j.core.profile.CommonProfile; + +final class SDLCFetcher implements SDLCVisitor +{ + private final Span parentSpan; + private final String clientVersion; + private final Function, CloseableHttpClient> httpClientProvider; + private final MutableList pm; + private final PureServerLoader pureLoader; + private final AlloySDLCLoader alloyLoader; + private final WorkspaceSDLCLoader workspaceLoader; + + public SDLCFetcher(Span parentSpan, String clientVersion, Function, CloseableHttpClient> httpClientProvider, MutableList pm, PureServerLoader pureLoader, AlloySDLCLoader alloyLoader, WorkspaceSDLCLoader workspaceLoader) + { + this.parentSpan = parentSpan; + this.clientVersion = clientVersion; + this.httpClientProvider = httpClientProvider; + this.pm = pm; + this.pureLoader = pureLoader; + this.alloyLoader = alloyLoader; + this.workspaceLoader = workspaceLoader; + } + + @Override + public PureModelContextData visit(AlloySDLC sdlc) + { + parentSpan.setTag("sdlc", "alloy"); + try (Scope ignore = GlobalTracer.get().buildSpan("Request Alloy Metadata").startActive(true)) + { + PureModelContextData loadedProject = this.alloyLoader.loadAlloyProject(pm, sdlc, clientVersion, this.httpClientProvider); + loadedProject.origin.sdlcInfo.packageableElementPointers = sdlc.packageableElementPointers; + List missingPaths = this.alloyLoader.checkAllPathsExist(loadedProject, sdlc); + if (missingPaths.isEmpty()) + { + return loadedProject; + } + else + { + throw new EngineException("The following entities:" + missingPaths + " do not exist in the project data loaded from the metadata server. " + + "Please make sure the corresponding Gitlab pipeline for version " + (this.alloyLoader.isLatestRevision(sdlc) ? "latest" : sdlc.version) + " has completed and also metadata server has updated with corresponding entities " + + "by confirming the data returned from this API ."); + } + } + } + + @Override + public PureModelContextData visit(PureSDLC pureSDLC) + { + parentSpan.setTag("sdlc", "pure"); + try (Scope ignore = GlobalTracer.get().buildSpan("Request Pure Metadata").startActive(true)) + { + Subject subject = SubjectTools.getCurrentSubject(); + + return ListIterate.injectInto( + new PureModelContextData.Builder(), + pureSDLC.packageableElementPointers, + (builder, pointers) -> builder.withPureModelContextData(this.pureLoader.loadPurePackageableElementPointer(pm, pointers, clientVersion, subject == null ? "" : "?auth=kerberos", pureSDLC.overrideUrl)) + ).distinct().sorted().build(); + } + } + + @Override + public PureModelContextData visit(WorkspaceSDLC sdlc) + { + parentSpan.setTag("sdlc", "workspace"); + parentSpan.setTag("project", sdlc.project); + parentSpan.setTag("workspace", sdlc.getWorkspace()); + parentSpan.setTag("isGroupWorkspace", sdlc.isGroupWorkspace); + + try (Scope scope = GlobalTracer.get().buildSpan("Request Workspace Metadata").startActive(true)) + { + PureModelContextData loadedProject = this.workspaceLoader.loadWorkspace(pm, sdlc, this.httpClientProvider); + PureModelContextData sdlcDependenciesPMCD = this.workspaceLoader.getSDLCDependenciesPMCD(pm, this.clientVersion, sdlc, this.httpClientProvider); + return loadedProject.combine(sdlcDependenciesPMCD); + } + } +} diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/SDLCLoader.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/SDLCLoader.java index c98d111985b..5ffeff7bdaa 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/SDLCLoader.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/SDLCLoader.java @@ -20,7 +20,6 @@ import io.opentracing.tag.Tags; import io.opentracing.util.GlobalTracer; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.function.Supplier; @@ -29,6 +28,7 @@ import org.apache.http.HttpStatus; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.util.EntityUtils; @@ -36,12 +36,12 @@ import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.api.set.primitive.IntSet; import org.eclipse.collections.impl.factory.primitive.IntSets; -import org.eclipse.collections.impl.utility.ListIterate; import org.finos.legend.engine.language.pure.modelManager.ModelLoader; import org.finos.legend.engine.language.pure.modelManager.ModelManager; import org.finos.legend.engine.language.pure.modelManager.sdlc.alloy.AlloySDLCLoader; import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.MetaDataServerConfiguration; import org.finos.legend.engine.language.pure.modelManager.sdlc.pure.PureServerLoader; +import org.finos.legend.engine.language.pure.modelManager.sdlc.workspace.WorkspaceSDLCLoader; import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; @@ -70,6 +70,7 @@ public class SDLCLoader implements ModelLoader private final Supplier subjectProvider; private final PureServerLoader pureLoader; private final AlloySDLCLoader alloyLoader; + private final WorkspaceSDLCLoader workspaceLoader; private final Function, CloseableHttpClient> httpClientProvider; public SDLCLoader(MetaDataServerConfiguration metaDataServerConfiguration, Supplier subjectProvider) @@ -89,18 +90,21 @@ public SDLCLoader(MetaDataServerConfiguration metaDataServerConfiguration, Suppl public SDLCLoader(MetaDataServerConfiguration metaDataServerConfiguration, Supplier subjectProvider, PureServerLoader pureLoader, Function, CloseableHttpClient> httpClientProvider) { - this.subjectProvider = subjectProvider; - this.pureLoader = pureLoader; - this.alloyLoader = new AlloySDLCLoader(metaDataServerConfiguration); - this.httpClientProvider = httpClientProvider; + this(metaDataServerConfiguration, subjectProvider, pureLoader, httpClientProvider, new AlloySDLCLoader(metaDataServerConfiguration)); } public SDLCLoader(MetaDataServerConfiguration metaDataServerConfiguration, Supplier subjectProvider, PureServerLoader pureLoader, Function, CloseableHttpClient> httpClientProvider, AlloySDLCLoader alloyLoader) + { + this(subjectProvider, pureLoader, httpClientProvider, alloyLoader, new WorkspaceSDLCLoader(metaDataServerConfiguration.sdlc)); + } + + public SDLCLoader(Supplier subjectProvider, PureServerLoader pureLoader, Function, CloseableHttpClient> httpClientProvider, AlloySDLCLoader alloyLoader, WorkspaceSDLCLoader workspaceLoader) { this.subjectProvider = subjectProvider; this.pureLoader = pureLoader; this.alloyLoader = alloyLoader; this.httpClientProvider = httpClientProvider; + this.workspaceLoader = workspaceLoader; } private Subject getSubject() @@ -119,6 +123,7 @@ private Subject getSubject() @Override public void setModelManager(ModelManager modelManager) { + this.workspaceLoader.setModelManager(modelManager); } @Override @@ -168,55 +173,18 @@ public PureModelContextData load(MutableList pm, PureModelContext PureModelContextPointer context = (PureModelContextPointer) ctx; Assert.assertTrue(clientVersion != null, () -> "Client version should be set when pulling metadata from the metadata repository"); - Function0 fetchMetadata; - - final Subject subject = getSubject(); - - if (context.sdlcInfo instanceof PureSDLC) - { - fetchMetadata = () -> - { - parentSpan.setTag("sdlc", "pure"); - try (Scope scope = GlobalTracer.get().buildSpan("Request Pure Metadata").startActive(true)) - { - return ListIterate.injectInto( - new PureModelContextData.Builder(), - context.sdlcInfo.packageableElementPointers, - (builder, pointers) -> builder.withPureModelContextData(this.pureLoader.loadPurePackageableElementPointer(pm, pointers, clientVersion, subject == null ? "" : "?auth=kerberos", ((PureSDLC) context.sdlcInfo).overrideUrl)) - ).distinct().sorted().build(); - } - }; - } - else if (context.sdlcInfo instanceof AlloySDLC) - { - fetchMetadata = () -> - { - parentSpan.setTag("sdlc", "alloy"); - try (Scope scope = GlobalTracer.get().buildSpan("Request Alloy Metadata").startActive(true)) - { - AlloySDLC sdlc = (AlloySDLC) context.sdlcInfo; - PureModelContextData loadedProject = this.alloyLoader.loadAlloyProject(pm, sdlc, clientVersion, this.httpClientProvider); - loadedProject.origin.sdlcInfo.packageableElementPointers = sdlc.packageableElementPointers; - List missingPaths = this.alloyLoader.checkAllPathsExist(loadedProject, sdlc); - if (missingPaths.isEmpty()) - { - return loadedProject; - } - else - { - throw new EngineException("The following entities:" + missingPaths + " do not exist in the project data loaded from the metadata server. " + - "Please make sure the corresponding Gitlab pipeline for version " + (this.alloyLoader.isLatestRevision(sdlc) ? "latest" : sdlc.version) + " has completed and also metadata server has updated with corresponding entities " + - "by confirming the data returned from this API ."); - } - } - }; - } - else - { - throw new UnsupportedOperationException("To Code"); - } + SDLCFetcher fetcher = new SDLCFetcher( + parentSpan, + clientVersion, + this.httpClientProvider, + pm, + this.pureLoader, + this.alloyLoader, + this.workspaceLoader + ); - PureModelContextData metaData = subject == null ? fetchMetadata.value() : exec(subject, fetchMetadata::value); + Subject subject = getSubject(); + PureModelContextData metaData = subject == null ? context.sdlcInfo.accept(fetcher) : exec(subject, () -> context.sdlcInfo.accept(fetcher)); if (metaData.origin != null) { @@ -234,6 +202,11 @@ public static PureModelContextData loadMetadataFromHTTPURL(MutableList pm, LoggingEventType startEvent, LoggingEventType stopEvent, String url, Function, CloseableHttpClient> httpClientProvider) + { + return loadMetadataFromHTTPURL(pm, startEvent, stopEvent, url, httpClientProvider, null); + } + + public static PureModelContextData loadMetadataFromHTTPURL(MutableList pm, LoggingEventType startEvent, LoggingEventType stopEvent, String url, Function, CloseableHttpClient> httpClientProvider, Function httpRequestProvider) { Scope scope = GlobalTracer.get().scopeManager().active(); CloseableHttpClient httpclient; @@ -260,15 +233,24 @@ public static PureModelContextData loadMetadataFromHTTPURL(MutableList "Engine was unable to load information from the Pure SDLC link"); LOGGER.info(new LogInfo(pm, stopEvent, (double) System.currentTimeMillis() - start).toString()); @@ -298,14 +280,14 @@ public static PureModelContextData loadMetadataFromHTTPURL(MutableList= 300) { - String msg = EntityUtils.toString(entity); + String msg = entity != null ? EntityUtils.toString(entity) : response.getStatusLine().getReasonPhrase(); response.close(); - throw new EngineException("Error response from " + httpGet.getURI() + ", HTTP" + statusCode + "\n" + msg); + throw new EngineException("Error response from " + httpRequest.getURI() + ", HTTP" + statusCode + "\n" + msg); } return entity; diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/MetadataServerPac4jConfiguration.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/MetadataServerPac4jConfiguration.java new file mode 100644 index 00000000000..966ee9c4adc --- /dev/null +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/MetadataServerPac4jConfiguration.java @@ -0,0 +1,26 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.pure.modelManager.sdlc.configuration; + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") +@JsonSubTypes({ + @JsonSubTypes.Type(value = MetadataServerPrivateAccessTokenConfiguration.class, name = "privateAccessToken"), +}) +public abstract class MetadataServerPac4jConfiguration +{ +} diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/MetadataServerPrivateAccessTokenConfiguration.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/MetadataServerPrivateAccessTokenConfiguration.java new file mode 100644 index 00000000000..4ccae839e82 --- /dev/null +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/MetadataServerPrivateAccessTokenConfiguration.java @@ -0,0 +1,20 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.pure.modelManager.sdlc.configuration; + +public class MetadataServerPrivateAccessTokenConfiguration extends MetadataServerPac4jConfiguration +{ + public String accessTokenHeaderName; +} diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/ServerConnectionConfiguration.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/ServerConnectionConfiguration.java index ae0b74c93b1..4be7f20226e 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/ServerConnectionConfiguration.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/configuration/ServerConnectionConfiguration.java @@ -26,6 +26,7 @@ public class ServerConnectionConfiguration public String host; public Integer port; public String prefix = ""; + public MetadataServerPac4jConfiguration pac4j; public ServerConnectionConfiguration() { diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/workspace/WorkspaceSDLCLoader.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/workspace/WorkspaceSDLCLoader.java new file mode 100644 index 00000000000..e756df13f84 --- /dev/null +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/main/java/org/finos/legend/engine/language/pure/modelManager/sdlc/workspace/WorkspaceSDLCLoader.java @@ -0,0 +1,191 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.language.pure.modelManager.sdlc.workspace; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.opentracing.Scope; +import io.opentracing.util.GlobalTracer; +import java.io.InputStream; +import java.security.PrivilegedAction; +import java.util.List; +import java.util.function.Function; +import javax.security.auth.Subject; +import org.apache.http.HttpEntity; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.impl.client.BasicCookieStore; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.message.BasicHeader; +import org.eclipse.collections.api.list.MutableList; +import org.finos.legend.engine.language.pure.modelManager.ModelManager; +import org.finos.legend.engine.language.pure.modelManager.sdlc.SDLCLoader; +import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.MetadataServerPrivateAccessTokenConfiguration; +import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.ServerConnectionConfiguration; +import org.finos.legend.engine.protocol.Protocol; +import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextPointer; +import org.finos.legend.engine.protocol.pure.v1.model.context.WorkspaceSDLC; +import org.finos.legend.engine.shared.core.ObjectMapperFactory; +import org.finos.legend.engine.shared.core.kerberos.HttpClientBuilder; +import org.finos.legend.engine.shared.core.kerberos.ProfileManagerHelper; +import org.finos.legend.engine.shared.core.operational.logs.LoggingEventType; +import org.finos.legend.server.pac4j.gitlab.GitlabPersonalAccessTokenProfile; +import org.pac4j.core.profile.CommonProfile; + +public class WorkspaceSDLCLoader +{ + private static final TypeReference> SDLC_PROJECT_DEPENDENCY_TYPE = new TypeReference>() + { + }; + + private final ServerConnectionConfiguration sdlcServerConnectionConfig; + private final ObjectMapper mapper; + private ModelManager modelManager; + + public WorkspaceSDLCLoader(ServerConnectionConfiguration sdlcServerConnectionConfig) + { + this.sdlcServerConnectionConfig = sdlcServerConnectionConfig; + this.mapper = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports(); + } + + public PureModelContextData loadWorkspace(MutableList pm, WorkspaceSDLC sdlc, Function, CloseableHttpClient> httpClientProvider) + { + return this.doAs(pm, () -> + { + String url = sdlcServerConnectionConfig.getBaseUrl() + "/api/projects/" + sdlc.project + (sdlc.isGroupWorkspace ? "/groupWorkspaces/" : "/workspaces/") + sdlc.getWorkspace() + "/pureModelContextData"; + return SDLCLoader.loadMetadataFromHTTPURL(pm, LoggingEventType.METADATA_REQUEST_ALLOY_PROJECT_START, LoggingEventType.METADATA_REQUEST_ALLOY_PROJECT_STOP, url, httpClientProvider, x -> prepareHttpRequest(pm, x)); + }); + } + + public void setModelManager(ModelManager modelManager) + { + this.modelManager = modelManager; + } + + public PureModelContextData getSDLCDependenciesPMCD(MutableList pm, String clientVersion, WorkspaceSDLC sdlc, Function, CloseableHttpClient> httpClientProvider) + { + return this.doAs(pm, () -> + { + CloseableHttpClient httpclient; + + if (httpClientProvider != null) + { + httpclient = httpClientProvider.apply(pm); + } + else + { + httpclient = (CloseableHttpClient) HttpClientBuilder.getHttpClient(new BasicCookieStore()); + } + + try ( + CloseableHttpClient client = httpclient; + Scope scope = GlobalTracer.get().buildSpan("Load project upstream dependencies").startActive(true) + ) + { + String url = String.format("%s/api/projects/%s/%s/%s/revisions/HEAD/upstreamProjects", + sdlcServerConnectionConfig.getBaseUrl(), + sdlc.project, + sdlc.isGroupWorkspace ? "groupWorkspaces" : "workspaces", + sdlc.getWorkspace()); + + HttpGet httpRequest = this.prepareHttpRequest(pm, url); + HttpEntity entity = SDLCLoader.execHttpRequest(scope.span(), client, httpRequest); + + try (InputStream content = entity.getContent()) + { + List dependencies = mapper.readValue(content, SDLC_PROJECT_DEPENDENCY_TYPE); + + PureModelContextData.Builder builder = PureModelContextData.newBuilder(); + + dependencies.forEach(dependency -> + { + builder.addPureModelContextData(this.loadDependencyData(pm, clientVersion, dependency)); + }); + + builder.removeDuplicates(); + return builder.build(); + } + } + catch (Exception e) + { + throw new RuntimeException(e); + } + }); + } + + private PureModelContextData doAs(MutableList pm, PrivilegedAction action) + { + Subject kerberosCredential = ProfileManagerHelper.extractSubject(pm); + return kerberosCredential == null ? action.run() : Subject.doAs(kerberosCredential, action); + } + + private HttpGet prepareHttpRequest(MutableList pm, String url) + { + HttpGet httpRequest = null; + + if (this.sdlcServerConnectionConfig.pac4j != null && this.sdlcServerConnectionConfig.pac4j instanceof MetadataServerPrivateAccessTokenConfiguration) + { + String patHeaderName = ((MetadataServerPrivateAccessTokenConfiguration) this.sdlcServerConnectionConfig.pac4j).accessTokenHeaderName; + MutableList patProfiles = pm.selectInstancesOf(GitlabPersonalAccessTokenProfile.class); + if (patProfiles.getFirst() != null) + { + httpRequest = new HttpGet(String.format("%s?client_name=%s", url, patProfiles.getFirst().getClientName())); + httpRequest.addHeader(new BasicHeader(patHeaderName, patProfiles.getFirst().getPersonalAccessToken())); + } + } + + if (httpRequest == null) + { + httpRequest = new HttpGet(url); + } + + return httpRequest; + } + + private PureModelContextData loadDependencyData(MutableList profiles, String clientVersion, SDLCProjectDependency dependency) + { + PureModelContextPointer pointer = new PureModelContextPointer(); + AlloySDLC sdlcInfo = new AlloySDLC(); + sdlcInfo.groupId = dependency.getGroupId(); + sdlcInfo.artifactId = dependency.getArtifactId(); + sdlcInfo.version = dependency.getVersionId(); + pointer.sdlcInfo = sdlcInfo; + pointer.serializer = new Protocol("pure", clientVersion); + return this.modelManager.loadData(pointer, clientVersion, profiles); + } + + private static class SDLCProjectDependency + { + public String projectId; + public String versionId; + + public String getGroupId() + { + return projectId.split(":")[0]; + } + + public String getArtifactId() + { + return projectId.split(":")[1]; + } + + public String getVersionId() + { + return versionId; + } + } +} diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/test/java/org/finos/legend/engine/language/pure/modelManager/sdlc/TestSDLCLoader.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/test/java/org/finos/legend/engine/language/pure/modelManager/sdlc/TestSDLCLoader.java index 41f8e0a7981..ebf7cb8cbf5 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/test/java/org/finos/legend/engine/language/pure/modelManager/sdlc/TestSDLCLoader.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager-sdlc/src/test/java/org/finos/legend/engine/language/pure/modelManager/sdlc/TestSDLCLoader.java @@ -16,21 +16,29 @@ package org.finos.legend.engine.language.pure.modelManager.sdlc; import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.client.WireMock; import com.github.tomakehurst.wiremock.junit.WireMockClassRule; import com.github.tomakehurst.wiremock.stubbing.Scenario; import io.opentracing.mock.MockSpan; import io.opentracing.mock.MockTracer; import io.opentracing.util.GlobalTracer; +import java.util.List; +import java.util.stream.Collectors; import javax.security.auth.Subject; import org.eclipse.collections.api.factory.Lists; +import org.finos.legend.engine.language.pure.modelManager.ModelManager; import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.MetaDataServerConfiguration; import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.ServerConnectionConfiguration; import org.finos.legend.engine.protocol.Protocol; import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextPointer; +import org.finos.legend.engine.protocol.pure.v1.model.context.WorkspaceSDLC; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.Class; import org.finos.legend.engine.shared.core.ObjectMapperFactory; +import org.finos.legend.engine.shared.core.deployment.DeploymentMode; import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; import org.junit.After; import org.junit.Assert; @@ -50,6 +58,8 @@ public class TestSDLCLoader private static final MockTracer tracer = new MockTracer(); + private static final String CLIENT_VERSION = "v1_33_0"; + @BeforeClass public static void setUpClass() { @@ -77,7 +87,7 @@ public void testSdlcLoaderRetriesOnSomeHttpResponses() throws Exception configureWireMockForRetries(); SDLCLoader sdlcLoader = createSDLCLoader(); - PureModelContextData pmcdLoaded = sdlcLoader.load(Lists.fixedSize.empty(), pointer, "v1_32_0", tracer.activeSpan()); + PureModelContextData pmcdLoaded = sdlcLoader.load(Lists.fixedSize.empty(), pointer, CLIENT_VERSION, tracer.activeSpan()); Assert.assertNotNull(pmcdLoaded); Object tries = tracer.finishedSpans() @@ -101,7 +111,7 @@ public void testSdlcLoaderDoesNotRetryOnHardFailures() throws Exception try { - sdlcLoader.load(Lists.fixedSize.empty(), pointer, "v1_32_0", tracer.activeSpan()); + sdlcLoader.load(Lists.fixedSize.empty(), pointer, CLIENT_VERSION, tracer.activeSpan()); Assert.fail("Should throw"); } catch (EngineException e) @@ -110,6 +120,51 @@ public void testSdlcLoaderDoesNotRetryOnHardFailures() throws Exception } } + @Test + public void testSdlcLoaderForWorkspacesWithoutDependency() throws Exception + { + WorkspaceSDLC sdlcInfo = new WorkspaceSDLC(); + sdlcInfo.project = "proj-1234"; + sdlcInfo.isGroupWorkspace = true; + sdlcInfo.version = "workspaceAbc"; + + PureModelContextPointer pointer = new PureModelContextPointer(); + pointer.sdlcInfo = sdlcInfo; + + configureWireMockForRetries(); + SDLCLoader sdlcLoader = createSDLCLoader(); + PureModelContextData pmcdLoaded = sdlcLoader.load(Lists.fixedSize.empty(), pointer, CLIENT_VERSION, tracer.activeSpan()); + Assert.assertNotNull(pmcdLoaded); + Assert.assertEquals(1, pmcdLoaded.getElements().size()); + Assert.assertEquals("pkg::pkg::myClass", pmcdLoaded.getElements().get(0).getPath()); + } + + @Test + public void testSdlcLoaderForWorkspacesWithDependency() throws Exception + { + WorkspaceSDLC sdlcInfo = new WorkspaceSDLC(); + sdlcInfo.project = "proj-1235"; + sdlcInfo.isGroupWorkspace = false; + sdlcInfo.version = "workspaceAbc"; + + PureModelContextPointer pointer = new PureModelContextPointer(); + pointer.sdlcInfo = sdlcInfo; + + configureWireMockForRetries(); + SDLCLoader sdlcLoader = createSDLCLoader(); + + ModelManager modelManager = new ModelManager(DeploymentMode.TEST, tracer, sdlcLoader); + + PureModelContextData pmcdLoaded = modelManager.loadData(pointer, CLIENT_VERSION, Lists.fixedSize.empty()); + + Assert.assertNotNull(pmcdLoaded); + Assert.assertEquals(2, pmcdLoaded.getElements().size()); + + List paths = pmcdLoaded.getElements().stream().map(PackageableElement::getPath).sorted().collect(Collectors.toList()); + Assert.assertEquals("pkg::pkg::myAnotherClass", paths.get(0)); + Assert.assertEquals("pkg::pkg::myClass", paths.get(1)); + } + private static PureModelContextPointer getPureModelContextPointer() { AlloySDLC sdlcInfo = new AlloySDLC(); @@ -127,6 +182,7 @@ private SDLCLoader createSDLCLoader() MetaDataServerConfiguration serverConfiguration = new MetaDataServerConfiguration(); serverConfiguration.alloy = new ServerConnectionConfiguration(); serverConfiguration.pure = new ServerConnectionConfiguration(); + serverConfiguration.sdlc = new ServerConnectionConfiguration(); serverConfiguration.alloy.host = "localhost"; serverConfiguration.alloy.port = rule.port(); @@ -136,36 +192,70 @@ private SDLCLoader createSDLCLoader() serverConfiguration.pure.port = rule.port(); serverConfiguration.pure.prefix = "/pure"; + serverConfiguration.sdlc.host = "localhost"; + serverConfiguration.sdlc.port = rule.port(); + serverConfiguration.sdlc.prefix = "/sdlc"; + return new SDLCLoader(serverConfiguration, Subject::new); } private static void configureWireMockForRetries() throws JsonProcessingException { + ObjectMapper objectMapper = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports(); + PureModelContextData data = PureModelContextData.newPureModelContextData(new Protocol(), new PureModelContextPointer(), Lists.fixedSize.empty()); - String pmcdJson = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports().writeValueAsString(data); + String pmcdJson = objectMapper.writeValueAsString(data); - WireMock.stubFor(WireMock.get("/alloy/projects/groupId/artifactId/versions/1.0.0/pureModelContextData?clientVersion=v1_32_0") + WireMock.stubFor(WireMock.get("/alloy/projects/groupId/artifactId/versions/1.0.0/pureModelContextData?clientVersion=" + CLIENT_VERSION) .inScenario("RETRY_FAILURES") .whenScenarioStateIs(Scenario.STARTED) .willReturn(WireMock.aResponse().withStatus(503).withBody("a failure")) .willSetStateTo("FAILED_1")); - WireMock.stubFor(WireMock.get("/alloy/projects/groupId/artifactId/versions/1.0.0/pureModelContextData?clientVersion=v1_32_0") + WireMock.stubFor(WireMock.get("/alloy/projects/groupId/artifactId/versions/1.0.0/pureModelContextData?clientVersion=" + CLIENT_VERSION) .inScenario("RETRY_FAILURES") .whenScenarioStateIs("FAILED_1") .willReturn(WireMock.aResponse().withStatus(503).withBody("a failure")) .willSetStateTo("FAILED_2")); - WireMock.stubFor(WireMock.get("/alloy/projects/groupId/artifactId/versions/1.0.0/pureModelContextData?clientVersion=v1_32_0") + WireMock.stubFor(WireMock.get("/alloy/projects/groupId/artifactId/versions/1.0.0/pureModelContextData?clientVersion=" + CLIENT_VERSION) .inScenario("RETRY_FAILURES") .whenScenarioStateIs("FAILED_2") .willReturn(WireMock.okJson(pmcdJson)) .willSetStateTo("FAILED_3")); + + + Class t = new Class(); + t.name = "myClass"; + t._package = "pkg::pkg"; + PureModelContextData data2 = PureModelContextData.newPureModelContextData(new Protocol(), new PureModelContextPointer(), Lists.fixedSize.with(t)); + String pmcdJson2 = objectMapper.writeValueAsString(data2); + + Class t2 = new Class(); + t2.name = "myAnotherClass"; + t2._package = "pkg::pkg"; + PureModelContextData dataDep = PureModelContextData.newPureModelContextData(new Protocol(), new PureModelContextPointer(), Lists.fixedSize.with(t2)); + String pmcdJsonDep = objectMapper.writeValueAsString(dataDep); + + WireMock.stubFor(WireMock.get("/sdlc/api/projects/proj-1234/groupWorkspaces/workspaceAbc/pureModelContextData") + .willReturn(WireMock.okJson(pmcdJson2))); + + WireMock.stubFor(WireMock.get("/sdlc/api/projects/proj-1234/groupWorkspaces/workspaceAbc/revisions/HEAD/upstreamProjects") + .willReturn(WireMock.okJson("[]"))); + + WireMock.stubFor(WireMock.get("/sdlc/api/projects/proj-1235/workspaces/workspaceAbc/pureModelContextData") + .willReturn(WireMock.okJson(pmcdJson2))); + + WireMock.stubFor(WireMock.get("/sdlc/api/projects/proj-1235/workspaces/workspaceAbc/revisions/HEAD/upstreamProjects") + .willReturn(WireMock.okJson("[{\"projectId\": \"org.finos.legend.dependency:models\",\"versionId\": \"2.0.1\"}]"))); + + WireMock.stubFor(WireMock.get("/alloy/projects/org.finos.legend.dependency/models/versions/2.0.1/pureModelContextData?clientVersion=" + CLIENT_VERSION) + .willReturn(WireMock.okJson(pmcdJsonDep))); } private static void configureWireMockForNoRetries() throws JsonProcessingException { - WireMock.stubFor(WireMock.get("/alloy/projects/groupId/artifactId/versions/1.0.0/pureModelContextData?clientVersion=v1_32_0") + WireMock.stubFor(WireMock.get("/alloy/projects/groupId/artifactId/versions/1.0.0/pureModelContextData?clientVersion=" + CLIENT_VERSION) .willReturn(WireMock.aResponse().withStatus(400).withBody("a failure"))); } } diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager/pom.xml index b0ff121784f..396194e3633 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-modelManager diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager/src/main/java/org/finos/legend/engine/language/pure/modelManager/ModelManager.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager/src/main/java/org/finos/legend/engine/language/pure/modelManager/ModelManager.java index 6b188225374..123dc45665c 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager/src/main/java/org/finos/legend/engine/language/pure/modelManager/ModelManager.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-language-pure-modelManager/src/main/java/org/finos/legend/engine/language/pure/modelManager/ModelManager.java @@ -18,7 +18,10 @@ import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import io.opentracing.Scope; +import io.opentracing.Tracer; import io.opentracing.util.GlobalTracer; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import org.eclipse.collections.api.block.procedure.Procedure; import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.api.tuple.Pair; @@ -36,10 +39,6 @@ import org.finos.legend.engine.shared.core.operational.Assert; import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; import org.pac4j.core.profile.CommonProfile; -import org.slf4j.Logger; - -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; public class ModelManager { @@ -57,9 +56,16 @@ public class ModelManager public final Cache pureModelCache = CacheBuilder.newBuilder().recordStats().softValues().expireAfterAccess(30, TimeUnit.MINUTES).build(); private final DeploymentMode deploymentMode; private final MutableList modelLoaders; + private final Tracer tracer; public ModelManager(DeploymentMode mode, ModelLoader... modelLoaders) { + this(mode, GlobalTracer.get(), modelLoaders); + } + + public ModelManager(DeploymentMode mode, Tracer tracer, ModelLoader... modelLoaders) + { + this.tracer = tracer; this.modelLoaders = Lists.mutable.of(modelLoaders); this.modelLoaders.forEach((Procedure) loader -> loader.setModelManager(this)); this.deploymentMode = mode; @@ -104,7 +110,7 @@ public String getLambdaReturnType(Lambda lambda, PureModelContext context, Strin // Remove clientVersion public PureModelContextData loadData(PureModelContext context, String clientVersion, MutableList pm) { - try (Scope scope = GlobalTracer.get().buildSpan("Load Model").startActive(true)) + try (Scope scope = tracer.buildSpan("Load Model").startActive(true)) { scope.span().setTag("context", context.getClass().getSimpleName()); if (context instanceof PureModelContextData) diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-api/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-api/pom.xml index f0d06addb59..98a0ebdbc0b 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-api/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-protocol-api diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation-pure/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation-pure/pom.xml index 13abe3f5e6f..5651e0f131e 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation-pure/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-protocol-generation-pure diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation-pure/src/main/resources/core_protocol_generation/generation.pure b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation-pure/src/main/resources/core_protocol_generation/generation.pure index ed2a6a417fa..83e534eebb8 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation-pure/src/main/resources/core_protocol_generation/generation.pure +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation-pure/src/main/resources/core_protocol_generation/generation.pure @@ -513,12 +513,18 @@ function meta::protocols::generation::java::conventions(purePackage:String[1], j | '.' + $packageStr; ); pair( - $c->identifier($package), + javaIdentifier($package), $c->identifier($x.name->toOne())->toUpperFirstCharacter()->toOne() );} ); } +function <> meta::protocols::generation::java::javaIdentifier(name: String[1]):String[1] +{ + let keywordMap = javaKeywords()->buildKeywordReplacementMap(); + $name->sanitizeIdentifier($keywordMap); +} + function meta::protocols::generation::java::addProvidedTypesFromDependencies(conventions: Conventions[1], dependencies: Configuration[*]): Conventions[1] { $dependencies->fold({config: Configuration[1], c1: Conventions[1] | diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation/pom.xml index 93c6220db52..0087d0f6f18 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-generation/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-protocol-generation diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/pom.xml index 71aa64d49f5..08cea3c2fa0 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-protocol-pure diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/PureClientVersions.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/PureClientVersions.java index 5cd5f97d1da..5f722504544 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/PureClientVersions.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/PureClientVersions.java @@ -28,7 +28,7 @@ public class PureClientVersions assert !hasRepeatedVersions(versions) : "Repeated version id :" + versions.toBag().selectByOccurrences(i -> i > 1).toSet().makeString("[", ", ", "]"); } - public static String production = "v1_32_0"; + public static String production = "v1_33_0"; static boolean hasRepeatedVersions(ImmutableList versions) { diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/AlloySDLC.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/AlloySDLC.java index b261bff20d9..9e8456e87c5 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/AlloySDLC.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/AlloySDLC.java @@ -50,4 +50,10 @@ public int hashCode() { return Objects.hashCode(this.project) + 89 * Objects.hashCode(this.version) + 17 * Objects.hashCode(this.groupId) + 17 * Objects.hashCode(artifactId); } + + @Override + public T accept(SDLCVisitor visitor) + { + return visitor.visit(this); + } } diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/PureSDLC.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/PureSDLC.java index 1de16fb4977..a5c3fbacc9a 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/PureSDLC.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/PureSDLC.java @@ -40,4 +40,10 @@ public int hashCode() { return Objects.hash(this.overrideUrl, this.version, this.baseVersion, this.packageableElementPointers); } + + @Override + public T accept(SDLCVisitor visitor) + { + return visitor.visit(this); + } } diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/SDLC.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/SDLC.java index 88ca3ab4a84..d4034434c1f 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/SDLC.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/SDLC.java @@ -16,7 +16,6 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; - import java.util.Collections; import java.util.List; import java.util.Objects; @@ -24,7 +23,8 @@ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") @JsonSubTypes({ @JsonSubTypes.Type(value = PureSDLC.class, name = "pure"), - @JsonSubTypes.Type(value = AlloySDLC.class, name = "alloy") + @JsonSubTypes.Type(value = AlloySDLC.class, name = "alloy"), + @JsonSubTypes.Type(value = WorkspaceSDLC.class, name = "workspace") }) public abstract class SDLC { @@ -52,4 +52,9 @@ public int hashCode() { return Objects.hash(version, packageableElementPointers); } + + public T accept(SDLCVisitor visitor) + { + throw new UnsupportedOperationException("Not implemented"); + } } diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/SDLCVisitor.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/SDLCVisitor.java new file mode 100644 index 00000000000..f0712e8e930 --- /dev/null +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/SDLCVisitor.java @@ -0,0 +1,25 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.protocol.pure.v1.model.context; + +public interface SDLCVisitor +{ + T visit(AlloySDLC alloySDLC); + + T visit(PureSDLC pureSDLC); + + T visit(WorkspaceSDLC workspaceSDLC); +} diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/WorkspaceSDLC.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/WorkspaceSDLC.java new file mode 100644 index 00000000000..fd2ea504951 --- /dev/null +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/context/WorkspaceSDLC.java @@ -0,0 +1,61 @@ +// Copyright 2020 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.pure.v1.model.context; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +public class WorkspaceSDLC extends SDLC +{ + @JsonProperty(value = "project") + public String project; + + @JsonProperty(value = "isGroupWorkspace") + public boolean isGroupWorkspace; + + @JsonIgnore + public String getWorkspace() + { + return this.version; + } + + @Override + public boolean equals(Object o) + { + if (this == o) + { + return true; + } + if ((o == null) || (o.getClass() != this.getClass())) + { + return false; + } + WorkspaceSDLC that = (WorkspaceSDLC) o; + return Objects.equals(this.project, that.project) && Objects.equals(this.version, that.version) && Objects.equals(this.isGroupWorkspace, that.isGroupWorkspace); + } + + @Override + public int hashCode() + { + return Objects.hashCode(this.project) + 89 * Objects.hashCode(this.version) + 17 * Objects.hashCode(this.isGroupWorkspace); + } + + @Override + public T accept(SDLCVisitor visitor) + { + return visitor.visit(this); + } +} diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/test/java/org/finos/legend/engine/protocol/test/TestProtocolUpdates.java b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/test/java/org/finos/legend/engine/protocol/test/TestProtocolUpdates.java index 550f911bbc6..6b4fbb5a5e2 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/test/java/org/finos/legend/engine/protocol/test/TestProtocolUpdates.java +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol-pure/src/test/java/org/finos/legend/engine/protocol/test/TestProtocolUpdates.java @@ -24,6 +24,6 @@ public class TestProtocolUpdates public void testProductionProtocolVersion() { String productionProtocolVersion = PureClientVersions.production; - Assert.assertEquals("v1_32_0", productionProtocolVersion); + Assert.assertEquals("v1_33_0", productionProtocolVersion); } } diff --git a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol/pom.xml b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol/pom.xml index 3222c5f3767..5fa2d8a0d74 100644 --- a/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/legend-engine-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-language-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-protocol diff --git a/legend-engine-core/legend-engine-core-language-pure/pom.xml b/legend-engine-core/legend-engine-core-language-pure/pom.xml index 0897b6cb4e3..41534e5d6ec 100644 --- a/legend-engine-core/legend-engine-core-language-pure/pom.xml +++ b/legend-engine-core/legend-engine-core-language-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-core - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-query-pure/legend-engine-query-pure/pom.xml b/legend-engine-core/legend-engine-core-query-pure/legend-engine-query-pure/pom.xml index 7cc07a0c3f1..fadab77e656 100644 --- a/legend-engine-core/legend-engine-core-query-pure/legend-engine-query-pure/pom.xml +++ b/legend-engine-core/legend-engine-core-query-pure/legend-engine-query-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-query-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-query-pure diff --git a/legend-engine-core/legend-engine-core-query-pure/pom.xml b/legend-engine-core/legend-engine-core-query-pure/pom.xml index 1f0f2e790de..ad7609fc061 100644 --- a/legend-engine-core/legend-engine-core-query-pure/pom.xml +++ b/legend-engine-core/legend-engine-core-query-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-core - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-shared/legend-engine-shared-core/pom.xml b/legend-engine-core/legend-engine-core-shared/legend-engine-shared-core/pom.xml index 75001d41b25..bd31e3ef550 100644 --- a/legend-engine-core/legend-engine-core-shared/legend-engine-shared-core/pom.xml +++ b/legend-engine-core/legend-engine-core-shared/legend-engine-shared-core/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-shared - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-shared-core diff --git a/legend-engine-core/legend-engine-core-shared/legend-engine-shared-javaCompiler/pom.xml b/legend-engine-core/legend-engine-core-shared/legend-engine-shared-javaCompiler/pom.xml index d9a7fc82689..c1140e7193f 100644 --- a/legend-engine-core/legend-engine-core-shared/legend-engine-shared-javaCompiler/pom.xml +++ b/legend-engine-core/legend-engine-core-shared/legend-engine-shared-javaCompiler/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-shared - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-shared-javaCompiler diff --git a/legend-engine-core/legend-engine-core-shared/pom.xml b/legend-engine-core/legend-engine-core-shared/pom.xml index a674da193be..3b19fae1fd6 100644 --- a/legend-engine-core/legend-engine-core-shared/pom.xml +++ b/legend-engine-core/legend-engine-core-shared/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-core - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-test/legend-engine-test-data-generation/pom.xml b/legend-engine-core/legend-engine-core-test/legend-engine-test-data-generation/pom.xml index d7286cac2cd..33b98d58ee8 100644 --- a/legend-engine-core/legend-engine-core-test/legend-engine-test-data-generation/pom.xml +++ b/legend-engine-core/legend-engine-core-test/legend-engine-test-data-generation/pom.xml @@ -3,7 +3,7 @@ org.finos.legend.engine legend-engine-core-test - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/pom.xml b/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/pom.xml index 96f215753bc..7a555052b84 100644 --- a/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/pom.xml +++ b/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-test - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/src/main/java/org/finos/legend/engine/test/runner/mapping/MappingTestRunner.java b/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/src/main/java/org/finos/legend/engine/test/runner/mapping/MappingTestRunner.java index 81fb2474bcc..e402ce14b76 100644 --- a/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/src/main/java/org/finos/legend/engine/test/runner/mapping/MappingTestRunner.java +++ b/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/src/main/java/org/finos/legend/engine/test/runner/mapping/MappingTestRunner.java @@ -60,7 +60,7 @@ import java.util.*; import java.util.function.Consumer; -import static org.finos.legend.engine.language.pure.compiler.toPureGraph.HelperRuntimeBuilder.getElement; +import static org.finos.legend.engine.language.pure.compiler.toPureGraph.HelperRuntimeBuilder.getStore; @Deprecated public class MappingTestRunner @@ -102,7 +102,7 @@ public void setupTestData() CompileContext context = this.pureModel.getContext(); Root_meta_core_runtime_ConnectionStore connectionStore = new Root_meta_core_runtime_ConnectionStore_Impl("") ._connection(conn.accept(connectionVisitor)) - ._element(getElement(conn.element, conn.elementSourceInformation, context)); + ._element(getStore(conn.element, conn.elementSourceInformation, context)); this.runtime._connectionStoresAdd(connectionStore); }); } diff --git a/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/src/main/java/org/finos/legend/engine/testable/mapping/extension/MappingTestRunner.java b/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/src/main/java/org/finos/legend/engine/testable/mapping/extension/MappingTestRunner.java index 52e11c1d323..89c8d97ed58 100644 --- a/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/src/main/java/org/finos/legend/engine/testable/mapping/extension/MappingTestRunner.java +++ b/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-mapping/src/main/java/org/finos/legend/engine/testable/mapping/extension/MappingTestRunner.java @@ -19,7 +19,6 @@ import org.eclipse.collections.api.tuple.Pair; import org.eclipse.collections.impl.tuple.Tuples; import org.eclipse.collections.impl.utility.ListIterate; -import org.finos.legend.engine.language.pure.compiler.toPureGraph.CompileContext; import org.finos.legend.engine.language.pure.compiler.toPureGraph.ConnectionFirstPassBuilder; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.plan.execution.PlanExecutor; @@ -57,7 +56,7 @@ import java.util.stream.Collectors; import static org.finos.legend.engine.language.pure.compiler.toPureGraph.HelperModelBuilder.getElementFullPath; -import static org.finos.legend.engine.language.pure.compiler.toPureGraph.HelperRuntimeBuilder.getElement; +import static org.finos.legend.engine.language.pure.compiler.toPureGraph.HelperRuntimeBuilder.getStore; public class MappingTestRunner implements TestRunner { @@ -142,7 +141,7 @@ private TestResult executeMappingTest(MappingTest mappingTest, MappingTestRunne Connection conn = connection.getOne(); Root_meta_core_runtime_ConnectionStore connectionStore = new Root_meta_core_runtime_ConnectionStore_Impl("") ._connection(conn.accept(context.getConnectionVisitor())) - ._element(getElement(conn.element, conn.elementSourceInformation, context.getPureModel().getContext())); + ._element(getStore(conn.element, conn.elementSourceInformation, context.getPureModel().getContext())); runtime._connectionStoresAdd(connectionStore); }); handleGenerationOfPlan(connections.stream().map(Pair::getOne).collect(Collectors.toList()), runtime, context); diff --git a/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-shared/pom.xml b/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-shared/pom.xml index 9d19e01a47e..3344ab5f87a 100644 --- a/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-shared/pom.xml +++ b/legend-engine-core/legend-engine-core-test/legend-engine-test-runner-shared/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-test - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-test-runner-shared diff --git a/legend-engine-core/legend-engine-core-test/legend-engine-test-server-shared/pom.xml b/legend-engine-core/legend-engine-core-test/legend-engine-test-server-shared/pom.xml index eed9506b68f..978a685b682 100644 --- a/legend-engine-core/legend-engine-core-test/legend-engine-test-server-shared/pom.xml +++ b/legend-engine-core/legend-engine-core-test/legend-engine-test-server-shared/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-test - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-test-server-shared diff --git a/legend-engine-core/legend-engine-core-test/legend-engine-testable/pom.xml b/legend-engine-core/legend-engine-core-test/legend-engine-testable/pom.xml index 7d455c55c97..983fded224f 100644 --- a/legend-engine-core/legend-engine-core-test/legend-engine-testable/pom.xml +++ b/legend-engine-core/legend-engine-core-test/legend-engine-testable/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-core-test - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-testable diff --git a/legend-engine-core/legend-engine-core-test/pom.xml b/legend-engine-core/legend-engine-core-test/pom.xml index eabebcd002a..bd291804b58 100644 --- a/legend-engine-core/legend-engine-core-test/pom.xml +++ b/legend-engine-core/legend-engine-core-test/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-core - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-core/pom.xml b/legend-engine-core/pom.xml index b2c14141b16..d2d5783f50d 100644 --- a/legend-engine-core/pom.xml +++ b/legend-engine-core/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/pom.xml b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/pom.xml index b51cee8d273..abcfaa09029 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/pom.xml +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-code - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-code-compiled-core diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/legend/test/handlersTest.pure b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/legend/test/handlersTest.pure index 48d8a454c6e..09e3d2fbff2 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/legend/test/handlersTest.pure +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/legend/test/handlersTest.pure @@ -372,6 +372,10 @@ Class meta::legend::test::handlers::model::TestString length(){$this.string->length()}:Integer[1]; ltrim(){$this.string->ltrim()}:String[1]; + lpad(){$this.string->lpad(1)}:String[1]; + lpad2(){$this.string->lpad(1, '0')}:String[1]; + rpad(){$this.string->rpad(1)}:String[1]; + rpad2(){$this.string->rpad(1, '0')}:String[1]; parseBoolean(){$this.string->parseBoolean()}:Boolean[1]; parseDate(){$this.string->parseDate()}:Date[1]; @@ -390,7 +394,7 @@ Class meta::legend::test::handlers::model::TestString toString(){3->toString()}:String[1]; toUpper(){$this.string->toUpper()}:String[1]; trim(){$this.string->trim()}:String[1]; - + hashString(){$this.string->meta::pure::functions::hash::hash(meta::pure::functions::hash::HashType.MD5)}:String[1]; } Class meta::legend::test::handlers::model::TestDate diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/metaExtension.pure b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/metaExtension.pure index 6bee2a3fe0e..d32252f31ec 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/metaExtension.pure +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/metaExtension.pure @@ -121,15 +121,37 @@ function {doc.doc = 'Get all properties on the provided type / class'} ->concatenate($class.qualifiedPropertiesFromAssociations) } -function {doc.doc = 'Get all nested types present in this property tree of this class'} +function {doc.doc = 'Get all nested types present in the property tree of this class or its hierarchy.'} meta::pure::functions::meta::allNestedPropertyTypes(class : Class[1]) : Type[*] { - let propertyTypes = $class.properties->map(p | $p.genericType.rawType->toOne()); + let propertyTypes = $class->meta::pure::functions::meta::hierarchicalProperties()->map(p | $p.genericType.rawType->toOne()); let propertiesThatAreClasses = $propertyTypes->filter(type | $type->instanceOf(Class))->cast(@Class); - $class->concatenate($propertyTypes->concatenate($propertiesThatAreClasses->map(class |$class->allNestedPropertyTypes()))); + $class->concatenate($class->validGeneralizations())->concatenate($propertyTypes->concatenate($propertiesThatAreClasses->map(class |$class->allNestedPropertyTypes())))->removeDuplicates(); } -function {doc.doc = 'Get defined and inheritied properties on the provided class'} +function {doc.doc = 'Get the explicitly defined generalizations.'} + meta::pure::functions::meta::validGeneralizations(type : Type[1]) : Class[*] +{ + $type.generalizations.general.rawType->filter(c | $c != Any && $c != Enum)->cast(@Class); +} + +function {doc.doc = 'Recursively get all explicitly defined generalizations.'} + meta::pure::functions::meta::hierarchicalAllGeneralizations(type : Type[1]) : Class[*] +{ + let oneLevel = $type->validGeneralizations(); + $oneLevel->concatenate($oneLevel->map(c | $c->validGeneralizations()))->removeDuplicates(); +} + +function {doc.doc = 'Get all inherited properties on the provided class'} + meta::pure::functions::meta::hierarchicalAllProperties(class:Class[1]):AbstractProperty[*] +{ + if($class==Any, + | [], + | $class->allProperties()->concatenate($class.generalizations->map(g| hierarchicalProperties($g.general.rawType->cast(@Class)->toOne())))->removeDuplicates() + ); +} + +function {doc.doc = 'Get defined and inherited properties on the provided class'} meta::pure::functions::meta::hierarchicalProperties(class:Class[1]):Property[*] { if($class==Any, @@ -138,7 +160,7 @@ function {doc.doc = 'Get defined and inheritied properties on the provided class ); } -function {doc.doc = 'Get defined and inheritied properties via associations on the provided class'} +function {doc.doc = 'Get defined and inherited properties via associations on the provided class'} meta::pure::functions::meta::hierarchicalPropertiesFromAssociations(class:Class[1]):Property[*] { if($class==Any, @@ -147,7 +169,7 @@ function {doc.doc = 'Get defined and inheritied properties via associations on t ); } -function {doc.doc = 'Get defined and inheritied qualified properties on the provided class'} +function {doc.doc = 'Get defined and inherited qualified properties on the provided class'} meta::pure::functions::meta::hierarchicalQualifiedProperties(class:Class[1]):AbstractProperty[*] { if($class==Any, diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/stringExtension.pure b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/stringExtension.pure index 2ec81292d02..390c64a9cbd 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/stringExtension.pure +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/stringExtension.pure @@ -167,6 +167,36 @@ function {doc.doc = 'Un-camel case / humanize the provided string using provided ->joinStrings(' '); } +function meta::pure::functions::string::lpad(str:String[1], length:Integer[1]):String[1] +{ + lpad($str, $length, ' ') +} + +function meta::pure::functions::string::lpad(str:String[1], length:Integer[1], char:String[1]):String[1] +{ + pad($str, $length, $char, true) +} + +function meta::pure::functions::string::rpad(str:String[1], length:Integer[1]):String[1] +{ + rpad($str, $length, ' ') +} + +function meta::pure::functions::string::rpad(str:String[1], length:Integer[1], char:String[1]):String[1] +{ + pad($str, $length, $char, false) +} + +function <> meta::pure::functions::string::pad(str:String[1], length:Integer[1], char:String[1], left:Boolean[1]):String[1] +{ + let result = if ($str->length() > $length, + | $str->substring(0, $length), + | range($length - $str->length())->fold({acc, s | if ($left, | $char + $s, | $s + $char)}, $str->toOne()) + ); + + if ($result->length() > $length, | $result->substring(0, $length), | $result); +} + function {doc.doc = 'Split the string and select the part'} meta::pure::functions::string::splitPart(str:String[0..1], token:String[1], part:Integer[1]):String[0..1] { diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/tests/stringExtension.pure b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/tests/stringExtension.pure index 92134809154..8d0324b7bd8 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/tests/stringExtension.pure +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/corefunctions/tests/stringExtension.pure @@ -110,6 +110,22 @@ function <> {test.excludePlatform = 'Java compiled'} meta::pure::func assertEquals('abab', repeatString('ab', 2)); } +function <> meta::pure::functions::string::tests::testLpad():Boolean[1] +{ + assertEquals('abc', lpad('abcd', 3, '_')); + assertEquals('______abcd', lpad('abcd', 10, '_')); + assertEquals(' abcd', lpad('abcd', 10)); + assertEquals('abcd', lpad('abcd', 10, '')); +} + +function <> meta::pure::functions::string::tests::testRpad():Boolean[1] +{ + assertEquals('abc', rpad('abcd', 3, '_')); + assertEquals('abcd______', rpad('abcd', 10, '_')); + assertEquals('abcd ', rpad('abcd', 10)); + assertEquals('abcd', rpad('abcd', 10, '')); +} + function <> meta::pure::functions::string::tests::testSplitPart():Boolean[1] { assertEquals([], splitPart([], 'a', 1)); diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/mapping/modelToModel.pure b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/mapping/modelToModel.pure index f2e97ad2f94..8a670e34456 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/mapping/modelToModel.pure +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/mapping/modelToModel.pure @@ -63,7 +63,7 @@ Class meta::external::store::model::JsonModelConnection extends PureModelConnect url : String[1]; } -Class meta::external::store::model::JsonDataRecord +Class meta::pure::mapping::modelToModel::JsonDataRecord { number : Integer[1]; record : String[1]; @@ -75,7 +75,7 @@ Class meta::external::store::model::XmlModelConnection extends PureModelConnecti url : String[1]; } -Class meta::external::store::model::XmlDataRecord +Class meta::pure::mapping::modelToModel::XmlDataRecord { number : Integer[1]; record : String[1]; diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/router/routing/router_routing.pure b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/router/routing/router_routing.pure index 47cab133ee6..7e7aebb9845 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/router/routing/router_routing.pure +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/pure/router/routing/router_routing.pure @@ -731,6 +731,10 @@ function meta::pure::router::routing::shouldStopFunctions(extensions:meta::pure: tdsContains_T_1__Function_MANY__TabularDataSet_1__Boolean_1_, tdsContains_T_1__Function_MANY__String_MANY__TabularDataSet_1__Function_1__Boolean_1_, splitPart_String_$0_1$__String_1__Integer_1__String_$0_1$_, + lpad_String_1__Integer_1__String_1_, + lpad_String_1__Integer_1__String_1__String_1_, + rpad_String_1__Integer_1__String_1_, + rpad_String_1__Integer_1__String_1__String_1_, meta::pure::tds::extensions::firstNotNull_T_MANY__T_$0_1$_, meta::pure::functions::date::calendar::annualized_Date_1__String_1__Date_1__Number_$0_1$__Number_$0_1$_, meta::pure::functions::date::calendar::cme_Date_1__String_1__Date_1__Number_$0_1$__Number_$0_1$_, diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/store/m2m/tests/legend/constraints.pure b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/store/m2m/tests/legend/constraints.pure index 5655669e235..f82278765af 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/store/m2m/tests/legend/constraints.pure +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-core/src/main/resources/core/store/m2m/tests/legend/constraints.pure @@ -63,6 +63,26 @@ meta::pure::mapping::modelToModel::test::alloy::constraints::testQueryOnTypeWith assert(jsonEquivalent('{"name":"FirmX"}'->parseJSON(), $result.values->toOne()->parseJSON())); } +function <> +{ serverVersion.start='v1_19_0' } +meta::pure::mapping::modelToModel::test::alloy::constraints::testGraphFetchWithPackageConflictingPropertyName():Boolean[1] +{ + let result = execute( |Firm.all()->graphFetchChecked(#{Firm {organization} }#)->serialize(#{Firm {organization} }#), + m2mconstraintmapping1, + ^Runtime( + connectionStores = ^ConnectionStore( + element=^ModelStore(), + connection= ^JsonModelConnection( + class=_Firm, + url='data:application/json,{"name":"FirmX","org":"orgA"}' + ) + ) + ), + meta::pure::extension::defaultExtensions() + ); + assert(jsonEquivalent('{"defects":[{"path":[],"enforcementLevel":"Error","ruleType":"ClassConstraint","externalId":null,"id":"0","ruleDefinerPath":"meta::pure::mapping::modelToModel::test::alloy::constraints::Firm","message":"Constraint :[0] violated in the Class Firm"}],"source":{"defects":[],"source":{"number":1,"record":"{\\"name\\":\\"FirmX\\",\\"org\\":\\"orgA\\"}"},"value":{"org":"orgA","employees":[]}},"value":{"organization":"orgA"}}'->parseJSON(), $result.values->toOne()->parseJSON())); +} + function <> { serverVersion.start='v1_19_0' } meta::pure::mapping::modelToModel::test::alloy::constraints::testQueryOnSourceTypeWithFailingConstraintSucceedsWithConstraintsDisabled():Boolean[1] @@ -386,6 +406,7 @@ Class meta::pure::mapping::modelToModel::test::alloy::constraints::Firm { name: String[1]; employees: meta::pure::mapping::modelToModel::test::alloy::constraints::Person[*]; + organization: String[1]; } Class meta::pure::mapping::modelToModel::test::alloy::constraints::Firm2 @@ -403,6 +424,7 @@ Class meta::pure::mapping::modelToModel::test::alloy::constraints::_Firm { name: String[1]; employees:meta::pure::mapping::modelToModel::test::alloy::constraints::_Person[*]; + org: String[1]; } Class meta::pure::mapping::modelToModel::test::alloy::constraints::_Firm2 @@ -680,7 +702,8 @@ Mapping meta::pure::mapping::modelToModel::test::alloy::constraints::m2mconstrai { ~src meta::pure::mapping::modelToModel::test::alloy::constraints::_Firm name: $src.name, - employees: $src.employees + employees: $src.employees, + organization: $src.org } meta::pure::mapping::modelToModel::test::alloy::constraints::Person: Pure diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-functions/pom.xml b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-functions/pom.xml index cafc8094775..1b548aaad6d 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-functions/pom.xml +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-compiled-functions/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-code - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-code-compiled-functions diff --git a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-core-extension/pom.xml b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-core-extension/pom.xml index 7fa3a90ac59..43fa36225c8 100644 --- a/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-core-extension/pom.xml +++ b/legend-engine-pure/legend-engine-pure-code/legend-engine-pure-code-core-extension/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-code - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-code-core-extension diff --git a/legend-engine-pure/legend-engine-pure-code/pom.xml b/legend-engine-pure/legend-engine-pure-code/pom.xml index fcf48b09e5d..bf7a7802ed1 100644 --- a/legend-engine-pure/legend-engine-pure-code/pom.xml +++ b/legend-engine-pure/legend-engine-pure-code/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light-metadata-pure/pom.xml b/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light-metadata-pure/pom.xml index 4d7ef4be307..94119e44ec4 100644 --- a/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light-metadata-pure/pom.xml +++ b/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light-metadata-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-pure-ide - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light-pure/pom.xml b/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light-pure/pom.xml index eccd598d078..902e3b9a872 100644 --- a/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light-pure/pom.xml +++ b/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-pure-ide - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light/pom.xml b/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light/pom.xml index 17a0b4aefdf..943af4072c7 100644 --- a/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light/pom.xml +++ b/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-pure-ide - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light/src/main/java/org/finos/legend/engine/ide/PureIDELight.java b/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light/src/main/java/org/finos/legend/engine/ide/PureIDELight.java index 0008f70baee..5a20a181b37 100644 --- a/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light/src/main/java/org/finos/legend/engine/ide/PureIDELight.java +++ b/legend-engine-pure/legend-engine-pure-ide/legend-engine-pure-ide-light/src/main/java/org/finos/legend/engine/ide/PureIDELight.java @@ -45,6 +45,7 @@ protected MutableList buildRepositories(SourceLocationCon .with(this.buildCore("legend-engine-xts-mastery/legend-engine-xt-mastery-pure", "mastery")) .with(this.buildCore("legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-pure", "function_activator")) .with(this.buildCore("legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure", "snowflakeapp")) + .with(this.buildCore("legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure", "bigqueryfunction")) .with(this.buildCore("legend-engine-xts-hostedService/legend-engine-xt-hostedService-pure", "hostedservice")) .with(this.buildCore("legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure", "relational")) .with(this.buildCore("legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-javaPlatformBinding-pure", "relational-java-platform-binding")) diff --git a/legend-engine-pure/legend-engine-pure-ide/pom.xml b/legend-engine-pure/legend-engine-pure-ide/pom.xml index fe62d799f87..db9b7685a12 100644 --- a/legend-engine-pure/legend-engine-pure-ide/pom.xml +++ b/legend-engine-pure/legend-engine-pure-ide/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-diagram-java/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-diagram-java/pom.xml index 3e69a6bdf58..7d977c23f1c 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-diagram-java/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-diagram-java/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-platform-modular-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-platform-dsl-diagram-java diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-graph-java/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-graph-java/pom.xml index 336a660b58a..4e794efe7e4 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-graph-java/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-graph-java/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-platform-modular-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-platform-dsl-graph-java diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-mapping-java/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-mapping-java/pom.xml index a6bb9845859..9292eebdca4 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-mapping-java/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-mapping-java/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-platform-modular-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-platform-dsl-mapping-java diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-path-java/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-path-java/pom.xml index cf4adf171aa..b4d57b5ef77 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-path-java/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-dsl-path-java/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-platform-modular-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-platform-dsl-path-java diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-functions-java/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-functions-java/pom.xml index b2e106b6fd1..e121d7d02c8 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-functions-java/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-functions-java/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-platform-modular-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-platform-functions-java diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-functions-json-java/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-functions-json-java/pom.xml index 8f9acf9465b..05a6fb366fa 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-functions-json-java/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-functions-json-java/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-platform-modular-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-platform-functions-json-java diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-java/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-java/pom.xml index 36fd4464f5b..ad841e4923b 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-java/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-java/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-platform-modular-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-platform-java diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-store-relational-java/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-store-relational-java/pom.xml index 78d84ca6879..3293c9491bb 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-store-relational-java/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/legend-engine-pure-platform-store-relational-java/pom.xml @@ -22,7 +22,7 @@ org.finos.legend.engine legend-engine-pure-platform-modular-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-pure-platform-store-relational-java diff --git a/legend-engine-pure/legend-engine-pure-platform-modular-generation/pom.xml b/legend-engine-pure/legend-engine-pure-platform-modular-generation/pom.xml index ff3867b33eb..e85cf24b8e4 100644 --- a/legend-engine-pure/legend-engine-pure-platform-modular-generation/pom.xml +++ b/legend-engine-pure/legend-engine-pure-platform-modular-generation/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-compiler/pom.xml b/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-compiler/pom.xml index 523ae9a2a96..375cf80e8d6 100644 --- a/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-compiler/pom.xml +++ b/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-compiler/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-pure-runtime - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-execution/pom.xml b/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-execution/pom.xml index 0385771f6ca..35beccf887d 100644 --- a/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-execution/pom.xml +++ b/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-pure-runtime - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-extensions/pom.xml b/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-extensions/pom.xml index 51ad8bec715..c4d2abed6d2 100644 --- a/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-extensions/pom.xml +++ b/legend-engine-pure/legend-engine-pure-runtime/legend-engine-pure-runtime-extensions/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-pure-runtime - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/legend-engine-pure-runtime/legend-engine-xt-java-runtime-compiler/pom.xml b/legend-engine-pure/legend-engine-pure-runtime/legend-engine-xt-java-runtime-compiler/pom.xml index 752fc2e25bc..e5449b79ac0 100644 --- a/legend-engine-pure/legend-engine-pure-runtime/legend-engine-xt-java-runtime-compiler/pom.xml +++ b/legend-engine-pure/legend-engine-pure-runtime/legend-engine-xt-java-runtime-compiler/pom.xml @@ -20,7 +20,7 @@ org.finos.legend.engine legend-engine-pure-runtime - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-java-runtime-compiler diff --git a/legend-engine-pure/legend-engine-pure-runtime/pom.xml b/legend-engine-pure/legend-engine-pure-runtime/pom.xml index 6a851e58074..04e055772e1 100644 --- a/legend-engine-pure/legend-engine-pure-runtime/pom.xml +++ b/legend-engine-pure/legend-engine-pure-runtime/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-pure - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-pure/pom.xml b/legend-engine-pure/pom.xml index 644726cea75..f44b083da6e 100644 --- a/legend-engine-pure/pom.xml +++ b/legend-engine-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/legend-engine-xt-analytics-binding-api/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/legend-engine-xt-analytics-binding-api/pom.xml index aa2b8da6975..560c9747ba8 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/legend-engine-xt-analytics-binding-api/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/legend-engine-xt-analytics-binding-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-analytics-binding - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/legend-engine-xt-analytics-binding-pure/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/legend-engine-xt-analytics-binding-pure/pom.xml index 1e0af19bad8..cc9cb7761e9 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/legend-engine-xt-analytics-binding-pure/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/legend-engine-xt-analytics-binding-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-analytics-binding - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/pom.xml index 37a5cbfbda1..1c5ad184d2a 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-binding/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-analytics - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-class/legend-engine-xt-analytics-class-api/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-class/legend-engine-xt-analytics-class-api/pom.xml index 3dd304e3490..236419909d9 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-class/legend-engine-xt-analytics-class-api/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-class/legend-engine-xt-analytics-class-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-analytics-class - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-class/legend-engine-xt-analytics-class-pure/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-class/legend-engine-xt-analytics-class-pure/pom.xml index cfce69a9758..a5b87b70295 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-class/legend-engine-xt-analytics-class-pure/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-class/legend-engine-xt-analytics-class-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-analytics-class - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-class/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-class/pom.xml index 1c1f5539889..81a400683f4 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-class/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-class/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-analytics - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-function/legend-engine-xt-analytics-function-api/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-function/legend-engine-xt-analytics-function-api/pom.xml index d5d303dac6e..8bfae5633b8 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-function/legend-engine-xt-analytics-function-api/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-function/legend-engine-xt-analytics-function-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-analytics-function - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-function/legend-engine-xt-analytics-function-pure/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-function/legend-engine-xt-analytics-function-pure/pom.xml index f645ad5f1f5..b5674331734 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-function/legend-engine-xt-analytics-function-pure/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-function/legend-engine-xt-analytics-function-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-analytics-function - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-function/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-function/pom.xml index bbc6593b0b6..cf8427ebbf7 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-function/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-function/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-analytics - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/legend-engine-xt-analytics-lineage-api/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/legend-engine-xt-analytics-lineage-api/pom.xml index 51b4a91ff31..73ad805d9dd 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/legend-engine-xt-analytics-lineage-api/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/legend-engine-xt-analytics-lineage-api/pom.xml @@ -19,7 +19,7 @@ legend-engine-xts-analytics-lineage org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/legend-engine-xt-analytics-lineage-pure/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/legend-engine-xt-analytics-lineage-pure/pom.xml index 2d1a0fb8727..1baf84e4088 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/legend-engine-xt-analytics-lineage-pure/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/legend-engine-xt-analytics-lineage-pure/pom.xml @@ -19,7 +19,7 @@ legend-engine-xts-analytics-lineage org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/pom.xml index dd62aee9f06..5f437ed1733 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-lineage/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-analytics - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-api/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-api/pom.xml index 9e81fbb0417..55b054729fe 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-api/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-api/pom.xml @@ -3,7 +3,7 @@ legend-engine-xts-analytics-mapping org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 Legend Engine - XT - Analytics - Mapping - API @@ -62,10 +62,6 @@ org.finos.legend.engine legend-engine-xt-analytics-function-pure - - org.finos.legend.engine - legend-engine-xt-analytics-function-pure - org.finos.legend.engine legend-engine-xt-analytics-binding-pure diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-protocol/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-protocol/pom.xml index 2a2f2f46698..8691aed6548 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-protocol/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-analytics-mapping - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 Legend Engine - XT - Analytics - Mapping - Protocol diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/pom.xml index 42c581d3582..1fa255c0d6e 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/pom.xml @@ -3,7 +3,7 @@ legend-engine-xts-analytics-mapping org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/src/main/resources/core_analytics_mapping/modelCoverage/analyticsTest.pure b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/src/main/resources/core_analytics_mapping/modelCoverage/analyticsTest.pure index 1df60231c2b..5a1cc6275a3 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/src/main/resources/core_analytics_mapping/modelCoverage/analyticsTest.pure +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/src/main/resources/core_analytics_mapping/modelCoverage/analyticsTest.pure @@ -20,6 +20,11 @@ function meta::analytics::mapping::modelCoverage::test::generateModelCoverageAna meta::analytics::mapping::modelCoverage::analyze($mapping, false, false, false); } +function meta::analytics::mapping::modelCoverage::test::generateModelCoverageAnalyticsWithMappedEntityInfo(mapping: Mapping[1]): MappingModelCoverageAnalysisResult[1] +{ + meta::analytics::mapping::modelCoverage::analyze($mapping, true, false, false); +} + // Relational function <> meta::analytics::mapping::modelCoverage::test::testSimpleRelationalMappingCoverage():Boolean[1] { @@ -111,6 +116,41 @@ function <> meta::analytics::mapping::modelCove assertContains($result.mappedEntities.path, 'meta::analytics::mapping::modelCoverage::test::Target_meta::analytics::mapping::modelCoverage::test::Source_autoMapped_shared'); } + +function <> meta::analytics::mapping::modelCoverage::test::testAutoMappedComplexPropertiesMappingCoverageWithMappedEntityInfo():Boolean[1] +{ + let grammar = 'Class meta::analytics::mapping::modelCoverage::test::Target\n'+ + '{\n'+ + ' tgtId: String[1];\n'+ + ' shared: meta::analytics::mapping::modelCoverage::test::Shared[1];\n'+ + '}\n'+ + 'Class meta::analytics::mapping::modelCoverage::test::Shared\n'+ + '{\n'+ + ' sharedId: String[1];\n'+ + '}\n'+ + 'Class meta::analytics::mapping::modelCoverage::test::Source\n'+ + '{\n'+ + ' srcId: String[1];\n'+ + ' shared: meta::analytics::mapping::modelCoverage::test::Shared[1];\n'+ + '}\n'+ + '###Mapping\n'+ + 'Mapping meta::analytics::mapping::modelCoverage::test::simpleAutoMappedMapping\n'+ + '(\n'+ + ' *meta::analytics::mapping::modelCoverage::test::Target:Pure\n'+ + ' {\n'+ + ' ~src meta::analytics::mapping::modelCoverage::test::Source\n'+ + ' tgtId: $src.srcId,\n'+ + ' shared: $src.shared\n'+ + ' }\n'+ + ')\n'; + let elements = meta::legend::compileLegendGrammar($grammar); + let autoMappedMapping = $elements->filter(e|$e->instanceOf(Mapping))->at(0)->cast(@Mapping); + let result = meta::analytics::mapping::modelCoverage::test::generateModelCoverageAnalyticsWithMappedEntityInfo($autoMappedMapping); + assert($result.mappedEntities->size() == 2); + assertContains($result.mappedEntities.path, 'meta::analytics::mapping::modelCoverage::test::Target_meta::analytics::mapping::modelCoverage::test::Source_autoMapped_shared'); + assert($result.mappedEntities->filter(m |$m.info->isNotEmpty())->size() == 2); +} + function <> meta::analytics::mapping::modelCoverage::test::testCyclicalAutoMappedComplexPropertiesMappingCoverage():Boolean[1] { let grammar = 'Class meta::analytics::mapping::modelCoverage::test::Target\n'+ diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/src/main/resources/core_analytics_mapping/modelCoverage/mappedEntityBuilder.pure b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/src/main/resources/core_analytics_mapping/modelCoverage/mappedEntityBuilder.pure index 4272359f4aa..387366be166 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/src/main/resources/core_analytics_mapping/modelCoverage/mappedEntityBuilder.pure +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/legend-engine-xt-analytics-mapping-pure/src/main/resources/core_analytics_mapping/modelCoverage/mappedEntityBuilder.pure @@ -301,9 +301,9 @@ function meta::analytics::mapping::modelCoverage::buildEntity( let srcClassPath = if($pureInstanceSetImplementation.srcClass->isNotEmpty(),|$pureInstanceSetImplementation.srcClass->toOne()->cast(@Class)->elementToPath(),|''); $targetClassPath + '_' + $srcClassPath;, |$pm.targetSetImplementationId); - processAutoMappedPropertyMapping($pm.property->cast(@AbstractProperty), $id + '_autoMapped_', $config, []);); + processAutoMappedPropertyMapping($pm.property->cast(@AbstractProperty), $id + '_autoMapped_', $isRootEntity, $config, []);); let autoMappedPrimitiveProperties = if($setImplementation->instanceOf(PureInstanceSetImplementation), - |buildAutoMappedProperty($class, if($setImplementation->cast(@PureInstanceSetImplementation).srcClass->isEmpty(),|[],|$setImplementation->cast(@PureInstanceSetImplementation).srcClass->cast(@Class)->toOne()), $properties, $config), + |buildAutoMappedProperty($class, if($setImplementation->cast(@PureInstanceSetImplementation).srcClass->isEmpty(),|[],|$setImplementation->cast(@PureInstanceSetImplementation).srcClass->cast(@Class)->toOne()), $properties, $isRootEntity, $config), |[]); buildEntity( $class, @@ -318,6 +318,7 @@ function meta::analytics::mapping::modelCoverage::buildEntity( function meta::analytics::mapping::modelCoverage::processAutoMappedPropertyMapping( property: AbstractProperty[1], prefix: String[1], + isRootEntity: Boolean[1], config: AnalysisConfiguration[1], visited: Pair, String>[*] ): AutoMappedHelperClass[1] @@ -344,8 +345,9 @@ function meta::analytics::mapping::modelCoverage::processAutoMappedPropertyMappi | ^MappedPropertyInfo(type = $property.genericType.rawType->toOne()->mapType(), multiplicity = $property.multiplicity->toOne()), | [])); let newVisited = $visited->concatenate([^Pair, String>(first = $property.genericType.rawType->toOne()->cast(@Class), second = $entityPath)]); - let children = $property.genericType.rawType->toOne()->cast(@Class).properties->map(p | $p->processAutoMappedPropertyMapping($entityPath + '_' , $config, $newVisited)); - let currentMappedEntity = ^MappedEntity(path=$entityPath, properties = $children->map(c|$c.mappedProperties)); + let propertyType = $property.genericType.rawType->toOne()->cast(@Class); + let children = $propertyType.properties->map(p | $p->processAutoMappedPropertyMapping($entityPath + '_' , $isRootEntity, $config, $newVisited)); + let currentMappedEntity = ^MappedEntity(path=$entityPath, properties = $children->map(c|$c.mappedProperties), info = if($config.returnMappedEntityInfo == true, | ^MappedEntityInfo(isRootEntity = $isRootEntity, classPath = $propertyType->elementToPath()), | [])); ^AutoMappedHelperClass(mappedProperties = [$entityMappedProp], mappedEntities=[$currentMappedEntity]->concatenate($children->map(c|$c.mappedEntities))); ); ) @@ -355,6 +357,7 @@ function meta::analytics::mapping::modelCoverage::buildAutoMappedProperty( class:Class[1], srcClass: Class[0..1], mappedProperties: MappedProperty[*], + isRootEntity: Boolean[1], config: AnalysisConfiguration[1] ): MappedProperty[*] { diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/pom.xml index dc4f11e89ef..6aca9cd512f 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-mapping/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-analytics - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-search/legend-engine-xt-analytics-search-generation/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-search/legend-engine-xt-analytics-search-generation/pom.xml index 8a2cef1d5ef..9e5c2b619b2 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-search/legend-engine-xt-analytics-search-generation/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-search/legend-engine-xt-analytics-search-generation/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-analytics-search - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-search/legend-engine-xt-analytics-search-pure/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-search/legend-engine-xt-analytics-search-pure/pom.xml index 8d556d18a90..a747c5a2ded 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-search/legend-engine-xt-analytics-search-pure/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-search/legend-engine-xt-analytics-search-pure/pom.xml @@ -19,7 +19,7 @@ legend-engine-xts-analytics-search org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-search/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-search/pom.xml index 1ea29707898..5e38c424c7d 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-search/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-search/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-analytics - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-store/legend-engine-xt-analytics-store-entitlement-api/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-store/legend-engine-xt-analytics-store-entitlement-api/pom.xml index bda221c6705..9fd99c8c5e9 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-store/legend-engine-xt-analytics-store-entitlement-api/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-store/legend-engine-xt-analytics-store-entitlement-api/pom.xml @@ -19,7 +19,7 @@ legend-engine-xts-analytics-store org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-xt-analytics-store-entitlement-api diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-store/legend-engine-xt-analytics-store-entitlement/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-store/legend-engine-xt-analytics-store-entitlement/pom.xml index 21d5ba85b04..4997496a343 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-store/legend-engine-xt-analytics-store-entitlement/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-store/legend-engine-xt-analytics-store-entitlement/pom.xml @@ -19,7 +19,7 @@ legend-engine-xts-analytics-store org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/legend-engine-xts-analytics-store/pom.xml b/legend-engine-xts-analytics/legend-engine-xts-analytics-store/pom.xml index 8ea1c598453..8850eca838d 100644 --- a/legend-engine-xts-analytics/legend-engine-xts-analytics-store/pom.xml +++ b/legend-engine-xts-analytics/legend-engine-xts-analytics-store/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-analytics - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-analytics/pom.xml b/legend-engine-xts-analytics/pom.xml index 9f1109bd3cb..45d257de6ee 100644 --- a/legend-engine-xts-analytics/pom.xml +++ b/legend-engine-xts-analytics/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-arrow/legend-engine-xt-arrow-pure/pom.xml b/legend-engine-xts-arrow/legend-engine-xt-arrow-pure/pom.xml index 03b2e282a85..0bcbb78ef25 100644 --- a/legend-engine-xts-arrow/legend-engine-xt-arrow-pure/pom.xml +++ b/legend-engine-xts-arrow/legend-engine-xt-arrow-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-arrow - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/pom.xml b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/pom.xml index 0b0db2cf8ba..417dc7e7f9d 100644 --- a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/pom.xml +++ b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/pom.xml @@ -3,7 +3,7 @@ org.finos.legend.engine legend-engine-xts-arrow - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/main/java/org/finos/legend/engine/external/format/arrow/ArrowDataWriter.java b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/main/java/org/finos/legend/engine/external/format/arrow/ArrowDataWriter.java index 330d78225c6..381604cbea7 100644 --- a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/main/java/org/finos/legend/engine/external/format/arrow/ArrowDataWriter.java +++ b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/main/java/org/finos/legend/engine/external/format/arrow/ArrowDataWriter.java @@ -16,11 +16,9 @@ import java.nio.charset.Charset; import java.util.Calendar; -import java.util.HashMap; -import java.util.Locale; +import java.util.GregorianCalendar; import java.util.TimeZone; -import org.apache.arrow.adapter.jdbc.JdbcFieldInfo; import org.apache.arrow.adapter.jdbc.JdbcToArrowConfig; import org.apache.arrow.adapter.jdbc.JdbcToArrowConfigBuilder; import org.apache.arrow.adapter.jdbc.LegendArrowVectorIterator; @@ -32,39 +30,40 @@ import java.io.IOException; import java.io.OutputStream; -import java.sql.ResultSet; import java.sql.SQLException; +import org.finos.legend.engine.plan.execution.stores.relational.result.RelationalResult; public class ArrowDataWriter extends ExternalFormatWriter implements AutoCloseable { private final LegendArrowVectorIterator iterator; private final BufferAllocator allocator; - public ArrowDataWriter(ResultSet resultSet) throws SQLException + public ArrowDataWriter(RelationalResult resultSet) throws SQLException { - HashMap map = new HashMap(); - this.allocator = new RootAllocator(); - JdbcToArrowConfig config = new JdbcToArrowConfigBuilder(allocator, Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT)).build(); - this.iterator = LegendArrowVectorIterator.create(resultSet, config); + Calendar calendar = resultSet.getRelationalDatabaseTimeZone() == null ? + new GregorianCalendar(TimeZone.getTimeZone("GMT")) : + new GregorianCalendar(TimeZone.getTimeZone(resultSet.getRelationalDatabaseTimeZone())); + JdbcToArrowConfig config = new JdbcToArrowConfigBuilder(allocator, calendar).setReuseVectorSchemaRoot(true).build(); + this.iterator = LegendArrowVectorIterator.create(resultSet.getResultSet(), config); } @Override public void writeData(OutputStream outputStream) throws IOException { - try + try (VectorSchemaRoot vector = iterator.next(); + ArrowStreamWriter writer = new ArrowStreamWriter(vector, null, outputStream); + ) { + writer.start(); + writer.writeBatch(); while (this.iterator.hasNext()) { - try (VectorSchemaRoot vector = iterator.next(); - ArrowStreamWriter writer = new ArrowStreamWriter(vector, null, outputStream) - ) - { - writer.start(); - writer.writeBatch(); - } + iterator.next(); + writer.writeBatch(); + } } catch (Exception e) diff --git a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/main/java/org/finos/legend/engine/external/format/arrow/ArrowRuntimeExtension.java b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/main/java/org/finos/legend/engine/external/format/arrow/ArrowRuntimeExtension.java index 372a03f85ac..326a6053071 100644 --- a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/main/java/org/finos/legend/engine/external/format/arrow/ArrowRuntimeExtension.java +++ b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/main/java/org/finos/legend/engine/external/format/arrow/ArrowRuntimeExtension.java @@ -65,7 +65,7 @@ public Result executeExternalizeTDSExecutionNode(ExternalFormatExternalizeTDSExe private Result streamArrowFromRelational(RelationalResult relationalResult) throws SQLException, IOException { - return new ExternalFormatSerializeResult(new ArrowDataWriter(relationalResult.getResultSet()), relationalResult, CONTENT_TYPE); + return new ExternalFormatSerializeResult(new ArrowDataWriter(relationalResult), relationalResult, CONTENT_TYPE); } diff --git a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/java/TestArrowNodeExecutor.java b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/java/TestArrowNodeExecutor.java index 0c388a95af5..6db2d4232d4 100644 --- a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/java/TestArrowNodeExecutor.java +++ b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/java/TestArrowNodeExecutor.java @@ -14,6 +14,7 @@ import java.io.FileOutputStream; import java.io.IOException; +import java.util.TimeZone; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.VectorSchemaRoot; @@ -31,6 +32,7 @@ import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.model.result.SQLResultColumn; import org.finos.legend.engine.shared.core.api.request.RequestContext; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; @@ -45,7 +47,6 @@ public class TestArrowNodeExecutor { - @Test public void testExternalize() throws Exception { @@ -57,7 +58,7 @@ public void testExternalize() throws Exception mockExecutionNode.connection = mockDatabaseConnection; Mockito.when(mockDatabaseConnection.accept(any())).thenReturn(false); - try (Connection conn = DriverManager.getConnection("jdbc:h2:~/test", "sa", ""); + try (Connection conn = DriverManager.getConnection("jdbc:h2:~/test;TIME ZONE=America/New_York", "sa", ""); ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { //setup table @@ -70,7 +71,7 @@ public void testExternalize() throws Exception conn.createStatement().execute("INSERT INTO testtable (testInt, testString, testDate, testBool) VALUES(1,'A', '2020-01-01 00:00:00-05:00',true),( 2,null, '2020-01-01 00:00:00-02:00',false ),( 3,'B', '2020-01-01 00:00:00-05:00',false )"); conn.createStatement().execute("INSERT INTO testtableJoin (testIntR, testStringR) VALUES(6,'A'), (1,'B')"); - RelationalResult result = new RelationalResult(FastList.newListWith(new RelationalExecutionActivity("SELECT * FROM testtable left join testtableJoin on testtable.testInt=testtableJoin.testIntR", null)), mockExecutionNode, FastList.newListWith(new SQLResultColumn("testInt", "INTEGER"), new SQLResultColumn("testStringR", "VARCHAR"), new SQLResultColumn("testString", "VARCHAR"), new SQLResultColumn("testDate", "TIMESTAMP"), new SQLResultColumn("testBool", "TIMESTAMP")), null, "GMT", conn, null, null, null, new RequestContext()); + RelationalResult result = new RelationalResult(FastList.newListWith(new RelationalExecutionActivity("SELECT * FROM testtable left join testtableJoin on testtable.testInt=testtableJoin.testIntR", null)), mockExecutionNode, FastList.newListWith(new SQLResultColumn("testInt", "INTEGER"), new SQLResultColumn("testStringR", "VARCHAR"), new SQLResultColumn("testString", "VARCHAR"), new SQLResultColumn("testDate", "TIMESTAMP"), new SQLResultColumn("testBool", "TIMESTAMP")), null, "America/New_York", conn, null, null, null, new RequestContext()); ExternalFormatSerializeResult nodeExecute = (ExternalFormatSerializeResult) extension.executeExternalizeTDSExecutionNode(node, result, null, null); @@ -97,7 +98,7 @@ public void testExternalizeAsString() throws Exception mockExecutionNode.connection = mockDatabaseConnection; Mockito.when(mockDatabaseConnection.accept(any())).thenReturn(false); - try (Connection conn = DriverManager.getConnection("jdbc:h2:~/test", "sa", ""); + try (Connection conn = DriverManager.getConnection("jdbc:h2:~/test;TIME ZONE=America/New_York", "sa", ""); ) { @@ -106,7 +107,7 @@ public void testExternalizeAsString() throws Exception conn.createStatement().execute("Create Table testtable (testInt INTEGER, testString VARCHAR(255), testDate TIMESTAMP, testBool BOOLEAN)"); conn.createStatement().execute("INSERT INTO testtable (testInt, testString, testDate, testBool) VALUES(1,'A', '2020-01-01 00:00:00-05:00',true),( 2,'B', '2020-01-01 00:00:00-02:00',false ),( 3,'B', '2020-01-01 00:00:00-05:00',false )"); - RelationalResult result = new RelationalResult(FastList.newListWith(new RelationalExecutionActivity("SELECT * FROM testtable", null)), mockExecutionNode, FastList.newListWith(new SQLResultColumn("testInt", "INTEGER"), new SQLResultColumn("testString", "VARCHAR"), new SQLResultColumn("testDate", "TIMESTAMP"), new SQLResultColumn("testBool", "TIMESTAMP")), null, "GMT", conn, null, null, null, new RequestContext()); + RelationalResult result = new RelationalResult(FastList.newListWith(new RelationalExecutionActivity("SELECT * FROM testtable", null)), mockExecutionNode, FastList.newListWith(new SQLResultColumn("testInt", "INTEGER"), new SQLResultColumn("testString", "VARCHAR"), new SQLResultColumn("testDate", "TIMESTAMP"), new SQLResultColumn("testBool", "TIMESTAMP")), null, "America/New_York", conn, null, null, null, new RequestContext()); ExternalFormatSerializeResult nodeExecute = (ExternalFormatSerializeResult) extension.executeExternalizeTDSExecutionNode(node, result, null, null); diff --git a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/java/TestArrowQueries.java b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/java/TestArrowQueries.java index 2f0dfbb49c7..529873fee2a 100644 --- a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/java/TestArrowQueries.java +++ b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/java/TestArrowQueries.java @@ -55,6 +55,7 @@ public class TestArrowQueries { + @Test public void runTest() { @@ -62,6 +63,7 @@ public void runTest() ByteArrayOutputStream baos = new ByteArrayOutputStream(); ) { + ObjectMapper objectMapper = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports(); ExecuteInput input = objectMapper.readValue(getClass().getClassLoader().getResource("arrowService.json"), ExecuteInput.class); @@ -81,7 +83,8 @@ public void runTest() .build(); StreamingResult streamingResult = (StreamingResult) executor.executeWithArgs(executeArgs); streamingResult.stream(baos, SerializationFormat.DEFAULT); - assertAndValidateArrow(new ByteArrayInputStream(baos.toByteArray()), "expectedArrowServiceData.arrow"); + assertAndValidateArrow(new ByteArrayInputStream(baos.toByteArray()), "expectedArrowServiceData.arrow"); + } catch (Exception e) { diff --git a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/resources/arrowService.json b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/resources/arrowService.json index 89e3581953c..59ccedc3265 100644 --- a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/resources/arrowService.json +++ b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/resources/arrowService.json @@ -24,8 +24,8 @@ { "_type": "collection", "multiplicity": { - "lowerBound": 5, - "upperBound": 5 + "lowerBound": 4, + "upperBound": 4 }, "values": [ { @@ -91,27 +91,6 @@ } ] }, - { - "_type": "lambda", - "body": [ - { - "_type": "property", - "parameters": [ - { - "_type": "var", - "name": "x" - } - ], - "property": "settlementDateTime" - } - ], - "parameters": [ - { - "_type": "var", - "name": "x" - } - ] - }, { "_type": "lambda", "body": [ @@ -138,8 +117,8 @@ { "_type": "collection", "multiplicity": { - "lowerBound": 5, - "upperBound": 5 + "lowerBound": 4, + "upperBound": 4 }, "values": [ { @@ -154,10 +133,6 @@ "_type": "string", "value": "Quantity" }, - { - "_type": "string", - "value": "Settlement Date Time" - }, { "_type": "string", "value": "Trade Date" @@ -447,6 +422,7 @@ { "connection": { "_type": "RelationalDatabaseConnection", + "timeZone" : "America/New_York", "authenticationStrategy": { "_type": "h2Default" }, diff --git a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/resources/expectedArrowServiceData.arrow b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/resources/expectedArrowServiceData.arrow index e556d7a94a0..c03b1b8aa2d 100644 Binary files a/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/resources/expectedArrowServiceData.arrow and b/legend-engine-xts-arrow/legend-engine-xt-arrow-runtime/src/test/resources/expectedArrowServiceData.arrow differ diff --git a/legend-engine-xts-arrow/pom.xml b/legend-engine-xts-arrow/pom.xml index 242bd3bae07..8e1f2e008e0 100644 --- a/legend-engine-xts-arrow/pom.xml +++ b/legend-engine-xts-arrow/pom.xml @@ -4,7 +4,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xts-arrow diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanismConfiguration.java b/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanismConfiguration.java deleted file mode 100644 index 11ca50b9ce1..00000000000 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanismConfiguration.java +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection; - -import org.eclipse.collections.api.block.function.Function0; -import org.eclipse.collections.api.factory.Lists; -import org.eclipse.collections.api.list.ImmutableList; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanism; - -import java.util.ArrayList; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Objects; -import java.util.Set; - -public class AuthenticationMechanismConfiguration -{ - private final AuthenticationMechanism authenticationMechanism; - private final ImmutableList> authenticationConfigurationTypes; - private final Function0 defaultAuthenticationConfigurationGenerator; - - private AuthenticationMechanismConfiguration(AuthenticationMechanism authenticationMechanism, List> authenticationConfigurationTypes, Function0 defaultAuthenticationConfigurationGenerator) - { - this.authenticationMechanism = Objects.requireNonNull(authenticationMechanism, "Authentication mechanism is missing"); - this.authenticationConfigurationTypes = Lists.immutable.withAll(authenticationConfigurationTypes); - this.defaultAuthenticationConfigurationGenerator = defaultAuthenticationConfigurationGenerator; - } - - public AuthenticationMechanism getAuthenticationMechanism() - { - return authenticationMechanism; - } - - public ImmutableList> getAuthenticationConfigurationTypes() - { - return authenticationConfigurationTypes; - } - - public Function0 getDefaultAuthenticationConfigurationGenerator() - { - return defaultAuthenticationConfigurationGenerator; - } - - public static class Builder - { - private final AuthenticationMechanism authenticationMechanism; - private final Set> authenticationConfigurationTypes = new LinkedHashSet<>(); - private Function0 defaultAuthenticationConfigurationGenerator; - - public Builder(AuthenticationMechanism authenticationMechanism) - { - this.authenticationMechanism = authenticationMechanism; - } - - public Builder withAuthenticationConfigurationType(Class authenticationConfigurationType) - { - this.authenticationConfigurationTypes.add(authenticationConfigurationType); - return this; - } - - public Builder withAuthenticationConfigurationTypes(List> authenticationConfigurationTypes) - { - this.authenticationConfigurationTypes.addAll(authenticationConfigurationTypes); - return this; - } - - @SafeVarargs - public final Builder withAuthenticationConfigurationTypes(Class... authenticationConfigurationTypes) - { - this.authenticationConfigurationTypes.addAll(Lists.mutable.of(authenticationConfigurationTypes)); - return this; - } - - public Builder withDefaultAuthenticationConfigurationGenerator(Function0 defaultAuthenticationConfigurationGenerator) - { - this.defaultAuthenticationConfigurationGenerator = defaultAuthenticationConfigurationGenerator; - return this; - } - - public AuthenticationMechanismConfiguration build() - { - return new AuthenticationMechanismConfiguration( - this.authenticationMechanism, - new ArrayList<>(this.authenticationConfigurationTypes), - this.defaultAuthenticationConfigurationGenerator - ); - } - } -} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/LegendEnvironment.java b/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/LegendEnvironment.java deleted file mode 100644 index 7751f5bdecc..00000000000 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/LegendEnvironment.java +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection; - -import org.eclipse.collections.api.factory.Lists; -import org.eclipse.collections.api.list.ImmutableList; -import org.eclipse.collections.api.map.ImmutableMap; -import org.eclipse.collections.api.map.MutableMap; -import org.eclipse.collections.impl.factory.Maps; -import org.eclipse.collections.impl.utility.ListIterate; -import org.finos.legend.authentication.vault.CredentialVault; -import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.CredentialVaultSecret; -import org.finos.legend.engine.shared.core.identity.Identity; - -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * This is the runtime instance of configuration for Legend Engine, the place we package common configs, - * such as vaults, that can be passed to various parts of engine, authentication, connection factory, etc. - */ -public class LegendEnvironment -{ - protected final ImmutableList vaults; - protected final ImmutableMap, CredentialVault> vaultsIndex; - protected final ImmutableMap storeSupportsIndex; - - protected LegendEnvironment(List vaults, Map storeSupportsIndex) - { - this.vaults = Lists.immutable.withAll(vaults); - MutableMap, CredentialVault> vaultsIndex = Maps.mutable.empty(); - for (CredentialVault vault : vaults) - { - vaultsIndex.put(vault.getSecretType(), vault); - } - this.vaultsIndex = vaultsIndex.toImmutable(); - this.storeSupportsIndex = Maps.immutable.withAll(storeSupportsIndex); - } - - public String lookupVaultSecret(CredentialVaultSecret credentialVaultSecret, Identity identity) throws Exception - { - Class secretClass = credentialVaultSecret.getClass(); - if (!this.vaultsIndex.containsKey(secretClass)) - { - throw new RuntimeException(String.format("Can't find secret: credential vault for secret of type '%s' has not been registered", secretClass.getSimpleName())); - } - CredentialVault vault = this.vaultsIndex.get(secretClass); - return vault.lookupSecret(credentialVaultSecret, identity); - } - - public StoreSupport findStoreSupport(String identifier) - { - return Objects.requireNonNull(this.storeSupportsIndex.get(identifier), String.format("Can't find store support with identifier '%s'", identifier)); - } - - public static class Builder - { - private final List vaults = Lists.mutable.empty(); - private final Map storeSupportsIndex = new LinkedHashMap<>(); - - public Builder() - { - } - - public Builder withVaults(List vaults) - { - this.vaults.addAll(vaults); - return this; - } - - public Builder withVaults(CredentialVault... vaults) - { - this.vaults.addAll(Lists.mutable.with(vaults)); - return this; - } - - public Builder withVault(CredentialVault vault) - { - this.vaults.add(vault); - return this; - } - - public Builder withStoreSupports(List storeSupports) - { - storeSupports.forEach(this::registerStoreSupport); - return this; - } - - public Builder withStoreSupports(StoreSupport... storeSupports) - { - ListIterate.forEach(Lists.mutable.with(storeSupports), this::registerStoreSupport); - return this; - } - - public Builder withStoreSupport(StoreSupport storeSupport) - { - this.registerStoreSupport(storeSupport); - return this; - } - - private void registerStoreSupport(StoreSupport storeSupport) - { - if (this.storeSupportsIndex.containsKey(storeSupport.getIdentifier())) - { - throw new RuntimeException(String.format("Can't register store support: found multiple store supports with identifier '%s'", storeSupport.getIdentifier())); - } - this.storeSupportsIndex.put(storeSupport.getIdentifier(), storeSupport); - } - - public LegendEnvironment build() - { - return new LegendEnvironment(this.vaults, this.storeSupportsIndex); - } - } -} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreInstance.java b/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreInstance.java deleted file mode 100644 index 2ced76c35a8..00000000000 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreInstance.java +++ /dev/null @@ -1,221 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection; - -import org.eclipse.collections.api.factory.Lists; -import org.eclipse.collections.api.list.ImmutableList; -import org.eclipse.collections.impl.utility.ListIterate; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanism; -import org.finos.legend.connection.protocol.ConnectionSpecification; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * A StoreInstance represents a named instance of a Store. - */ -public final class StoreInstance -{ - private final String identifier; - private final StoreSupport storeSupport; - private final ConnectionSpecification connectionSpecification; - private final Map authenticationMechanismConfigurationIndex; - private final Map, AuthenticationMechanism> authenticationMechanismIndex; - - private StoreInstance(String identifier, StoreSupport storeSupport, List authenticationMechanismConfigurations, ConnectionSpecification connectionSpecification) - { - this.identifier = Objects.requireNonNull(identifier, "Can't create store instance: identifier is missing"); - this.storeSupport = storeSupport; - this.connectionSpecification = Objects.requireNonNull(connectionSpecification, "Connection specification is missing"); - - Map authenticationMechanismConfigurationIndex = new LinkedHashMap<>(); - - if (authenticationMechanismConfigurations.isEmpty()) - { - for (AuthenticationMechanism authenticationMechanism : this.storeSupport.getAuthenticationMechanisms()) - { - authenticationMechanismConfigurationIndex.put(authenticationMechanism, this.storeSupport.getAuthenticationMechanismConfiguration(authenticationMechanism)); - } - } - else - { - for (AuthenticationMechanismConfiguration authenticationMechanismConfiguration : authenticationMechanismConfigurations) - { - AuthenticationMechanism authenticationMechanism = authenticationMechanismConfiguration.getAuthenticationMechanism(); - if (authenticationMechanismConfigurationIndex.containsKey(authenticationMechanism)) - { - throw new RuntimeException(String.format("Found multiple configurations for authentication mechanism '%s'", authenticationMechanism.getLabel())); - } - AuthenticationMechanismConfiguration configFromStoreSupport = this.storeSupport.getAuthenticationMechanismConfiguration(authenticationMechanism); - if (configFromStoreSupport == null) - { - throw new RuntimeException(String.format("Authentication mechanism '%s' is not covered by store support '%s'. Supported mechanism(s):\n%s", - authenticationMechanism.getLabel(), - this.storeSupport.getIdentifier(), - ListIterate.collect(this.storeSupport.getAuthenticationMechanisms(), mechanism -> "- " + mechanism.getLabel()).makeString("\n") - )); - } - ImmutableList> authenticationConfigTypesFromStoreSupport = configFromStoreSupport.getAuthenticationConfigurationTypes(); - List> authenticationConfigurationTypes = Lists.mutable.empty(); - for (Class authenticationConfigurationType : authenticationMechanismConfiguration.getAuthenticationConfigurationTypes()) - { - if (!authenticationConfigTypesFromStoreSupport.contains(authenticationConfigurationType)) - { - throw new RuntimeException(String.format("Authentication configuration type '%s' is not covered by store support '%s' for authentication mechanism '%s'. Supported configuration type(s):\n%s", - authenticationConfigurationType.getSimpleName(), - this.storeSupport.getIdentifier(), - authenticationMechanism.getLabel(), - authenticationConfigTypesFromStoreSupport.collect(type -> "- " + type.getSimpleName()).makeString("\n") - )); - } - else - { - authenticationConfigurationTypes.add(authenticationConfigurationType); - } - } - authenticationMechanismConfigurationIndex.put(authenticationMechanism, new AuthenticationMechanismConfiguration.Builder(authenticationMechanism) - // NOTE: if no configuration type is specified, it means the store instance supports all configuration types configured for that mechanism in the store support - .withAuthenticationConfigurationTypes(!authenticationConfigurationTypes.isEmpty() ? authenticationConfigurationTypes : authenticationConfigTypesFromStoreSupport.toList()) - .withDefaultAuthenticationConfigurationGenerator(authenticationMechanismConfiguration.getDefaultAuthenticationConfigurationGenerator() != null ? authenticationMechanismConfiguration.getDefaultAuthenticationConfigurationGenerator() : configFromStoreSupport.getDefaultAuthenticationConfigurationGenerator()) - .build()); - - } - - } - - this.authenticationMechanismConfigurationIndex = authenticationMechanismConfigurationIndex; - Map, AuthenticationMechanism> authenticationMechanismIndex = new LinkedHashMap<>(); - authenticationMechanismConfigurationIndex.forEach((authenticationMechanism, authenticationMechanismConfiguration) -> - { - if (authenticationMechanismConfiguration.getAuthenticationConfigurationTypes().isEmpty()) - { - throw new RuntimeException(String.format("No authentication configuration type is associated with authentication mechanism '%s'", authenticationMechanism.getLabel())); - } - authenticationMechanismConfiguration.getAuthenticationConfigurationTypes().forEach(configurationType -> - { - authenticationMechanismIndex.put(configurationType, authenticationMechanism); - }); - }); - this.authenticationMechanismIndex = authenticationMechanismIndex; - } - - public String getIdentifier() - { - return identifier; - } - - public StoreSupport getStoreSupport() - { - return storeSupport; - } - - public List getAuthenticationMechanisms() - { - return new ArrayList<>(this.authenticationMechanismConfigurationIndex.keySet()); - } - - public List> getAuthenticationConfigurationTypes() - { - return new ArrayList<>(this.authenticationMechanismIndex.keySet()); - } - - public AuthenticationMechanism getAuthenticationMechanism(Class authenticationConfigurationType) - { - return this.authenticationMechanismIndex.get(authenticationConfigurationType); - } - - public ConnectionSpecification getConnectionSpecification() - { - return connectionSpecification; - } - - public AuthenticationMechanismConfiguration getAuthenticationMechanismConfiguration(AuthenticationMechanism authenticationMechanism) - { - return authenticationMechanismConfigurationIndex.get(authenticationMechanism); - } - - public T getConnectionSpecification(Class clazz) - { - if (!this.connectionSpecification.getClass().equals(clazz)) - { - throw new RuntimeException(String.format("Can't get connection specification of type '%s' for store '%s'", clazz.getSimpleName(), this.identifier)); - } - return (T) this.connectionSpecification; - } - - public static class Builder - { - private final LegendEnvironment environment; - private String identifier; - private String storeSupportIdentifier; - private final List authenticationMechanismConfigurations = Lists.mutable.empty(); - private ConnectionSpecification connectionSpecification; - - public Builder(LegendEnvironment environment) - { - this.environment = environment; - } - - public Builder withIdentifier(String identifier) - { - this.identifier = identifier; - return this; - } - - public Builder withStoreSupportIdentifier(String storeSupportIdentifier) - { - this.storeSupportIdentifier = storeSupportIdentifier; - return this; - } - - public Builder withAuthenticationMechanismConfiguration(AuthenticationMechanismConfiguration authenticationMechanismConfiguration) - { - this.authenticationMechanismConfigurations.add(authenticationMechanismConfiguration); - return this; - } - - public Builder withAuthenticationMechanismConfigurations(List authenticationMechanismConfigurations) - { - this.authenticationMechanismConfigurations.addAll(authenticationMechanismConfigurations); - return this; - } - - public Builder withAuthenticationMechanismConfigurations(AuthenticationMechanismConfiguration... authenticationMechanismConfigurations) - { - this.authenticationMechanismConfigurations.addAll(Lists.mutable.of(authenticationMechanismConfigurations)); - return this; - } - - public Builder withConnectionSpecification(ConnectionSpecification connectionSpecification) - { - this.connectionSpecification = connectionSpecification; - return this; - } - - public StoreInstance build() - { - return new StoreInstance( - this.identifier, - this.environment.findStoreSupport(Objects.requireNonNull(this.storeSupportIdentifier, "Store support identifier is missing")), - this.authenticationMechanismConfigurations, - this.connectionSpecification - ); - } - } -} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreSupport.java b/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreSupport.java deleted file mode 100644 index fb36a14dc05..00000000000 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreSupport.java +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection; - -import org.eclipse.collections.api.factory.Lists; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanism; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * A StoreSupport describes the capabilities supported by a Store. - * For now, it describes the authentication mechanisms. - */ -public class StoreSupport -{ - private final String identifier; - private final Map authenticationMechanismConfigurationIndex; - - protected StoreSupport(String identifier, List authenticationMechanismConfigurations) - { - this.identifier = Objects.requireNonNull(identifier, "Identifier is missing"); - - Map authenticationMechanismConfigurationIndex = new LinkedHashMap<>(); - Map, AuthenticationMechanism> authenticationConfigurationTypeIndex = new LinkedHashMap<>(); - for (AuthenticationMechanismConfiguration authenticationMechanismConfiguration : authenticationMechanismConfigurations) - { - AuthenticationMechanism authenticationMechanism = authenticationMechanismConfiguration.getAuthenticationMechanism(); - if (authenticationMechanismConfigurationIndex.containsKey(authenticationMechanism)) - { - throw new RuntimeException(String.format("Found multiple configurations for authentication mechanism '%s'", authenticationMechanism.getLabel())); - } - authenticationMechanismConfigurationIndex.put(authenticationMechanism, authenticationMechanismConfiguration); - authenticationMechanismConfiguration.getAuthenticationConfigurationTypes().forEach(authenticationConfigurationType -> - { - if (authenticationConfigurationTypeIndex.containsKey(authenticationConfigurationType)) - { - throw new RuntimeException(String.format("Authentication configuration type '%s' is associated with multiple authentication mechanisms", authenticationConfigurationType.getSimpleName())); - } - authenticationConfigurationTypeIndex.put(authenticationConfigurationType, authenticationMechanism); - }); - } - - this.authenticationMechanismConfigurationIndex = authenticationMechanismConfigurationIndex; - this.authenticationMechanismConfigurationIndex.forEach((authenticationMechanism, authenticationMechanismConfiguration) -> - { - if (authenticationMechanismConfiguration.getAuthenticationConfigurationTypes().isEmpty()) - { - throw new RuntimeException(String.format("No authentication configuration type is associated with authentication mechanism '%s'", authenticationMechanism.getLabel())); - } - }); - } - - public String getIdentifier() - { - return identifier; - } - - public AuthenticationMechanismConfiguration getAuthenticationMechanismConfiguration(AuthenticationMechanism authenticationMechanism) - { - return authenticationMechanismConfigurationIndex.get(authenticationMechanism); - } - - public List getAuthenticationMechanisms() - { - return new ArrayList<>(this.authenticationMechanismConfigurationIndex.keySet()); - } - - public static class Builder - { - private String identifier; - private final List authenticationMechanismConfigurations = Lists.mutable.empty(); - - public Builder withIdentifier(String identifier) - { - this.identifier = identifier; - return this; - } - - public Builder withAuthenticationMechanismConfiguration(AuthenticationMechanismConfiguration authenticationMechanismConfiguration) - { - this.authenticationMechanismConfigurations.add(authenticationMechanismConfiguration); - return this; - } - - public Builder withAuthenticationMechanismConfigurations(List authenticationMechanismConfigurations) - { - this.authenticationMechanismConfigurations.addAll(authenticationMechanismConfigurations); - return this; - } - - public Builder withAuthenticationMechanismConfigurations(AuthenticationMechanismConfiguration... authenticationMechanismConfigurations) - { - this.authenticationMechanismConfigurations.addAll(Lists.mutable.of(authenticationMechanismConfigurations)); - return this; - } - - public StoreSupport build() - { - return new StoreSupport( - this.identifier, - this.authenticationMechanismConfigurations - ); - } - } -} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/DefaultStoreInstanceProvider.java b/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/DefaultStoreInstanceProvider.java deleted file mode 100644 index 718b6404929..00000000000 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/DefaultStoreInstanceProvider.java +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection.impl; - -import org.eclipse.collections.api.factory.Lists; -import org.eclipse.collections.api.factory.Maps; -import org.eclipse.collections.api.map.ImmutableMap; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.StoreInstanceProvider; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -public class DefaultStoreInstanceProvider implements StoreInstanceProvider -{ - private final ImmutableMap storeInstancesIndex; - - private DefaultStoreInstanceProvider(Map storeInstancesIndex) - { - - this.storeInstancesIndex = Maps.immutable.withAll(storeInstancesIndex); - } - - @Override - public StoreInstance lookup(String identifier) - { - return Objects.requireNonNull(this.storeInstancesIndex.get(identifier), String.format("Can't find store instance with identifier '%s'", identifier)); - } - - public static class Builder - { - private final Map storeInstancesIndex = new HashMap<>(); - - public Builder() - { - - } - - public Builder withStoreInstances(List storeInstances) - { - storeInstances.forEach(this::registerStoreInstance); - return this; - } - - public Builder withStoreInstances(StoreInstance... storeInstances) - { - Lists.mutable.with(storeInstances).forEach(this::registerStoreInstance); - return this; - } - - public Builder withStoreInstance(StoreInstance storeInstance) - { - this.registerStoreInstance(storeInstance); - return this; - } - - private void registerStoreInstance(StoreInstance storeInstance) - { - if (this.storeInstancesIndex.containsKey(storeInstance.getIdentifier())) - { - throw new RuntimeException(String.format("Found multiple store instances with identifier '%s'", storeInstance.getIdentifier())); - } - this.storeInstancesIndex.put(storeInstance.getIdentifier(), storeInstance); - } - - public DefaultStoreInstanceProvider build() - { - return new DefaultStoreInstanceProvider(this.storeInstancesIndex); - } - } -} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/InstrumentedStoreInstanceProvider.java b/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/InstrumentedStoreInstanceProvider.java deleted file mode 100644 index c3438eab9fd..00000000000 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/InstrumentedStoreInstanceProvider.java +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection.impl; - -import org.eclipse.collections.api.factory.Maps; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.StoreInstanceProvider; - -import java.util.Map; -import java.util.Objects; - -/** - * This is the instrumented version of {@link StoreInstanceProvider} which is used for testing. - */ -public class InstrumentedStoreInstanceProvider implements StoreInstanceProvider -{ - private final Map storeInstancesIndex = Maps.mutable.empty(); - - public void injectStoreInstance(StoreInstance storeInstance) - { - if (this.storeInstancesIndex.containsKey(storeInstance.getIdentifier())) - { - throw new RuntimeException(String.format("Found multiple store instances with identifier '%s'", storeInstance.getIdentifier())); - } - this.storeInstancesIndex.put(storeInstance.getIdentifier(), storeInstance); - } - - @Override - public StoreInstance lookup(String identifier) - { - return Objects.requireNonNull(this.storeInstancesIndex.get(identifier), String.format("Can't find store instance with identifier '%s'", identifier)); - } -} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationMechanismType.java b/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationMechanismType.java deleted file mode 100644 index b60214e8c54..00000000000 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationMechanismType.java +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection.protocol; - -import org.finos.legend.connection.impl.ApiKeyAuthenticationConfiguration; -import org.finos.legend.connection.impl.EncryptedPrivateKeyPairAuthenticationConfiguration; -import org.finos.legend.connection.impl.KerberosAuthenticationConfiguration; -import org.finos.legend.connection.impl.UserPasswordAuthenticationConfiguration; -import org.finos.legend.engine.shared.core.identity.Credential; -import org.finos.legend.engine.shared.core.identity.credential.ApiTokenCredential; -import org.finos.legend.engine.shared.core.identity.credential.LegendKerberosCredential; -import org.finos.legend.engine.shared.core.identity.credential.OAuthCredential; -import org.finos.legend.engine.shared.core.identity.credential.PlaintextUserPasswordCredential; -import org.finos.legend.engine.shared.core.identity.credential.PrivateKeyCredential; - -public enum AuthenticationMechanismType implements AuthenticationMechanism -{ - USER_PASSWORD("UsernamePassword"), - API_KEY("APIKey"), - KEY_PAIR("KeyPair"), - KERBEROS("Kerberos"), - OAUTH("OAuth"); - - private final String label; - - private AuthenticationMechanismType(String label) - { - this.label = label; - } - - @Override - public String getLabel() - { - return this.label; - } -} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/test/java/org/finos/legend/connection/StoreSupportTest.java b/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/test/java/org/finos/legend/connection/StoreSupportTest.java deleted file mode 100644 index 3a7f85d05ff..00000000000 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/test/java/org/finos/legend/connection/StoreSupportTest.java +++ /dev/null @@ -1,254 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection; - -import org.eclipse.collections.api.factory.Lists; -import org.finos.legend.connection.impl.DefaultStoreInstanceProvider; -import org.finos.legend.connection.impl.EncryptedPrivateKeyPairAuthenticationConfiguration; -import org.finos.legend.connection.impl.KerberosAuthenticationConfiguration; -import org.finos.legend.connection.impl.UserPasswordAuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanism; -import org.finos.legend.connection.protocol.AuthenticationMechanismType; -import org.finos.legend.connection.protocol.ConnectionSpecification; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -public class StoreSupportTest -{ - @Test - public void testValidateStoreSupportBuilder() - { - // success - new StoreSupport.Builder() - .withIdentifier("test") - .build(); - - // failure - Exception exception; - - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new StoreSupport.Builder().build(); - }); - Assertions.assertEquals("Identifier is missing", exception.getMessage()); - - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new StoreSupport.Builder() - .withIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .withAuthenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) - .build(), - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .withAuthenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) - .build() - ).build(); - }); - Assertions.assertEquals("Found multiple configurations for authentication mechanism 'UsernamePassword'", exception.getMessage()); - - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new StoreSupport.Builder() - .withIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .withAuthenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) - .build(), - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.KERBEROS) - .withAuthenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) - .build() - ).build(); - }); - Assertions.assertEquals("Authentication configuration type 'UserPasswordAuthenticationConfiguration' is associated with multiple authentication mechanisms", exception.getMessage()); - - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new StoreSupport.Builder() - .withIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .build() - ).build(); - }); - Assertions.assertEquals("No authentication configuration type is associated with authentication mechanism 'UsernamePassword'", exception.getMessage()); - } - - @Test - public void testValidateStoreInstanceBuilder() - { - LegendEnvironment environment = new LegendEnvironment.Builder() - .withStoreSupport(new StoreSupport.Builder() - .withIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .withAuthenticationConfigurationTypes( - UserPasswordAuthenticationConfiguration.class, - EncryptedPrivateKeyPairAuthenticationConfiguration.class - ) - .build(), - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.KERBEROS) - .withAuthenticationConfigurationTypes(KerberosAuthenticationConfiguration.class) - .build() - ) - .build()) - .build(); - - // success - StoreInstance testStore = new StoreInstance.Builder(environment) - .withIdentifier("test-store") - .withStoreSupportIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .build() - ) - .withConnectionSpecification(new TestConnectionSpecification()) - .build(); - Assertions.assertArrayEquals(new AuthenticationMechanism[]{AuthenticationMechanismType.USER_PASSWORD}, testStore.getAuthenticationMechanisms().toArray()); - - // make sure if no auth mechanisms is specified, all mechanisms will be supported - StoreInstance testStore2 = new StoreInstance.Builder(environment) - .withIdentifier("test-store") - .withStoreSupportIdentifier("test") - .withConnectionSpecification(new TestConnectionSpecification()) - .build(); - Assertions.assertArrayEquals(new AuthenticationMechanism[]{AuthenticationMechanismType.USER_PASSWORD, AuthenticationMechanismType.KERBEROS}, testStore2.getAuthenticationMechanisms().toArray()); - - // make sure if no authentication configuration type is specified, all types will be supported - StoreInstance testStore3 = new StoreInstance.Builder(environment) - .withIdentifier("test-store") - .withStoreSupportIdentifier("test") - .withConnectionSpecification(new TestConnectionSpecification()) - .withAuthenticationMechanismConfiguration(new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD).build()) - .build(); - Assertions.assertArrayEquals(Lists.mutable.of( - UserPasswordAuthenticationConfiguration.class, - EncryptedPrivateKeyPairAuthenticationConfiguration.class - ).toArray(), testStore3.getAuthenticationMechanismConfiguration(AuthenticationMechanismType.USER_PASSWORD).getAuthenticationConfigurationTypes().toArray()); - - // failure - Exception exception; - - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new StoreInstance.Builder(environment) - .withIdentifier("test-store") - .withStoreSupportIdentifier("test") - .build(); - }); - Assertions.assertEquals("Connection specification is missing", exception.getMessage()); - - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new StoreInstance.Builder(environment) - .withIdentifier("test-store") - .withStoreSupportIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .build(), - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .build() - ) - .withConnectionSpecification(new TestConnectionSpecification()) - .build(); - }); - Assertions.assertEquals("Found multiple configurations for authentication mechanism 'UsernamePassword'", exception.getMessage()); - - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new StoreInstance.Builder(environment) - .withIdentifier("test-store") - .withStoreSupportIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.API_KEY) - .build() - ) - .withConnectionSpecification(new TestConnectionSpecification()) - .build(); - }); - Assertions.assertEquals("Authentication mechanism 'APIKey' is not covered by store support 'test'. Supported mechanism(s):\n" + - "- UsernamePassword\n" + - "- Kerberos", exception.getMessage()); - - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new StoreInstance.Builder(environment) - .withIdentifier("test-store") - .withStoreSupportIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .withAuthenticationConfigurationTypes(KerberosAuthenticationConfiguration.class) - .build() - ) - .withConnectionSpecification(new TestConnectionSpecification()) - .build(); - }); - Assertions.assertEquals("Authentication configuration type 'KerberosAuthenticationConfiguration' is not covered by store support 'test' for authentication mechanism 'UsernamePassword'. Supported configuration type(s):\n" + - "- UserPasswordAuthenticationConfiguration\n" + - "- EncryptedPrivateKeyPairAuthenticationConfiguration", exception.getMessage()); - } - - private static class TestConnectionSpecification extends ConnectionSpecification - { - @Override - public String shortId() - { - return null; - } - } - - @Test - public void testStoreInstanceManagement() - { - LegendEnvironment environment = new LegendEnvironment.Builder() - .withStoreSupport(new StoreSupport.Builder() - .withIdentifier("test") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD) - .withAuthenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) - .build(), - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.KERBEROS) - .withAuthenticationConfigurationTypes(KerberosAuthenticationConfiguration.class) - .build() - ) - .build()) - .build(); - - StoreInstance storeInstance = new StoreInstance.Builder(environment) - .withIdentifier("test-store") - .withStoreSupportIdentifier("test") - .withConnectionSpecification(new TestConnectionSpecification()) - .build(); - - StoreInstanceProvider storeInstanceProvider = new DefaultStoreInstanceProvider.Builder().withStoreInstance(storeInstance).build(); - - // failure - Exception exception; - - // error: store already registered - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - new DefaultStoreInstanceProvider.Builder().withStoreInstances(storeInstance, storeInstance).build(); - }); - Assertions.assertEquals("Found multiple store instances with identifier 'test-store'", exception.getMessage()); - - // error: store not found - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - storeInstanceProvider.lookup("unknown"); - }); - Assertions.assertEquals("Can't find store instance with identifier 'unknown'", exception.getMessage()); - } -} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-grammar/pom.xml b/legend-engine-xts-authentication/legend-engine-xt-authentication-grammar/pom.xml index d60499a7b02..8c63af6d882 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-grammar/pom.xml +++ b/legend-engine-xts-authentication/legend-engine-xt-authentication-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-authentication - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-core/pom.xml b/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-core/pom.xml index 9f76cb56a53..2f6d6a7aa7d 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-core/pom.xml +++ b/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-core/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-authentication - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-gcp-federation/pom.xml b/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-gcp-federation/pom.xml index 88ecd494b67..c8aae068c31 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-gcp-federation/pom.xml +++ b/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-gcp-federation/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-authentication - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-vault-aws/pom.xml b/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-vault-aws/pom.xml index 21d94edcf1e..681b8673670 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-vault-aws/pom.xml +++ b/legend-engine-xts-authentication/legend-engine-xt-authentication-implementation-vault-aws/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-authentication - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -54,7 +54,7 @@ software.amazon.awssdk auth - 2.17.129 + ${amazon.awssdk.version} software.amazon.awssdk @@ -101,7 +101,7 @@ software.amazon.awssdk secretsmanager - 2.17.129 + ${amazon.awssdk.version} io.netty diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-protocol/pom.xml b/legend-engine-xts-authentication/legend-engine-xt-authentication-protocol/pom.xml index 5e4d86a9054..02efc5f4179 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-protocol/pom.xml +++ b/legend-engine-xts-authentication/legend-engine-xt-authentication-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-authentication - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-pure/pom.xml b/legend-engine-xts-authentication/legend-engine-xt-authentication-pure/pom.xml index c532fd18084..91ce8fefd78 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-pure/pom.xml +++ b/legend-engine-xts-authentication/legend-engine-xt-authentication-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-authentication - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-authentication/pom.xml b/legend-engine-xts-authentication/pom.xml index 2e6ece66d89..6df60d90e1c 100644 --- a/legend-engine-xts-authentication/pom.xml +++ b/legend-engine-xts-authentication/pom.xml @@ -14,11 +14,12 @@ See the License for the specific language governing permissions and limitations under the License. --> - + org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -28,7 +29,6 @@ legend-engine-xt-authentication-grammar - legend-engine-xt-authentication-connection-factory legend-engine-xt-authentication-implementation-core legend-engine-xt-authentication-implementation-gcp-federation legend-engine-xt-authentication-implementation-vault-aws diff --git a/legend-engine-xts-avro/legend-engine-xt-avro-pure/pom.xml b/legend-engine-xts-avro/legend-engine-xt-avro-pure/pom.xml index da269d7de3f..8da89c74c34 100644 --- a/legend-engine-xts-avro/legend-engine-xt-avro-pure/pom.xml +++ b/legend-engine-xts-avro/legend-engine-xt-avro-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-avro - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-avro/legend-engine-xt-avro/pom.xml b/legend-engine-xts-avro/legend-engine-xt-avro/pom.xml index de3143e9cab..a7a72c1b369 100644 --- a/legend-engine-xts-avro/legend-engine-xt-avro/pom.xml +++ b/legend-engine-xts-avro/legend-engine-xt-avro/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-avro - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-avro/pom.xml b/legend-engine-xts-avro/pom.xml index 933307d3023..080390f01f5 100644 --- a/legend-engine-xts-avro/pom.xml +++ b/legend-engine-xts-avro/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/pom.xml b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/pom.xml new file mode 100644 index 00000000000..f7b825763cc --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/pom.xml @@ -0,0 +1,223 @@ + + + + + org.finos.legend.engine + legend-engine-xts-bigqueryFunction + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-bigqueryFunction-api + jar + Legend Engine - XT - BigQuery Function - API + + + + + org.finos.legend.pure + legend-pure-m3-core + + + + + + org.finos.legend.engine + legend-engine-language-pure-compiler + + + org.finos.legend.engine + legend-engine-language-pure-modelManager + + + org.finos.legend.engine + legend-engine-pure-code-compiled-core + + + org.finos.legend.engine + legend-engine-pure-code-core-extension + + + org.finos.legend.engine + legend-engine-executionPlan-generation + + + org.finos.legend.engine + legend-engine-shared-core + + + org.finos.legend.engine + legend-engine-xt-functionActivator-pure + + + org.finos.legend.engine + legend-engine-xt-functionActivator-deployment + + + org.finos.legend.engine + legend-engine-xt-functionActivator-protocol + + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-xt-functionActivator-api + + + org.finos.legend.engine + legend-engine-xt-relationalStore-pure + + + org.finos.legend.engine + legend-engine-xt-relationalStore-bigquery-pure + + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-pure + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-protocol + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-compiler + runtime + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-grammar + runtime + + + org.finos.legend.engine + legend-engine-language-pure-dsl-generation + + + + + javax.ws.rs + javax.ws.rs-api + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + + + com.google.cloud + google-cloud-bigquery + 2.29.0 + + + org.conscrypt + conscrypt-openjdk-uber + + + io.netty + netty-buffer + + + io.netty + netty-common + + + org.apache.arrow + * + + + org.checkerframework + checker-qual + + + com.google.errorprone + error_prone_annotations + + + javax.annotation + javax.annotation-api + + + com.google.j2objc + j2objc-annotations + + + commons-logging + commons-logging + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + + + org.apache.arrow + arrow-format + + + + + + + + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections + + + + + + org.slf4j + slf4j-api + + + + + + junit + junit + test + + + org.finos.legend.engine + legend-engine-xt-relationalStore-grammar + test + + + org.finos.legend.engine + legend-engine-configuration + test + + + org.glassfish.jersey.core + jersey-common + test + + + + \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/api/BigQueryFunctionError.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/api/BigQueryFunctionError.java new file mode 100644 index 00000000000..d167c28bd59 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/api/BigQueryFunctionError.java @@ -0,0 +1,30 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.api; + +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.list.ImmutableList; +import org.finos.legend.engine.functionActivator.service.FunctionActivatorError; + +public class BigQueryFunctionError extends FunctionActivatorError +{ + public ImmutableList foundSQLs; + + public BigQueryFunctionError(String message, Iterable foundSQLs) + { + super(message); + this.foundSQLs = Lists.immutable.withAll(foundSQLs); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/api/BigQueryFunctionService.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/api/BigQueryFunctionService.java new file mode 100644 index 00000000000..2d703545042 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/api/BigQueryFunctionService.java @@ -0,0 +1,102 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.api; + +import org.eclipse.collections.api.RichIterable; +import org.eclipse.collections.api.block.function.Function; +import org.eclipse.collections.api.list.MutableList; +import org.eclipse.collections.impl.factory.Lists; +import org.finos.legend.engine.functionActivator.api.output.FunctionActivatorInfo; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionArtifact; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionContent; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionDeploymentConfiguration; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionDeploymentResult; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.functionActivator.service.FunctionActivatorError; +import org.finos.legend.engine.functionActivator.service.FunctionActivatorService; +import org.finos.legend.engine.language.bigqueryFunction.deployment.*; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunctionProtocolExtension; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.pure.generated.*; +import org.finos.legend.engine.shared.core.identity.Identity; + +import java.util.List; + +public class BigQueryFunctionService implements FunctionActivatorService +{ + private final BigQueryFunctionDeploymentManager bigQueryFunctionDeploymentManager; + + public BigQueryFunctionService() + { + this.bigQueryFunctionDeploymentManager = new BigQueryFunctionDeploymentManager(); + } + + @Override + public FunctionActivatorInfo info(PureModel pureModel, String version) + { + return new FunctionActivatorInfo( + "BigQuery Function", + "Create a BigQuery Function that can activate in BigQuery.", + "meta::protocols::pure::" + version + "::metamodel::function::activator::bigQueryFunction::BigQueryFunction", + BigQueryFunctionProtocolExtension.packageJSONType, + pureModel); + } + + @Override + public boolean supports(Root_meta_external_function_activator_FunctionActivator functionActivator) + { + return functionActivator instanceof Root_meta_external_function_activator_bigQueryFunction_BigQueryFunction; + } + + @Override + public MutableList validate(Identity identity, PureModel pureModel, Root_meta_external_function_activator_bigQueryFunction_BigQueryFunction activator, PureModelContext inputModel, Function> routerExtensions) + { + BigQueryFunctionArtifact artifact = BigQueryFunctionGenerator.generateArtifact(pureModel, activator, routerExtensions); + return this.validateArtifact(artifact); + } + + @Override + public BigQueryFunctionDeploymentResult publishToSandbox(Identity identity, PureModel pureModel, Root_meta_external_function_activator_bigQueryFunction_BigQueryFunction activator, PureModelContext inputModel, List runtimeConfigurations, Function> routerExtensions) + { + BigQueryFunctionArtifact artifact = BigQueryFunctionGenerator.generateArtifact(pureModel, activator, routerExtensions); + MutableList validationErrors = this.validateArtifact(artifact); + + Root_meta_external_function_activator_bigQueryFunction_BigQueryFunctionDeploymentConfiguration deploymentConfiguration = ((Root_meta_external_function_activator_bigQueryFunction_BigQueryFunctionDeploymentConfiguration) activator._activationConfiguration()); + return validationErrors.notEmpty() ? + new BigQueryFunctionDeploymentResult(validationErrors.collect(e -> e.message)) : + this.bigQueryFunctionDeploymentManager.deployImpl(artifact, deploymentConfiguration); + } + + @Override + public BigQueryFunctionArtifact renderArtifact(PureModel pureModel, Root_meta_external_function_activator_bigQueryFunction_BigQueryFunction activator, PureModelContext inputModel, String clientVersion, Function> routerExtensions) + { + return BigQueryFunctionGenerator.generateArtifact(pureModel, activator, routerExtensions); + } + + @Override + public List selectConfig(List configurations) + { + return Lists.mutable.withAll(configurations).select(e -> e instanceof BigQueryFunctionDeploymentConfiguration).collect(e -> (BigQueryFunctionDeploymentConfiguration) e); + } + + private MutableList validateArtifact(BigQueryFunctionArtifact artifact) + { + int size = ((BigQueryFunctionContent)artifact.content).sqlExpressions.size(); + return size == 1 ? + Lists.fixedSize.empty() : + Lists.fixedSize.with(new BigQueryFunctionError("BigQuery Function can't be used with a plan containing '" + size + "' SQL expressions", ((BigQueryFunctionContent)artifact.content).sqlExpressions)); + } +} diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppArtifactGenerationExtension.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionArtifactGenerationExtension.java similarity index 74% rename from legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppArtifactGenerationExtension.java rename to legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionArtifactGenerationExtension.java index 1d916df4b67..0dd3ca8877d 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppArtifactGenerationExtension.java +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionArtifactGenerationExtension.java @@ -12,22 +12,19 @@ // See the License for the specific language governing permissions and // limitations under the License. - -package org.finos.legend.engine.language.snowflakeApp.deployment; +package org.finos.legend.engine.language.bigqueryFunction.deployment; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.language.pure.dsl.generation.extension.Artifact; import org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; -import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeApp; import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.PackageableElement; import java.util.List; -public class SnowflakeAppArtifactGenerationExtension implements ArtifactGenerationExtension +public class BigQueryFunctionArtifactGenerationExtension implements ArtifactGenerationExtension { - private static final String ROOT_PATH = "snowflakeApp"; - private static final String FILENAME = "snowflakeArtifact.json"; + private static final String ROOT_PATH = "bigQueryFunction"; @Override public String getKey() @@ -39,15 +36,11 @@ public String getKey() public boolean canGenerate(PackageableElement element) { return false; - // return element instanceof Root_meta_external_function_activator_snowflakeApp_SnowflakeApp; } - @Override public List generate(PackageableElement element, PureModel pureModel, PureModelContextData data, String clientVersion) { return null; - } - } diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionDeploymentManager.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionDeploymentManager.java new file mode 100644 index 00000000000..7f8af0d9566 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionDeploymentManager.java @@ -0,0 +1,100 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.deployment; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.RoutineId; +import com.google.cloud.bigquery.RoutineInfo; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.impl.utility.Iterate; +import org.finos.legend.engine.functionActivator.deployment.DeploymentManager; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionArtifact; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionContent; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionDeploymentConfiguration; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionDeploymentResult; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.shared.core.identity.Identity; +import org.finos.legend.pure.generated.Root_meta_external_function_activator_bigQueryFunction_BigQueryFunctionDeploymentConfiguration; +import org.finos.legend.pure.generated.Root_meta_pure_alloy_connections_alloy_specification_BigQueryDatasourceSpecification; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +/** + * These deployment functions assume that the artifact has already been validated. + */ +public class BigQueryFunctionDeploymentManager implements DeploymentManager +{ + private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryFunctionDeploymentManager.class); + + @Override + public boolean canDeploy(FunctionActivatorArtifact activatorArtifact) + { + return activatorArtifact instanceof BigQueryFunctionArtifact; + } + + @Override + public BigQueryFunctionDeploymentResult deploy(Identity identity, BigQueryFunctionArtifact artifact) + { + return new BigQueryFunctionDeploymentResult("", false); + } + + @Override + public BigQueryFunctionDeploymentResult deploy(Identity identity, BigQueryFunctionArtifact artifact, List availableRuntimeConfigurations) + { + return new BigQueryFunctionDeploymentResult("", false); + } + + public BigQueryFunctionDeploymentResult deployImpl(BigQueryFunctionArtifact artifact, Root_meta_external_function_activator_bigQueryFunction_BigQueryFunctionDeploymentConfiguration deploymentConfiguration) + { + LOGGER.info("Starting deployment"); + BigQueryFunctionDeploymentResult result; + try + { + Root_meta_pure_alloy_connections_alloy_specification_BigQueryDatasourceSpecification datasourceSpecification = (Root_meta_pure_alloy_connections_alloy_specification_BigQueryDatasourceSpecification) deploymentConfiguration._target()._datasourceSpecification(); + String dataset = datasourceSpecification._defaultDataset(); + String projectId = datasourceSpecification._projectId(); + + BigQueryFunctionContent functionContent = (BigQueryFunctionContent) artifact.content; + BigQuery bigQuery = BigQueryOptions.newBuilder().setProjectId(projectId).build().getService(); + RoutineId routineId = RoutineId.of(projectId, dataset, functionContent.functionName); + + String sqlExpression = Iterate.getOnly(functionContent.sqlExpressions); + String sourceProjectId = artifact.sourceProjectId; + String sourceDefaultDataset = artifact.sourceDefaultDataset; + // TODO: Include projectId in core relational BigQuery SQL statement construction + String fullyQualifiedSqlExpression = sqlExpression.replace(sourceDefaultDataset, String.format("`%s.%s`", sourceProjectId, sourceDefaultDataset)); + RoutineInfo routineInfo = + RoutineInfo + .newBuilder(routineId) + .setRoutineType("TABLE_VALUED_FUNCTION") + .setLanguage("SQL") + .setBody(fullyQualifiedSqlExpression) + .build(); + bigQuery.create(routineInfo); + + LOGGER.info("Completed deployment successfully"); + result = new BigQueryFunctionDeploymentResult(functionContent.functionName, true); + } + catch (Exception e) + { + LOGGER.info("Completed deployment with error"); + result = new BigQueryFunctionDeploymentResult(Lists.mutable.with(e.getMessage())); + } + return result; + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionGenerator.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionGenerator.java new file mode 100644 index 00000000000..aecad31fe04 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/java/org/finos/legend/engine/language/bigqueryFunction/deployment/BigQueryFunctionGenerator.java @@ -0,0 +1,61 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.deployment; + +import org.eclipse.collections.api.RichIterable; +import org.eclipse.collections.api.block.function.Function; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.tuple.Pair; +import org.eclipse.collections.impl.tuple.Tuples; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; +import org.finos.legend.engine.plan.generation.PlanGenerator; +import org.finos.legend.engine.plan.platform.PlanPlatform; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionArtifact; +import org.finos.legend.pure.generated.*; +import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.FunctionDefinition; +import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.PackageableFunction; + +public class BigQueryFunctionGenerator +{ + public static BigQueryFunctionArtifact generateArtifact(PureModel pureModel, Root_meta_external_function_activator_bigQueryFunction_BigQueryFunction activator, Function> routerExtensions) + { + Pair> artifactDetails = extractArtifactDetails(pureModel, activator, routerExtensions); + Root_meta_pure_alloy_connections_alloy_specification_BigQueryDatasourceSpecification bigQueryDatasourceSpecification = artifactDetails.getOne(); + RichIterable sqlExpressions = artifactDetails.getTwo(); + return new BigQueryFunctionArtifact(activator._functionName(), Lists.mutable.withAll(sqlExpressions), bigQueryDatasourceSpecification._projectId(), bigQueryDatasourceSpecification._defaultDataset()); + } + + private static Pair> extractArtifactDetails(PureModel pureModel, Root_meta_external_function_activator_bigQueryFunction_BigQueryFunction activator, Function> routerExtensions) + { + PackageableFunction function = activator._function(); + Root_meta_pure_executionPlan_ExecutionPlan executionPlan = PlanGenerator.generateExecutionPlanAsPure((FunctionDefinition) function, null, null, null, pureModel, PlanPlatform.JAVA, null, routerExtensions.apply(pureModel)); + RichIterable sqlExecutionNodes = + collectAllNodes(executionPlan._rootExecutionNode()).selectInstancesOf(Root_meta_relational_mapping_SQLExecutionNode.class); + + Root_meta_external_store_relational_runtime_RelationalDatabaseConnection relationalDatabaseConnection = (Root_meta_external_store_relational_runtime_RelationalDatabaseConnection) sqlExecutionNodes.getAny()._connection(); + Root_meta_pure_alloy_connections_alloy_specification_BigQueryDatasourceSpecification bigQueryDatasourceSpecification = ((Root_meta_pure_alloy_connections_alloy_specification_BigQueryDatasourceSpecification) relationalDatabaseConnection._datasourceSpecification()); + + return Tuples.pair( + bigQueryDatasourceSpecification, + sqlExecutionNodes + .collect(Root_meta_relational_mapping_SQLExecutionNode::_sqlQuery) + .select(x -> !x.toLowerCase().startsWith("alter"))); + } + + private static RichIterable collectAllNodes(Root_meta_pure_executionPlan_ExecutionNode node) + { + return Lists.mutable.with(node).withAll(node._executionNodes().flatCollect(BigQueryFunctionGenerator::collectAllNodes)); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/resources/META-INF/services/org.finos.legend.engine.functionActivator.service.FunctionActivatorService b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/resources/META-INF/services/org.finos.legend.engine.functionActivator.service.FunctionActivatorService new file mode 100644 index 00000000000..6fafe23bd24 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/resources/META-INF/services/org.finos.legend.engine.functionActivator.service.FunctionActivatorService @@ -0,0 +1 @@ +org.finos.legend.engine.language.bigqueryFunction.api.BigQueryFunctionService \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension new file mode 100644 index 00000000000..5d10f14211b --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.bigqueryFunction.deployment.BigQueryFunctionArtifactGenerationExtension diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/test/java/org/finos/legend/engine/language/bigqueryFunction/api/TestValidation.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/test/java/org/finos/legend/engine/language/bigqueryFunction/api/TestValidation.java new file mode 100644 index 00000000000..9bcf6f77dd9 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-api/src/test/java/org/finos/legend/engine/language/bigqueryFunction/api/TestValidation.java @@ -0,0 +1,47 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.api; + +import com.fasterxml.jackson.core.type.TypeReference; +import org.finos.legend.engine.functionActivator.api.FunctionActivatorAPI; +import org.finos.legend.engine.functionActivator.api.output.FunctionActivatorInfo; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; +import org.finos.legend.engine.language.pure.modelManager.ModelManager; +import org.finos.legend.engine.pure.code.core.PureCoreExtensionLoader; +import org.finos.legend.engine.shared.core.ObjectMapperFactory; +import org.finos.legend.engine.shared.core.deployment.DeploymentMode; +import org.junit.Assert; +import org.junit.Test; + +import javax.ws.rs.core.Response; +import java.util.List; + +public class TestValidation +{ + private final FunctionActivatorAPI api = new FunctionActivatorAPI(new ModelManager(DeploymentMode.TEST), (PureModel pureModel) -> PureCoreExtensionLoader.extensions().flatCollect(e -> e.extraPureCoreExtensions(pureModel.getExecutionSupport()))); + + @Test + public void testList() throws Exception + { + Response response = api.list(null); + System.out.println(response.getEntity().toString()); + List info = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports().readValue(response.getEntity().toString(), new TypeReference>(){}); + Assert.assertEquals(1, info.size()); + Assert.assertEquals("BigQuery Function", info.get(0).name); + Assert.assertEquals("Create a BigQuery Function that can activate in BigQuery.", info.get(0).description); + Assert.assertEquals("meta::protocols::pure::vX_X_X::metamodel::function::activator::bigQueryFunction::BigQueryFunction", info.get(0).configuration.topElement); + Assert.assertEquals(8, info.get(0).configuration.model.size()); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/pom.xml b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/pom.xml new file mode 100644 index 00000000000..025c97a21a3 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/pom.xml @@ -0,0 +1,102 @@ + + + + + org.finos.legend.engine + legend-engine-xts-bigqueryFunction + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-bigqueryFunction-compiler + jar + Legend Engine - XT - BigQuery Function - Compiler + + + + + + org.finos.legend.pure + legend-pure-m3-core + + + + + + org.finos.legend.engine + legend-engine-language-pure-compiler + + + org.finos.legend.engine + legend-engine-xt-functionActivator-pure + + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-xt-relationalStore-pure + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-pure + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-protocol + + + + + + org.eclipse.collections + eclipse-collections-api + + + + + + junit + junit + test + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-grammar + test + + + org.finos.legend.engine + legend-engine-language-pure-grammar + test + + + org.finos.legend.engine + legend-engine-language-pure-grammar + test-jar + test + + + org.finos.legend.engine + legend-engine-language-pure-compiler + test-jar + test + + + + \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/main/java/org/finos/legend/engine/language/bigqueryFunction/compiler/toPureGraph/BigQueryFunctionCompilerExtension.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/main/java/org/finos/legend/engine/language/bigqueryFunction/compiler/toPureGraph/BigQueryFunctionCompilerExtension.java new file mode 100644 index 00000000000..50d127e75b1 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/main/java/org/finos/legend/engine/language/bigqueryFunction/compiler/toPureGraph/BigQueryFunctionCompilerExtension.java @@ -0,0 +1,84 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.compiler.toPureGraph; + +import org.eclipse.collections.api.factory.Lists; +import org.finos.legend.engine.code.core.CoreFunctionActivatorCodeRepositoryProvider; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.CompileContext; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.Processor; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunction; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunctionDeploymentConfiguration; +import org.finos.legend.pure.generated.*; +import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.PackageableFunction; +import org.finos.legend.pure.m3.navigation.function.FunctionDescriptor; + +import java.util.Collections; + +public class BigQueryFunctionCompilerExtension implements CompilerExtension +{ + // Here only for dependency check error ... + CoreFunctionActivatorCodeRepositoryProvider forDependencies; + + @Override + public CompilerExtension build() + { + return new BigQueryFunctionCompilerExtension(); + } + + @Override + public Iterable> getExtraProcessors() + { + return Lists.fixedSize.of( + Processor.newProcessor( + BigQueryFunction.class, + Lists.fixedSize.with(BigQueryFunctionDeploymentConfiguration.class), + this::buildBigQueryFunction + ), + Processor.newProcessor( + BigQueryFunctionDeploymentConfiguration.class, + this::buildDeploymentConfig + ) + ); + } + + public Root_meta_external_function_activator_bigQueryFunction_BigQueryFunction buildBigQueryFunction(BigQueryFunction bigQueryFunction, CompileContext context) + { + try + { + PackageableFunction func = (PackageableFunction) context.resolvePackageableElement(FunctionDescriptor.functionDescriptorToId(bigQueryFunction.function), bigQueryFunction.sourceInformation); + return new Root_meta_external_function_activator_bigQueryFunction_BigQueryFunction_Impl( + bigQueryFunction.name, + null, + context.pureModel.getClass("meta::external::function::activator::bigQueryFunction::BigQueryFunction")) + ._functionName(bigQueryFunction.functionName) + ._function(func) + ._description(bigQueryFunction.description) + ._owner(bigQueryFunction.owner) + ._activationConfiguration(bigQueryFunction.activationConfiguration != null ? buildDeploymentConfig((BigQueryFunctionDeploymentConfiguration) bigQueryFunction.activationConfiguration, context) : null); + } + catch (Exception e) + { + throw new RuntimeException(e); + } + } + + public Root_meta_external_function_activator_bigQueryFunction_BigQueryFunctionDeploymentConfiguration buildDeploymentConfig(BigQueryFunctionDeploymentConfiguration configuration, CompileContext context) + { + return new Root_meta_external_function_activator_bigQueryFunction_BigQueryFunctionDeploymentConfiguration_Impl("") + ._target((Root_meta_external_store_relational_runtime_RelationalDatabaseConnection) context.resolveConnection(configuration.activationConnection.connection, configuration.sourceInformation)); + // ._stage(context.pureModel.getEnumValue("meta::external::function::activator::DeploymentStage", configuration.stage.name())); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension new file mode 100644 index 00000000000..8441e6e90c5 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.bigqueryFunction.compiler.toPureGraph.BigQueryFunctionCompilerExtension \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/test/java/org/finos/legend/engine/language/bigqueryFunction/compiler/toPureGraph/test/TestBigQueryFunctionCompilationFromGrammar.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/test/java/org/finos/legend/engine/language/bigqueryFunction/compiler/toPureGraph/test/TestBigQueryFunctionCompilationFromGrammar.java new file mode 100644 index 00000000000..413445fd0f6 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-compiler/src/test/java/org/finos/legend/engine/language/bigqueryFunction/compiler/toPureGraph/test/TestBigQueryFunctionCompilationFromGrammar.java @@ -0,0 +1,67 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.compiler.toPureGraph.test; + +import org.finos.legend.engine.language.pure.compiler.test.TestCompilationFromGrammar; +import org.junit.Test; + +public class TestBigQueryFunctionCompilationFromGrammar extends TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite +{ + @Override + public String getDuplicatedElementTestCode() + { + return "Class anything::Name {}\n" + + "###Mapping\n" + + "Mapping anything::somethingelse ()\n" + + "###BigQuery\n" + + "BigQueryFunction anything::Name\n" + + "{" + + " functionName : 'name';\n" + + " function : a::f():String[1];" + + "}\n"; + } + + @Override + public String getDuplicatedElementTestExpectedErrorMessage() + { + return "COMPILATION error at [5:1-7:32]: Duplicated element 'anything::Name'"; + } + + @Test + public void testHappyPath() + { + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test( + "function a::f():String[1]{'ok';}\n" + + "###BigQuery\n" + + "BigQueryFunction app::pack::MyApp\n" + + "{" + + " functionName : 'name';\n" + + " function : a::f():String[1];" + + "}\n", null); + } + + @Test + public void testFunctionError() + { + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test( + "function a::f():String[1]{'ok';}\n" + + "###BigQuery\n" + + "BigQueryFunction app::pack::MyApp\n" + + "{" + + " functionName : 'name';\n" + + " function : a::fz():String[1];" + + "}\n", " at [3:1-5:33]: Error in 'app::pack::MyApp': org.finos.legend.engine.shared.core.operational.errorManagement.EngineException: Can't find the packageable element 'a::fz__String_1_'"); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/pom.xml b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/pom.xml new file mode 100644 index 00000000000..cef65fa04f2 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/pom.xml @@ -0,0 +1,163 @@ + + + + + org.finos.legend.engine + legend-engine-xts-bigqueryFunction + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-bigqueryFunction-grammar + jar + Legend Engine - XT - BigQuery Function - Grammar + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + copy-antlr-core-grammar + initialize + + unpack + + + + + org.finos.legend.engine + legend-engine-language-pure-grammar + jar + false + ${project.build.directory} + antlr/*.g4 + + + + + + + + org.antlr + antlr4-maven-plugin + + + + antlr4 + + + true + true + true + target/antlr + ${project.build.directory}/generated-sources + + + + + + + + + + + + + + + + + + + + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + + + + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-protocol + + + org.finos.legend.engine + legend-engine-language-pure-grammar + + + org.finos.legend.engine + legend-engine-protocol-pure + + + + + + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections + + + + + + org.antlr + antlr4-runtime + compile + + + + + + junit + junit + test + + + org.finos.legend.engine + legend-engine-shared-core + test-jar + test + + + org.finos.legend.engine + legend-engine-language-pure-grammar + test-jar + test + + + + \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/BigQueryFunctionLexerGrammar.g4 b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/BigQueryFunctionLexerGrammar.g4 new file mode 100644 index 00000000000..469bf5791b5 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/BigQueryFunctionLexerGrammar.g4 @@ -0,0 +1,15 @@ +lexer grammar BigQueryFunctionLexerGrammar; + +import M3LexerGrammar; + +BIGQUERY_FUNCTION: 'BigQueryFunction'; +BIGQUERY_FUNCTION__FUNCTION_NAME: 'functionName'; +BIGQUERY_FUNCTION__DESCRIPTION: 'description'; +BIGQUERY_FUNCTION__FUNCTION: 'function'; +BIGQUERY_FUNCTION__OWNER: 'owner'; +BIGQUERY_FUNCTION__ACTIVATION: 'activationConfiguration'; + +// ------------------------------------- CONFIGURATION ------------------------------- +CONFIGURATION: 'BigQueryFunctionDeploymentConfiguration'; +ACTIVATION_CONNECTION: 'activationConnection'; +DEPLOYMENT_STAGE: 'stage'; \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/BigQueryFunctionParserGrammar.g4 b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/BigQueryFunctionParserGrammar.g4 new file mode 100644 index 00000000000..7ece7329da8 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/BigQueryFunctionParserGrammar.g4 @@ -0,0 +1,67 @@ +parser grammar BigQueryFunctionParserGrammar; + +import M3ParserGrammar; + +options +{ + tokenVocab = BigQueryFunctionLexerGrammar; +} + +identifier: VALID_STRING | STRING | + BIGQUERY_FUNCTION | + BIGQUERY_FUNCTION__FUNCTION_NAME | + BIGQUERY_FUNCTION__DESCRIPTION | + BIGQUERY_FUNCTION__FUNCTION | + BIGQUERY_FUNCTION__OWNER | + BIGQUERY_FUNCTION__ACTIVATION| + CONFIGURATION| DEPLOYMENT_STAGE + | ACTIVATION_CONNECTION | + ALL | + LET | + ALL_VERSIONS | + ALL_VERSIONS_IN_RANGE | + TO_BYTES_FUNCTION + ; +// -------------------------------------- DEFINITION -------------------------------------- + +definition: (bigQueryFunction | deploymentConfig)* + EOF +; +bigQueryFunction: BIGQUERY_FUNCTION stereotypes? taggedValues? qualifiedName + BRACE_OPEN + ( + functionName + | description + | function + | owner + | activation + )* + BRACE_CLOSE; + +stereotypes: LESS_THAN LESS_THAN stereotype (COMMA stereotype)* GREATER_THAN GREATER_THAN; +stereotype: qualifiedName DOT identifier; +taggedValues: BRACE_OPEN taggedValue (COMMA taggedValue)* BRACE_CLOSE; +taggedValue: qualifiedName DOT identifier EQUAL STRING; + +functionName: BIGQUERY_FUNCTION__FUNCTION_NAME COLON STRING SEMI_COLON; + +description: BIGQUERY_FUNCTION__DESCRIPTION COLON STRING SEMI_COLON; + +function: BIGQUERY_FUNCTION__FUNCTION COLON functionIdentifier SEMI_COLON; + +owner : BIGQUERY_FUNCTION__OWNER COLON STRING SEMI_COLON; + +activation: BIGQUERY_FUNCTION__ACTIVATION COLON qualifiedName SEMI_COLON ; + +// ----------------------------------- Deployment ------------------------------------------------------ +deploymentConfig: CONFIGURATION qualifiedName + BRACE_OPEN + activationConnection + BRACE_CLOSE +; + +activationConnection: ACTIVATION_CONNECTION COLON qualifiedName SEMI_COLON +; + +stage: DEPLOYMENT_STAGE COLON STRING SEMI_COLON +; \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/from/BigQueryFunctionGrammarParserExtension.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/from/BigQueryFunctionGrammarParserExtension.java new file mode 100644 index 00000000000..a2c960bbd38 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/from/BigQueryFunctionGrammarParserExtension.java @@ -0,0 +1,70 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.grammar.from; + +import org.antlr.v4.runtime.CharStream; +import org.antlr.v4.runtime.CharStreams; +import org.antlr.v4.runtime.CommonTokenStream; +import org.eclipse.collections.impl.factory.Lists; +import org.finos.legend.engine.language.pure.grammar.from.ParserErrorListener; +import org.finos.legend.engine.language.pure.grammar.from.PureGrammarParserContext; +import org.finos.legend.engine.language.pure.grammar.from.SectionSourceCode; +import org.finos.legend.engine.language.pure.grammar.from.SourceCodeParserInfo; +import org.finos.legend.engine.language.pure.grammar.from.antlr4.BigQueryFunctionLexerGrammar; +import org.finos.legend.engine.language.pure.grammar.from.antlr4.BigQueryFunctionParserGrammar; +import org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension; +import org.finos.legend.engine.language.pure.grammar.from.extension.SectionParser; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.section.DefaultCodeSection; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.section.Section; + +import java.util.function.Consumer; + +public class BigQueryFunctionGrammarParserExtension implements PureGrammarParserExtension +{ + public static final String NAME = "BigQuery"; + + @Override + public Iterable getExtraSectionParsers() + { + return Lists.fixedSize.of(SectionParser.newParser(NAME, BigQueryFunctionGrammarParserExtension::parseSection)); + } + + private static Section parseSection(SectionSourceCode sectionSourceCode, Consumer elementConsumer, PureGrammarParserContext context) + { + SourceCodeParserInfo parserInfo = getBigQueryFunctionInfo(sectionSourceCode); + DefaultCodeSection section = new DefaultCodeSection(); + section.parserName = sectionSourceCode.sectionType; + section.sourceInformation = parserInfo.sourceInformation; + + BigQueryFunctionTreeWalker walker = new BigQueryFunctionTreeWalker(parserInfo.input, parserInfo.walkerSourceInformation, elementConsumer, section); + walker.visit((BigQueryFunctionParserGrammar.DefinitionContext) parserInfo.rootContext); + + return section; + } + + private static SourceCodeParserInfo getBigQueryFunctionInfo(SectionSourceCode sectionSourceCode) + { + CharStream input = CharStreams.fromString(sectionSourceCode.code); + ParserErrorListener errorListener = new ParserErrorListener(sectionSourceCode.walkerSourceInformation, BigQueryFunctionParserGrammar.VOCABULARY); + BigQueryFunctionLexerGrammar lexer = new BigQueryFunctionLexerGrammar(input); + lexer.removeErrorListeners(); + lexer.addErrorListener(errorListener); + BigQueryFunctionParserGrammar parser = new BigQueryFunctionParserGrammar(new CommonTokenStream(lexer)); + parser.removeErrorListeners(); + parser.addErrorListener(errorListener); + return new SourceCodeParserInfo(sectionSourceCode.code, input, sectionSourceCode.sourceInformation, sectionSourceCode.walkerSourceInformation, lexer, parser, parser.definition()); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/from/BigQueryFunctionTreeWalker.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/from/BigQueryFunctionTreeWalker.java new file mode 100644 index 00000000000..306d4a91388 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/from/BigQueryFunctionTreeWalker.java @@ -0,0 +1,140 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.grammar.from; + +import org.antlr.v4.runtime.CharStream; +import org.eclipse.collections.impl.factory.Lists; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.language.pure.grammar.from.ParseTreeWalkerSourceInformation; +import org.finos.legend.engine.language.pure.grammar.from.PureGrammarParserUtility; +import org.finos.legend.engine.language.pure.grammar.from.antlr4.BigQueryFunctionParserGrammar; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunctionDeploymentConfiguration; +import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.ConnectionPointer; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.StereotypePtr; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.TagPtr; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.TaggedValue; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.section.DefaultCodeSection; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunction; + +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +public class BigQueryFunctionTreeWalker +{ + private final CharStream input; + private final ParseTreeWalkerSourceInformation walkerSourceInformation; + private final Consumer elementConsumer; + private final DefaultCodeSection section; + + public BigQueryFunctionTreeWalker(CharStream input, ParseTreeWalkerSourceInformation walkerSourceInformation, Consumer elementConsumer, DefaultCodeSection section) + { + this.input = input; + this.walkerSourceInformation = walkerSourceInformation; + this.elementConsumer = elementConsumer; + this.section = section; + } + + public void visit(BigQueryFunctionParserGrammar.DefinitionContext ctx) + { + if (ctx.bigQueryFunction() != null && !ctx.bigQueryFunction().isEmpty()) + { + ctx.bigQueryFunction().stream().map(this::visitBigQueryFunction).peek(e -> this.section.elements.add(e.getPath())).forEach(this.elementConsumer); + } + if (ctx.deploymentConfig() != null && !ctx.deploymentConfig().isEmpty()) + { + ctx.deploymentConfig().stream().map(this::visitDeploymentConfig).peek(e -> this.section.elements.add(e.getPath())).forEach(this.elementConsumer); + } + } + + private BigQueryFunctionDeploymentConfiguration visitDeploymentConfig(BigQueryFunctionParserGrammar.DeploymentConfigContext ctx) + { + BigQueryFunctionDeploymentConfiguration config = new BigQueryFunctionDeploymentConfiguration(); + ConnectionPointer pointer = new ConnectionPointer(); + pointer.connection = PureGrammarParserUtility.fromQualifiedName(ctx.activationConnection().qualifiedName().packagePath() == null + ? Collections.emptyList() : ctx.activationConnection().qualifiedName().packagePath().identifier(), ctx.activationConnection().qualifiedName().identifier()); + pointer.sourceInformation = walkerSourceInformation.getSourceInformation(ctx.activationConnection().qualifiedName()); + config.activationConnection = pointer; + return config; + } + + private BigQueryFunction visitBigQueryFunction(BigQueryFunctionParserGrammar.BigQueryFunctionContext ctx) + { + BigQueryFunction bigQueryFunction = new BigQueryFunction(); + bigQueryFunction.name = PureGrammarParserUtility.fromIdentifier(ctx.qualifiedName().identifier()); + bigQueryFunction._package = ctx.qualifiedName().packagePath() == null ? "" : PureGrammarParserUtility.fromPath(ctx.qualifiedName().packagePath().identifier()); + bigQueryFunction.sourceInformation = walkerSourceInformation.getSourceInformation(ctx); + bigQueryFunction.stereotypes = ctx.stereotypes() == null ? Lists.mutable.empty() : this.visitStereotypes(ctx.stereotypes()); + bigQueryFunction.taggedValues = ctx.taggedValues() == null ? Lists.mutable.empty() : this.visitTaggedValues(ctx.taggedValues()); + + BigQueryFunctionParserGrammar.FunctionNameContext functionNameContext = PureGrammarParserUtility.validateAndExtractRequiredField(ctx.functionName(), "functionName", bigQueryFunction.sourceInformation); + bigQueryFunction.functionName = PureGrammarParserUtility.fromGrammarString(functionNameContext.STRING().getText(), true); + BigQueryFunctionParserGrammar.FunctionContext functionContext = PureGrammarParserUtility.validateAndExtractRequiredField(ctx.function(), "function", bigQueryFunction.sourceInformation); + bigQueryFunction.function = functionContext.functionIdentifier().getText(); + BigQueryFunctionParserGrammar.OwnerContext ownerContext = PureGrammarParserUtility.validateAndExtractOptionalField(ctx.owner(), "owner", bigQueryFunction.sourceInformation); + if (ownerContext != null) + { + bigQueryFunction.owner = PureGrammarParserUtility.fromGrammarString(ownerContext.STRING().getText(), true); + } + BigQueryFunctionParserGrammar.DescriptionContext descriptionContext = PureGrammarParserUtility.validateAndExtractOptionalField(ctx.description(), "description", bigQueryFunction.sourceInformation); + if (descriptionContext != null) + { + bigQueryFunction.description = PureGrammarParserUtility.fromGrammarString(descriptionContext.STRING().getText(), true); + } + BigQueryFunctionParserGrammar.ActivationContext activationContext = PureGrammarParserUtility.validateAndExtractOptionalField(ctx.activation(), "activationConfiguration", bigQueryFunction.sourceInformation); + if (activationContext != null) + { + BigQueryFunctionDeploymentConfiguration config = new BigQueryFunctionDeploymentConfiguration(); + ConnectionPointer connectionPointer = new ConnectionPointer(); + connectionPointer.connection = activationContext.qualifiedName().getText(); + config.activationConnection = connectionPointer; + + bigQueryFunction.activationConfiguration = config; + } + return bigQueryFunction; + } + + private List visitTaggedValues(BigQueryFunctionParserGrammar.TaggedValuesContext ctx) + { + return ListIterate.collect(ctx.taggedValue(), taggedValueContext -> + { + TaggedValue taggedValue = new TaggedValue(); + TagPtr tagPtr = new TagPtr(); + taggedValue.tag = tagPtr; + tagPtr.profile = PureGrammarParserUtility.fromQualifiedName(taggedValueContext.qualifiedName().packagePath() == null ? Collections.emptyList() : taggedValueContext.qualifiedName().packagePath().identifier(), taggedValueContext.qualifiedName().identifier()); + tagPtr.value = PureGrammarParserUtility.fromIdentifier(taggedValueContext.identifier()); + taggedValue.value = PureGrammarParserUtility.fromGrammarString(taggedValueContext.STRING().getText(), true); + taggedValue.tag.profileSourceInformation = this.walkerSourceInformation.getSourceInformation(taggedValueContext.qualifiedName()); + taggedValue.tag.sourceInformation = this.walkerSourceInformation.getSourceInformation(taggedValueContext.identifier()); + taggedValue.sourceInformation = this.walkerSourceInformation.getSourceInformation(taggedValueContext); + return taggedValue; + }); + } + + private List visitStereotypes(BigQueryFunctionParserGrammar.StereotypesContext ctx) + { + return ListIterate.collect(ctx.stereotype(), stereotypeContext -> + { + StereotypePtr stereotypePtr = new StereotypePtr(); + stereotypePtr.profile = PureGrammarParserUtility.fromQualifiedName(stereotypeContext.qualifiedName().packagePath() == null ? Collections.emptyList() : stereotypeContext.qualifiedName().packagePath().identifier(), stereotypeContext.qualifiedName().identifier()); + stereotypePtr.value = PureGrammarParserUtility.fromIdentifier(stereotypeContext.identifier()); + stereotypePtr.profileSourceInformation = this.walkerSourceInformation.getSourceInformation(stereotypeContext.qualifiedName()); + stereotypePtr.sourceInformation = this.walkerSourceInformation.getSourceInformation(stereotypeContext); + return stereotypePtr; + }); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/to/BigQueryFunctionGrammarComposer.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/to/BigQueryFunctionGrammarComposer.java new file mode 100644 index 00000000000..7c2f5ef1d8a --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/java/org/finos/legend/engine/language/bigqueryFunction/grammar/to/BigQueryFunctionGrammarComposer.java @@ -0,0 +1,90 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.grammar.to; + +import org.eclipse.collections.api.block.function.Function3; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.list.MutableList; +import org.eclipse.collections.impl.utility.Iterate; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.language.bigqueryFunction.grammar.from.BigQueryFunctionGrammarParserExtension; +import org.finos.legend.engine.language.pure.grammar.to.PureGrammarComposerContext; +import org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunction; +import org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunctionDeploymentConfiguration; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; + +import java.util.Collections; +import java.util.List; + +import static org.finos.legend.engine.language.pure.grammar.to.HelperDomainGrammarComposer.renderAnnotations; + +public class BigQueryFunctionGrammarComposer implements PureGrammarComposerExtension +{ + private static String renderElement(PackageableElement element) + { + if (element instanceof BigQueryFunction) + { + return renderBigQueryFunction((BigQueryFunction) element); + } + return "/* Can't transform element '" + element.getPath() + "' in this section */"; + } + + private static String renderBigQueryFunction(BigQueryFunction app) + { + String packageName = app._package == null || app._package.isEmpty() ? app.name : app._package + "::" + app.name; + + return "BigQueryFunction " + renderAnnotations(app.stereotypes, app.taggedValues) + packageName + "\n" + + "{\n" + + " functionName : '" + app.functionName + "';\n" + + " function : " + app.function + ";\n" + + (app.owner == null ? "" : " owner : '" + app.owner + "';\n") + + (app.description == null ? "" : " description : '" + app.description + "';\n") + + (app.activationConfiguration == null ? "" : " activationConfiguration : " + ((BigQueryFunctionDeploymentConfiguration) app.activationConfiguration).activationConnection.connection + ";\n") + + "}"; + } + + @Override + public List, PureGrammarComposerContext, String, String>> getExtraSectionComposers() + { + return Lists.fixedSize.with((elements, context, sectionName) -> + { + if (!BigQueryFunctionGrammarParserExtension.NAME.equals(sectionName)) + { + return null; + } + return ListIterate.collect(elements, element -> + { + if (element instanceof BigQueryFunction) + { + return renderBigQueryFunction((BigQueryFunction) element); + } + return "/* Can't transform element '" + element.getPath() + "' in this section */"; + }).makeString("\n\n"); + }); + } + + @Override + public List, PureGrammarComposerContext, List, PureGrammarComposerExtension.PureFreeSectionGrammarComposerResult>> getExtraFreeSectionComposers() + { + return Collections.singletonList((elements, context, composedSections) -> + { + MutableList composableElements = Iterate.select(elements, e -> (e instanceof BigQueryFunction), Lists.mutable.empty()); + return composableElements.isEmpty() + ? null + : new PureFreeSectionGrammarComposerResult(composableElements.asLazy().collect(BigQueryFunctionGrammarComposer::renderElement).makeString("###" + BigQueryFunctionGrammarParserExtension.NAME + "\n", "\n\n", ""), composableElements); + }); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension new file mode 100644 index 00000000000..5db153ec50a --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.bigqueryFunction.grammar.from.BigQueryFunctionGrammarParserExtension \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension new file mode 100644 index 00000000000..c96ed6ec60c --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.bigqueryFunction.grammar.to.BigQueryFunctionGrammarComposer \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/test/java/org/finos/legend/engine/language/bigqueryFunction/grammar/test/TestBigQueryFunctionRoundtrip.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/test/java/org/finos/legend/engine/language/bigqueryFunction/grammar/test/TestBigQueryFunctionRoundtrip.java new file mode 100644 index 00000000000..be4a0658090 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/test/java/org/finos/legend/engine/language/bigqueryFunction/grammar/test/TestBigQueryFunctionRoundtrip.java @@ -0,0 +1,46 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.grammar.test; + +import org.finos.legend.engine.language.pure.grammar.test.TestGrammarRoundtrip; +import org.junit.Test; + +public class TestBigQueryFunctionRoundtrip extends TestGrammarRoundtrip.TestGrammarRoundtripTestSuite +{ + @Test + public void testBigQueryFunction() + { + test("###BigQuery\n" + + "BigQueryFunction <> {a::A.val = 'ok'} xx::MyApp\n" + + "{\n" + + " functionName : 'MyApp';\n" + + " function : zxx(Integer[1]):String[1];\n" + + " owner : 'pierre';\n" + + " description : 'A super nice app!';\n" + + " activationConfiguration : com::gs::test::TestConnection;\n" + + "}\n"); + } + + @Test + public void testBigQueryFunctionMinimal() + { + test("###BigQuery\n" + + "BigQueryFunction xx::MyApp\n" + + "{\n" + + " functionName : 'MyApp';\n" + + " function : zxx(Integer[1]):String[1];\n" + + "}\n"); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/test/java/org/finos/legend/engine/language/bigqueryFunction/grammar/test/TestBigQueryParsing.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/test/java/org/finos/legend/engine/language/bigqueryFunction/grammar/test/TestBigQueryParsing.java new file mode 100644 index 00000000000..e700a9fd43e --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-grammar/src/test/java/org/finos/legend/engine/language/bigqueryFunction/grammar/test/TestBigQueryParsing.java @@ -0,0 +1,74 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.bigqueryFunction.grammar.test; + +import org.antlr.v4.runtime.Vocabulary; +import org.eclipse.collections.impl.list.mutable.ListAdapter; +import org.finos.legend.engine.language.pure.grammar.from.antlr4.BigQueryFunctionParserGrammar; +import org.finos.legend.engine.language.pure.grammar.test.TestGrammarParser; +import org.junit.Test; + +import java.util.List; + +public class TestBigQueryParsing extends TestGrammarParser.TestGrammarParserTestSuite +{ + @Override + public Vocabulary getParserGrammarVocabulary() + { + return BigQueryFunctionParserGrammar.VOCABULARY; + } + + @Override + public String getParserGrammarIdentifierInclusionTestCode(List keywords) + { + return "###BigQuery\n" + + "BigQueryFunction " + ListAdapter.adapt(keywords).makeString("::") + "\n" + + "{\n" + + " function : a::f():String[1];" + + " functionName : 'sass';\n" + + "}\n"; + } + + @Test + public void testGetParserErrorWrongProperty() + { + test("###BigQuery\n" + + "BigQueryFunction x::A\n" + + "{\n" + + " functioName : 'sass';\n" + + "}\n", "PARSER error at [4:4-14]: Unexpected token 'functioName'. Valid alternatives: ['functionName', 'description', 'function', 'owner', 'activationConfiguration']"); + } + + @Test + public void testGetParserErrorMissingApplicationName() + { + test("###BigQuery\n" + + "BigQueryFunction x::A\n" + + "{\n" + + " owner : 'pierre';\n" + + "}\n", "PARSER error at [2:1-5:1]: Field 'functionName' is required"); + } + + @Test + public void testGetParserErrorMissingFunction() + { + test("###BigQuery\n" + + "BigQueryFunction x::A\n" + + "{\n" + + " functionName : 'MyApp';\n" + + " owner : 'pierre';\n" + + "}\n", "PARSER error at [2:1-6:1]: Field 'function' is required"); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/pom.xml b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/pom.xml new file mode 100644 index 00000000000..71a5db746db --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/pom.xml @@ -0,0 +1,50 @@ + + + + + org.finos.legend.engine + legend-engine-xts-bigqueryFunction + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-bigqueryFunction-protocol + jar + Legend Engine - XT - BigQuery Function - Protocol + + + + org.finos.legend.engine + legend-engine-protocol-pure + + + + org.finos.legend.engine + legend-engine-xt-functionActivator-protocol + ${project.version} + + + + org.eclipse.collections + eclipse-collections-api + + + org.finos.legend.engine + legend-engine-xt-relationalStore-protocol + + + \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionArtifact.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionArtifact.java new file mode 100644 index 00000000000..236d48af71d --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionArtifact.java @@ -0,0 +1,31 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.bigqueryFunction.deployment; + +import org.eclipse.collections.api.list.MutableList; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; + +public class BigQueryFunctionArtifact extends FunctionActivatorArtifact +{ + public String sourceProjectId; + public String sourceDefaultDataset; + + public BigQueryFunctionArtifact(String name, MutableList sqlExpressions, String sourceProjectId, String sourceDefaultDataset) + { + this.content = new BigQueryFunctionContent(name, sqlExpressions); + this.sourceProjectId = sourceProjectId; + this.sourceDefaultDataset = sourceDefaultDataset; + } +} diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppContent.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionContent.java similarity index 55% rename from legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppContent.java rename to legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionContent.java index 388b537959b..48b09329cbe 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppContent.java +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionContent.java @@ -12,22 +12,20 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.snowflakeApp.deployment; +package org.finos.legend.engine.protocol.bigqueryFunction.deployment; -import org.eclipse.collections.api.RichIterable; -import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.list.MutableList; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorDeploymentContent; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentContent; -public class SnowflakeAppContent extends FunctionActivatorDeploymentContent +public class BigQueryFunctionContent extends FunctionActivatorDeploymentContent { - public MutableList sqlExpressions = Lists.mutable.empty(); + public final MutableList sqlExpressions; - public String applicationName; + public final String functionName; - public SnowflakeAppContent(String name, MutableList sqlExpressions) + public BigQueryFunctionContent(String name, MutableList sqlExpressions) { - this.applicationName = name; + this.functionName = name; this.sqlExpressions = sqlExpressions; } } diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionDeploymentConfiguration.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionDeploymentConfiguration.java new file mode 100644 index 00000000000..f8383226996 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionDeploymentConfiguration.java @@ -0,0 +1,33 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.bigqueryFunction.deployment; + +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.RelationalDatabaseConnection; + +public class BigQueryFunctionDeploymentConfiguration extends FunctionActivatorDeploymentConfiguration +{ + RelationalDatabaseConnection connection; + + public BigQueryFunctionDeploymentConfiguration() + { + //jackson + } + + public BigQueryFunctionDeploymentConfiguration(RelationalDatabaseConnection connection) + { + this.connection = connection; + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionDeploymentResult.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionDeploymentResult.java new file mode 100644 index 00000000000..9093ae4c4f2 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/deployment/BigQueryFunctionDeploymentResult.java @@ -0,0 +1,34 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.bigqueryFunction.deployment; + +import org.eclipse.collections.api.list.MutableList; +import org.finos.legend.engine.protocol.functionActivator.deployment.DeploymentResult; + +public class BigQueryFunctionDeploymentResult extends DeploymentResult +{ + public MutableList errors; + + public BigQueryFunctionDeploymentResult(String activatorIdentifier, boolean result) + { + this.successful = result; + this.activatorIdentifier = activatorIdentifier; + } + + public BigQueryFunctionDeploymentResult(MutableList errors) + { + this.errors = errors; + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunction.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunction.java new file mode 100644 index 00000000000..89cc7e74bf2 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunction.java @@ -0,0 +1,27 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.bigqueryFunction.metamodel; + +import org.finos.legend.engine.protocol.functionActivator.metamodel.FunctionActivator; + +//------------------------------------------------------------ +// Should be generated out of the Pure protocol specification +//------------------------------------------------------------ +public class BigQueryFunction extends FunctionActivator +{ + public String functionName; + public String description; + public String owner; +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunctionDeploymentConfiguration.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunctionDeploymentConfiguration.java new file mode 100644 index 00000000000..790a0ae97a0 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunctionDeploymentConfiguration.java @@ -0,0 +1,28 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.bigqueryFunction.metamodel; + +import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.ConnectionPointer; + +public class BigQueryFunctionDeploymentConfiguration extends DeploymentConfiguration +{ + public ConnectionPointer activationConnection; + + public BigQueryFunctionDeploymentConfiguration() + { + + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunctionProtocolExtension.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunctionProtocolExtension.java new file mode 100644 index 00000000000..56e3b30ab3f --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/java/org/finos/legend/engine/protocol/bigqueryFunction/metamodel/BigQueryFunctionProtocolExtension.java @@ -0,0 +1,63 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.bigqueryFunction.metamodel; + +import org.eclipse.collections.api.block.function.Function0; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.factory.Maps; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionArtifact; +import org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionContent; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentContent; +import org.finos.legend.engine.protocol.pure.v1.extension.ProtocolSubTypeInfo; +import org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; + +import java.util.List; +import java.util.Map; + +public class BigQueryFunctionProtocolExtension implements PureProtocolExtension +{ + public static String packageJSONType = "bigQueryFunction"; + + @Override + public List>>> getExtraProtocolSubTypeInfoCollectors() + { + return Lists.fixedSize.with(() -> Lists.mutable.with( + ProtocolSubTypeInfo.newBuilder(PackageableElement.class) + .withSubtype(BigQueryFunction.class, packageJSONType) + .build(), + ProtocolSubTypeInfo.newBuilder(PackageableElement.class) + .withSubtype(BigQueryFunctionDeploymentConfiguration.class, packageJSONType + "Config") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorDeploymentConfiguration.class) + .withSubtype(org.finos.legend.engine.protocol.bigqueryFunction.deployment.BigQueryFunctionDeploymentConfiguration.class, "bigQueryFunctionDeploymentConfig") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorArtifact.class) + .withSubtype(BigQueryFunctionArtifact.class, "bigQueryFunctionArtifact") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorDeploymentContent.class) + .withSubtype(BigQueryFunctionContent.class, "bigQueryFunctionDeploymentContent") + .build() + )); + } + + @Override + public Map, String> getExtraProtocolToClassifierPathMap() + { + return Maps.mutable.with(BigQueryFunction.class, "meta::external::function::activator::bigQueryFunction::BigQueryFunction"); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension new file mode 100644 index 00000000000..7ab4de68c73 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension @@ -0,0 +1 @@ +org.finos.legend.engine.protocol.bigqueryFunction.metamodel.BigQueryFunctionProtocolExtension \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/pom.xml b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/pom.xml new file mode 100644 index 00000000000..ccabe4842a4 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/pom.xml @@ -0,0 +1,156 @@ + + + + + + org.finos.legend.engine + legend-engine-xts-bigqueryFunction + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-bigqueryFunction-pure + jar + Legend Engine - XT - BigQuery Function - PAR/JAVA + + + + + org.finos.legend.pure + legend-pure-maven-generation-par + + src/main/resources + ${legend.pure.version} + + core_bigqueryfunction + + + ${project.basedir}/src/main/resources/core_bigqueryfunction.definition.json + + + + + generate-sources + + build-pure-jar + + + + + + org.finos.legend.engine + legend-engine-xt-functionActivator-pure + ${project.version} + + + org.finos.legend.pure + legend-pure-m2-dsl-diagram-grammar + ${legend.pure.version} + + + org.finos.legend.engine + legend-engine-xt-relationalStore-pure + ${project.version} + + + + + org.finos.legend.pure + legend-pure-maven-generation-java + + + compile + + build-pure-compiled-jar + + + true + true + modular + true + + core_bigqueryfunction + + + + + + + org.finos.legend.engine + legend-engine-xt-functionActivator-pure + ${project.version} + + + org.finos.legend.pure + legend-pure-m2-dsl-diagram-grammar + ${legend.pure.version} + + + org.finos.legend.engine + legend-engine-xt-relationalStore-pure + ${project.version} + + + + + + + + + org.finos.legend.pure + legend-pure-m4 + + + org.finos.legend.pure + legend-pure-m3-core + + + org.finos.legend.pure + legend-pure-runtime-java-engine-compiled + + + + org.finos.legend.engine + legend-engine-pure-platform-java + + + org.finos.legend.engine + legend-engine-pure-code-compiled-core + + + org.finos.legend.engine + legend-engine-pure-platform-functions-java + + + org.finos.legend.engine + legend-engine-xt-relationalStore-pure + + + org.finos.legend.engine + legend-engine-xt-functionActivator-pure + + + + org.eclipse.collections + eclipse-collections + + + org.eclipse.collections + eclipse-collections-api + + + diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/java/org/finos/legend/pure/code/core/CoreBigQueryFunctionCodeRepositoryProvider.java b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/java/org/finos/legend/pure/code/core/CoreBigQueryFunctionCodeRepositoryProvider.java new file mode 100644 index 00000000000..2817e421404 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/java/org/finos/legend/pure/code/core/CoreBigQueryFunctionCodeRepositoryProvider.java @@ -0,0 +1,28 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.pure.code.core; + +import org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepository; +import org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepositoryProvider; +import org.finos.legend.pure.m3.serialization.filesystem.repository.GenericCodeRepository; + +public class CoreBigQueryFunctionCodeRepositoryProvider implements CodeRepositoryProvider +{ + @Override + public CodeRepository repository() + { + return GenericCodeRepository.build("core_bigqueryfunction.definition.json"); + } +} diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/META-INF/services/org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepositoryProvider b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/META-INF/services/org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepositoryProvider new file mode 100644 index 00000000000..a6852036018 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/META-INF/services/org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepositoryProvider @@ -0,0 +1 @@ +org.finos.legend.pure.code.core.CoreBigQueryFunctionCodeRepositoryProvider \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/core_bigqueryfunction.definition.json b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/core_bigqueryfunction.definition.json new file mode 100644 index 00000000000..0987cf1d134 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/core_bigqueryfunction.definition.json @@ -0,0 +1,9 @@ +{ + "name": "core_bigqueryfunction", + "pattern": "(meta::external::function::activator::bigQueryFunction|meta::protocols)(::.*)?", + "dependencies": [ + "platform", + "core_function_activator", + "core_relational" + ] +} \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/core_bigqueryfunction/metamodel/metamodel.pure b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/core_bigqueryfunction/metamodel/metamodel.pure new file mode 100644 index 00000000000..0bfb77ad25d --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/legend-engine-xt-bigqueryFunction-pure/src/main/resources/core_bigqueryfunction/metamodel/metamodel.pure @@ -0,0 +1,26 @@ +import meta::external::function::activator::*; + +Class meta::external::function::activator::bigQueryFunction::BigQueryFunction extends FunctionActivator +{ + functionName : String[1]; + description : String[0..1]; + owner : String[0..1]; +} + +Class meta::external::function::activator::bigQueryFunction::BigQueryFunctionDeploymentConfiguration extends DeploymentConfiguration +{ + target: meta::external::store::relational::runtime::RelationalDatabaseConnection[1]; +} + +Class meta::external::function::activator::bigQueryFunction::BigQueryFunctionDeploymentResult extends DeploymentResult +{ + +} + +// This section needs to be code generated from the section above +Class meta::protocols::pure::vX_X_X::metamodel::function::activator::bigQueryFunction::BigQueryFunction extends meta::protocols::pure::vX_X_X::metamodel::function::activator::FunctionActivator +{ + functionName : String[1]; + description : String[0..1]; + owner : String[0..1]; +} \ No newline at end of file diff --git a/legend-engine-xts-bigqueryFunction/pom.xml b/legend-engine-xts-bigqueryFunction/pom.xml new file mode 100644 index 00000000000..9a4969c1220 --- /dev/null +++ b/legend-engine-xts-bigqueryFunction/pom.xml @@ -0,0 +1,36 @@ + + + + + org.finos.legend.engine + legend-engine + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xts-bigqueryFunction + pom + Legend Engine - XTS - BigQuery Function + + + legend-engine-xt-bigqueryFunction-api + legend-engine-xt-bigqueryFunction-compiler + legend-engine-xt-bigqueryFunction-grammar + legend-engine-xt-bigqueryFunction-protocol + legend-engine-xt-bigqueryFunction-pure + + \ No newline at end of file diff --git a/legend-engine-xts-changetoken/legend-engine-xt-changetoken-compiler/pom.xml b/legend-engine-xts-changetoken/legend-engine-xt-changetoken-compiler/pom.xml index 435dcb74a7b..4c340522714 100644 --- a/legend-engine-xts-changetoken/legend-engine-xt-changetoken-compiler/pom.xml +++ b/legend-engine-xts-changetoken/legend-engine-xt-changetoken-compiler/pom.xml @@ -19,7 +19,7 @@ legend-engine-xts-changetoken org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-changetoken/legend-engine-xt-changetoken-pure/pom.xml b/legend-engine-xts-changetoken/legend-engine-xt-changetoken-pure/pom.xml index 100dc295cf2..e6d40325a45 100644 --- a/legend-engine-xts-changetoken/legend-engine-xt-changetoken-pure/pom.xml +++ b/legend-engine-xts-changetoken/legend-engine-xt-changetoken-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-changetoken - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-changetoken/pom.xml b/legend-engine-xts-changetoken/pom.xml index e9202e6ce65..5d1cef3261d 100644 --- a/legend-engine-xts-changetoken/pom.xml +++ b/legend-engine-xts-changetoken/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-compiler/pom.xml b/legend-engine-xts-connection/legend-engine-xt-connection-compiler/pom.xml new file mode 100644 index 00000000000..b9f95efb51d --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-compiler/pom.xml @@ -0,0 +1,73 @@ + + + + + + org.finos.legend.engine + legend-engine-xts-connection + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-connection-compiler + jar + Legend Engine - XT - Connection - Compiler + + + + + org.finos.legend.engine + legend-engine-xt-connection-protocol + + + org.finos.legend.engine + legend-engine-xt-connection-pure-metamodel + + + org.finos.legend.engine + legend-engine-language-pure-compiler + + + + + + org.finos.legend.pure + legend-pure-m3-core + + + + + + org.eclipse.collections + eclipse-collections-api + + + + + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + + \ No newline at end of file diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/ConnectionCompilerExtension.java b/legend-engine-xts-connection/legend-engine-xt-connection-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/ConnectionCompilerExtension.java new file mode 100644 index 00000000000..3276c0f3d49 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-compiler/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/ConnectionCompilerExtension.java @@ -0,0 +1,51 @@ +// Copyright 2020 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.pure.compiler.toPureGraph; + +import org.eclipse.collections.api.factory.Maps; +import org.eclipse.collections.api.map.MutableMap; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.Processor; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection; +import org.finos.legend.pure.generated.Root_meta_pure_metamodel_connection_Connection; +import org.finos.legend.pure.generated.Root_meta_pure_metamodel_connection_Connection_Impl; + +import java.util.Collections; + +public class ConnectionCompilerExtension implements CompilerExtension +{ + static final MutableMap connectionsIndex = Maps.mutable.empty(); + + @Override + public CompilerExtension build() + { + return new ConnectionCompilerExtension(); + } + + @Override + public Iterable> getExtraProcessors() + { + return Collections.singletonList(Processor.newProcessor( + Connection.class, + (element, context) -> + { + // @HACKY: new-connection-framework + Root_meta_pure_metamodel_connection_Connection metamodel = new Root_meta_pure_metamodel_connection_Connection_Impl(element.name, null, context.pureModel.getClass("meta::pure::metamodel::connection::Connection"))._name(element.name); + connectionsIndex.put(context.pureModel.buildPackageString(element._package, element.name), metamodel); + metamodel._rawValue(element); + return metamodel; + })); + } +} diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-compiler/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension b/legend-engine-xts-connection/legend-engine-xt-connection-compiler/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension new file mode 100644 index 00000000000..c6f728c13a9 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-compiler/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.pure.compiler.toPureGraph.ConnectionCompilerExtension diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/pom.xml b/legend-engine-xts-connection/legend-engine-xt-connection-factory/pom.xml similarity index 84% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/pom.xml rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/pom.xml index e0d1268d796..363f8907ab9 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/pom.xml +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/pom.xml @@ -18,14 +18,14 @@ org.finos.legend.engine - legend-engine-xts-authentication - 4.32.1-SNAPSHOT + legend-engine-xts-connection + 4.35.4-SNAPSHOT 4.0.0 - legend-engine-xt-authentication-connection-factory + legend-engine-xt-connection-factory jar - Legend Engine - XT - Authentication - Connection Factory + Legend Engine - XT - Connection - Factory @@ -41,6 +41,14 @@ org.finos.legend.engine legend-engine-xt-authentication-protocol + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-xt-connection-protocol + diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanism.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanism.java new file mode 100644 index 00000000000..7e982189eba --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanism.java @@ -0,0 +1,95 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.connection; + +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.list.ImmutableList; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; + +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; + +public class AuthenticationMechanism +{ + private final AuthenticationMechanismType authenticationMechanismType; + private final ImmutableList> authenticationConfigurationTypes; + + private AuthenticationMechanism(AuthenticationMechanismType authenticationMechanismType, List> authenticationConfigurationTypes) + { + this.authenticationMechanismType = Objects.requireNonNull(authenticationMechanismType, "Authentication mechanism is missing"); + this.authenticationConfigurationTypes = Lists.immutable.withAll(authenticationConfigurationTypes); + } + + public AuthenticationMechanismType getAuthenticationMechanismType() + { + return authenticationMechanismType; + } + + public ImmutableList> getAuthenticationConfigurationTypes() + { + return authenticationConfigurationTypes; + } + + public static Builder builder() + { + return new Builder(); + } + + public static class Builder + { + private AuthenticationMechanismType authenticationMechanismType; + private final Set> authenticationConfigurationTypes = new LinkedHashSet<>(); + + private Builder() + { + } + + public Builder type(AuthenticationMechanismType authenticationMechanismType) + { + this.authenticationMechanismType = authenticationMechanismType; + return this; + } + + public Builder authenticationConfigurationType(Class authenticationConfigurationType) + { + this.authenticationConfigurationTypes.add(authenticationConfigurationType); + return this; + } + + public Builder authenticationConfigurationTypes(List> authenticationConfigurationTypes) + { + this.authenticationConfigurationTypes.addAll(authenticationConfigurationTypes); + return this; + } + + @SafeVarargs + public final Builder authenticationConfigurationTypes(Class... authenticationConfigurationTypes) + { + this.authenticationConfigurationTypes.addAll(Lists.mutable.of(authenticationConfigurationTypes)); + return this; + } + + public AuthenticationMechanism build() + { + return new AuthenticationMechanism( + this.authenticationMechanismType, + new ArrayList<>(this.authenticationConfigurationTypes) + ); + } + } +} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreInstanceProvider.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanismType.java similarity index 88% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreInstanceProvider.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanismType.java index 8b83db33c91..15c061ee6f9 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/StoreInstanceProvider.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/AuthenticationMechanismType.java @@ -14,7 +14,7 @@ package org.finos.legend.connection; -public interface StoreInstanceProvider +public interface AuthenticationMechanismType { - StoreInstance lookup(String identifier); + String getIdentifier(); } diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/Authenticator.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/Authenticator.java similarity index 80% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/Authenticator.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/Authenticator.java index 90e10882b38..96f879da05f 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/Authenticator.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/Authenticator.java @@ -16,8 +16,7 @@ import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.list.ImmutableList; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanism; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; @@ -26,8 +25,8 @@ public class Authenticator { - private final StoreInstance storeInstance; - private final AuthenticationMechanism authenticationMechanism; + private final Connection connection; + private final AuthenticationMechanismType authenticationMechanismType; private final AuthenticationConfiguration authenticationConfiguration; private final Class sourceCredentialType; private final Class targetCredentialType; @@ -35,10 +34,10 @@ public class Authenticator private final ConnectionBuilder connectionBuilder; private final LegendEnvironment environment; - public Authenticator(StoreInstance storeInstance, AuthenticationMechanism authenticationMechanism, AuthenticationConfiguration authenticationConfiguration, Class sourceCredentialType, Class targetCredentialType, List credentialBuilders, ConnectionBuilder connectionBuilder, LegendEnvironment environment) + public Authenticator(Connection connection, AuthenticationMechanismType authenticationMechanismType, AuthenticationConfiguration authenticationConfiguration, Class sourceCredentialType, Class targetCredentialType, List credentialBuilders, ConnectionBuilder connectionBuilder, LegendEnvironment environment) { - this.storeInstance = storeInstance; - this.authenticationMechanism = authenticationMechanism; + this.connection = connection; + this.authenticationMechanismType = authenticationMechanismType; this.authenticationConfiguration = authenticationConfiguration; this.sourceCredentialType = sourceCredentialType; this.targetCredentialType = targetCredentialType; @@ -74,9 +73,9 @@ public CRED makeCredential(Identity identity) throws Exception return (CRED) credential; } - public AuthenticationMechanism getAuthenticationMechanism() + public AuthenticationMechanismType getAuthenticationMechanism() { - return authenticationMechanism; + return authenticationMechanismType; } public AuthenticationConfiguration getAuthenticationConfiguration() @@ -84,9 +83,9 @@ public AuthenticationConfiguration getAuthenticationConfiguration() return authenticationConfiguration; } - public StoreInstance getStoreInstance() + public Connection getConnection() { - return storeInstance; + return connection; } public Class getSourceCredentialType() diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/Connection.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/Connection.java new file mode 100644 index 00000000000..a352b763636 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/Connection.java @@ -0,0 +1,271 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.connection; + +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.factory.Maps; +import org.eclipse.collections.api.list.ImmutableList; +import org.eclipse.collections.api.map.ImmutableMap; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public final class Connection +{ + private final String identifier; + private final DatabaseSupport databaseSupport; + private final ConnectionSpecification connectionSpecification; + private final AuthenticationConfiguration authenticationConfiguration; + private final ImmutableMap authenticationMechanismsIndex; + private final ImmutableList authenticationMechanismTypes; + private final ImmutableMap, AuthenticationMechanismType> authenticationConfigurationTypesMap; + + private final ImmutableList> authenticationConfigurationTypes; + + private Connection(String identifier, DatabaseSupport databaseSupport, List authenticationMechanisms, ConnectionSpecification connectionSpecification, AuthenticationConfiguration authenticationConfiguration) + { + this.identifier = Objects.requireNonNull(identifier, "Can't create connection: identifier is missing"); + this.databaseSupport = databaseSupport; + this.connectionSpecification = Objects.requireNonNull(connectionSpecification, "Connection specification is missing"); + this.authenticationConfiguration = Objects.requireNonNull(authenticationConfiguration, "Authentication configuration is missing"); + + Map authenticationMechanismsIndex = new LinkedHashMap<>(); + List authenticationMechanismTypes = Lists.mutable.empty(); + if (authenticationMechanisms.isEmpty()) + { + for (AuthenticationMechanismType authenticationMechanismType : this.databaseSupport.getAuthenticationMechanismTypes()) + { + authenticationMechanismsIndex.put(authenticationMechanismType.getIdentifier(), this.databaseSupport.getAuthenticationMechanism(authenticationMechanismType)); + } + authenticationMechanismTypes.addAll(this.databaseSupport.getAuthenticationMechanismTypes().toList()); + } + else + { + for (AuthenticationMechanism authenticationMechanism : authenticationMechanisms) + { + AuthenticationMechanismType authenticationMechanismType = authenticationMechanism.getAuthenticationMechanismType(); + // if no mechanism is specified, it means the connection supports all mechanisms specified in the database support + if (authenticationMechanismsIndex.containsKey(authenticationMechanismType.getIdentifier())) + { + throw new RuntimeException(String.format("Found multiple configurations for authentication mechanism '%s'", authenticationMechanismType.getIdentifier())); + } + AuthenticationMechanism authenticationMechanismsFromDatabaseSupport = this.databaseSupport.getAuthenticationMechanism(authenticationMechanismType); + if (authenticationMechanismsFromDatabaseSupport == null) + { + throw new RuntimeException(String.format("Authentication mechanism '%s' is not covered by database support '%s'. Supported mechanism(s):\n%s", + authenticationMechanismType.getIdentifier(), + this.databaseSupport.getDatabaseType().getIdentifier(), + this.databaseSupport.getAuthenticationMechanismTypes().collect(mechanism -> "- " + mechanism.getIdentifier()).makeString("\n") + )); + } + ImmutableList> authenticationConfigTypesFromDatabaseSupport = authenticationMechanismsFromDatabaseSupport.getAuthenticationConfigurationTypes(); + List> authenticationConfigurationTypes = Lists.mutable.empty(); + for (Class authenticationConfigurationType : authenticationMechanism.getAuthenticationConfigurationTypes()) + { + if (!authenticationConfigTypesFromDatabaseSupport.contains(authenticationConfigurationType)) + { + throw new RuntimeException(String.format("Authentication configuration type '%s' is not covered by database support '%s' for authentication mechanism '%s'. Supported configuration type(s):\n%s", + authenticationConfigurationType.getSimpleName(), + this.databaseSupport.getDatabaseType().getIdentifier(), + authenticationMechanismType.getIdentifier(), + authenticationConfigTypesFromDatabaseSupport.collect(type -> "- " + type.getSimpleName()).makeString("\n") + )); + } + else + { + authenticationConfigurationTypes.add(authenticationConfigurationType); + } + } + authenticationMechanismsIndex.put(authenticationMechanismType.getIdentifier(), AuthenticationMechanism + .builder() + .type(authenticationMechanismType) + // if no configuration type is specified, it means the connection supports all configuration types configured for that mechanism in the database support + .authenticationConfigurationTypes(!authenticationConfigurationTypes.isEmpty() ? authenticationConfigurationTypes : authenticationConfigTypesFromDatabaseSupport.toList()) + .build()); + authenticationMechanismTypes.add(authenticationMechanismType); + } + } + this.authenticationMechanismsIndex = Maps.immutable.withAll(authenticationMechanismsIndex); + this.authenticationMechanismTypes = Lists.immutable.withAll(authenticationMechanismTypes); + + Map, AuthenticationMechanismType> authenticationConfigurationTypesMap = new LinkedHashMap<>(); + List> authenticationConfigurationTypes = Lists.mutable.empty(); + authenticationMechanismsIndex.values().forEach((authenticationMechanism) -> + { + authenticationMechanism.getAuthenticationConfigurationTypes().forEach(configurationType -> + { + authenticationConfigurationTypesMap.put(configurationType, authenticationMechanism.getAuthenticationMechanismType()); + authenticationConfigurationTypes.add(configurationType); + }); + }); + this.authenticationConfigurationTypesMap = Maps.immutable.withAll(authenticationConfigurationTypesMap); + this.authenticationConfigurationTypes = Lists.immutable.withAll(authenticationConfigurationTypes); + + if (!this.authenticationConfigurationTypesMap.containsKey(this.authenticationConfiguration.getClass())) + { + throw new RuntimeException(String.format("Specified authentication configuration of type '%s' is not compatible. Supported configuration type(s):\n%s", + this.authenticationConfiguration.getClass().getSimpleName(), + this.getAuthenticationConfigurationTypes().collect(type -> "- " + type.getSimpleName()).makeString("\n") + )); + } + } + + public String getIdentifier() + { + return identifier; + } + + public DatabaseSupport getDatabaseSupport() + { + return databaseSupport; + } + + public ConnectionSpecification getConnectionSpecification() + { + return connectionSpecification; + } + + public AuthenticationConfiguration getAuthenticationConfiguration() + { + return authenticationConfiguration; + } + + public ImmutableList getAuthenticationMechanisms() + { + return this.authenticationMechanismTypes; + } + + public ImmutableList> getAuthenticationConfigurationTypes() + { + return this.authenticationConfigurationTypes; + } + + public AuthenticationMechanismType getAuthenticationMechanism(Class authenticationConfigurationType) + { + return this.authenticationConfigurationTypesMap.get(authenticationConfigurationType); + } + + public AuthenticationMechanism getAuthenticationMechanism(AuthenticationMechanismType authenticationMechanismType) + { + return authenticationMechanismsIndex.get(authenticationMechanismType.getIdentifier()); + } + + public T getConnectionSpecification(Class clazz) + { + if (!this.connectionSpecification.getClass().equals(clazz)) + { + throw new RuntimeException(String.format("Can't get connection specification of type '%s' for store '%s'", clazz.getSimpleName(), this.identifier)); + } + return (T) this.connectionSpecification; + } + + public static Builder builder() + { + return new Builder(); + } + + public static class Builder + { + private DatabaseSupport databaseSupport; + private String identifier; + private final List authenticationMechanisms = Lists.mutable.empty(); + private ConnectionSpecification connectionSpecification; + private AuthenticationConfiguration authenticationConfiguration; + + private Builder() + { + } + + public Builder databaseSupport(DatabaseSupport databaseSupport) + { + this.databaseSupport = databaseSupport; + return this; + } + + public Builder identifier(String identifier) + { + this.identifier = identifier; + return this; + } + + public Builder authenticationMechanism(AuthenticationMechanism authenticationMechanism) + { + this.authenticationMechanisms.add(authenticationMechanism); + return this; + } + + public Builder authenticationMechanisms(List authenticationMechanisms) + { + this.authenticationMechanisms.addAll(authenticationMechanisms); + return this; + } + + public Builder authenticationMechanisms(AuthenticationMechanism... authenticationMechanisms) + { + this.authenticationMechanisms.addAll(Lists.mutable.of(authenticationMechanisms)); + return this; + } + + public Builder connectionSpecification(ConnectionSpecification connectionSpecification) + { + this.connectionSpecification = connectionSpecification; + return this; + } + + public Builder authenticationConfiguration(AuthenticationConfiguration authenticationConfiguration) + { + this.authenticationConfiguration = authenticationConfiguration; + return this; + } + + public Builder fromProtocol(org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection protocol, LegendEnvironment environment) + { + return this + .databaseSupport(environment.getDatabaseSupport(environment.getDatabaseType(protocol.databaseType))) + .identifier(protocol.getPath()) + .authenticationMechanisms( + protocol.authenticationMechanisms != null + ? ListIterate.collect(protocol.authenticationMechanisms, mechanism -> + AuthenticationMechanism + .builder() + .type(environment.getAuthenticationMechanism(mechanism.authenticationMechanismType)) + .authenticationConfigurationTypes( + ListIterate.collect(mechanism.configurationTypes, environment::getAuthenticationConfigurationType) + ) + .build()) + : Lists.mutable.empty() + ) + .connectionSpecification(protocol.connectionSpecification) + .authenticationConfiguration(protocol.authenticationConfiguration); + } + + + public Connection build() + { + return new Connection( + this.identifier, + this.databaseSupport, + this.authenticationMechanisms, + this.connectionSpecification, + this.authenticationConfiguration + ); + } + } +} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/ConnectionBuilder.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionBuilder.java similarity index 97% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/ConnectionBuilder.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionBuilder.java index 49de64a0f9c..344982d55eb 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/ConnectionBuilder.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionBuilder.java @@ -14,7 +14,7 @@ package org.finos.legend.connection; -import org.finos.legend.connection.protocol.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/DatabaseType.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionExtension.java similarity index 68% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/DatabaseType.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionExtension.java index 3982c57e35e..58fb2362ee1 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/DatabaseType.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionExtension.java @@ -14,23 +14,18 @@ package org.finos.legend.connection; -public enum DatabaseType implements Database -{ - H2("H2"), - POSTGRES("Postgres"), - BIG_QUERY("BigQuery"), - SNOWFLAKE("Snowflake"); - - private final String label; +import java.util.Collections; +import java.util.List; - private DatabaseType(String label) +public interface ConnectionExtension +{ + default List getExtraDatabaseTypes() { - this.label = label; + return Collections.emptyList(); } - @Override - public String getLabel() + default List getExtraAuthenticationMechanismTypes() { - return this.label; + return Collections.emptyList(); } } diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/ConnectionFactory.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionFactory.java similarity index 65% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/ConnectionFactory.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionFactory.java index 5eca86a7c23..aedb2732661 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/ConnectionFactory.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionFactory.java @@ -14,12 +14,9 @@ package org.finos.legend.connection; -import org.eclipse.collections.api.block.function.Function0; import org.eclipse.collections.api.factory.Lists; -import org.eclipse.collections.impl.utility.ListIterate; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanism; -import org.finos.legend.connection.protocol.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; @@ -39,14 +36,12 @@ public class ConnectionFactory { private final LegendEnvironment environment; - private final StoreInstanceProvider storeInstanceProvider; private final Map credentialBuildersIndex = new LinkedHashMap<>(); private final Map connectionBuildersIndex = new LinkedHashMap<>(); - private ConnectionFactory(LegendEnvironment environment, StoreInstanceProvider storeInstanceProvider, List credentialBuilders, List connectionBuilders) + private ConnectionFactory(LegendEnvironment environment, List credentialBuilders, List connectionBuilders) { - this.environment = environment; - this.storeInstanceProvider = storeInstanceProvider; + this.environment = Objects.requireNonNull(environment, "environment is missing"); for (ConnectionBuilder builder : connectionBuilders) { this.connectionBuildersIndex.put(new ConnectionBuilder.Key(builder.getConnectionSpecificationType(), builder.getCredentialType()), builder); @@ -62,84 +57,52 @@ public LegendEnvironment getEnvironment() return environment; } - public Authenticator getAuthenticator(Identity identity, String storeInstanceIdentifier, AuthenticationMechanism authenticationMechanism) + public Authenticator getAuthenticator(Identity identity, Connection connection, AuthenticationConfiguration authenticationConfiguration) { - return this.getAuthenticator(identity, this.storeInstanceProvider.lookup(storeInstanceIdentifier), authenticationMechanism); - } - - public Authenticator getAuthenticator(Identity identity, StoreInstance storeInstance, AuthenticationMechanism authenticationMechanism) - { - AuthenticationMechanismConfiguration authenticationMechanismConfiguration = Objects.requireNonNull(storeInstance.getAuthenticationMechanismConfiguration(authenticationMechanism), String.format("Store '%s' does not support authentication mechanism '%s'. Supported mechanism(s):\n%s", - storeInstance.getIdentifier(), - authenticationMechanism.getLabel(), - ListIterate.collect(storeInstance.getAuthenticationMechanisms(), mechanism -> "- " + mechanism.getLabel()).makeString("\n") - )); - Function0 generator = authenticationMechanismConfiguration.getDefaultAuthenticationConfigurationGenerator(); - AuthenticationConfiguration authenticationConfiguration = Objects.requireNonNull(generator != null ? generator.get() : null, String.format("Can't auto-generate authentication configuration for store '%s' with authentication mechanism '%s'. Please provide a configuration of one of the following type(s):\n%s", - storeInstance.getIdentifier(), - authenticationMechanism.getLabel(), - authenticationMechanismConfiguration.getAuthenticationConfigurationTypes().collect(configType -> "- " + configType.getSimpleName()).makeString("\n") - )); - return this.getAuthenticator(identity, storeInstance, authenticationMechanism, authenticationConfiguration); - } - - public Authenticator getAuthenticator(Identity identity, String storeInstanceIdentifier, AuthenticationConfiguration authenticationConfiguration) - { - return this.getAuthenticator(identity, this.storeInstanceProvider.lookup(storeInstanceIdentifier), authenticationConfiguration); - } - - public Authenticator getAuthenticator(Identity identity, StoreInstance storeInstance, AuthenticationConfiguration authenticationConfiguration) - { - AuthenticationMechanism authenticationMechanism = Objects.requireNonNull(storeInstance.getAuthenticationMechanism(authenticationConfiguration.getClass()), String.format("Store '%s' does not accept authentication configuration type '%s'. Supported configuration type(s):\n%s", - storeInstance.getIdentifier(), + AuthenticationMechanismType authenticationMechanismType = Objects.requireNonNull(connection.getAuthenticationMechanism(authenticationConfiguration.getClass()), String.format("Connection '%s' is not compatible with authentication configuration type '%s'. Supported configuration type(s):\n%s", + connection.getIdentifier(), authenticationConfiguration.getClass().getSimpleName(), - ListIterate.collect(storeInstance.getAuthenticationConfigurationTypes(), configType -> "- " + configType.getSimpleName()).makeString("\n") + connection.getAuthenticationConfigurationTypes().collect(configType -> "- " + configType.getSimpleName()).makeString("\n") )); - return this.getAuthenticator(identity, storeInstance, authenticationMechanism, authenticationConfiguration); + return this.getAuthenticator(identity, connection, authenticationMechanismType, authenticationConfiguration); } - private Authenticator getAuthenticator(Identity identity, StoreInstance storeInstance, AuthenticationMechanism authenticationMechanism, AuthenticationConfiguration authenticationConfiguration) + private Authenticator getAuthenticator(Identity identity, Connection connection, AuthenticationMechanismType authenticationMechanismType, AuthenticationConfiguration authenticationConfiguration) { - AuthenticationFlowResolver.ResolutionResult result = AuthenticationFlowResolver.run(this.credentialBuildersIndex, this.connectionBuildersIndex, identity, authenticationMechanism, authenticationConfiguration, storeInstance.getConnectionSpecification()); + AuthenticationFlowResolver.ResolutionResult result = AuthenticationFlowResolver.run(this.credentialBuildersIndex, this.connectionBuildersIndex, identity, authenticationMechanismType, authenticationConfiguration, connection.getConnectionSpecification()); if (result == null) { - throw new RuntimeException(String.format("No authentication flow for store '%s' can be resolved for the specified identity (authentication configuration: %s, connection specification: %s)", - storeInstance.getIdentifier(), + throw new RuntimeException(String.format("No authentication flow for connection '%s' can be resolved for the specified identity (authentication configuration: %s, connection specification: %s)", + connection.getIdentifier(), authenticationConfiguration.getClass().getSimpleName(), - storeInstance.getConnectionSpecification().getClass().getSimpleName() + connection.getConnectionSpecification().getClass().getSimpleName() )); } - return new Authenticator(storeInstance, authenticationMechanism, authenticationConfiguration, result.sourceCredentialType, result.targetCredentialType, result.flow, connectionBuildersIndex.get(new ConnectionBuilder.Key(storeInstance.getConnectionSpecification().getClass(), result.targetCredentialType)), this.environment); + return new Authenticator(connection, authenticationMechanismType, authenticationConfiguration, result.sourceCredentialType, result.targetCredentialType, result.flow, connectionBuildersIndex.get(new ConnectionBuilder.Key(connection.getConnectionSpecification().getClass(), result.targetCredentialType)), this.environment); } - public Authenticator getAuthenticator(Identity identity, String storeInstanceIdentifier) - { - return this.getAuthenticator(identity, this.storeInstanceProvider.lookup(storeInstanceIdentifier)); - } - - public Authenticator getAuthenticator(Identity identity, StoreInstance storeInstance) + public Authenticator getAuthenticator(Identity identity, Connection connection) { Authenticator authenticator = null; - for (AuthenticationMechanism authenticationMechanism : storeInstance.getAuthenticationMechanisms()) + for (AuthenticationMechanismType authenticationMechanismType : connection.getAuthenticationMechanisms()) { - AuthenticationMechanismConfiguration authenticationMechanismConfiguration = storeInstance.getAuthenticationMechanismConfiguration(authenticationMechanism); - Function0 generator = authenticationMechanismConfiguration.getDefaultAuthenticationConfigurationGenerator(); - AuthenticationConfiguration authenticationConfiguration = generator != null ? generator.get() : null; + AuthenticationMechanism authenticationMechanism = connection.getAuthenticationMechanism(authenticationMechanismType); + AuthenticationConfiguration authenticationConfiguration = connection.getAuthenticationConfiguration(); if (authenticationConfiguration != null) { - AuthenticationFlowResolver.ResolutionResult result = AuthenticationFlowResolver.run(this.credentialBuildersIndex, this.connectionBuildersIndex, identity, authenticationMechanism, authenticationConfiguration, storeInstance.getConnectionSpecification()); + AuthenticationFlowResolver.ResolutionResult result = AuthenticationFlowResolver.run(this.credentialBuildersIndex, this.connectionBuildersIndex, identity, authenticationMechanismType, authenticationConfiguration, connection.getConnectionSpecification()); if (result != null) { - authenticator = new Authenticator(storeInstance, authenticationMechanism, authenticationConfiguration, result.sourceCredentialType, result.targetCredentialType, result.flow, connectionBuildersIndex.get(new ConnectionBuilder.Key(storeInstance.getConnectionSpecification().getClass(), result.targetCredentialType)), this.environment); + authenticator = new Authenticator(connection, authenticationMechanismType, authenticationConfiguration, result.sourceCredentialType, result.targetCredentialType, result.flow, connectionBuildersIndex.get(new ConnectionBuilder.Key(connection.getConnectionSpecification().getClass(), result.targetCredentialType)), this.environment); break; } } } if (authenticator == null) { - throw new RuntimeException(String.format("No authentication flow for store '%s' can be resolved for the specified identity. Try specifying an authentication mechanism or authentication configuration. Supported configuration type(s):\n%s", - storeInstance.getIdentifier(), - ListIterate.collect(storeInstance.getAuthenticationConfigurationTypes(), configType -> "- " + configType.getSimpleName() + " (" + storeInstance.getAuthenticationMechanism(configType).getLabel() + ")").makeString("\n") + throw new RuntimeException(String.format("No authentication flow for connection '%s' can be resolved for the specified identity. Try specifying another authentication configuration. Supported configuration type(s):\n%s", + connection.getIdentifier(), + connection.getAuthenticationConfigurationTypes().collect(configType -> "- " + configType.getSimpleName() + " (" + connection.getAuthenticationMechanism(configType).getIdentifier() + ")").makeString("\n") )); } return authenticator; @@ -175,7 +138,7 @@ private static class AuthenticationFlowResolver *

* With this setup, we can use a basic graph search algorithm (e.g. BFS) to resolve the shortest path to build a connection */ - private AuthenticationFlowResolver(Map credentialBuildersIndex, Map connectionBuildersIndex, Identity identity, AuthenticationConfiguration authenticationConfiguration, AuthenticationMechanism authenticationMechanism, ConnectionSpecification connectionSpecification) + private AuthenticationFlowResolver(Map credentialBuildersIndex, Map connectionBuildersIndex, Identity identity, AuthenticationConfiguration authenticationConfiguration, AuthenticationMechanismType authenticationMechanismType, ConnectionSpecification connectionSpecification) { // add start node (i.e. identity node) this.startNode = new FlowNode(identity); @@ -228,10 +191,10 @@ private void processEdge(FlowNode node, FlowNode adjacentNode) /** * Resolves the authentication flow in order to build a connection for a specified identity */ - public static ResolutionResult run(Map credentialBuildersIndex, Map connectionBuildersIndex, Identity identity, AuthenticationMechanism authenticationMechanism, AuthenticationConfiguration authenticationConfiguration, ConnectionSpecification connectionSpecification) + public static ResolutionResult run(Map credentialBuildersIndex, Map connectionBuildersIndex, Identity identity, AuthenticationMechanismType authenticationMechanismType, AuthenticationConfiguration authenticationConfiguration, ConnectionSpecification connectionSpecification) { // using BFS algo to search for the shortest (non-cyclic) path - AuthenticationFlowResolver state = new AuthenticationFlowResolver(credentialBuildersIndex, connectionBuildersIndex, identity, authenticationConfiguration, authenticationMechanism, connectionSpecification); + AuthenticationFlowResolver state = new AuthenticationFlowResolver(credentialBuildersIndex, connectionBuildersIndex, identity, authenticationConfiguration, authenticationMechanismType, connectionSpecification); boolean found = false; Set visitedNodes = new HashSet<>(); // Create a set to keep track of visited vertices @@ -371,76 +334,74 @@ public ResolutionResult(List flow, Class T getConnection(Identity identity, StoreInstance storeInstance, AuthenticationConfiguration authenticationConfiguration) throws Exception - { - return this.getConnection(identity, this.getAuthenticator(identity, storeInstance, authenticationConfiguration)); - } - - public T getConnection(Identity identity, String storeInstanceIdentifier, AuthenticationConfiguration authenticationConfiguration) throws Exception + public T getConnection(Identity identity, Connection connection, AuthenticationConfiguration authenticationConfiguration) throws Exception { - return this.getConnection(identity, this.getAuthenticator(identity, storeInstanceIdentifier, authenticationConfiguration)); + return this.getConnection(identity, this.getAuthenticator(identity, connection, authenticationConfiguration)); } - public T getConnection(Identity identity, StoreInstance storeInstance) throws Exception + public T getConnection(Identity identity, Connection connection) throws Exception { - return this.getConnection(identity, this.getAuthenticator(identity, storeInstance)); + return this.getConnection(identity, this.getAuthenticator(identity, connection)); } - public T getConnection(Identity identity, String storeInstanceIdentifier) throws Exception + public T getConnection(Identity identity, Authenticator authenticator) throws Exception { - return this.getConnection(identity, this.getAuthenticator(identity, storeInstanceIdentifier)); + ConnectionBuilder flow = (ConnectionBuilder) authenticator.getConnectionBuilder(); + return flow.getConnection(authenticator.getConnection().getConnectionSpecification(), flow.getAuthenticatorCompatible(authenticator), identity); } - public T getConnection(Identity identity, Authenticator authenticator) throws Exception + public static Builder builder() { - ConnectionBuilder flow = (ConnectionBuilder) authenticator.getConnectionBuilder(); - return flow.getConnection(authenticator.getStoreInstance().getConnectionSpecification(), flow.getAuthenticatorCompatible(authenticator), identity); + return new Builder(); } public static class Builder { - private final LegendEnvironment environment; - private final StoreInstanceProvider storeInstanceProvider; + private LegendEnvironment environment; private final List credentialBuilders = Lists.mutable.empty(); private final List connectionBuilders = Lists.mutable.empty(); - public Builder(LegendEnvironment environment, StoreInstanceProvider storeInstanceProvider) + private Builder() + { + } + + public Builder environment(LegendEnvironment environment) { this.environment = environment; - this.storeInstanceProvider = storeInstanceProvider; + return this; } - public Builder withCredentialBuilders(List credentialBuilders) + public Builder credentialBuilders(List credentialBuilders) { this.credentialBuilders.addAll(credentialBuilders); return this; } - public Builder withCredentialBuilders(CredentialBuilder... credentialBuilders) + public Builder credentialBuilders(CredentialBuilder... credentialBuilders) { this.credentialBuilders.addAll(Lists.mutable.with(credentialBuilders)); return this; } - public Builder withCredentialBuilder(CredentialBuilder credentialBuilder) + public Builder credentialBuilder(CredentialBuilder credentialBuilder) { this.credentialBuilders.add(credentialBuilder); return this; } - public Builder withConnectionBuilders(List connectionBuilders) + public Builder connectionBuilders(List connectionBuilders) { this.connectionBuilders.addAll(connectionBuilders); return this; } - public Builder withConnectionBuilders(ConnectionBuilder... connectionBuilders) + public Builder connectionBuilders(ConnectionBuilder... connectionBuilders) { this.connectionBuilders.addAll(Lists.mutable.with(connectionBuilders)); return this; } - public Builder withConnectionBuilder(ConnectionBuilder connectionBuilder) + public Builder connectionBuilder(ConnectionBuilder connectionBuilder) { this.connectionBuilders.add(connectionBuilder); return this; @@ -459,7 +420,6 @@ public ConnectionFactory build() return new ConnectionFactory( this.environment, - this.storeInstanceProvider, this.credentialBuilders, this.connectionBuilders ); diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/ConnectionManager.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionManager.java similarity index 100% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/ConnectionManager.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/ConnectionManager.java diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/CredentialBuilder.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/CredentialBuilder.java similarity index 96% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/CredentialBuilder.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/CredentialBuilder.java index 0f4726abfd7..404074863a1 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/CredentialBuilder.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/CredentialBuilder.java @@ -14,7 +14,7 @@ package org.finos.legend.connection; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/DatabaseSupport.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/DatabaseSupport.java new file mode 100644 index 00000000000..7649cb9aedc --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/DatabaseSupport.java @@ -0,0 +1,165 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.connection; + +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.factory.Maps; +import org.eclipse.collections.api.list.ImmutableList; +import org.eclipse.collections.api.map.ImmutableMap; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * A DatabaseSupport describes the capabilities supported by a database. + * For now, it describes the authentication mechanisms. + */ +public final class DatabaseSupport +{ + private final DatabaseType databaseType; + private final ImmutableMap authenticationMechanismsIndex; + private final ImmutableList authenticationMechanismTypes; + + private DatabaseSupport(DatabaseType databaseType, List authenticationMechanisms) + { + this.databaseType = Objects.requireNonNull(databaseType, "Database type is missing"); + + Map authenticationMechanismsIndex = new LinkedHashMap<>(); + List authenticationMechanismTypes = Lists.mutable.empty(); + Map, AuthenticationMechanismType> authenticationConfigurationTypeIndex = new LinkedHashMap<>(); + for (AuthenticationMechanism authenticationMechanism : authenticationMechanisms) + { + AuthenticationMechanismType authenticationMechanismType = authenticationMechanism.getAuthenticationMechanismType(); + if (authenticationMechanismsIndex.containsKey(authenticationMechanismType.getIdentifier())) + { + throw new RuntimeException(String.format("Found multiple authentication mechanisms with type '%s'", authenticationMechanismType.getIdentifier())); + } + authenticationMechanismsIndex.put(authenticationMechanismType.getIdentifier(), authenticationMechanism); + authenticationMechanism.getAuthenticationConfigurationTypes().forEach(authenticationConfigurationType -> + { + if (authenticationConfigurationTypeIndex.containsKey(authenticationConfigurationType)) + { + throw new RuntimeException(String.format("Authentication configuration type '%s' is associated with multiple authentication mechanisms", authenticationConfigurationType.getSimpleName())); + } + authenticationConfigurationTypeIndex.put(authenticationConfigurationType, authenticationMechanismType); + }); + authenticationMechanismTypes.add(authenticationMechanism.getAuthenticationMechanismType()); + } + + this.authenticationMechanismTypes = Lists.immutable.withAll(authenticationMechanismTypes); + this.authenticationMechanismsIndex = Maps.immutable.withAll(authenticationMechanismsIndex); + + authenticationMechanisms.forEach((authenticationMechanism) -> + { + if (authenticationMechanism.getAuthenticationConfigurationTypes().isEmpty()) + { + throw new RuntimeException(String.format("No authentication configuration type is associated with authentication mechanism '%s'", authenticationMechanism.getAuthenticationMechanismType().getIdentifier())); + } + }); + } + + public DatabaseType getDatabaseType() + { + return this.databaseType; + } + + public AuthenticationMechanism getAuthenticationMechanism(AuthenticationMechanismType authenticationMechanismType) + { + return authenticationMechanismsIndex.get(authenticationMechanismType.getIdentifier()); + } + + public ImmutableList getAuthenticationMechanismTypes() + { + return this.authenticationMechanismTypes; + } + + public static void verifyDatabaseType(DatabaseSupport databaseSupport, DatabaseType databaseType) + { + if (!databaseType.equals(databaseSupport.getDatabaseType())) + { + + throw new RuntimeException(String.format("Expected database type '%s'", databaseType.getIdentifier())); + } + } + + public static Builder builder() + { + return new Builder(); + } + + public static class Builder + { + private DatabaseType databaseType; + private final List authenticationMechanisms = Lists.mutable.empty(); + + private Builder() + { + } + + public Builder type(DatabaseType databaseType) + { + this.databaseType = databaseType; + return this; + } + + public Builder authenticationMechanism(AuthenticationMechanism authenticationMechanism) + { + this.authenticationMechanisms.add(authenticationMechanism); + return this; + } + + public Builder authenticationMechanisms(List authenticationMechanisms) + { + this.authenticationMechanisms.addAll(authenticationMechanisms); + return this; + } + + public Builder authenticationMechanisms(AuthenticationMechanism... authenticationMechanisms) + { + this.authenticationMechanisms.addAll(Lists.mutable.of(authenticationMechanisms)); + return this; + } + + public Builder fromProtocol(org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.DatabaseSupport databaseSupport, LegendEnvironment environment) + { + return this + .type(environment.getDatabaseType(databaseSupport.databaseType)) + .authenticationMechanisms( + databaseSupport.authenticationMechanisms != null + ? ListIterate.collect(databaseSupport.authenticationMechanisms, mechanism -> + AuthenticationMechanism + .builder() + .type(environment.getAuthenticationMechanism(mechanism.authenticationMechanismType)) + .authenticationConfigurationTypes( + ListIterate.collect(mechanism.configurationTypes, environment::getAuthenticationConfigurationType) + ) + .build()) + : Lists.mutable.empty() + ); + } + + public DatabaseSupport build() + { + return new DatabaseSupport( + this.databaseType, + this.authenticationMechanisms + ); + } + } +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/Database.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/DatabaseType.java similarity index 91% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/Database.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/DatabaseType.java index e999c689c69..19292292433 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/Database.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/DatabaseType.java @@ -14,7 +14,7 @@ package org.finos.legend.connection; -public interface Database +public interface DatabaseType { - String getLabel(); + String getIdentifier(); } diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/IdentityFactory.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/IdentityFactory.java similarity index 80% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/IdentityFactory.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/IdentityFactory.java index a02600a70a6..71065b9e4fc 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/IdentityFactory.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/IdentityFactory.java @@ -21,6 +21,7 @@ import org.finos.legend.engine.shared.core.identity.factory.DefaultIdentityFactory; import java.util.List; +import java.util.Objects; public class IdentityFactory { @@ -28,7 +29,7 @@ public class IdentityFactory private IdentityFactory(LegendEnvironment environment) { - this.environment = environment; + this.environment = Objects.requireNonNull(environment, "environment is missing"); } // TODO: @akphi - this clones the logic from IdentityFactoryProvider, we should think about unifying them @@ -41,11 +42,11 @@ public Identity createIdentity(IdentitySpecification identitySpecification) // TODO: @akphi - should we restrict here that we can only either specify the subject/profiles? if (identitySpecification.getSubject() != null) { - credentials.addAll(DEFAULT.makeIdentity(identitySpecification.getSubject()).getCredentials().toList()); + return DEFAULT.makeIdentity(identitySpecification.getSubject()); } if (!identitySpecification.getProfiles().isEmpty()) { - credentials.addAll(DEFAULT.makeIdentity(Lists.mutable.withAll(identitySpecification.getProfiles())).getCredentials().toList()); + return DEFAULT.makeIdentity(Lists.mutable.withAll(identitySpecification.getProfiles())); } if (credentials.isEmpty()) { @@ -54,13 +55,23 @@ public Identity createIdentity(IdentitySpecification identitySpecification) return new Identity(identitySpecification.getName(), credentials); } + public static Builder builder() + { + return new Builder(); + } + public static class Builder { - private final LegendEnvironment environment; + private LegendEnvironment environment; + + private Builder() + { + } - public Builder(LegendEnvironment environment) + public Builder environment(LegendEnvironment environment) { this.environment = environment; + return this; } public IdentityFactory build() diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/IdentitySpecification.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/IdentitySpecification.java similarity index 85% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/IdentitySpecification.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/IdentitySpecification.java index 7a07c2f6989..50e60a670e8 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/IdentitySpecification.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/IdentitySpecification.java @@ -20,7 +20,6 @@ import javax.security.auth.Subject; import java.util.List; -import java.util.Objects; public class IdentitySpecification { @@ -57,6 +56,11 @@ public List getCredentials() return credentials; } + public static Builder builder() + { + return new Builder(); + } + public static class Builder { private String name; @@ -64,37 +68,41 @@ public static class Builder private Subject subject; private final List credentials = Lists.mutable.empty(); - public Builder withName(String name) + private Builder() + { + } + + public Builder name(String name) { this.name = name; return this; } - public Builder withProfiles(List profiles) + public Builder profiles(List profiles) { this.profiles.addAll(profiles); return this; } - public Builder withProfile(CommonProfile profile) + public Builder profile(CommonProfile profile) { this.profiles.add(profile); return this; } - public Builder withSubject(Subject subject) + public Builder subject(Subject subject) { this.subject = subject; return this; } - public Builder withCredentials(List credentials) + public Builder credentials(List credentials) { this.credentials.addAll(credentials); return this; } - public Builder withCredential(Credential credential) + public Builder credentials(Credential credential) { this.credentials.add(credential); return this; diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/LegendEnvironment.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/LegendEnvironment.java new file mode 100644 index 00000000000..6a9d44531d0 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/LegendEnvironment.java @@ -0,0 +1,209 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.connection; + +import org.eclipse.collections.api.block.function.Function0; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.list.ImmutableList; +import org.eclipse.collections.api.list.MutableList; +import org.eclipse.collections.api.map.ImmutableMap; +import org.eclipse.collections.impl.factory.Maps; +import org.eclipse.collections.impl.utility.LazyIterate; +import org.finos.legend.authentication.vault.CredentialVault; +import org.finos.legend.connection.impl.CoreAuthenticationMechanismType; +import org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtensionLoader; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.CredentialVaultSecret; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.shared.core.identity.Identity; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.ServiceLoader; + +/** + * This is the runtime instance of configuration for Legend Engine, the place we package common configs, + * such as vaults, that can be passed to various parts of engine, authentication, connection factory, etc. + */ +public class LegendEnvironment +{ + protected final ImmutableList vaults; + protected final ImmutableList connectionExtensions; + protected final ImmutableMap authenticationMechanismsTypesIndex; + protected final ImmutableMap databaseTypesIndex; + protected final ImmutableMap> authenticationConfigurationTypesIndex; + protected final ImmutableMap, CredentialVault> vaultsIndex; + protected final ImmutableMap databaseSupportsIndex; + + protected LegendEnvironment(List vaults, List databaseSupports) + { + this.vaults = Lists.immutable.withAll(vaults); + Map, CredentialVault> vaultsIndex = new LinkedHashMap<>(); + for (CredentialVault vault : vaults) + { + vaultsIndex.put(vault.getSecretType(), vault); + } + this.vaultsIndex = Maps.immutable.withAll(vaultsIndex); + + MutableList connectionExtensions = Lists.mutable.withAll(ServiceLoader.load(ConnectionExtension.class)); + this.connectionExtensions = connectionExtensions.toImmutable(); + + // load authentication mechanism types + List authenticationMechanismTypes = Lists.mutable.of(CoreAuthenticationMechanismType.values()); + authenticationMechanismTypes.addAll(connectionExtensions.flatCollect(ConnectionExtension::getExtraAuthenticationMechanismTypes)); + Map authenticationMechanismsTypesIndex = new LinkedHashMap<>(); + authenticationMechanismTypes.forEach(authenticationMechanism -> + { + if (authenticationMechanismsTypesIndex.containsKey(authenticationMechanism.getIdentifier())) + { + throw new RuntimeException(String.format("Found multiple authentication mechanisms with label '%s'", authenticationMechanism.getIdentifier())); + } + authenticationMechanismsTypesIndex.put(authenticationMechanism.getIdentifier(), authenticationMechanism); + }); + this.authenticationMechanismsTypesIndex = Maps.immutable.withAll(authenticationMechanismsTypesIndex); + + // load database types + List databaseTypes = connectionExtensions.flatCollect(ConnectionExtension::getExtraDatabaseTypes); + Map databaseTypesIndex = new LinkedHashMap<>(); + databaseTypes.forEach(databaseType -> + { + if (databaseTypesIndex.containsKey(databaseType.getIdentifier())) + { + throw new RuntimeException(String.format("Found multiple authentication mechanisms with label '%s'", databaseType.getIdentifier())); + } + databaseTypesIndex.put(databaseType.getIdentifier(), databaseType); + }); + this.databaseTypesIndex = Maps.immutable.withAll(databaseTypesIndex); + + // load authentication configuration types + Map> authenticationConfigurationTypesIndex = new LinkedHashMap<>(); + PureProtocolExtensionLoader.extensions().forEach(extension -> + LazyIterate.flatCollect(extension.getExtraProtocolSubTypeInfoCollectors(), Function0::value).forEach(info -> + { + info.getSubTypes().forEach(subType -> + { + if (AuthenticationConfiguration.class.isAssignableFrom(subType.getOne())) + { + if (authenticationConfigurationTypesIndex.containsKey(subType.getTwo())) + { + throw new RuntimeException(String.format("Found multiple authentication configuration types with identifier '%s'", subType.getTwo())); + } + authenticationConfigurationTypesIndex.put(subType.getTwo(), (Class) subType.getOne()); + } + }); + })); + this.authenticationConfigurationTypesIndex = Maps.immutable.withAll(authenticationConfigurationTypesIndex); + + // load database supports + Map databaseSupportsIndex = new LinkedHashMap<>(); + databaseSupports.forEach(databaseSupport -> + { + if (databaseSupportsIndex.containsKey(databaseSupport.getDatabaseType().getIdentifier())) + { + throw new RuntimeException(String.format("Found multiple database supports for type '%s'", databaseSupport.getDatabaseType().getIdentifier())); + } + databaseSupportsIndex.put(databaseSupport.getDatabaseType().getIdentifier(), databaseSupport); + }); + this.databaseSupportsIndex = Maps.immutable.withAll(databaseSupportsIndex); + } + + public String lookupVaultSecret(CredentialVaultSecret credentialVaultSecret, Identity identity) throws Exception + { + Class secretClass = credentialVaultSecret.getClass(); + if (!this.vaultsIndex.containsKey(secretClass)) + { + throw new RuntimeException(String.format("Can't find secret: credential vault for secret of type '%s' has not been registered", secretClass.getSimpleName())); + } + CredentialVault vault = this.vaultsIndex.get(secretClass); + return vault.lookupSecret(credentialVaultSecret, identity); + } + + public AuthenticationMechanismType getAuthenticationMechanism(String identifier) + { + return Objects.requireNonNull(this.authenticationMechanismsTypesIndex.get(identifier), String.format("Can't find authentication mechanism with label '%s'", identifier)); + } + + public DatabaseType getDatabaseType(String identifier) + { + return Objects.requireNonNull(this.databaseTypesIndex.get(identifier), String.format("Can't find database type with identifier '%s'", identifier)); + } + + public Class getAuthenticationConfigurationType(String identifier) + { + return Objects.requireNonNull(this.authenticationConfigurationTypesIndex.get(identifier), String.format("Can't find authentication configuration type with identifier '%s'", identifier)); + } + + public DatabaseSupport getDatabaseSupport(DatabaseType databaseType) + { + return Objects.requireNonNull(this.databaseSupportsIndex.get(databaseType.getIdentifier()), String.format("Can't find database support with database type '%s'", databaseType.getIdentifier())); + } + + public static Builder builder() + { + return new Builder(); + } + + public static class Builder + { + private final List vaults = Lists.mutable.empty(); + private final List databaseSupports = Lists.mutable.empty(); + + private Builder() + { + } + + public Builder vaults(List vaults) + { + this.vaults.addAll(vaults); + return this; + } + + public Builder vaults(CredentialVault... vaults) + { + this.vaults.addAll(Lists.mutable.with(vaults)); + return this; + } + + public Builder vault(CredentialVault vault) + { + this.vaults.add(vault); + return this; + } + + public Builder databaseSupports(List databaseSupports) + { + this.databaseSupports.addAll(databaseSupports); + return this; + } + + public Builder databaseSupports(DatabaseSupport... databaseSupports) + { + this.databaseSupports.addAll(Lists.mutable.with(databaseSupports)); + return this; + } + + public Builder databaseSupport(DatabaseSupport databaseSupport) + { + this.databaseSupports.add(databaseSupport); + return this; + } + + public LegendEnvironment build() + { + return new LegendEnvironment(this.vaults, this.databaseSupports); + } + } +} diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/CoreAuthenticationMechanismType.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/CoreAuthenticationMechanismType.java new file mode 100644 index 00000000000..860b89cf708 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/CoreAuthenticationMechanismType.java @@ -0,0 +1,39 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.connection.impl; + +import org.finos.legend.connection.AuthenticationMechanismType; + +public enum CoreAuthenticationMechanismType implements AuthenticationMechanismType +{ + USER_PASSWORD("UsernamePassword"), + API_KEY("APIKey"), + KEY_PAIR("KeyPair"), + KERBEROS("Kerberos"), + OAUTH("OAuth"); + + private final String identifier; + + private CoreAuthenticationMechanismType(String identifier) + { + this.identifier = identifier; + } + + @Override + public String getIdentifier() + { + return this.identifier; + } +} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosCredentialExtractor.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosCredentialExtractor.java similarity index 93% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosCredentialExtractor.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosCredentialExtractor.java index a36c3add1cc..1dd67d45e49 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosCredentialExtractor.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosCredentialExtractor.java @@ -16,6 +16,7 @@ import org.finos.legend.connection.CredentialBuilder; import org.finos.legend.connection.LegendEnvironment; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.KerberosAuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.engine.shared.core.identity.credential.LegendKerberosCredential; diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KeyPairCredentialBuilder.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/KeyPairCredentialBuilder.java similarity index 97% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KeyPairCredentialBuilder.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/KeyPairCredentialBuilder.java index 46cc4ddc880..29c084f136c 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KeyPairCredentialBuilder.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/KeyPairCredentialBuilder.java @@ -26,6 +26,7 @@ import org.eclipse.collections.impl.factory.Strings; import org.finos.legend.connection.CredentialBuilder; import org.finos.legend.connection.LegendEnvironment; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.EncryptedPrivateKeyPairAuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.engine.shared.core.identity.credential.PrivateKeyCredential; diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordCredentialBuilder.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordCredentialBuilder.java similarity index 92% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordCredentialBuilder.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordCredentialBuilder.java index 7cec18bd446..98fa2b6d675 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordCredentialBuilder.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordCredentialBuilder.java @@ -16,6 +16,7 @@ import org.finos.legend.connection.CredentialBuilder; import org.finos.legend.connection.LegendEnvironment; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.engine.shared.core.identity.credential.PlaintextUserPasswordCredential; diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/test/java/org/finos/legend/connection/ConnectionFactoryTest.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/test/java/org/finos/legend/connection/ConnectionFactoryTest.java similarity index 68% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/test/java/org/finos/legend/connection/ConnectionFactoryTest.java rename to legend-engine-xts-connection/legend-engine-xt-connection-factory/src/test/java/org/finos/legend/connection/ConnectionFactoryTest.java index df1398edd7c..7c935f6cdc5 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/test/java/org/finos/legend/connection/ConnectionFactoryTest.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/test/java/org/finos/legend/connection/ConnectionFactoryTest.java @@ -15,10 +15,8 @@ package org.finos.legend.connection; import org.eclipse.collections.api.factory.Lists; -import org.finos.legend.connection.impl.InstrumentedStoreInstanceProvider; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanism; -import org.finos.legend.connection.protocol.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; import org.junit.jupiter.api.Assertions; @@ -40,61 +38,39 @@ public void testGetConnection_WithFailures() throws Exception new ConnectionBuilder_A() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build(), - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.Y) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_Y.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.Y) + .authenticationConfigurationTypes(AuthenticationConfiguration_Y.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); Identity identity = new Identity("test"); // success - env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_X())); + env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X()))); Exception exception; - // error: store not found - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, "unknown", new AuthenticationConfiguration_X())); - }); - Assertions.assertEquals("Can't find store instance with identifier 'unknown'", exception.getMessage()); - - // error: unsupported authentication mechanism - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, "test", TestAuthenticationMechanismType.Z)); - }); - Assertions.assertEquals("Store 'test' does not support authentication mechanism 'Z'. Supported mechanism(s):\n" + - "- X\n" + - "- Y", exception.getMessage()); - - // error: authentication mechanism does not come with a default config generator - exception = Assertions.assertThrows(RuntimeException.class, () -> - { - env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, "test", TestAuthenticationMechanismType.X)); - }); - Assertions.assertEquals("Can't auto-generate authentication configuration for store 'test' with authentication mechanism 'X'. Please provide a configuration of one of the following type(s):\n" + - "- AuthenticationConfiguration_X", exception.getMessage()); - // error: unsupported authentication configuration exception = Assertions.assertThrows(RuntimeException.class, () -> { - env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_Z())); + env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X()), new AuthenticationConfiguration_Z())); }); - Assertions.assertEquals("Store 'test' does not accept authentication configuration type 'AuthenticationConfiguration_Z'. Supported configuration type(s):\n" + + Assertions.assertEquals("Connection 'test::connectionX' is not compatible with authentication configuration type 'AuthenticationConfiguration_Z'. Supported configuration type(s):\n" + "- AuthenticationConfiguration_X\n" + "- AuthenticationConfiguration_Y", exception.getMessage()); // error: unresolvable authentication flow exception = Assertions.assertThrows(RuntimeException.class, () -> { - env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_Y())); + env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X()), new AuthenticationConfiguration_Y())); }); - Assertions.assertEquals("No authentication flow for store 'test' can be resolved for the specified identity (authentication configuration: AuthenticationConfiguration_Y, connection specification: TestConnectionSpecification)", exception.getMessage()); + Assertions.assertEquals("No authentication flow for connection 'test::connectionX' can be resolved for the specified identity (authentication configuration: AuthenticationConfiguration_Y, connection specification: TestConnectionSpecification)", exception.getMessage()); } /** @@ -113,17 +89,19 @@ public void testGetConnection_WithSimpleFlow() throws Exception new ConnectionBuilder_B() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build(), - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.Y) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_Y.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.Y) + .authenticationConfigurationTypes(AuthenticationConfiguration_Y.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); Identity identity = new Identity("test"); - Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_X()); + Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X())); assertAuthenticator(identity, env.connectionFactory, authenticator, Credential.class, Lists.mutable.with( "Credential->Credential_A [AuthenticationConfiguration_X]" ), ConnectionBuilder_A.class); @@ -146,14 +124,15 @@ public void testGetConnection_WithSpecificBuilderOrder() throws Exception new ConnectionBuilder_B() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); Identity identity = new Identity("test"); - Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_X()); + Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X())); assertAuthenticator(identity, env.connectionFactory, authenticator, Credential.class, Lists.mutable.with( "Credential->Credential_B [AuthenticationConfiguration_X]" ), ConnectionBuilder_B.class); @@ -175,14 +154,15 @@ public void testGetConnection_WithChainFlow() throws Exception new ConnectionBuilder_C() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); Identity identity = new Identity("test"); - Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_X()); + Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X())); assertAuthenticator(identity, env.connectionFactory, authenticator, Credential.class, Lists.mutable.with( "Credential->Credential_A [AuthenticationConfiguration_X]", "Credential_A->Credential_B [AuthenticationConfiguration_X]", @@ -206,14 +186,15 @@ public void testGetConnection_WithShortestFlowResolved() throws Exception new ConnectionBuilder_C() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); Identity identity = new Identity("test", new Credential_B()); - Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_X()); + Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X())); assertAuthenticator(identity, env.connectionFactory, authenticator, Credential_B.class, Lists.mutable.with( "Credential_B->Credential_C [AuthenticationConfiguration_X]" ), ConnectionBuilder_C.class); @@ -223,7 +204,7 @@ public void testGetConnection_WithShortestFlowResolved() throws Exception * Test Case: A -> B -> [Connection] */ @Test - public void testGetConnection_WithNoAuthConfigProvided() throws Exception + public void testGetConnection_WithCustomAuthConfigProvided() throws Exception { TestEnv env = TestEnv.create( Lists.mutable.with( @@ -234,23 +215,25 @@ public void testGetConnection_WithNoAuthConfigProvided() throws Exception new ConnectionBuilder_B() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build(), - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.Y) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_Y.class) - .withDefaultAuthenticationConfigurationGenerator(AuthenticationConfiguration_Y::new) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.Y) + .authenticationConfigurationTypes(AuthenticationConfiguration_Y.class) .build(), - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.Z) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_Z.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.Z) + .authenticationConfigurationTypes(AuthenticationConfiguration_Z.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); Identity identity = new Identity("test", new Credential_A()); // success - Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, "test"); + Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X()), new AuthenticationConfiguration_Y()); assertAuthenticator(identity, env.connectionFactory, authenticator, Credential_A.class, Lists.mutable.with( "Credential_A->Credential_B [AuthenticationConfiguration_Y]" ), ConnectionBuilder_B.class); @@ -258,9 +241,9 @@ public void testGetConnection_WithNoAuthConfigProvided() throws Exception // error: unresolvable authentication flow Exception exception = Assertions.assertThrows(RuntimeException.class, () -> { - env.connectionFactory.getAuthenticator(new Identity("test"), "test"); + env.connectionFactory.getAuthenticator(new Identity("test"), env.newStore("test::connectionX", new AuthenticationConfiguration_X())); }); - Assertions.assertEquals("No authentication flow for store 'test' can be resolved for the specified identity. Try specifying an authentication mechanism or authentication configuration. Supported configuration type(s):\n" + + Assertions.assertEquals("No authentication flow for connection 'test::connectionX' can be resolved for the specified identity. Try specifying another authentication configuration. Supported configuration type(s):\n" + "- AuthenticationConfiguration_X (X)\n" + "- AuthenticationConfiguration_Y (Y)\n" + "- AuthenticationConfiguration_Z (Z)", exception.getMessage()); @@ -280,14 +263,15 @@ public void testGetConnection_WithCredentialExtractor() throws Exception new ConnectionBuilder_A() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); Identity identity = new Identity("test", new Credential_A()); - Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_X()); + Authenticator authenticator = env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X())); assertAuthenticator(identity, env.connectionFactory, authenticator, Credential_A.class, Lists.mutable.with( "Credential_A->Credential_A [AuthenticationConfiguration_X]" ), ConnectionBuilder_A.class); @@ -301,13 +285,14 @@ public void testGetConnection_WithCredentialExtractor() throws Exception new ConnectionBuilder_A() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); - authenticator = env2.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_X()); + authenticator = env2.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X())); assertAuthenticator(identity, env2.connectionFactory, authenticator, Credential_A.class, Lists.mutable.with( "Credential_A->Credential_A [AuthenticationConfiguration_X]" ), ConnectionBuilder_A.class); @@ -328,18 +313,20 @@ public void testGetConnection_WithoutCredentialExtractor() throws Exception new ConnectionBuilder_A() ), Lists.mutable.with( - new AuthenticationMechanismConfiguration.Builder(TestAuthenticationMechanismType.X) - .withAuthenticationConfigurationTypes(AuthenticationConfiguration_X.class) + AuthenticationMechanism.builder() + .type(TestAuthenticationMechanismType.X) + .authenticationConfigurationTypes(AuthenticationConfiguration_X.class) .build() ) - ).newStore("test", Lists.mutable.empty()); + ); Identity identity = new Identity("test", new Credential_A()); Exception exception = Assertions.assertThrows(RuntimeException.class, () -> { - env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, "test", new AuthenticationConfiguration_X())); + env.connectionFactory.getConnection(identity, env.connectionFactory.getAuthenticator(identity, env.newStore("test::connectionX", new AuthenticationConfiguration_X()))); }); - Assertions.assertEquals("No authentication flow for store 'test' can be resolved for the specified identity (authentication configuration: AuthenticationConfiguration_X, connection specification: TestConnectionSpecification)", exception.getMessage()); + Assertions.assertEquals("No authentication flow for connection 'test::connectionX' can be resolved for the specified identity. Try specifying another authentication configuration. Supported configuration type(s):\n" + + "- AuthenticationConfiguration_X (X)", exception.getMessage()); } private void assertAuthenticator(Identity identity, ConnectionFactory connectionFactory, Authenticator authenticator, Class sourceCredentialType, List credentialBuilders, Class connectionBuilderType) throws Exception @@ -353,33 +340,38 @@ private void assertAuthenticator(Identity identity, ConnectionFactory connection private static class TestEnv { final LegendEnvironment environment; - final InstrumentedStoreInstanceProvider storeInstanceProvider; final ConnectionFactory connectionFactory; - private TestEnv(List credentialBuilders, List connectionBuilders, List authenticationMechanismConfigurations) + private TestEnv(List credentialBuilders, List connectionBuilders, List authenticationMechanisms) { - this.environment = new LegendEnvironment.Builder() - .withStoreSupport(new StoreSupport.Builder() - .withIdentifier("test") - .withAuthenticationMechanismConfigurations(authenticationMechanismConfigurations) + this.environment = LegendEnvironment.builder() + .databaseSupport(DatabaseSupport.builder() + .type(TestDatabaseType.TEST) + .authenticationMechanisms(authenticationMechanisms) .build()) .build(); - this.storeInstanceProvider = new InstrumentedStoreInstanceProvider(); - this.connectionFactory = new ConnectionFactory.Builder(this.environment, this.storeInstanceProvider) - .withCredentialBuilders(credentialBuilders) - .withConnectionBuilders(connectionBuilders) + this.connectionFactory = ConnectionFactory.builder() + .environment(this.environment) + .credentialBuilders(credentialBuilders) + .connectionBuilders(connectionBuilders) + .build(); + } + + Connection newStore(String identifier, AuthenticationConfiguration authenticationConfiguration, List authenticationMechanisms) + { + DatabaseSupport databaseSupport = this.environment.getDatabaseSupport(TestDatabaseType.TEST); + return Connection.builder() + .databaseSupport(databaseSupport) + .identifier(identifier) + .authenticationMechanisms(authenticationMechanisms) + .connectionSpecification(new TestConnectionSpecification()) + .authenticationConfiguration(authenticationConfiguration) .build(); } - TestEnv newStore(String identifier, List authenticationMechanismConfigurations) + Connection newStore(String identifier, AuthenticationConfiguration authenticationConfiguration) { - this.storeInstanceProvider.injectStoreInstance(new StoreInstance.Builder(this.environment) - .withIdentifier(identifier) - .withStoreSupportIdentifier("test") - .withAuthenticationMechanismConfigurations(authenticationMechanismConfigurations) - .withConnectionSpecification(new TestConnectionSpecification()) - .build()); - return this; + return newStore(identifier, authenticationConfiguration, Lists.mutable.empty()); } static TestEnv create() @@ -387,12 +379,26 @@ static TestEnv create() return new TestEnv(Lists.mutable.empty(), Lists.mutable.empty(), Lists.mutable.empty()); } - static TestEnv create(List credentialBuilders, List connectionBuilders, List authenticationMechanismConfigurations) + static TestEnv create(List credentialBuilders, List connectionBuilders, List authenticationMechanisms) { - return new TestEnv(credentialBuilders, connectionBuilders, authenticationMechanismConfigurations); + return new TestEnv(credentialBuilders, connectionBuilders, authenticationMechanisms); } } + // -------------------------- Database Type ------------------------------- + + private enum TestDatabaseType implements DatabaseType + { + TEST() + { + @Override + public String getIdentifier() + { + return "Test"; + } + } + } + // -------------------------- Credential ------------------------------- private static class Credential_A implements Credential @@ -436,14 +442,14 @@ public String shortId() } } - private enum TestAuthenticationMechanismType implements AuthenticationMechanism + private enum TestAuthenticationMechanismType implements AuthenticationMechanismType { X, Y, Z; @Override - public String getLabel() + public String getIdentifier() { return this.toString(); } diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/test/java/org/finos/legend/connection/DatabaseSupportTest.java b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/test/java/org/finos/legend/connection/DatabaseSupportTest.java new file mode 100644 index 00000000000..42c8f979abb --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-factory/src/test/java/org/finos/legend/connection/DatabaseSupportTest.java @@ -0,0 +1,273 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.connection; + +import org.eclipse.collections.api.factory.Lists; +import org.finos.legend.connection.impl.CoreAuthenticationMechanismType; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.PropertiesFileSecret; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ApiKeyAuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.EncryptedPrivateKeyPairAuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.KerberosAuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class DatabaseSupportTest +{ + @Test + public void testValidateDatabaseSupportBuilder() + { + // success + DatabaseSupport.builder() + .type(TestDatabaseType.TEST) + .build(); + + Exception exception; + + // error: missing database type + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + DatabaseSupport.builder().build(); + }); + Assertions.assertEquals("Database type is missing", exception.getMessage()); + + // error: multiple authentication configurations for one authentication mechanism + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + DatabaseSupport.builder() + .type(TestDatabaseType.TEST) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .authenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) + .build(), + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .authenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) + .build() + ).build(); + }); + Assertions.assertEquals("Found multiple authentication mechanisms with type 'UsernamePassword'", exception.getMessage()); + + // error: one authentication configuration is associated with multiple authentication mechanisms + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + DatabaseSupport.builder() + .type(TestDatabaseType.TEST) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .authenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) + .build(), + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.KERBEROS) + .authenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class) + .build() + ).build(); + }); + Assertions.assertEquals("Authentication configuration type 'UserPasswordAuthenticationConfiguration' is associated with multiple authentication mechanisms", exception.getMessage()); + + // error: no authentication configurations is associated with an authentication mechanism + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + DatabaseSupport.builder() + .type(TestDatabaseType.TEST) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .build() + ).build(); + }); + Assertions.assertEquals("No authentication configuration type is associated with authentication mechanism 'UsernamePassword'", exception.getMessage()); + } + + @Test + public void testValidateConnectionBuilder() + { + DatabaseSupport databaseSupport = DatabaseSupport.builder() + .type(TestDatabaseType.TEST) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .authenticationConfigurationTypes( + UserPasswordAuthenticationConfiguration.class, + EncryptedPrivateKeyPairAuthenticationConfiguration.class + ) + .build(), + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.KERBEROS) + .authenticationConfigurationTypes(KerberosAuthenticationConfiguration.class) + .build() + ) + .build(); + + // success + UserPasswordAuthenticationConfiguration userPasswordAuthenticationConfiguration = new UserPasswordAuthenticationConfiguration("some-user", new PropertiesFileSecret("some-secret")); + Connection testStore = Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .build() + ) + .authenticationConfiguration(userPasswordAuthenticationConfiguration) + .connectionSpecification(new TestConnectionSpecification()) + .build(); + Assertions.assertArrayEquals(new AuthenticationMechanismType[]{CoreAuthenticationMechanismType.USER_PASSWORD}, testStore.getAuthenticationMechanisms().toArray()); + + // success: make sure if no auth mechanisms is specified, all mechanisms will be supported + Connection testStore2 = Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .connectionSpecification(new TestConnectionSpecification()) + .authenticationConfiguration(userPasswordAuthenticationConfiguration) + .build(); + Assertions.assertArrayEquals(new AuthenticationMechanismType[]{CoreAuthenticationMechanismType.USER_PASSWORD, CoreAuthenticationMechanismType.KERBEROS}, testStore2.getAuthenticationMechanisms().toArray()); + + // success: make sure if no authentication configuration type is specified, all types will be supported + Connection testStore3 = Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .connectionSpecification(new TestConnectionSpecification()) + .authenticationConfiguration(userPasswordAuthenticationConfiguration) + .authenticationMechanism(AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD).build()) + .build(); + Assertions.assertArrayEquals(Lists.mutable.of( + UserPasswordAuthenticationConfiguration.class, + EncryptedPrivateKeyPairAuthenticationConfiguration.class + ).toArray(), testStore3.getAuthenticationMechanism(CoreAuthenticationMechanismType.USER_PASSWORD).getAuthenticationConfigurationTypes().toArray()); + + // failure: missing connection specification + Exception exception; + + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .build(); + }); + Assertions.assertEquals("Connection specification is missing", exception.getMessage()); + + // failure: missing authentication configuration + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .connectionSpecification(new TestConnectionSpecification()) + .authenticationConfiguration(new ApiKeyAuthenticationConfiguration()) + .build(); + }); + Assertions.assertEquals("Specified authentication configuration of type 'ApiKeyAuthenticationConfiguration' is not compatible. Supported configuration type(s):\n" + + "- UserPasswordAuthenticationConfiguration\n" + + "- EncryptedPrivateKeyPairAuthenticationConfiguration\n" + + "- KerberosAuthenticationConfiguration", exception.getMessage()); + + // failure: specified authentication configuration is not covered by any authentication mechanisms + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .connectionSpecification(new TestConnectionSpecification()) + .build(); + }); + Assertions.assertEquals("Authentication configuration is missing", exception.getMessage()); + + // failure: multiple configurations for one authentication mechanism + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .build(), + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .build() + ) + .connectionSpecification(new TestConnectionSpecification()) + .authenticationConfiguration(userPasswordAuthenticationConfiguration) + .build(); + }); + Assertions.assertEquals("Found multiple configurations for authentication mechanism 'UsernamePassword'", exception.getMessage()); + + // failure: specified an authentication mechanism that is not covered by the database support + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.API_KEY) + .build() + ) + .connectionSpecification(new TestConnectionSpecification()) + .authenticationConfiguration(userPasswordAuthenticationConfiguration) + .build(); + }); + Assertions.assertEquals("Authentication mechanism 'APIKey' is not covered by database support 'Test'. Supported mechanism(s):\n" + + "- UsernamePassword\n" + + "- Kerberos", exception.getMessage()); + + // failure: mismatch in authentication configuration types coverage with database support for an authentication mechanism + exception = Assertions.assertThrows(RuntimeException.class, () -> + { + Connection.builder() + .databaseSupport(databaseSupport) + .identifier("test::connection") + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .authenticationConfigurationTypes(KerberosAuthenticationConfiguration.class) + .build() + ) + .connectionSpecification(new TestConnectionSpecification()) + .authenticationConfiguration(userPasswordAuthenticationConfiguration) + .build(); + }); + Assertions.assertEquals("Authentication configuration type 'KerberosAuthenticationConfiguration' is not covered by database support 'Test' for authentication mechanism 'UsernamePassword'. Supported configuration type(s):\n" + + "- UserPasswordAuthenticationConfiguration\n" + + "- EncryptedPrivateKeyPairAuthenticationConfiguration", exception.getMessage()); + } + + private enum TestDatabaseType implements DatabaseType + { + TEST() + { + @Override + public String getIdentifier() + { + return "Test"; + } + } + } + + private static class TestConnectionSpecification extends ConnectionSpecification + { + @Override + public String shortId() + { + return null; + } + } +} diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-grammar/pom.xml b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/pom.xml new file mode 100644 index 00000000000..02369bc2133 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/pom.xml @@ -0,0 +1,140 @@ + + + + + org.finos.legend.engine + legend-engine-xts-connection + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-connection-grammar + jar + Legend Engine - XT - Connection - Grammar + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + initialize + + unpack + + + + + org.finos.legend.engine + legend-engine-language-pure-grammar + jar + false + ${project.build.directory} + antlr/*.g4 + + + + + + + + org.antlr + antlr4-maven-plugin + + + + antlr4 + + + true + true + true + target/antlr + ${project.build.directory}/generated-sources + + + + + + + + + + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-protocol + + + org.finos.legend.engine + legend-engine-xt-connection-protocol + + + org.finos.legend.engine + legend-engine-language-pure-grammar + + + + + + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections + + + + + + com.fasterxml.jackson.core + jackson-databind + + + com.fasterxml.jackson.core + jackson-annotations + + + + + + org.antlr + antlr4-runtime + compile + + + + + + junit + junit + test + + + org.finos.legend.engine + legend-engine-language-pure-grammar + test-jar + test + + + + diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/ConnectionLexerGrammar.g4 b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/ConnectionLexerGrammar.g4 new file mode 100644 index 00000000000..1e4efe86491 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/ConnectionLexerGrammar.g4 @@ -0,0 +1,10 @@ +lexer grammar ConnectionLexerGrammar; + +import M3LexerGrammar; + + +// ------------------------------------ KEYWORD -------------------------------------- + +DATABASE_CONNECTION: 'DatabaseConnection'; + +RAW_VALUE: 'rawValue'; diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/ConnectionParserGrammar.g4 b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/ConnectionParserGrammar.g4 new file mode 100644 index 00000000000..502449fee80 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/ConnectionParserGrammar.g4 @@ -0,0 +1,32 @@ +parser grammar ConnectionParserGrammar; + +import M3ParserGrammar; + +options +{ + tokenVocab = ConnectionLexerGrammar; +} + +// -------------------------------------- IDENTIFIER -------------------------------------- + +identifier: VALID_STRING | STRING + | ALL | LET | ALL_VERSIONS | ALL_VERSIONS_IN_RANGE | TO_BYTES_FUNCTION // from M3Parser + | DATABASE_CONNECTION | RAW_VALUE +; + +// -------------------------------------- DEFINITION -------------------------------------- + +definition: (databaseConnectionElement)* + EOF +; +databaseConnectionElement: DATABASE_CONNECTION qualifiedName + BRACE_OPEN + ( + rawValue + )* + BRACE_CLOSE +; +rawValue: RAW_VALUE COLON ISLAND_OPEN (rawValueContent)* SEMI_COLON +; +rawValueContent: ISLAND_START | ISLAND_BRACE_OPEN | ISLAND_CONTENT | ISLAND_HASH | ISLAND_BRACE_CLOSE | ISLAND_END +; \ No newline at end of file diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/from/ConnectionParseTreeWalker.java b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/from/ConnectionParseTreeWalker.java new file mode 100644 index 00000000000..4f39265160a --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/from/ConnectionParseTreeWalker.java @@ -0,0 +1,73 @@ +// Copyright 2020 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.pure.grammar.from; + +import org.antlr.v4.runtime.CharStream; +import org.finos.legend.engine.language.pure.grammar.from.antlr4.ConnectionParserGrammar; +import org.finos.legend.engine.protocol.pure.v1.PureProtocolObjectMapperFactory; +import org.finos.legend.engine.protocol.pure.v1.model.SourceInformation; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.section.DefaultCodeSection; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection; + +import java.util.function.Consumer; + +public class ConnectionParseTreeWalker +{ + private final CharStream input; + private final ParseTreeWalkerSourceInformation walkerSourceInformation; + private final Consumer elementConsumer; + private final DefaultCodeSection section; + + public ConnectionParseTreeWalker(CharStream input, ParseTreeWalkerSourceInformation walkerSourceInformation, Consumer elementConsumer, DefaultCodeSection section) + { + this.input = input; + this.walkerSourceInformation = walkerSourceInformation; + this.elementConsumer = elementConsumer; + this.section = section; + } + + public void visit(ConnectionParserGrammar.DefinitionContext ctx) + { + ctx.databaseConnectionElement().stream().map(this::visitElement).peek(e -> this.section.elements.add(e.getPath())).forEach(this.elementConsumer); + } + + private Connection visitElement(ConnectionParserGrammar.DatabaseConnectionElementContext ctx) + { + SourceInformation sourceInformation = walkerSourceInformation.getSourceInformation(ctx); + ConnectionParserGrammar.RawValueContext rawValueContext = PureGrammarParserUtility.validateAndExtractRequiredField(ctx.rawValue(), "rawValue", sourceInformation); + Connection connection; + try + { + StringBuilder text = new StringBuilder(); + for (ConnectionParserGrammar.RawValueContentContext fragment : rawValueContext.rawValueContent()) + { + text.append(fragment.getText()); + } + String rawValueText = text.length() > 0 ? text.substring(0, text.length() - 2) : text.toString(); + connection = PureProtocolObjectMapperFactory.getNewObjectMapper().readValue(rawValueText, Connection.class); + } + catch (Exception e) + { + throw new RuntimeException(e); + } + + connection.name = PureGrammarParserUtility.fromIdentifier(ctx.qualifiedName().identifier()); + connection._package = ctx.qualifiedName().packagePath() == null ? "" : PureGrammarParserUtility.fromPath(ctx.qualifiedName().packagePath().identifier()); + connection.sourceInformation = sourceInformation; + + return connection; + } +} diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/from/ConnectionParserExtension.java b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/from/ConnectionParserExtension.java new file mode 100644 index 00000000000..c2718631b4c --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/from/ConnectionParserExtension.java @@ -0,0 +1,64 @@ +// Copyright 2020 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.pure.grammar.from; + +import org.antlr.v4.runtime.CharStream; +import org.antlr.v4.runtime.CharStreams; +import org.antlr.v4.runtime.CommonTokenStream; +import org.eclipse.collections.impl.factory.Lists; +import org.finos.legend.engine.language.pure.grammar.from.antlr4.ConnectionLexerGrammar; +import org.finos.legend.engine.language.pure.grammar.from.antlr4.ConnectionParserGrammar; +import org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension; +import org.finos.legend.engine.language.pure.grammar.from.extension.SectionParser; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.section.DefaultCodeSection; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.section.Section; + +import java.util.function.Consumer; + +public class ConnectionParserExtension implements PureGrammarParserExtension +{ + public static final String NAME = "DatabaseConnection"; + + @Override + public Iterable getExtraSectionParsers() + { + return Lists.immutable.with(SectionParser.newParser(NAME, ConnectionParserExtension::parseSection)); + } + + private static Section parseSection(SectionSourceCode sectionSourceCode, Consumer elementConsumer, PureGrammarParserContext pureGrammarParserContext) + { + SourceCodeParserInfo parserInfo = getConnectionParserInfo(sectionSourceCode); + DefaultCodeSection section = new DefaultCodeSection(); + section.parserName = sectionSourceCode.sectionType; + section.sourceInformation = parserInfo.sourceInformation; + ConnectionParseTreeWalker walker = new ConnectionParseTreeWalker(parserInfo.input, parserInfo.walkerSourceInformation, elementConsumer, section); + walker.visit((ConnectionParserGrammar.DefinitionContext) parserInfo.rootContext); + return section; + } + + private static SourceCodeParserInfo getConnectionParserInfo(SectionSourceCode sectionSourceCode) + { + CharStream input = CharStreams.fromString(sectionSourceCode.code); + ParserErrorListener errorListener = new ParserErrorListener(sectionSourceCode.walkerSourceInformation); + ConnectionLexerGrammar lexer = new ConnectionLexerGrammar(input); + lexer.removeErrorListeners(); + lexer.addErrorListener(errorListener); + ConnectionParserGrammar parser = new ConnectionParserGrammar(new CommonTokenStream(lexer)); + parser.removeErrorListeners(); + parser.addErrorListener(errorListener); + return new SourceCodeParserInfo(sectionSourceCode.code, input, sectionSourceCode.sourceInformation, sectionSourceCode.walkerSourceInformation, lexer, parser, parser.definition()); + } +} diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/to/ConnectionGrammarComposerExtension.java b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/to/ConnectionGrammarComposerExtension.java new file mode 100644 index 00000000000..02608afd877 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/java/org/finos/legend/engine/language/pure/grammar/to/ConnectionGrammarComposerExtension.java @@ -0,0 +1,90 @@ +// Copyright 2020 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.language.pure.grammar.to; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import org.eclipse.collections.api.block.function.Function3; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.impl.utility.LazyIterate; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.language.pure.grammar.from.ConnectionParserExtension; +import org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension; +import org.finos.legend.engine.protocol.pure.v1.PureProtocolObjectMapperFactory; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection; + +import java.util.List; + +import static org.finos.legend.engine.language.pure.grammar.to.PureGrammarComposerUtility.getTabString; + +public class ConnectionGrammarComposerExtension implements PureGrammarComposerExtension +{ + @Override + public List, PureGrammarComposerContext, String, String>> getExtraSectionComposers() + { + return Lists.fixedSize.with((elements, context, sectionName) -> + { + if (!ConnectionParserExtension.NAME.equals(sectionName)) + { + return null; + } + return ListIterate.collect(elements, element -> + { + if (element instanceof Connection) + { + return renderElement((Connection) element, context); + } + return "/* Can't transform element '" + element.getPath() + "' in this section */"; + }).makeString("\n\n"); + }); + } + + @Override + public List, PureGrammarComposerContext, List, PureFreeSectionGrammarComposerResult>> getExtraFreeSectionComposers() + { + return Lists.fixedSize.with((elements, context, composedSections) -> + { + List composableElements = ListIterate.selectInstancesOf(elements, Connection.class); + return composableElements.isEmpty() ? null : new PureFreeSectionGrammarComposerResult(LazyIterate.collect(composableElements, el -> renderElement(el, context)).makeString("###" + ConnectionParserExtension.NAME + "\n", "\n\n", ""), composableElements); + }); + } + + private static String renderElement(Connection element, PureGrammarComposerContext context) + { + String value; + try + { + // @HACKY: new-connection-framework + element.sourceInformation = null; + ObjectMapper objectMapper = PureProtocolObjectMapperFactory.getNewObjectMapper(); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + objectMapper.enable(SerializationFeature.INDENT_OUTPUT); + value = objectMapper.writeValueAsString(element); + } + catch (Exception e) + { + throw new RuntimeException(e); + } + + return "DatabaseConnection " + PureGrammarComposerUtility.convertPath(element.getPath()) + "\n" + + "{\n" + + (getTabString() + "rawValue: #{\n" + + ListIterate.collect(Lists.mutable.of(value.split("\n")), line -> getTabString() + line).makeString("\n") + "\n" + + getTabString() + "}#;\n") + + "}"; + } +} diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension new file mode 100644 index 00000000000..747e1deb95e --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.pure.grammar.from.ConnectionParserExtension \ No newline at end of file diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension new file mode 100644 index 00000000000..889fae4c0bc --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-grammar/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.to.extension.PureGrammarComposerExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.pure.grammar.to.ConnectionGrammarComposerExtension \ No newline at end of file diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-protocol/pom.xml b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/pom.xml new file mode 100644 index 00000000000..ca8c0255238 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/pom.xml @@ -0,0 +1,69 @@ + + + + + + org.finos.legend.engine + legend-engine-xts-connection + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-connection-protocol + jar + Legend Engine - XT - Connection - Protocol + + + + + org.finos.legend.engine + legend-engine-xt-authentication-protocol + + + org.finos.legend.engine + legend-engine-protocol-pure + + + + + + com.fasterxml.jackson.core + jackson-annotations + + + + + + org.eclipse.collections + eclipse-collections-api + + + + + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + + \ No newline at end of file diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/ConnectionProtocolExtension.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/ConnectionProtocolExtension.java new file mode 100644 index 00000000000..10963ed2bc9 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/ConnectionProtocolExtension.java @@ -0,0 +1,61 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.pure.v1; + +import org.eclipse.collections.api.block.function.Function0; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.factory.Maps; +import org.finos.legend.engine.protocol.pure.v1.extension.ProtocolSubTypeInfo; +import org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.EncryptedPrivateKeyPairAuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.KerberosAuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; + +import java.util.List; +import java.util.Map; + +public class ConnectionProtocolExtension implements PureProtocolExtension +{ + public static final String CONNECTION_CLASSIFIER_PATH = "meta::pure::metamodel::connection::Connection"; + + @Override + public List>>> getExtraProtocolSubTypeInfoCollectors() + { + return Lists.fixedSize.of(() -> Lists.fixedSize.of( + // Packageable Element + ProtocolSubTypeInfo.newBuilder(PackageableElement.class) + // TODO: ideally we should be able to set this as `connection`, but this will clash with `PackageableElement` + .withSubtype(Connection.class, "databaseConnection") + .build(), + // Authentication + ProtocolSubTypeInfo.newBuilder(AuthenticationConfiguration.class) + .withSubtype(EncryptedPrivateKeyPairAuthenticationConfiguration.class, "KeyPair") + .withSubtype(UserPasswordAuthenticationConfiguration.class, "UserPassword") + .withSubtype(KerberosAuthenticationConfiguration.class, "Kerberos") + .build() + )); + } + + @Override + public Map, String> getExtraProtocolToClassifierPathMap() + { + return Maps.mutable.with( + Connection.class, CONNECTION_CLASSIFIER_PATH + ); + } +} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/ApiKeyAuthenticationConfiguration.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/ApiKeyAuthenticationConfiguration.java similarity index 92% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/ApiKeyAuthenticationConfiguration.java rename to legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/ApiKeyAuthenticationConfiguration.java index 4e22064c7c7..bda1af4cabe 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/ApiKeyAuthenticationConfiguration.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/ApiKeyAuthenticationConfiguration.java @@ -12,9 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.impl; +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.CredentialVaultSecret; public class ApiKeyAuthenticationConfiguration extends AuthenticationConfiguration diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationConfiguration.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/AuthenticationConfiguration.java similarity index 77% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationConfiguration.java rename to legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/AuthenticationConfiguration.java index 10774902332..4ae3b429833 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationConfiguration.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/AuthenticationConfiguration.java @@ -12,8 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.protocol; +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") public abstract class AuthenticationConfiguration { public abstract String shortId(); diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/AuthenticationMechanism.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/AuthenticationMechanism.java new file mode 100644 index 00000000000..af5c3ad460a --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/AuthenticationMechanism.java @@ -0,0 +1,23 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; + +import java.util.List; + +public final class AuthenticationMechanism +{ + public String authenticationMechanismType; + public List configurationTypes; +} diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/Connection.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/Connection.java new file mode 100644 index 00000000000..99d989c6be3 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/Connection.java @@ -0,0 +1,28 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; + +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; + +import java.util.List; + +public final class Connection extends PackageableElement +{ + public String databaseType; + public List authenticationMechanisms; + public ConnectionSpecification connectionSpecification; + public AuthenticationConfiguration authenticationConfiguration; +} + diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/ConnectionSpecification.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/ConnectionSpecification.java similarity index 77% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/ConnectionSpecification.java rename to legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/ConnectionSpecification.java index 908ab661327..451553e9afb 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/ConnectionSpecification.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/ConnectionSpecification.java @@ -12,8 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.protocol; +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") public abstract class ConnectionSpecification { public abstract String shortId(); diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/DatabaseSupport.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/DatabaseSupport.java new file mode 100644 index 00000000000..fcd17a2c3d0 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/DatabaseSupport.java @@ -0,0 +1,26 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; + +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +import java.util.List; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") +public class DatabaseSupport +{ + public String databaseType; + public List authenticationMechanisms; +} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/EncryptedPrivateKeyPairAuthenticationConfiguration.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/EncryptedPrivateKeyPairAuthenticationConfiguration.java similarity index 93% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/EncryptedPrivateKeyPairAuthenticationConfiguration.java rename to legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/EncryptedPrivateKeyPairAuthenticationConfiguration.java index 0bd50613949..e6409c1d142 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/EncryptedPrivateKeyPairAuthenticationConfiguration.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/EncryptedPrivateKeyPairAuthenticationConfiguration.java @@ -12,9 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.impl; +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.CredentialVaultSecret; public class EncryptedPrivateKeyPairAuthenticationConfiguration extends AuthenticationConfiguration @@ -25,6 +24,7 @@ public class EncryptedPrivateKeyPairAuthenticationConfiguration extends Authenti public EncryptedPrivateKeyPairAuthenticationConfiguration() { + // jackson } public EncryptedPrivateKeyPairAuthenticationConfiguration(String userName, CredentialVaultSecret privateKey, CredentialVaultSecret passphrase) diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosAuthenticationConfiguration.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/KerberosAuthenticationConfiguration.java similarity index 87% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosAuthenticationConfiguration.java rename to legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/KerberosAuthenticationConfiguration.java index 61b6ac6594a..ad2dcdfc702 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/KerberosAuthenticationConfiguration.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/KerberosAuthenticationConfiguration.java @@ -12,14 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.impl; - -import org.finos.legend.connection.protocol.AuthenticationConfiguration; +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; public class KerberosAuthenticationConfiguration extends AuthenticationConfiguration { public KerberosAuthenticationConfiguration() { + // authentication } @Override diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordAuthenticationConfiguration.java b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/UserPasswordAuthenticationConfiguration.java similarity index 91% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordAuthenticationConfiguration.java rename to legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/UserPasswordAuthenticationConfiguration.java index 61e8fb5c096..756de8dd6fd 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/impl/UserPasswordAuthenticationConfiguration.java +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/packageableElement/connection/UserPasswordAuthenticationConfiguration.java @@ -12,9 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.impl; +package org.finos.legend.engine.protocol.pure.v1.packageableElement.connection; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.CredentialVaultSecret; public class UserPasswordAuthenticationConfiguration extends AuthenticationConfiguration @@ -24,6 +23,7 @@ public class UserPasswordAuthenticationConfiguration extends AuthenticationConfi public UserPasswordAuthenticationConfiguration() { + // jackson } public UserPasswordAuthenticationConfiguration(String username, CredentialVaultSecret password) diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension new file mode 100644 index 00000000000..dfa880debb4 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension @@ -0,0 +1 @@ +org.finos.legend.engine.protocol.pure.v1.ConnectionProtocolExtension \ No newline at end of file diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/pom.xml b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/pom.xml new file mode 100644 index 00000000000..0052f847951 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/pom.xml @@ -0,0 +1,158 @@ + + + + + + org.finos.legend.engine + legend-engine-xts-connection + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-connection-pure-metamodel + jar + Legend Engine - XT - Connection - Pure - Metamodel + + + + + org.finos.legend.pure + legend-pure-maven-generation-par + + src/main/resources + ${legend.pure.version} + + platform + core + core_connection_metamodel + + + ${project.basedir}/src/main/resources/core_connection_metamodel.definition.json + + + + + + generate-sources + + build-pure-jar + + + + + + org.finos.legend.pure + legend-pure-m2-dsl-diagram-grammar + ${legend.pure.version} + + + org.finos.legend.pure + legend-pure-m2-dsl-graph-grammar + ${legend.pure.version} + + + org.finos.legend.engine + legend-engine-pure-code-compiled-core + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-json-pure + ${project.version} + + + + + org.finos.legend.pure + legend-pure-maven-generation-java + + + compile + + build-pure-compiled-jar + + + true + true + modular + true + + core_connection_metamodel + + + + + + + org.finos.legend.pure + legend-pure-m2-dsl-diagram-grammar + ${legend.pure.version} + + + org.finos.legend.pure + legend-pure-m2-dsl-graph-grammar + ${legend.pure.version} + + + org.finos.legend.engine + legend-engine-pure-code-compiled-core + ${project.version} + + + + + + + + + + org.finos.legend.pure + legend-pure-m4 + + + org.finos.legend.pure + legend-pure-m3-core + + + org.finos.legend.pure + legend-pure-runtime-java-engine-compiled + + + + + + org.finos.legend.engine + legend-engine-pure-code-compiled-core + + + org.finos.legend.engine + legend-engine-pure-platform-java + + + + + + org.eclipse.collections + eclipse-collections + + + org.eclipse.collections + eclipse-collections-api + + + + diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/java/org/finos/legend/pure/code/core/CoreConnectionRepositoryProvider.java b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/java/org/finos/legend/pure/code/core/CoreConnectionRepositoryProvider.java new file mode 100644 index 00000000000..bfa033889df --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/java/org/finos/legend/pure/code/core/CoreConnectionRepositoryProvider.java @@ -0,0 +1,29 @@ +// Copyright 2021 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.pure.code.core; + +import org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepository; +import org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepositoryProvider; +import org.finos.legend.pure.m3.serialization.filesystem.repository.GenericCodeRepository; + +public class CoreConnectionRepositoryProvider implements CodeRepositoryProvider +{ + @Override + public CodeRepository repository() + { + return GenericCodeRepository.build("core_connection_metamodel.definition.json"); + } +} + diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/META-INF/services/org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepositoryProvider b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/META-INF/services/org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepositoryProvider new file mode 100644 index 00000000000..05623448288 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/META-INF/services/org.finos.legend.pure.m3.serialization.filesystem.repository.CodeRepositoryProvider @@ -0,0 +1 @@ +org.finos.legend.pure.code.core.CoreConnectionRepositoryProvider \ No newline at end of file diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/core_connection_metamodel.definition.json b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/core_connection_metamodel.definition.json new file mode 100644 index 00000000000..63239e854b0 --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/core_connection_metamodel.definition.json @@ -0,0 +1,9 @@ +{ + "name": "core_connection_metamodel", + "pattern": "(meta::pure::metamodel::connection)(::.*)?", + "dependencies": [ + "platform", + "platform_functions", + "core" + ] +} \ No newline at end of file diff --git a/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/core_connection_metamodel/metamodel.pure b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/core_connection_metamodel/metamodel.pure new file mode 100644 index 00000000000..b6cd3dcfe4a --- /dev/null +++ b/legend-engine-xts-connection/legend-engine-xt-connection-pure-metamodel/src/main/resources/core_connection_metamodel/metamodel.pure @@ -0,0 +1,19 @@ +// Copyright 2022 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +Class meta::pure::metamodel::connection::Connection extends PackageableElement +{ + // @HACKY: new-connection-framework + rawValue: Any[1]; +} diff --git a/legend-engine-xts-connection/pom.xml b/legend-engine-xts-connection/pom.xml new file mode 100644 index 00000000000..3dda8473e68 --- /dev/null +++ b/legend-engine-xts-connection/pom.xml @@ -0,0 +1,36 @@ + + + + + org.finos.legend.engine + legend-engine + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xts-connection + pom + Legend Engine - XTS - Connection + + + legend-engine-xt-connection-factory + legend-engine-xt-connection-compiler + legend-engine-xt-connection-grammar + legend-engine-xt-connection-protocol + legend-engine-xt-connection-pure-metamodel + + \ No newline at end of file diff --git a/legend-engine-xts-daml/legend-engine-xt-daml-grammar/pom.xml b/legend-engine-xts-daml/legend-engine-xt-daml-grammar/pom.xml index 9a96c23c453..533e6a9cd49 100644 --- a/legend-engine-xts-daml/legend-engine-xt-daml-grammar/pom.xml +++ b/legend-engine-xts-daml/legend-engine-xt-daml-grammar/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-daml org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-daml/legend-engine-xt-daml-model/pom.xml b/legend-engine-xts-daml/legend-engine-xt-daml-model/pom.xml index 5074517e363..a5cb8ae416a 100644 --- a/legend-engine-xts-daml/legend-engine-xt-daml-model/pom.xml +++ b/legend-engine-xts-daml/legend-engine-xt-daml-model/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-daml org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-daml/legend-engine-xt-daml-pure/pom.xml b/legend-engine-xts-daml/legend-engine-xt-daml-pure/pom.xml index 6ae80cd45ce..c4571a79ee2 100644 --- a/legend-engine-xts-daml/legend-engine-xt-daml-pure/pom.xml +++ b/legend-engine-xts-daml/legend-engine-xt-daml-pure/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-daml org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-daml/pom.xml b/legend-engine-xts-daml/pom.xml index cb7c3d4f020..64883f211ec 100644 --- a/legend-engine-xts-daml/pom.xml +++ b/legend-engine-xts-daml/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/pom.xml b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/pom.xml index 56b7c7bd560..e4e0d7652e9 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/pom.xml +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/pom.xml @@ -1,9 +1,10 @@ - + legend-engine-xts-data-push org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -37,24 +38,57 @@ + + org.finos.legend.engine + legend-engine-shared-core + org.finos.legend.engine legend-engine-xt-authentication-implementation-core org.finos.legend.engine - legend-engine-xt-authentication-connection-factory + legend-engine-xt-connection-factory org.finos.legend.engine legend-engine-xt-relationalStore-connection + + org.finos.legend.engine + legend-engine-xt-relationalStore-snowflake-connection + + + org.finos.legend.engine + legend-engine-language-pure-modelManager-sdlc + + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-xt-connection-protocol + + + + + + org.finos.legend.engine + legend-engine-xt-relationalStore-protocol + runtime + + + org.finos.legend.engine + legend-engine-xt-relationalStore-snowflake-protocol + runtime + org.finos.legend.engine legend-engine-xt-relationalStore-postgres-connection runtime - + @@ -73,14 +107,6 @@ - - - - org.eclipse.collections - eclipse-collections-api - - - com.fasterxml.jackson.core @@ -96,12 +122,27 @@ - + - org.apache.httpcomponents - httpclient + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections - + + + + + org.pac4j.jax-rs + core + + + org.pac4j + pac4j-core + + @@ -124,6 +165,65 @@ + + + org.slf4j + slf4j-api + + + + + javax.servlet + javax.servlet-api + + + org.apache.httpcomponents + httpclient + + + commons-codec + commons-codec + + + + + org.apache.httpcomponents + httpcore + + + commons-logging + commons-logging + + + + + + software.amazon.awssdk + s3 + + + software.amazon.awssdk + sdk-core + + + software.amazon.awssdk + regions + + + software.amazon.awssdk + auth + + + + io.deephaven + deephaven-csv + + + io.deephaven + deephaven-csv-fast-double-parser + runtime + + com.fasterxml.jackson.dataformat @@ -178,6 +278,21 @@ dropwizard-testing test + + io.minio + minio + test + + + org.finos.legend.engine + legend-engine-xt-relationalStore-postgres-test-support + test + + + org.testcontainers + testcontainers + test + \ No newline at end of file diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/DataPusher.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/DataPusher.java new file mode 100644 index 00000000000..b8abecd5280 --- /dev/null +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/DataPusher.java @@ -0,0 +1,32 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.datapush; + +import org.finos.legend.connection.Connection; +import org.finos.legend.connection.ConnectionFactory; +import org.finos.legend.engine.datapush.data.Data; +import org.finos.legend.engine.shared.core.identity.Identity; + +public abstract class DataPusher +{ + protected ConnectionFactory connectionFactory; + + public void configure(ConnectionFactory connectionFactory) + { + this.connectionFactory = connectionFactory; + } + + public abstract void writeCSV(Identity identity, Connection connection, Data data) throws Exception; +} diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/DataPusherProvider.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/DataPusherProvider.java new file mode 100644 index 00000000000..c1b56623f75 --- /dev/null +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/DataPusherProvider.java @@ -0,0 +1,22 @@ +// Copyright 2022 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.datapush; + +import org.finos.legend.connection.Connection; + +public interface DataPusherProvider +{ + DataPusher getDataPusher(Connection connection); +} diff --git a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationMechanism.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/data/CSVData.java similarity index 84% rename from legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationMechanism.java rename to legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/data/CSVData.java index 7a104453eee..250cc0d0d7a 100644 --- a/legend-engine-xts-authentication/legend-engine-xt-authentication-connection-factory/src/main/java/org/finos/legend/connection/protocol/AuthenticationMechanism.java +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/data/CSVData.java @@ -12,9 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.protocol; +package org.finos.legend.engine.datapush.data; -public interface AuthenticationMechanism +public class CSVData extends Data { - String getLabel(); + public String value; } diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/data/Data.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/data/Data.java new file mode 100644 index 00000000000..e8350c31bca --- /dev/null +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/data/Data.java @@ -0,0 +1,26 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.datapush.data; + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") +@JsonSubTypes({ + @JsonSubTypes.Type(value = CSVData.class, name = "csv"), +}) +public class Data +{ +} diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/impl/SnowflakeWithS3StageDataPusher.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/impl/SnowflakeWithS3StageDataPusher.java new file mode 100644 index 00000000000..e7c0cf26734 --- /dev/null +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/impl/SnowflakeWithS3StageDataPusher.java @@ -0,0 +1,183 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.datapush.impl; + +import io.deephaven.csv.CsvSpecs; +import io.deephaven.csv.reading.CsvReader; +import io.deephaven.csv.sinks.SinkFactory; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.connection.Connection; +import org.finos.legend.engine.datapush.DataPusher; +import org.finos.legend.engine.datapush.data.CSVData; +import org.finos.legend.engine.datapush.data.Data; +import org.finos.legend.engine.shared.core.identity.Identity; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.S3ClientBuilder; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; +import software.amazon.awssdk.services.s3.model.PutObjectResponse; + +import java.io.ByteArrayInputStream; +import java.net.URI; +import java.sql.Statement; +import java.util.UUID; + +public class SnowflakeWithS3StageDataPusher extends DataPusher +{ + private final String tableName; + private final String stageName; + private final S3DataStage s3DataStage; + + public SnowflakeWithS3StageDataPusher(String s3StageBucketName, String s3StageEndpoint, AwsCredentialsProvider s3StageCredentialProvider, String tableName, String stageName) + { + this.tableName = tableName; + this.stageName = stageName; + this.s3DataStage = new S3DataStage(s3StageBucketName, s3StageEndpoint, s3StageCredentialProvider); + } + + @Override + public void writeCSV(Identity identity, Connection connection, Data data) throws Exception + { + // TODO: this is probably not performant for streaming/large CSV, we should think of how to optimize this later + CSVData csvData = (CSVData) data; + CsvSpecs specs = CsvSpecs.csv(); + CsvReader.Result csvParserResult = CsvReader.read(specs, new ByteArrayInputStream(csvData.value.getBytes()), SinkFactory.arrays()); + String filePath = this.s3DataStage.write(identity, csvData); + this.uploadCSVToSnowflake(identity, connection, filePath, csvParserResult); + } + + public void uploadCSVToSnowflake(Identity identity, Connection connection, String filePath, CsvReader.Result csvParserResult) throws Exception + { + java.sql.Connection jdbcConnection = this.connectionFactory.getConnection(identity, connection); + + String tableCreationQuery = String.format("CREATE TABLE %s (%s);", this.tableName, ListIterate.collect( + Lists.mutable.of(csvParserResult.columns()), column -> + { + String dataType = null; + // NOTE: these are specific to Snowflake + // See https://docs.snowflake.com/en/sql-reference/data-types + switch (column.dataType()) + { + case BOOLEAN_AS_BYTE: + dataType = "BOOLEAN"; + break; + case BYTE: + case SHORT: + case INT: + case LONG: + case DATETIME_AS_LONG: + case TIMESTAMP_AS_LONG: + dataType = "INTEGER"; + break; + case FLOAT: + case DOUBLE: + dataType = "FLOAT"; + break; + case STRING: + case CHAR: + dataType = "STRING"; + break; + case CUSTOM: + throw new RuntimeException("Not possible"); + } + // Put quote around table name to avoid problems with column names with spaces + return String.format("\"%s\" %s", column.name(), dataType); + } + ).makeString(",")); + // Give Snowflake the full s3 path to improve performance as no lookup is necessary + // See https://community.snowflake.com/s/question/0D50Z00009Y7eCRSAZ/copy-from-s3-into-table-command-is-extremely-slow + String insertQuery = String.format("COPY INTO %s FROM @%s/%s file_format = (type = csv skip_header = 1);", this.tableName, this.stageName, filePath); + + try + { + Statement statement = jdbcConnection.createStatement(); + statement.execute(String.format("DROP TABLE IF EXISTS %s;", this.tableName)); + statement.execute(tableCreationQuery); + statement.execute(insertQuery); + statement.close(); + } + catch (Exception e) + { + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + public static class S3DataStage + { + private final String bucket; + private final String endpoint; + private final AwsCredentialsProvider credentialsProvider; + + public S3DataStage(String bucket, String endpoint, AwsCredentialsProvider credentialsProvider) + { + this.bucket = bucket; + this.endpoint = endpoint; + this.credentialsProvider = credentialsProvider; + } + + private static String generateBucketName(Identity identity) + { + return identity.getName().replaceAll("_", "").toLowerCase(); + } + + private static String generateObjectKey() + { + return UUID.randomUUID().toString(); + } + + private static String generateObjectPrefix(Identity identity) + { + return identity.getName(); + } + + private S3Client getS3Client() + { + S3ClientBuilder clientBuilder = S3Client + .builder() + .credentialsProvider(this.credentialsProvider) + .region(Region.US_EAST_1); + if (this.endpoint != null) + { + clientBuilder.endpointOverride(URI.create(this.endpoint)); + } + return clientBuilder.build(); + } + + public String write(Identity identity, Data data) throws Exception + { + CSVData csvData = (CSVData) data; + S3Client s3Client = this.getS3Client(); + String key = String.format("%s/%s", generateObjectPrefix(identity), generateObjectKey()); + try + { + PutObjectRequest putObjectRequest = PutObjectRequest.builder() + .bucket(this.bucket) + .key(key).build(); + RequestBody requestBody = RequestBody.fromString(csvData.value); + PutObjectResponse putObjectResponse = s3Client.putObject(putObjectRequest, requestBody); + } + catch (Exception e) + { + e.printStackTrace(); + throw new RuntimeException(e); + } + return key; + } + } +} diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/BaseDataPushServer.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/BaseDataPushServer.java index 4959fee8f09..1129ce8f1bf 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/BaseDataPushServer.java +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/BaseDataPushServer.java @@ -15,26 +15,46 @@ package org.finos.legend.engine.datapush.server; import io.dropwizard.setup.Environment; -import org.finos.legend.engine.datapush.server.config.DataPushServerConfiguration; +import org.finos.legend.connection.ConnectionFactory; +import org.finos.legend.connection.IdentityFactory; +import org.finos.legend.connection.LegendEnvironment; +import org.finos.legend.engine.datapush.DataPusherProvider; +import org.finos.legend.engine.datapush.server.configuration.DataPushServerConfiguration; import org.finos.legend.engine.datapush.server.resources.DataPushResource; +import org.finos.legend.engine.protocol.pure.v1.PureProtocolObjectMapperFactory; import org.finos.legend.engine.server.support.server.BaseServer; - -import java.net.InetAddress; -import java.net.UnknownHostException; +import org.finos.legend.engine.shared.core.ObjectMapperFactory; public abstract class BaseDataPushServer extends BaseServer { - protected ServerInfo serverInfo; + protected LegendEnvironment environment; + protected IdentityFactory identityFactory; + protected ConnectionFactory connectionFactory; + protected DataPusherProvider dataPushProvider; + + @Override + public void initialize(io.dropwizard.setup.Bootstrap bootstrap) + { + super.initialize(bootstrap); + + PureProtocolObjectMapperFactory.withPureProtocolExtensions(bootstrap.getObjectMapper()); + ObjectMapperFactory.withStandardConfigurations(bootstrap.getObjectMapper()); + } - private static String getLocalHostName() throws UnknownHostException + @Override + public void run(DataPushServerConfiguration configuration, Environment environment) { - return InetAddress.getLocalHost().getHostName(); + this.environment = this.buildLegendEnvironment(configuration); + this.identityFactory = this.buildIdentityFactory(configuration, this.environment); + this.connectionFactory = this.buildConnectionFactory(configuration, this.environment); + this.dataPushProvider = this.buildDataPushProvider(); + super.run(configuration, environment); } @Override protected void configureServerCore(DataPushServerConfiguration configuration, Environment environment) { - environment.jersey().register(DataPushResource.class); + environment.jersey().register(new DataPushResource(configuration.getMetadataServerConfiguration(), this.environment, this.identityFactory, this.connectionFactory, this.dataPushProvider)); } @Override @@ -43,32 +63,11 @@ protected void configureServerExtension(DataPushServerConfiguration configuratio super.configureServerExtension(configuration, environment); } - public static final class ServerInfo - { - private final String hostName; - private final String initTime; - private final ServerPlatformInfo serverPlatformInfo; + public abstract LegendEnvironment buildLegendEnvironment(DataPushServerConfiguration configuration); - private ServerInfo(String hostName, String initTime, ServerPlatformInfo serverPlatformInfo) - { - this.hostName = hostName; - this.initTime = initTime; - this.serverPlatformInfo = (serverPlatformInfo == null) ? new ServerPlatformInfo(null, null, null) : serverPlatformInfo; - } + public abstract IdentityFactory buildIdentityFactory(DataPushServerConfiguration configuration, LegendEnvironment environment); - public String getHostName() - { - return this.hostName; - } + public abstract ConnectionFactory buildConnectionFactory(DataPushServerConfiguration configuration, LegendEnvironment environment); - public String getInitTime() - { - return this.initTime; - } - - public ServerPlatformInfo getPlatform() - { - return this.serverPlatformInfo; - } - } + public abstract DataPusherProvider buildDataPushProvider(); } diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionFactoryBundle.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionFactoryBundle.java deleted file mode 100644 index 2b96dfd3940..00000000000 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionFactoryBundle.java +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.engine.datapush.server; - -import io.dropwizard.Configuration; -import io.dropwizard.ConfiguredBundle; -import io.dropwizard.setup.Bootstrap; -import io.dropwizard.setup.Environment; -import org.finos.legend.authentication.vault.CredentialVault; -import org.finos.legend.authentication.vault.impl.EnvironmentCredentialVault; -import org.finos.legend.authentication.vault.impl.SystemPropertiesCredentialVault; -import org.finos.legend.connection.AuthenticationMechanismConfiguration; -import org.finos.legend.connection.ConnectionFactory; -import org.finos.legend.connection.DatabaseType; -import org.finos.legend.connection.impl.DefaultStoreInstanceProvider; -import org.finos.legend.connection.IdentityFactory; -import org.finos.legend.connection.LegendEnvironment; -import org.finos.legend.connection.RelationalDatabaseStoreSupport; -import org.finos.legend.connection.StoreInstanceProvider; -import org.finos.legend.connection.impl.UserPasswordAuthenticationConfiguration; -import org.finos.legend.connection.protocol.AuthenticationMechanismType; - -import java.util.List; -import java.util.function.Function; - -public class ConnectionFactoryBundle implements ConfiguredBundle -{ - private static LegendEnvironment environment; - private static IdentityFactory identityFactory; - private static StoreInstanceProvider storeInstanceProvider; - private static ConnectionFactory connectionFactory; - private final List credentialVaults; - private final Function configSupplier; - - public ConnectionFactoryBundle(Function configSupplier, List credentialVaults) - { - this.configSupplier = configSupplier; - this.credentialVaults = credentialVaults; - } - - @Override - public void initialize(Bootstrap bootstrap) - { - } - - @Override - public void run(C configuration, Environment environment) - { - ConnectionFactoryBundle.environment = new LegendEnvironment.Builder() - // TODO: @akphi - add a property credential vault and load its content up from the config -// .withVault(propertiesFileCredentialVault) - .withVault(new SystemPropertiesCredentialVault()) - .withVault(new EnvironmentCredentialVault()) - .withVaults(this.credentialVaults) - .withStoreSupport(new RelationalDatabaseStoreSupport.Builder(DatabaseType.POSTGRES) - .withIdentifier("Postgres") - .withAuthenticationMechanismConfigurations( - new AuthenticationMechanismConfiguration.Builder(AuthenticationMechanismType.USER_PASSWORD).withAuthenticationConfigurationTypes(UserPasswordAuthenticationConfiguration.class).build() - ).build()) - .build(); - - identityFactory = new IdentityFactory.Builder(ConnectionFactoryBundle.environment) - .build(); - - storeInstanceProvider = new DefaultStoreInstanceProvider.Builder().build(); - - connectionFactory = new ConnectionFactory.Builder(ConnectionFactoryBundle.environment, storeInstanceProvider) -// .withCredentialBuilderProvider(new DefaultCredentialBuilderProvider()) // can also use service loader -// .withConnectionBuilderProvider(new DefaultConnectionBuilderProvider()) // can also use service loader - .build(); - - // TODO: register store instances - } - - public static LegendEnvironment getEnvironment() - { - if (environment == null) - { - throw new IllegalStateException("Environment configuration has not been set!"); - } - return environment; - } - - public static IdentityFactory getIdentityFactory() - { - if (identityFactory == null) - { - throw new IllegalStateException("Identity factory has not been configured properly!"); - } - return identityFactory; - } - - public static ConnectionFactory getConnectionFactory() - { - if (connectionFactory == null) - { - throw new IllegalStateException("Connection factory has not been configured properly!"); - } - return connectionFactory; - } -} diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionModelLoader.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionModelLoader.java new file mode 100644 index 00000000000..0b6daa1ec43 --- /dev/null +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionModelLoader.java @@ -0,0 +1,182 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.datapush.server; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.http.client.CookieStore; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.cookie.Cookie; +import org.apache.http.impl.client.BasicCookieStore; +import org.apache.http.impl.client.CloseableHttpClient; +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.impl.utility.ArrayIterate; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.language.pure.modelManager.sdlc.SDLCLoader; +import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.MetaDataServerConfiguration; +import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection; +import org.finos.legend.engine.shared.core.ObjectMapperFactory; +import org.finos.legend.engine.shared.core.kerberos.HttpClientBuilder; +import org.finos.legend.engine.shared.core.operational.Assert; +import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; +import org.finos.legend.engine.shared.core.operational.logs.LoggingEventType; +import org.pac4j.core.profile.CommonProfile; + +import javax.servlet.http.HttpServletRequest; +import java.util.Date; +import java.util.List; + +public class ConnectionModelLoader +{ + private final MetaDataServerConfiguration metaDataServerConfiguration; + + public ConnectionModelLoader(MetaDataServerConfiguration metaDataServerConfiguration) + { + this.metaDataServerConfiguration = metaDataServerConfiguration; + } + + public Connection getConnectionFromSDLCWorkspace(HttpServletRequest request, String projectId, String workspaceId, boolean isGroupWorkspace, String connectionPath) + { + // NOTE: this flow is really meant only for development, here we have to + CookieStore cookieStore = new BasicCookieStore(); + ArrayIterate.forEach(request.getCookies(), c -> cookieStore.addCookie(new DEV_ONLY__HttpClientBuilderCookie(c))); + + try (CloseableHttpClient client = (CloseableHttpClient) HttpClientBuilder.getHttpClient(cookieStore)) + { + if (metaDataServerConfiguration == null || metaDataServerConfiguration.getSdlc() == null) + { + throw new EngineException("Please specify the metadataserver.sdlc information in the server configuration"); + } + HttpGet req = new HttpGet("http://" + metaDataServerConfiguration.getSdlc().host + ":" + metaDataServerConfiguration.getSdlc().port + "/api/projects/" + projectId + (isGroupWorkspace ? "/groupWorkspaces/" : "/workspaces/") + workspaceId + "/pureModelContextData"); + try (CloseableHttpResponse res = client.execute(req)) + { + ObjectMapper mapper = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports(); + PureModelContextData pureModelContextData = mapper.readValue(res.getEntity().getContent(), PureModelContextData.class); + return ListIterate.select(pureModelContextData.getElements(), element -> element.getPath().equals(connectionPath)).selectInstancesOf(Connection.class).getAny(); + } + } + catch (Exception e) + { + throw new RuntimeException(e); + } + } + + public Connection getConnectionFromProject(List profiles, String groupId, String artifactId, String versionId, String connectionPath) + { + AlloySDLC sdlcInfo = new AlloySDLC(); + sdlcInfo.groupId = groupId; + sdlcInfo.artifactId = artifactId; + sdlcInfo.version = versionId; + Assert.assertTrue(sdlcInfo.project == null, () -> "Accessing metadata services using project id was demised. Please update AlloySDLC to provide group and artifact IDs"); + Assert.assertTrue(sdlcInfo.groupId != null && sdlcInfo.artifactId != null, () -> "AlloySDLC info must contain and group and artifact IDs to access metadata services"); + PureModelContextData pureModelContextData = SDLCLoader.loadMetadataFromHTTPURL(Lists.mutable.withAll(profiles), LoggingEventType.METADATA_REQUEST_ALLOY_PROJECT_START, LoggingEventType.METADATA_REQUEST_ALLOY_PROJECT_STOP, (isLatestRevision(sdlcInfo)) ? + metaDataServerConfiguration.getAlloy().getBaseUrl() + "/projects/" + sdlcInfo.groupId + "/" + sdlcInfo.artifactId + "/revisions/latest/pureModelContextData" : + metaDataServerConfiguration.getAlloy().getBaseUrl() + "/projects/" + sdlcInfo.groupId + "/" + sdlcInfo.artifactId + "/versions/" + sdlcInfo.version + "/pureModelContextData"); + return ListIterate.select(pureModelContextData.getElements(), element -> element.getPath().equals(connectionPath)).selectInstancesOf(Connection.class).getAny(); + } + + private boolean isLatestRevision(AlloySDLC alloySDLC) + { + return alloySDLC.version == null || alloySDLC.version.equals("none") || alloySDLC.version.equals("master-SNAPSHOT"); + } + + private static class DEV_ONLY__HttpClientBuilderCookie implements Cookie + { + private final javax.servlet.http.Cookie cookie; + + public DEV_ONLY__HttpClientBuilderCookie(javax.servlet.http.Cookie cookie) + { + this.cookie = cookie; + } + + @Override + public String getName() + { + return this.cookie.getName(); + } + + @Override + public String getValue() + { + return this.cookie.getValue(); + } + + @Override + public String getComment() + { + return this.cookie.getComment(); + } + + @Override + public String getCommentURL() + { + return ""; + } + + @Override + public Date getExpiryDate() + { + if (this.cookie.getMaxAge() >= 0) + { + return new Date(System.currentTimeMillis() + this.cookie.getMaxAge() * 1000L); + } + throw new RuntimeException(""); + } + + @Override + public boolean isPersistent() + { + return true; + } + + @Override + public String getDomain() + { + return "localhost"; + } + + @Override + public String getPath() + { + return "/"; + } + + @Override + public int[] getPorts() + { + return new int[]{}; + } + + @Override + public boolean isSecure() + { + return false; + } + + @Override + public int getVersion() + { + return this.cookie.getVersion(); + } + + @Override + public boolean isExpired(Date date) + { + return false; + } + } +} diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/DataPushServer.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/DataPushServer.java index 3f939298be9..c9e566ca3d2 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/DataPushServer.java +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/DataPushServer.java @@ -15,10 +15,32 @@ package org.finos.legend.engine.datapush.server; import io.dropwizard.setup.Bootstrap; -import org.eclipse.collections.api.factory.Lists; -import org.finos.legend.engine.datapush.server.config.DataPushServerConfiguration; +import org.finos.legend.authentication.vault.impl.EnvironmentCredentialVault; +import org.finos.legend.authentication.vault.impl.SystemPropertiesCredentialVault; +import org.finos.legend.connection.AuthenticationMechanism; +import org.finos.legend.connection.Connection; +import org.finos.legend.connection.ConnectionFactory; +import org.finos.legend.connection.DatabaseSupport; +import org.finos.legend.connection.DatabaseType; +import org.finos.legend.connection.IdentityFactory; +import org.finos.legend.connection.LegendEnvironment; +import org.finos.legend.connection.impl.CoreAuthenticationMechanismType; +import org.finos.legend.connection.impl.KerberosCredentialExtractor; +import org.finos.legend.connection.impl.KeyPairCredentialBuilder; +import org.finos.legend.connection.impl.RelationalDatabaseType; +import org.finos.legend.connection.impl.SnowflakeConnectionBuilder; +import org.finos.legend.connection.impl.StaticJDBCConnectionBuilder; +import org.finos.legend.connection.impl.UserPasswordCredentialBuilder; +import org.finos.legend.engine.datapush.DataPusher; +import org.finos.legend.engine.datapush.DataPusherProvider; +import org.finos.legend.engine.datapush.impl.SnowflakeWithS3StageDataPusher; +import org.finos.legend.engine.datapush.server.configuration.DataPushServerConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.EncryptedPrivateKeyPairAuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; import org.finos.legend.engine.server.support.server.config.BaseServerConfiguration; import org.finos.legend.server.pac4j.LegendPac4jBundle; +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; public class DataPushServer extends BaseDataPushServer { @@ -33,7 +55,6 @@ public void initialize(Bootstrap bootstrap) { super.initialize(bootstrap); - bootstrap.addBundle(new ConnectionFactoryBundle<>(DataPushServerConfiguration::getConnectionFactoryConfiguration, Lists.mutable.empty())); bootstrap.addBundle(new LegendPac4jBundle<>(BaseServerConfiguration::getPac4jConfiguration)); } @@ -41,4 +62,83 @@ public static void main(String... args) throws Exception { new DataPushServer().run(args); } + + @Override + public LegendEnvironment buildLegendEnvironment(DataPushServerConfiguration configuration) + { + return LegendEnvironment.builder() + .vaults( + new SystemPropertiesCredentialVault(), + new EnvironmentCredentialVault() + ) + .databaseSupports( + DatabaseSupport.builder() + .type(RelationalDatabaseType.POSTGRES) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD).authenticationConfigurationTypes( + UserPasswordAuthenticationConfiguration.class + ).build() + ) + .build(), + DatabaseSupport.builder() + .type(RelationalDatabaseType.SNOWFLAKE) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.KEY_PAIR).authenticationConfigurationTypes( + EncryptedPrivateKeyPairAuthenticationConfiguration.class + ).build() + ) + .build() + ).build(); + } + + @Override + public IdentityFactory buildIdentityFactory(DataPushServerConfiguration configuration, LegendEnvironment environment) + { + return IdentityFactory.builder() + .environment(environment) + .build(); + } + + @Override + public ConnectionFactory buildConnectionFactory(DataPushServerConfiguration configuration, LegendEnvironment environment) + { + return ConnectionFactory.builder() + .environment(this.environment) + .credentialBuilders( + new KerberosCredentialExtractor(), + new UserPasswordCredentialBuilder(), + new KeyPairCredentialBuilder() + ) + .connectionBuilders( + new StaticJDBCConnectionBuilder.WithPlaintextUsernamePassword(), + new SnowflakeConnectionBuilder.WithKeyPair() + ) + .build(); + } + + @Override + public DataPusherProvider buildDataPushProvider() + { + return new DataPusherProvider() + { + @Override + public DataPusher getDataPusher(Connection connection) + { + DatabaseType databaseType = connection.getDatabaseSupport().getDatabaseType(); + if (RelationalDatabaseType.SNOWFLAKE.equals(databaseType)) + { + String tableName = "DEMO_DB.SCHEMA1.TABLE1"; + String stageName = "DEMO_DB.SCHEMA1.STAGE1"; + return new SnowflakeWithS3StageDataPusher("legend-dpsh1", null, StaticCredentialsProvider.create( + AwsBasicCredentials.create( + "xxxxx", // NOTE: secret - to be removed when committed + "xxxxx" // NOTE: secret - to be removed when committed + )), tableName, stageName); + } + throw new UnsupportedOperationException("Unsupported database type: " + databaseType.getIdentifier()); + } + }; + } } diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionFactoryConfiguration.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/configuration/ConnectionFactoryConfiguration.java similarity index 91% rename from legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionFactoryConfiguration.java rename to legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/configuration/ConnectionFactoryConfiguration.java index f3feddde5b1..e5e22b76bb6 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/ConnectionFactoryConfiguration.java +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/configuration/ConnectionFactoryConfiguration.java @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.datapush.server; +package org.finos.legend.engine.datapush.server.configuration; public class ConnectionFactoryConfiguration { diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/config/DataPushServerConfiguration.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/configuration/DataPushServerConfiguration.java similarity index 73% rename from legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/config/DataPushServerConfiguration.java rename to legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/configuration/DataPushServerConfiguration.java index 3c0a4fbf1a8..1934f36e667 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/config/DataPushServerConfiguration.java +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/configuration/DataPushServerConfiguration.java @@ -12,19 +12,26 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.datapush.server.config; +package org.finos.legend.engine.datapush.server.configuration; import com.fasterxml.jackson.annotation.JsonProperty; -import org.finos.legend.engine.datapush.server.ConnectionFactoryConfiguration; +import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.MetaDataServerConfiguration; import org.finos.legend.engine.server.support.server.config.BaseServerConfiguration; public class DataPushServerConfiguration extends BaseServerConfiguration { @JsonProperty("connection") private ConnectionFactoryConfiguration connectionFactoryConfiguration; + @JsonProperty("metadata-server") + private MetaDataServerConfiguration metadataserver; public ConnectionFactoryConfiguration getConnectionFactoryConfiguration() { return connectionFactoryConfiguration; } + + public MetaDataServerConfiguration getMetadataServerConfiguration() + { + return metadataserver; + } } diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/resources/DataPushResource.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/resources/DataPushResource.java index 2490782efc3..6bffdce91bd 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/resources/DataPushResource.java +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/main/java/org/finos/legend/engine/datapush/server/resources/DataPushResource.java @@ -15,49 +15,206 @@ package org.finos.legend.engine.datapush.server.resources; import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import org.finos.legend.connection.Connection; +import org.finos.legend.connection.ConnectionFactory; +import org.finos.legend.connection.IdentityFactory; +import org.finos.legend.connection.IdentitySpecification; +import org.finos.legend.connection.LegendEnvironment; +import org.finos.legend.engine.datapush.DataPusher; +import org.finos.legend.engine.datapush.DataPusherProvider; +import org.finos.legend.engine.datapush.data.CSVData; +import org.finos.legend.engine.datapush.server.ConnectionModelLoader; +import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.MetaDataServerConfiguration; import org.finos.legend.engine.server.support.server.resources.BaseResource; +import org.finos.legend.engine.shared.core.identity.Identity; +import org.finos.legend.engine.shared.core.kerberos.ProfileManagerHelper; +import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; +import org.finos.legend.engine.shared.core.operational.errorManagement.ExceptionTool; +import org.finos.legend.engine.shared.core.operational.logs.LoggingEventType; +import org.pac4j.core.profile.CommonProfile; +import org.pac4j.core.profile.ProfileManager; +import org.pac4j.jax.rs.annotations.Pac4JProfileManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; +import javax.ws.rs.DefaultValue; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import java.io.IOException; +import java.util.List; -@Path("/data/push") +@Path("/data-push") @Api("Data Push") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public class DataPushResource extends BaseResource { - public DataPushResource() + private static final String TEXT_CSV = "text/csv"; + private static final Logger LOGGER = LoggerFactory.getLogger(DataPushResource.class); + + private final ConnectionModelLoader connectionModelLoader; + private final LegendEnvironment environment; + private final IdentityFactory identityFactory; + private final ConnectionFactory connectionFactory; + private final DataPusherProvider dataPusherProvider; + + public DataPushResource(MetaDataServerConfiguration metadataserver, LegendEnvironment environment, IdentityFactory identityFactory, ConnectionFactory connectionFactory, DataPusherProvider dataPusherProvider) { + this.environment = environment; + this.identityFactory = identityFactory; + this.connectionFactory = connectionFactory; + this.dataPusherProvider = dataPusherProvider; + this.connectionModelLoader = new ConnectionModelLoader(metadataserver); } - @Path("/location/{location}/datastore/{datastore}/dataset/{dataset}") + @Path("/push/{groupId}/{artifactId}/{versionId}/{connectionPath}") @POST - @ApiOperation("Push data") - public Response push(@PathParam("location") String location, @PathParam("datastore") String datastore, @PathParam("dataset") String dataset) throws IOException + @Consumes({ + // TODO: content type will drive how we interpret the data, right now + // we only support CSV + MediaType.TEXT_PLAIN, + MediaType.TEXT_XML, + MediaType.APPLICATION_JSON, + TEXT_CSV + }) + @Produces(MediaType.APPLICATION_JSON) + public Response pushData( + @PathParam("groupId") String groupId, + @PathParam("artifactId") String artifactId, + @PathParam("versionId") String versionId, + @PathParam("connectionPath") String connectionPath, + String data, + @Context HttpServletRequest request, + @ApiParam(hidden = true) @Pac4JProfileManager ProfileManager profileManager + ) { - return executeWithLogging( - "pushing data\"", - () -> Response.ok().entity(this.pushData(location, datastore, dataset)).build() + List profiles = ProfileManagerHelper.extractProfiles(profileManager); + Identity identity = this.identityFactory.createIdentity( + IdentitySpecification.builder().profiles(profiles).build() ); + org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection connection = this.connectionModelLoader.getConnectionFromProject(profiles, groupId, artifactId, versionId, connectionPath); + + CSVData csvData = new CSVData(); + csvData.value = data; + + try + { + this.pushCSVData(identity, connection, csvData); + return Response.noContent().build(); + } + catch (Exception exception) + { + LOGGER.error("Can't push data:\n", exception); + return handleException(profiles, exception); + } } - private String pushData(String location, String datastore, String dataset) + @Path("/pushDev/{projectId}/{workspaceId}/{connectionPath}") + @POST + @Consumes({ + // TODO: content type will drive how we interpret the data, right now + // we only support CSV + MediaType.TEXT_PLAIN, + MediaType.TEXT_XML, + MediaType.APPLICATION_JSON, + TEXT_CSV + }) + @Produces(MediaType.APPLICATION_JSON) + public Response pushData_Dev( + @PathParam("projectId") String projectId, + @PathParam("workspaceId") String workspaceId, + @PathParam("connectionPath") String connectionPath, + @QueryParam("isGroupWorkspace") @DefaultValue("false") boolean isGroupWorkspace, + String data, + @Context HttpServletRequest request, + @ApiParam(hidden = true) @Pac4JProfileManager ProfileManager profileManager + ) { + List profiles = ProfileManagerHelper.extractProfiles(profileManager); + Identity identity = this.identityFactory.createIdentity( + IdentitySpecification.builder().profiles(profiles).build() + ); + org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection connection = this.connectionModelLoader.getConnectionFromSDLCWorkspace(request, projectId, workspaceId, isGroupWorkspace, connectionPath); + + CSVData csvData = new CSVData(); + csvData.value = data; + try { - // TODO - actually push the data - return "ok"; + this.pushCSVData(identity, connection, csvData); + return Response.noContent().build(); + } + catch (Exception exception) + { + LOGGER.error("Can't push data:\n", exception); + return handleException(profiles, exception); + } + } + + private Response handleException(List profiles, Exception exception) + { + Response.Status status = exception instanceof EngineException ? Response.Status.BAD_REQUEST : Response.Status.INTERNAL_SERVER_ERROR; + return ExceptionTool.exceptionManager(exception, LoggingEventType.ERROR_MANAGEMENT_ERROR, status, profiles); + } + + private void pushCSVData(Identity identity, org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection connectionProtocol, CSVData csvData) + { + Connection connection = Connection.builder().fromProtocol(connectionProtocol, this.environment).build(); + try + { + DataPusher dataPusher = this.dataPusherProvider.getDataPusher(connection); + dataPusher.configure(this.connectionFactory); + dataPusher.writeCSV(identity, connection, csvData); } catch (Exception e) { throw new RuntimeException(e); } } + + // ------------------------ DEBUG ----------------------- + // TO BE REMOVED when we stabilize the API and models + + @Path("/pushDev/debug") + @POST + @Consumes(MediaType.APPLICATION_JSON) + @Produces(MediaType.APPLICATION_JSON) + public Response TEMPORARY__pushData_Debug( + DebugInput input, + @Context HttpServletRequest request, + @ApiParam(hidden = true) @Pac4JProfileManager ProfileManager profileManager + ) + { + List profiles = ProfileManagerHelper.extractProfiles(profileManager); + Identity identity = this.identityFactory.createIdentity( + IdentitySpecification.builder().profiles(profiles).build() + ); + CSVData csvData = new CSVData(); + csvData.value = input.data; + + try + { + this.pushCSVData(identity, input.connection, csvData); + return Response.noContent().build(); + } + catch (Exception exception) + { + LOGGER.error("Can't push data:\n", exception); + return handleException(profiles, exception); + } + } + + public static class DebugInput + { + public org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.Connection connection; + public String data; + } } diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/TestDataPushResource.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/TestDataPushResource.java deleted file mode 100644 index d0187255cf0..00000000000 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/TestDataPushResource.java +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2020 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.engine.datapush.server; - -import org.apache.http.client.HttpResponseException; -import org.finos.legend.engine.datapush.server.test.AbstractDataPushServerResourceTest; -import org.junit.Test; - -import javax.ws.rs.client.Entity; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -import static org.junit.Assert.assertEquals; - -public class TestDataPushResource extends AbstractDataPushServerResourceTest -{ - @Test - public void testPost() throws HttpResponseException - { - Response response = this.clientFor("/api/data/push/location/LOCATION/datastore/STORE/dataset/DATASET").request().post(Entity.entity("{}", MediaType.APPLICATION_JSON_TYPE)); - - String responseText = response.readEntity(String.class); - - if (response.getStatus() != 200) - { - throw new HttpResponseException(response.getStatus(), "Error during http call with status: " + response.getStatus() + " , entity: " + responseText); - } - - assertEquals("ok", responseText); - } -} diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/resources/DataPushTestResource.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/resources/DataPushTestResource.java deleted file mode 100644 index 6053bf0afe3..00000000000 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/resources/DataPushTestResource.java +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2020 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.engine.datapush.server.resources; - -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import org.finos.legend.engine.server.support.server.resources.BaseResource; - -import javax.ws.rs.Consumes; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; - -@Path("/tests/data/push") -@Api("Data Push - tests") -@Consumes(MediaType.APPLICATION_JSON) -@Produces(MediaType.APPLICATION_JSON) -public class DataPushTestResource extends BaseResource -{ - public DataPushTestResource() - { - - } - - @Path("/postSomething") - @POST - @ApiOperation("Test POST") - public Response postSomething(Object object) - { - return executeWithLogging( - "testing post \"", - () -> Response.ok().entity("{\"post\" : \"ok\"}").build() - ); - } - - @Path("/getSomething") - @GET - @ApiOperation("Test GET") - public Response getSomething() - { - return executeWithLogging( - "testing post \"", - () -> Response.ok().entity("{\"get\" : \"ok\"}").build() - ); - } -} diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/AbstractDataPushServerResourceTest.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/AbstractDataPushServerResourceTest.java index f83f18063dc..ef44e5f161f 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/AbstractDataPushServerResourceTest.java +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/AbstractDataPushServerResourceTest.java @@ -16,7 +16,7 @@ import io.dropwizard.testing.ResourceHelpers; import io.dropwizard.testing.junit.DropwizardAppRule; -import org.finos.legend.engine.datapush.server.config.DataPushServerConfiguration; +import org.finos.legend.engine.datapush.server.configuration.DataPushServerConfiguration; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; @@ -50,6 +50,11 @@ private void configureClient() this.client.target(getServerUrl()).request().get(); } + protected DataPushServerForTest getApplicationInstance() + { + return APP_RULE.getApplication(); + } + protected WebTarget clientFor(String url) { return this.client.target(getServerUrl(url)); diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/DataPushServerForTest.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/DataPushServerForTest.java index 212c55dc8b6..70ab113b50c 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/DataPushServerForTest.java +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/DataPushServerForTest.java @@ -14,32 +14,71 @@ package org.finos.legend.engine.datapush.server.test; -import io.dropwizard.setup.Environment; -import org.finos.legend.engine.datapush.server.BaseDataPushServer; -import org.finos.legend.engine.datapush.server.config.DataPushServerConfiguration; -import org.finos.legend.engine.datapush.server.resources.DataPushTestResource; -import org.finos.legend.engine.server.support.server.BaseServer; +import org.finos.legend.authentication.vault.impl.EnvironmentCredentialVault; +import org.finos.legend.authentication.vault.impl.SystemPropertiesCredentialVault; +import org.finos.legend.connection.AuthenticationMechanism; +import org.finos.legend.connection.ConnectionFactory; +import org.finos.legend.connection.DatabaseSupport; +import org.finos.legend.connection.LegendEnvironment; +import org.finos.legend.connection.impl.CoreAuthenticationMechanismType; +import org.finos.legend.connection.impl.KerberosCredentialExtractor; +import org.finos.legend.connection.impl.RelationalDatabaseType; +import org.finos.legend.connection.impl.StaticJDBCConnectionBuilder; +import org.finos.legend.connection.impl.UserPasswordCredentialBuilder; +import org.finos.legend.engine.datapush.server.DataPushServer; +import org.finos.legend.engine.datapush.server.configuration.DataPushServerConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.UserPasswordAuthenticationConfiguration; -public class DataPushServerForTest extends BaseDataPushServer +public class DataPushServerForTest extends DataPushServer { public DataPushServerForTest() { } + public static void main(String... args) throws Exception + { + new DataPushServerForTest().run(args); + } + @Override - protected void configureServerExtension(DataPushServerConfiguration configuration, Environment environment) + public LegendEnvironment buildLegendEnvironment(DataPushServerConfiguration configuration) { - environment.jersey().register(new DataPushTestResource()); + return LegendEnvironment.builder() + .vaults( + new SystemPropertiesCredentialVault(), + new EnvironmentCredentialVault() + ) + .databaseSupports( + DatabaseSupport.builder() + .type(RelationalDatabaseType.POSTGRES) + .authenticationMechanisms( + AuthenticationMechanism.builder() + .type(CoreAuthenticationMechanismType.USER_PASSWORD) + .authenticationConfigurationTypes( + UserPasswordAuthenticationConfiguration.class + ).build() + ) + .build() + ).build(); } @Override - protected BaseServer.ServerPlatformInfo newServerPlatformInfo() + public ConnectionFactory buildConnectionFactory(DataPushServerConfiguration configuration, LegendEnvironment environment) { - return new ServerPlatformInfo(null, null, null); + return ConnectionFactory.builder() + .environment(this.environment) + .credentialBuilders( + new KerberosCredentialExtractor(), + new UserPasswordCredentialBuilder() + ) + .connectionBuilders( + new StaticJDBCConnectionBuilder.WithPlaintextUsernamePassword() + ) + .build(); } - public static void main(String... args) throws Exception + public LegendEnvironment getEnvironment() { - new DataPushServerForTest().run(args); + return environment; } } \ No newline at end of file diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/MinIOS3TestContainerWrapper.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/MinIOS3TestContainerWrapper.java new file mode 100644 index 00000000000..de1e4d3f2af --- /dev/null +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/MinIOS3TestContainerWrapper.java @@ -0,0 +1,86 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.datapush.server.test; + +import org.junit.Assert; +import org.testcontainers.DockerClientFactory; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; + +public class MinIOS3TestContainerWrapper +{ + private Network network; + private GenericContainer minioContainer; + + private MinIOS3TestContainerWrapper() + { + } + + public static MinIOS3TestContainerWrapper build() + { + return new MinIOS3TestContainerWrapper(); + } + + public void start() throws Exception + { + Assert.assertTrue("Docker environment not properly setup", DockerClientFactory.instance().isDockerAvailable()); + + this.network = Network.newNetwork(); + this.initMinio(); + } + + private void initMinio() + { +// GenericContainer mc = new GenericContainer<>("minio/mc:RELEASE.2023-08-08T17-23-59Z") +// .withNetwork(this.network) +// .withEnv("AWS_ACCESS_KEY_ID", "admin") +// .withEnv("AWS_SECRET_ACCESS_KEY", "password") +// .withEnv("AWS_REGION", "us-east-1") +// .withCreateContainerCmdModifier(x -> x.withEntrypoint( +// "/bin/sh", +// "-c", +// "until (/usr/bin/mc config host add minio http://minio:9000 admin password) do echo '...waiting...' && sleep 1; done; " + +// "/usr/bin/mc rm -r --force minio/" + this.getBucketName() + "; " + +// "/usr/bin/mc mb minio/" + this.getBucketName() + "; " + +// "/usr/bin/mc policy set public minio/" + this.getBucketName() + "; " + +// "tail -f /dev/null" +// ) +// ); + + this.minioContainer = new GenericContainer<>("minio/minio:RELEASE.2023-08-09T23-30-22Z") + .withNetwork(this.network) + .withNetworkAliases("minio", "warehouse.minio") + .withEnv("MINIO_ROOT_USER", "admin") + .withEnv("MINIO_ROOT_PASSWORD", "password") + .withEnv("MINIO_DOMAIN", "minio") + .withExposedPorts(9000, 9001) + .withCommand("server", "/data", "--console-address", ":9001") +// .dependsOn(mc) + ; + + } + + public void stop() throws Exception + { + try ( + Network ignored = this.network; + AutoCloseable ignored1 = this.minioContainer; + ) + { + // done + } + } +} \ No newline at end of file diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/TestDataPushServer.java b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/TestDataPushServer.java new file mode 100644 index 00000000000..19a78f80f8f --- /dev/null +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/java/org/finos/legend/engine/datapush/server/test/TestDataPushServer.java @@ -0,0 +1,92 @@ +// Copyright 2020 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.datapush.server.test; + +import org.finos.legend.connection.PostgresTestContainerWrapper; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; + +import javax.ws.rs.client.Entity; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +import static org.junit.Assert.assertEquals; + +public class TestDataPushServer extends AbstractDataPushServerResourceTest +{ + private MinIOS3TestContainerWrapper minioContainer; + + private PostgresTestContainerWrapper postgresContainer; + + @Before + public void setUp() + { + try + { + // TODO: use @ClassRule + this.minioContainer = MinIOS3TestContainerWrapper.build(); + this.minioContainer.start(); + } + catch (Exception e) + { + Assume.assumeTrue("Can't start MinIO", false); + } + + try + { + // TODO: use @ClassRule + this.postgresContainer = PostgresTestContainerWrapper.build(); + this.postgresContainer.start(); + } + catch (Exception e) + { + Assume.assumeTrue("Can't start PostgreSQLContainer", false); + } + } + + @After + public void tearDown() throws Exception + { + if (this.minioContainer != null) + { + this.minioContainer.stop(); + } + + if (this.postgresContainer != null) + { + this.postgresContainer.stop(); + } + + System.clearProperty("passwordRef"); + } + + @Test + @Ignore + public void test() + { + Response response = this.clientFor("/api/data-push/stage").request().post(Entity.entity("{\n" + + " \"_type\": \"sql\",\n" + + " \"statements\": [\"Drop table if exists FirmTable;\n" + + "Create Table FirmTable(id INT, Legal_Name VARCHAR(200));\n" + + "Insert into FirmTable (id, Legal_Name) values (1, 'FINOS');\"]\n" + + "}", MediaType.APPLICATION_JSON_TYPE)); + String responseText = response.readEntity(String.class); + + assertEquals("ok", responseText); + } +} diff --git a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/resources/config-test.yaml b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/resources/config-test.yaml index 0e02a41dfc4..41376c8a8dc 100644 --- a/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/resources/config-test.yaml +++ b/legend-engine-xts-data-push/legend-engine-xt-data-push-server/src/test/resources/config-test.yaml @@ -35,6 +35,17 @@ filterPriorities: org.pac4j.j2e.filter.SecurityFilter: 3 CORS: 4 +metadata-server: + pure: + host: 127.0.0.1 + port: 8080 + alloy: + host: 127.0.0.1 + port: 6200 + prefix: "/depot/api" + sdlc: + host: localhost + port: 6100 logging: # Change this to affect library class logging @@ -43,8 +54,13 @@ logging: - type: console logFormat: "%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%thread] %c - %m%n" +pac4j: + callbackPrefix: /api/pac4j + clients: + - org.pac4j.core.client.direct.AnonymousClient: {} + swagger: resourcePackage: org.finos.legend.engine.datapush.server.resources - title: Legend Data Push + title: Legend Data Push (Test) version: local-snapshot schemes: [] \ No newline at end of file diff --git a/legend-engine-xts-data-push/pom.xml b/legend-engine-xts-data-push/pom.xml index 76051500d88..57fa7a9eddf 100644 --- a/legend-engine-xts-data-push/pom.xml +++ b/legend-engine-xts-data-push/pom.xml @@ -3,7 +3,7 @@ legend-engine org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-api/pom.xml b/legend-engine-xts-data-space/legend-engine-xt-data-space-api/pom.xml index e561a5e2d4f..9c09af1c995 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-api/pom.xml +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-data-space - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-api/src/main/java/org/finos/legend/engine/api/analytics/DataSpaceAnalytics.java b/legend-engine-xts-data-space/legend-engine-xt-data-space-api/src/main/java/org/finos/legend/engine/api/analytics/DataSpaceAnalytics.java index 4f1ff451244..4fd610160ec 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-api/src/main/java/org/finos/legend/engine/api/analytics/DataSpaceAnalytics.java +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-api/src/main/java/org/finos/legend/engine/api/analytics/DataSpaceAnalytics.java @@ -95,7 +95,7 @@ public Response analyzeDataSpace(DataSpaceAnalysisInput input, @ApiParam(hidden { try { - return ManageConstantResult.manageResult(profiles, DataSpaceAnalyticsHelper.analyzeDataSpace(dataSpace, pureModel, (DataSpace) dataSpaceProtocol, pureModelContextData, input.clientVersion, this.generatorExtensions, this.entitlementServiceExtensions), objectMapper); + return ManageConstantResult.manageResult(profiles, DataSpaceAnalyticsHelper.analyzeDataSpace(dataSpace, pureModel, (DataSpace) dataSpaceProtocol, pureModelContextData, input.clientVersion, this.generatorExtensions, this.entitlementServiceExtensions, false), objectMapper); } catch (Exception e) { @@ -122,7 +122,7 @@ public Response analyzeDataSpaceCoverage(DataSpaceAnalysisInput input, @ApiParam { try { - return ManageConstantResult.manageResult(profiles, DataSpaceAnalyticsHelper.analyzeDataSpaceCoverage(dataSpace, pureModel, (DataSpace) dataSpaceProtocol, pureModelContextData, input.clientVersion, this.generatorExtensions, this.entitlementServiceExtensions), objectMapper); + return ManageConstantResult.manageResult(profiles, DataSpaceAnalyticsHelper.analyzeDataSpaceCoverage(dataSpace, pureModel, (DataSpace) dataSpaceProtocol, pureModelContextData, input.clientVersion, this.generatorExtensions, this.entitlementServiceExtensions, true), objectMapper); } catch (Exception e) { diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-api/src/test/java/org/finos/legend/engine/api/analytics/test/TestDataSpaceAnalyticsApi.java b/legend-engine-xts-data-space/legend-engine-xt-data-space-api/src/test/java/org/finos/legend/engine/api/analytics/test/TestDataSpaceAnalyticsApi.java index 56ae1cd555c..77819fe4943 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-api/src/test/java/org/finos/legend/engine/api/analytics/test/TestDataSpaceAnalyticsApi.java +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-api/src/test/java/org/finos/legend/engine/api/analytics/test/TestDataSpaceAnalyticsApi.java @@ -56,7 +56,7 @@ public void testDataSpaceAnalysisWithNotFoundDataSpace() throws IOException public void testDataSpaceAnalysis() throws IOException { PureModelContextData modelContextData = objectMapper.readValue(Objects.requireNonNull(getClass().getClassLoader().getResource("dataSpaceAnalyticsTestData.json")), PureModelContextData.class); - String expected = "{\"defaultExecutionContext\":\"dummyContext\",\"diagrams\":[],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"}],\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[]}]}},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS\",\"stereotypes\":[],\"taggedValues\":[]}"; + String expected = "{\"defaultExecutionContext\":\"dummyContext\",\"diagrams\":[],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS\",\"stereotypes\":[],\"taggedValues\":[]}"; testAnalyticsWithVersions(expected, modelContextData, "model::animal::AnimalDS"); } diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-compiler/pom.xml b/legend-engine-xts-data-space/legend-engine-xt-data-space-compiler/pom.xml index f8ab50a0fcf..3190c3ab4d6 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-compiler/pom.xml +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-compiler/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-data-space org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/pom.xml b/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/pom.xml index dfb0bddab8f..c6bafc6d8e6 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/pom.xml +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-data-space - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/src/main/java/org/finos/legend/engine/generation/analytics/DataSpaceAnalyticsHelper.java b/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/src/main/java/org/finos/legend/engine/generation/analytics/DataSpaceAnalyticsHelper.java index 22b5fe7c61d..0bfc4306a11 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/src/main/java/org/finos/legend/engine/generation/analytics/DataSpaceAnalyticsHelper.java +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/src/main/java/org/finos/legend/engine/generation/analytics/DataSpaceAnalyticsHelper.java @@ -156,7 +156,7 @@ private static DataSpaceExecutableResult buildExecutableResult(ResultType result return null; } - private static MappingModelCoverageAnalysisResult buildMappingModelCoverageAnalysisResult(Root_meta_analytics_mapping_modelCoverage_MappingModelCoverageAnalysisResult mappingModelCoverageAnalysisResult, DataSpaceExecutionContextAnalysisResult excResult, PureModel pureModel, DataSpace dataSpaceProtocol, PureModelContextData pureModelContextData, String clientVersion, MutableList generatorExtensions, List entitlementServiceExtensions, Boolean returnDataSets) + private static MappingModelCoverageAnalysisResult buildMappingModelCoverageAnalysisResult(Root_meta_analytics_mapping_modelCoverage_MappingModelCoverageAnalysisResult mappingModelCoverageAnalysisResult, DataSpaceExecutionContextAnalysisResult excResult, PureModel pureModel, DataSpace dataSpaceProtocol, PureModelContextData pureModelContextData, String clientVersion, MutableList generatorExtensions, List entitlementServiceExtensions, Boolean returnDataSets, Boolean returnLightPMCD) { try { @@ -165,73 +165,76 @@ private static MappingModelCoverageAnalysisResult buildMappingModelCoverageAnaly { excResult.datasets = LazyIterate.flatCollect(entitlementServiceExtensions, extension -> extension.generateDatasetSpecifications(null, excResult.defaultRuntime, pureModel.getRuntime(excResult.defaultRuntime), excResult.mapping, pureModel.getMapping(excResult.mapping), pureModelContextData, pureModel)).toList(); } - PureModelContextData.Builder builder = PureModelContextData.newBuilder(); - - // Here we prune the bindings to have just packageableIncludes part of ModelUnit - // because we only need that as a part of analytics. - List bindingPaths = pureModelContextData.getElements().stream().filter(el -> el instanceof Binding).map(b -> - { - Binding _binding = new Binding(); - _binding.name = b.name; - _binding.contentType = ((Binding) b).contentType; - _binding._package = b._package; - _binding.modelUnit = ((Binding) b).modelUnit; - _binding.modelUnit.packageableElementExcludes = org.eclipse.collections.api.factory.Lists.mutable.empty(); - builder.addElement(_binding); - return b.getPath(); - }).collect(Collectors.toList()); - RichIterable bindings = org.eclipse.collections.api.factory.Lists.mutable.ofAll(bindingPaths.stream().map(path -> + if (returnLightPMCD) { - Root_meta_external_format_shared_binding_Binding binding; - try + PureModelContextData.Builder builder = PureModelContextData.newBuilder(); + + // Here we prune the bindings to have just packageableIncludes part of ModelUnit + // because we only need that as a part of analytics. + List bindingPaths = pureModelContextData.getElements().stream().filter(el -> el instanceof Binding).map(b -> { - binding = (Root_meta_external_format_shared_binding_Binding) pureModel.getPackageableElement(path); - return binding; - } - catch (Exception ignored) + Binding _binding = new Binding(); + _binding.name = b.name; + _binding.contentType = ((Binding) b).contentType; + _binding._package = b._package; + _binding.modelUnit = ((Binding) b).modelUnit; + _binding.modelUnit.packageableElementExcludes = org.eclipse.collections.api.factory.Lists.mutable.empty(); + builder.addElement(_binding); + return b.getPath(); + }).collect(Collectors.toList()); + RichIterable bindings = org.eclipse.collections.api.factory.Lists.mutable.ofAll(bindingPaths.stream().map(path -> { + Root_meta_external_format_shared_binding_Binding binding; + try + { + binding = (Root_meta_external_format_shared_binding_Binding) pureModel.getPackageableElement(path); + return binding; + } + catch (Exception ignored) + { - } - return null; - }).filter(c -> c != null).collect(Collectors.toList())); - Root_meta_analytics_binding_modelCoverage_BindingModelCoverageAnalysisResult bindingAnalysisResult = core_analytics_binding_modelCoverage_analytics.Root_meta_analytics_binding_modelCoverage_getBindingModelCoverage_Binding_MANY__BindingModelCoverageAnalysisResult_1_(bindings, pureModel.getExecutionSupport()); - List functionPaths = pureModelContextData.getElements().stream().filter(el -> el instanceof Function).map(e -> e.getPath()).collect(Collectors.toList()); - List allExtraElements = functionPaths; - allExtraElements.add(dataSpaceProtocol.getPath()); - pureModelContextData.getElements().stream().filter(el -> allExtraElements.contains(el.getPath())).forEach(builder::addElement); - List elements = builder.build().getElements(); - RichIterable> functions = org.eclipse.collections.api.factory.Lists.mutable.ofAll(functionPaths.stream().map(path -> - { - ConcreteFunctionDefinition function = null; - try + } + return null; + }).filter(c -> c != null).collect(Collectors.toList())); + Root_meta_analytics_binding_modelCoverage_BindingModelCoverageAnalysisResult bindingAnalysisResult = core_analytics_binding_modelCoverage_analytics.Root_meta_analytics_binding_modelCoverage_getBindingModelCoverage_Binding_MANY__BindingModelCoverageAnalysisResult_1_(bindings, pureModel.getExecutionSupport()); + List functionPaths = pureModelContextData.getElements().stream().filter(el -> el instanceof Function).map(e -> e.getPath()).collect(Collectors.toList()); + List allExtraElements = functionPaths; + allExtraElements.add(dataSpaceProtocol.getPath()); + pureModelContextData.getElements().stream().filter(el -> allExtraElements.contains(el.getPath())).forEach(builder::addElement); + List elements = builder.build().getElements(); + RichIterable> functions = org.eclipse.collections.api.factory.Lists.mutable.ofAll(functionPaths.stream().map(path -> { - function = pureModel.getConcreteFunctionDefinition_safe(path); - if (function == null) + ConcreteFunctionDefinition function = null; + try { - Function _function = (Function) elements.stream().filter(e -> e.getPath().equals(path)).findFirst().get(); - function = pureModel.getConcreteFunctionDefinition_safe(path + HelperValueSpecificationGrammarComposer.getFunctionSignature(_function)); + function = pureModel.getConcreteFunctionDefinition_safe(path); + if (function == null) + { + Function _function = (Function) elements.stream().filter(e -> e.getPath().equals(path)).findFirst().get(); + function = pureModel.getConcreteFunctionDefinition_safe(path + HelperValueSpecificationGrammarComposer.getFunctionSignature(_function)); + } + return function; } - return function; - } - catch (Exception ignored) - { + catch (Exception ignored) + { - } - return null; - }).filter(c -> c != null).collect(Collectors.toList())); - Root_meta_analytics_function_modelCoverage_FunctionModelCoverageAnalysisResult functionCoverageAnalysisResult = core_analytics_function_modelCoverage_analytics.Root_meta_analytics_function_modelCoverage_getFunctionModelCoverage_ConcreteFunctionDefinition_MANY__FunctionModelCoverageAnalysisResult_1_(org.eclipse.collections.impl.factory.Lists.mutable.ofAll(functions), pureModel.getExecutionSupport()); - MutableList> coveredClasses = mappingModelCoverageAnalysisResult._classes().toList(); - List coveredClassesPaths = coveredClasses.stream().map(c -> HelperModelBuilder.getElementFullPath(c, pureModel.getExecutionSupport())).collect(Collectors.toList()); - coveredClasses = org.eclipse.collections.impl.factory.Lists.mutable.ofAll(Stream.concat(Stream.concat(functionCoverageAnalysisResult._classes().toList().stream().filter(c -> !coveredClassesPaths.contains(HelperModelBuilder.getElementFullPath(c, pureModel.getExecutionSupport()))), - bindingAnalysisResult._classes().toList().stream().filter(c -> !coveredClassesPaths.contains(HelperModelBuilder.getElementFullPath(c, pureModel.getExecutionSupport())))).distinct(), - mappingModelCoverageAnalysisResult._classes().toList().stream()).collect(Collectors.toList())); - MutableList> coveredEnumerations = org.eclipse.collections.impl.factory.Lists.mutable.ofAll(Stream.concat(mappingModelCoverageAnalysisResult._enumerations().toList().stream(), functionCoverageAnalysisResult._enumerations().toList().stream()).distinct().collect(Collectors.toList())); - PureModelContextData classes = PureModelContextDataGenerator.generatePureModelContextDataFromClasses(coveredClasses, clientVersion, pureModel.getExecutionSupport()); - PureModelContextData enums = PureModelContextDataGenerator.generatePureModelContextDataFromEnumerations(coveredEnumerations, clientVersion, pureModel.getExecutionSupport()); - PureModelContextData _profiles = PureModelContextDataGenerator.generatePureModelContextDataFromProfile((RichIterable) mappingModelCoverageAnalysisResult._profiles(), clientVersion, pureModel.getExecutionSupport()); - PureModelContextData associations = PureModelContextDataGenerator.generatePureModelContextDataFromAssociations(mappingModelCoverageAnalysisResult._associations(), clientVersion, pureModel.getExecutionSupport()); - mappingModelCoverageAnalysisResultProtocol.model = builder.build().combine(classes).combine(enums).combine(_profiles).combine(associations); + } + return null; + }).filter(c -> c != null).collect(Collectors.toList())); + Root_meta_analytics_function_modelCoverage_FunctionModelCoverageAnalysisResult functionCoverageAnalysisResult = core_analytics_function_modelCoverage_analytics.Root_meta_analytics_function_modelCoverage_getFunctionModelCoverage_ConcreteFunctionDefinition_MANY__FunctionModelCoverageAnalysisResult_1_(org.eclipse.collections.impl.factory.Lists.mutable.ofAll(functions), pureModel.getExecutionSupport()); + MutableList> coveredClasses = mappingModelCoverageAnalysisResult._classes().toList(); + List coveredClassesPaths = coveredClasses.stream().map(c -> HelperModelBuilder.getElementFullPath(c, pureModel.getExecutionSupport())).collect(Collectors.toList()); + coveredClasses = org.eclipse.collections.impl.factory.Lists.mutable.ofAll(Stream.concat(Stream.concat(functionCoverageAnalysisResult._classes().toList().stream().filter(c -> !coveredClassesPaths.contains(HelperModelBuilder.getElementFullPath(c, pureModel.getExecutionSupport()))), + bindingAnalysisResult._classes().toList().stream().filter(c -> !coveredClassesPaths.contains(HelperModelBuilder.getElementFullPath(c, pureModel.getExecutionSupport())))).distinct(), + mappingModelCoverageAnalysisResult._classes().toList().stream()).collect(Collectors.toList())); + MutableList> coveredEnumerations = org.eclipse.collections.impl.factory.Lists.mutable.ofAll(Stream.concat(mappingModelCoverageAnalysisResult._enumerations().toList().stream(), functionCoverageAnalysisResult._enumerations().toList().stream()).distinct().collect(Collectors.toList())); + PureModelContextData classes = PureModelContextDataGenerator.generatePureModelContextDataFromClasses(coveredClasses, clientVersion, pureModel.getExecutionSupport()); + PureModelContextData enums = PureModelContextDataGenerator.generatePureModelContextDataFromEnumerations(coveredEnumerations, clientVersion, pureModel.getExecutionSupport()); + PureModelContextData _profiles = PureModelContextDataGenerator.generatePureModelContextDataFromProfile((RichIterable) mappingModelCoverageAnalysisResult._profiles(), clientVersion, pureModel.getExecutionSupport()); + PureModelContextData associations = PureModelContextDataGenerator.generatePureModelContextDataFromAssociations(mappingModelCoverageAnalysisResult._associations(), clientVersion, pureModel.getExecutionSupport()); + mappingModelCoverageAnalysisResultProtocol.model = builder.build().combine(classes).combine(enums).combine(_profiles).combine(associations); + } return mappingModelCoverageAnalysisResultProtocol; } catch (Exception ignored) @@ -242,14 +245,15 @@ private static MappingModelCoverageAnalysisResult buildMappingModelCoverageAnaly public static DataSpaceAnalysisResult analyzeDataSpace(Root_meta_pure_metamodel_dataSpace_DataSpace dataSpace, PureModel pureModel, DataSpace dataSpaceProtocol, PureModelContextData pureModelContextData, String clientVersion) { - return analyzeDataSpace(dataSpace, pureModel, dataSpaceProtocol, pureModelContextData, clientVersion, Lists.mutable.withAll(ServiceLoader.load(PlanGeneratorExtension.class)), EntitlementServiceExtensionLoader.extensions()); + return analyzeDataSpace(dataSpace, pureModel, dataSpaceProtocol, pureModelContextData, clientVersion, Lists.mutable.withAll(ServiceLoader.load(PlanGeneratorExtension.class)), EntitlementServiceExtensionLoader.extensions(), false); } - public static DataSpaceAnalysisResult analyzeDataSpaceCoverage(Root_meta_pure_metamodel_dataSpace_DataSpace dataSpace, PureModel pureModel, DataSpace dataSpaceProtocol, PureModelContextData pureModelContextData, String clientVersion, MutableList generatorExtensions, List entitlementServiceExtensions) + public static DataSpaceAnalysisResult analyzeDataSpaceCoverage(Root_meta_pure_metamodel_dataSpace_DataSpace dataSpace, PureModel pureModel, DataSpace dataSpaceProtocol, PureModelContextData pureModelContextData, String clientVersion, MutableList generatorExtensions, List entitlementServiceExtensions, Boolean returnLightGraph) { - Root_meta_pure_metamodel_dataSpace_analytics_DataSpaceCoverageAnalysisResult analysisResult = core_data_space_analytics_analytics.Root_meta_pure_metamodel_dataSpace_analytics_analyzeDataSpaceCoverage_DataSpace_1__PackageableRuntime_MANY__DataSpaceCoverageAnalysisResult_1_( + Root_meta_pure_metamodel_dataSpace_analytics_DataSpaceCoverageAnalysisResult analysisResult = core_data_space_analytics_analytics.Root_meta_pure_metamodel_dataSpace_analytics_analyzeDataSpaceCoverage_DataSpace_1__PackageableRuntime_MANY__Boolean_1__DataSpaceCoverageAnalysisResult_1_( dataSpace, ListIterate.selectInstancesOf(pureModelContextData.getElements(), PackageableRuntime.class).collect(runtime -> pureModel.getPackageableRuntime(runtime.getPath(), runtime.sourceInformation)), + returnLightGraph, pureModel.getExecutionSupport() ); @@ -288,7 +292,7 @@ public static DataSpaceAnalysisResult analyzeDataSpaceCoverage(Root_meta_pure_me excResult.defaultRuntime = HelperModelBuilder.getElementFullPath(executionContext._defaultRuntime(), pureModel.getExecutionSupport()); excResult.compatibleRuntimes = ListIterate.collect(executionContextAnalysisResult._compatibleRuntimes().toList(), runtime -> HelperModelBuilder.getElementFullPath(runtime, pureModel.getExecutionSupport())); Root_meta_analytics_mapping_modelCoverage_MappingModelCoverageAnalysisResult mappingModelCoverageAnalysisResult = executionContextAnalysisResult._mappingCoverage(); - excResult.mappingModelCoverageAnalysisResult = buildMappingModelCoverageAnalysisResult(mappingModelCoverageAnalysisResult, excResult, pureModel, dataSpaceProtocol, pureModelContextData, clientVersion, generatorExtensions, entitlementServiceExtensions, false); + excResult.mappingModelCoverageAnalysisResult = buildMappingModelCoverageAnalysisResult(mappingModelCoverageAnalysisResult, excResult, pureModel, dataSpaceProtocol, pureModelContextData, clientVersion, generatorExtensions, entitlementServiceExtensions, false, returnLightGraph); return excResult; }); result.defaultExecutionContext = dataSpace._defaultExecutionContext()._name(); @@ -304,11 +308,12 @@ public static DataSpaceAnalysisResult analyzeDataSpaceCoverage(Root_meta_pure_me return result; } - public static DataSpaceAnalysisResult analyzeDataSpace(Root_meta_pure_metamodel_dataSpace_DataSpace dataSpace, PureModel pureModel, DataSpace dataSpaceProtocol, PureModelContextData pureModelContextData, String clientVersion, MutableList generatorExtensions, List entitlementServiceExtensions) + public static DataSpaceAnalysisResult analyzeDataSpace(Root_meta_pure_metamodel_dataSpace_DataSpace dataSpace, PureModel pureModel, DataSpace dataSpaceProtocol, PureModelContextData pureModelContextData, String clientVersion, MutableList generatorExtensions, List entitlementServiceExtensions, Boolean returnLightGraph) { - Root_meta_pure_metamodel_dataSpace_analytics_DataSpaceAnalysisResult analysisResult = core_data_space_analytics_analytics.Root_meta_pure_metamodel_dataSpace_analytics_analyzeDataSpace_DataSpace_1__PackageableRuntime_MANY__DataSpaceAnalysisResult_1_( + Root_meta_pure_metamodel_dataSpace_analytics_DataSpaceAnalysisResult analysisResult = core_data_space_analytics_analytics.Root_meta_pure_metamodel_dataSpace_analytics_analyzeDataSpace_DataSpace_1__PackageableRuntime_MANY__Boolean_1__DataSpaceAnalysisResult_1_( dataSpace, ListIterate.selectInstancesOf(pureModelContextData.getElements(), PackageableRuntime.class).collect(runtime -> pureModel.getPackageableRuntime(runtime.getPath(), runtime.sourceInformation)), + returnLightGraph, pureModel.getExecutionSupport() ); @@ -347,7 +352,7 @@ public static DataSpaceAnalysisResult analyzeDataSpace(Root_meta_pure_metamodel_ excResult.defaultRuntime = HelperModelBuilder.getElementFullPath(executionContext._defaultRuntime(), pureModel.getExecutionSupport()); excResult.compatibleRuntimes = ListIterate.collect(executionContextAnalysisResult._compatibleRuntimes().toList(), runtime -> HelperModelBuilder.getElementFullPath(runtime, pureModel.getExecutionSupport())); Root_meta_analytics_mapping_modelCoverage_MappingModelCoverageAnalysisResult mappingModelCoverageAnalysisResult = executionContextAnalysisResult._mappingCoverage(); - excResult.mappingModelCoverageAnalysisResult = buildMappingModelCoverageAnalysisResult(mappingModelCoverageAnalysisResult, excResult, pureModel, dataSpaceProtocol, pureModelContextData, clientVersion, generatorExtensions, entitlementServiceExtensions, true); + excResult.mappingModelCoverageAnalysisResult = buildMappingModelCoverageAnalysisResult(mappingModelCoverageAnalysisResult, excResult, pureModel, dataSpaceProtocol, pureModelContextData, clientVersion, generatorExtensions, entitlementServiceExtensions, true, returnLightGraph); return excResult; }); result.defaultExecutionContext = dataSpace._defaultExecutionContext()._name(); diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/src/test/java/org/finos/legend/engine/generation/TestDataSpaceAnalyticsArtifactGenerationExtension.java b/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/src/test/java/org/finos/legend/engine/generation/TestDataSpaceAnalyticsArtifactGenerationExtension.java index 007a95a9377..c47ecbc97c4 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/src/test/java/org/finos/legend/engine/generation/TestDataSpaceAnalyticsArtifactGenerationExtension.java +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-generation/src/test/java/org/finos/legend/engine/generation/TestDataSpaceAnalyticsArtifactGenerationExtension.java @@ -82,50 +82,50 @@ private void testDataSpaceAnalyticsArtifactGenerationExtension(String modelFileP @Test public void testAnalyticsForBasicDataSpace() throws Exception { - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceBasic.pure", "model::animal::AnimalDS_Old", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":\"model::animal::AnimalDiagram\",\"title\":\"\"},{\"diagram\":\"model::GeneralDiagram\",\"title\":\"\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"description\":\"An important execution context\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS_Old\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::enterprise\",\"value\":\"taxonomyNodes\"},\"value\":\"abcdxyz005\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum2\"},{\"tag\":{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"deprecationNotice\"},\"value\":\"Please use AnimalDS dataspace instead - link provided\"}]}]}},\"name\":\"dummyContext\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS_Old\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::enterprise\",\"value\":\"taxonomyNodes\"},\"value\":\"abcdxyz005\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum2\"},{\"tag\":{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"deprecationNotice\"},\"value\":\"Please use AnimalDS dataspace instead - link provided\"}]}]}},\"name\":\"dummyContext2\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime2\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS_Old\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::enterprise\",\"value\":\"taxonomyNodes\"},\"value\":\"abcdxyz005\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum2\"},{\"tag\":{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"deprecationNotice\"},\"value\":\"Please use AnimalDS dataspace instead - link provided\"}]}]}},\"name\":\"dummyContext3\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"position\":{\"x\":568.0,\"y\":404.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\",\"position\":{\"x\":809.0,\"y\":187.0},\"rectangle\":{\"height\":44.0,\"width\":108.64453125}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":628.423828125,\"y\":433.0},{\"x\":863.322265625,\"y\":209.0}]},\"sourceView\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"targetView\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\"}],\"name\":\"GeneralDiagram\",\"package\":\"model\",\"propertyViews\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::mammal::Mammal\",\"id\":\"641a0336-d4b5-418c-b656-2f52461264e2\",\"position\":{\"x\":427.0,\"y\":210.0},\"rectangle\":{\"height\":44.0,\"width\":125.1123046875}},{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"position\":{\"x\":787.0,\"y\":216.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"7a992cfc-c888-4091-aa00-ab430915aced\",\"position\":{\"x\":515.423828125,\"y\":-7.5},\"rectangle\":{\"height\":100.0,\"width\":199.716796875}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":847.423828125,\"y\":245.0},{\"x\":615.2822265625,\"y\":42.5}]},\"sourceView\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"targetView\":\"7a992cfc-c888-4091-aa00-ab430915aced\"}],\"name\":\"AnimalDiagram\",\"package\":\"model::animal\",\"propertyViews\":[]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]},\"name\":\"AnimalDS_Old\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS_Old\",\"stereotypes\":[{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"profile\":\"meta::pure::profiles::enterprise\",\"tag\":\"taxonomyNodes\",\"value\":\"abcdxyz005\"},{\"profile\":\"meta::pure::profiles::doc\",\"tag\":\"doc\",\"value\":\"Lorem ipsum\"},{\"profile\":\"meta::pure::profiles::doc\",\"tag\":\"doc\",\"value\":\"Lorem ipsum2\"},{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"tag\":\"deprecationNotice\",\"value\":\"Please use AnimalDS dataspace instead - link provided\"}]}"); - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceBasic.pure", "model::animal::AnimalDS", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"description\":\"Some diagram description\",\"diagram\":\"model::animal::AnimalDiagram\",\"title\":\"Diag 1\"},{\"description\":\"Some more diagram description\",\"diagram\":\"model::GeneralDiagram\",\"title\":\"Diag 2\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"description\":\"An important execution context\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"description\":\"Some diagram description\",\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"Diag 1\"},{\"description\":\"Some more diagram description\",\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"Diag 2\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\",\"title\":\"Haha Nice\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}]}},\"name\":\"dummyContext\",\"title\":\"Haha Nice\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"description\":\"Some diagram description\",\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"Diag 1\"},{\"description\":\"Some more diagram description\",\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"Diag 2\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\",\"title\":\"Haha Nice\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}]}},\"name\":\"dummyContext2\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime2\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"description\":\"Some diagram description\",\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"Diag 1\"},{\"description\":\"Some more diagram description\",\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"Diag 2\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\",\"title\":\"Haha Nice\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}]}},\"name\":\"dummyContext3\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"position\":{\"x\":568.0,\"y\":404.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\",\"position\":{\"x\":809.0,\"y\":187.0},\"rectangle\":{\"height\":44.0,\"width\":108.64453125}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":628.423828125,\"y\":433.0},{\"x\":863.322265625,\"y\":209.0}]},\"sourceView\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"targetView\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\"}],\"name\":\"GeneralDiagram\",\"package\":\"model\",\"propertyViews\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::mammal::Mammal\",\"id\":\"641a0336-d4b5-418c-b656-2f52461264e2\",\"position\":{\"x\":427.0,\"y\":210.0},\"rectangle\":{\"height\":44.0,\"width\":125.1123046875}},{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"position\":{\"x\":787.0,\"y\":216.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"7a992cfc-c888-4091-aa00-ab430915aced\",\"position\":{\"x\":515.423828125,\"y\":-7.5},\"rectangle\":{\"height\":100.0,\"width\":199.716796875}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":847.423828125,\"y\":245.0},{\"x\":615.2822265625,\"y\":42.5}]},\"sourceView\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"targetView\":\"7a992cfc-c888-4091-aa00-ab430915aced\"}],\"name\":\"AnimalDiagram\",\"package\":\"model::animal\",\"propertyViews\":[]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]},\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}"); - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceBasic.pure", "model::animal::AnimalDS2", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":\"model::animal::AnimalDiagram\",\"title\":\"\"},{\"diagram\":\"model::GeneralDiagram\",\"title\":\"\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"description\":\"An important execution context\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS2\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}]}},\"name\":\"dummyContext\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS2\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}]}},\"name\":\"dummyContext2\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime2\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS2\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}]}},\"name\":\"dummyContext3\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"position\":{\"x\":568.0,\"y\":404.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\",\"position\":{\"x\":809.0,\"y\":187.0},\"rectangle\":{\"height\":44.0,\"width\":108.64453125}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":628.423828125,\"y\":433.0},{\"x\":863.322265625,\"y\":209.0}]},\"sourceView\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"targetView\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\"}],\"name\":\"GeneralDiagram\",\"package\":\"model\",\"propertyViews\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::mammal::Mammal\",\"id\":\"641a0336-d4b5-418c-b656-2f52461264e2\",\"position\":{\"x\":427.0,\"y\":210.0},\"rectangle\":{\"height\":44.0,\"width\":125.1123046875}},{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"position\":{\"x\":787.0,\"y\":216.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"7a992cfc-c888-4091-aa00-ab430915aced\",\"position\":{\"x\":515.423828125,\"y\":-7.5},\"rectangle\":{\"height\":100.0,\"width\":199.716796875}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":847.423828125,\"y\":245.0},{\"x\":615.2822265625,\"y\":42.5}]},\"sourceView\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"targetView\":\"7a992cfc-c888-4091-aa00-ab430915aced\"}],\"name\":\"AnimalDiagram\",\"package\":\"model::animal\",\"propertyViews\":[]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]},\"name\":\"AnimalDS2\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS2\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceBasic.pure", "model::animal::AnimalDS_Old", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":\"model::animal::AnimalDiagram\",\"title\":\"\"},{\"diagram\":\"model::GeneralDiagram\",\"title\":\"\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"description\":\"An important execution context\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext2\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime2\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext3\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"position\":{\"x\":568.0,\"y\":404.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\",\"position\":{\"x\":809.0,\"y\":187.0},\"rectangle\":{\"height\":44.0,\"width\":108.64453125}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":628.423828125,\"y\":433.0},{\"x\":863.322265625,\"y\":209.0}]},\"sourceView\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"targetView\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\"}],\"name\":\"GeneralDiagram\",\"package\":\"model\",\"propertyViews\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::mammal::Mammal\",\"id\":\"641a0336-d4b5-418c-b656-2f52461264e2\",\"position\":{\"x\":427.0,\"y\":210.0},\"rectangle\":{\"height\":44.0,\"width\":125.1123046875}},{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"position\":{\"x\":787.0,\"y\":216.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"7a992cfc-c888-4091-aa00-ab430915aced\",\"position\":{\"x\":515.423828125,\"y\":-7.5},\"rectangle\":{\"height\":100.0,\"width\":199.716796875}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":847.423828125,\"y\":245.0},{\"x\":615.2822265625,\"y\":42.5}]},\"sourceView\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"targetView\":\"7a992cfc-c888-4091-aa00-ab430915aced\"}],\"name\":\"AnimalDiagram\",\"package\":\"model::animal\",\"propertyViews\":[]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]},\"name\":\"AnimalDS_Old\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS_Old\",\"stereotypes\":[{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"profile\":\"meta::pure::profiles::enterprise\",\"tag\":\"taxonomyNodes\",\"value\":\"abcdxyz005\"},{\"profile\":\"meta::pure::profiles::doc\",\"tag\":\"doc\",\"value\":\"Lorem ipsum\"},{\"profile\":\"meta::pure::profiles::doc\",\"tag\":\"doc\",\"value\":\"Lorem ipsum2\"},{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"tag\":\"deprecationNotice\",\"value\":\"Please use AnimalDS dataspace instead - link provided\"}]}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceBasic.pure", "model::animal::AnimalDS", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"description\":\"Some diagram description\",\"diagram\":\"model::animal::AnimalDiagram\",\"title\":\"Diag 1\"},{\"description\":\"Some more diagram description\",\"diagram\":\"model::GeneralDiagram\",\"title\":\"Diag 2\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"description\":\"An important execution context\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext\",\"title\":\"Haha Nice\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext2\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime2\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext3\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"position\":{\"x\":568.0,\"y\":404.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\",\"position\":{\"x\":809.0,\"y\":187.0},\"rectangle\":{\"height\":44.0,\"width\":108.64453125}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":628.423828125,\"y\":433.0},{\"x\":863.322265625,\"y\":209.0}]},\"sourceView\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"targetView\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\"}],\"name\":\"GeneralDiagram\",\"package\":\"model\",\"propertyViews\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::mammal::Mammal\",\"id\":\"641a0336-d4b5-418c-b656-2f52461264e2\",\"position\":{\"x\":427.0,\"y\":210.0},\"rectangle\":{\"height\":44.0,\"width\":125.1123046875}},{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"position\":{\"x\":787.0,\"y\":216.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"7a992cfc-c888-4091-aa00-ab430915aced\",\"position\":{\"x\":515.423828125,\"y\":-7.5},\"rectangle\":{\"height\":100.0,\"width\":199.716796875}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":847.423828125,\"y\":245.0},{\"x\":615.2822265625,\"y\":42.5}]},\"sourceView\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"targetView\":\"7a992cfc-c888-4091-aa00-ab430915aced\"}],\"name\":\"AnimalDiagram\",\"package\":\"model::animal\",\"propertyViews\":[]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]},\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceBasic.pure", "model::animal::AnimalDS2", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":\"model::animal::AnimalDiagram\",\"title\":\"\"},{\"diagram\":\"model::GeneralDiagram\",\"title\":\"\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"description\":\"An important execution context\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext2\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime2\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext3\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"position\":{\"x\":568.0,\"y\":404.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\",\"position\":{\"x\":809.0,\"y\":187.0},\"rectangle\":{\"height\":44.0,\"width\":108.64453125}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":628.423828125,\"y\":433.0},{\"x\":863.322265625,\"y\":209.0}]},\"sourceView\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"targetView\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\"}],\"name\":\"GeneralDiagram\",\"package\":\"model\",\"propertyViews\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::mammal::Mammal\",\"id\":\"641a0336-d4b5-418c-b656-2f52461264e2\",\"position\":{\"x\":427.0,\"y\":210.0},\"rectangle\":{\"height\":44.0,\"width\":125.1123046875}},{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"position\":{\"x\":787.0,\"y\":216.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"7a992cfc-c888-4091-aa00-ab430915aced\",\"position\":{\"x\":515.423828125,\"y\":-7.5},\"rectangle\":{\"height\":100.0,\"width\":199.716796875}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":847.423828125,\"y\":245.0},{\"x\":615.2822265625,\"y\":42.5}]},\"sourceView\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"targetView\":\"7a992cfc-c888-4091-aa00-ab430915aced\"}],\"name\":\"AnimalDiagram\",\"package\":\"model::animal\",\"propertyViews\":[]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]},\"name\":\"AnimalDS2\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS2\",\"stereotypes\":[{\"profile\":\"meta::pure::metamodel::dataSpace::profiles::DataSpaceInfo\",\"value\":\"Verified\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[]}"); } @Test public void testAnalyticsForDataSpaceWithAssociation() throws Exception { - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithAssociation.pure", "domain::COVIDDatapace", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Not over yet?\",\"diagrams\":[{\"diagram\":\"domain::COVIDDataDiagram\",\"title\":\"\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"runtime::MyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"runtime::MyRuntime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Not over yet?\",\"diagrams\":[{\"diagram\":{\"path\":\"domain::COVIDDataDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"runtime::MyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::CovidDataMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"runtime::MyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::CovidDataMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"}],\"name\":\"COVIDDatapace\",\"package\":\"domain\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}]}},\"name\":\"dummyContext\"},{\"compatibleRuntimes\":[\"runtime::MyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"runtime::MyRuntime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Not over yet?\",\"diagrams\":[{\"diagram\":{\"path\":\"domain::COVIDDataDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"runtime::MyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::CovidDataMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"runtime::MyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::CovidDataMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"}],\"name\":\"COVIDDatapace\",\"package\":\"domain\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}]}},\"name\":\"dummyContext2\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"class\",\"constraints\":[],\"name\":\"COVIDData\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"id\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Integer\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"date\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"StrictDate\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"caseType\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"cases\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Float\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"lastReportedFlag\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"demographics\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Demographics\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"domain::COVIDData\",\"id\":\"6b69f44b-f729-46aa-b244-ec5ee8164142\",\"position\":{\"x\":280.0,\"y\":159.0},\"rectangle\":{\"height\":128.0,\"width\":205.8291015625}},{\"class\":\"domain::Demographics\",\"id\":\"159e797e-ae75-437d-ba9c-253f99a48826\",\"position\":{\"x\":698.0,\"y\":238.0},\"rectangle\":{\"height\":58.0,\"width\":111.68994140625}},{\"class\":\"domain::Class1\",\"id\":\"f6bd8a50-8d18-4bd9-9a8d-7fad88d02b07\",\"position\":{\"x\":360.844970703125,\"y\":49.0},\"rectangle\":{\"height\":58.0,\"width\":137.390625}},{\"class\":\"domain::Class2\",\"id\":\"690e89d4-23e9-46e8-8543-c89c22cc9e15\",\"position\":{\"x\":696.844970703125,\"y\":95.0},\"rectangle\":{\"height\":44.0,\"width\":133.68994140625}}],\"generalizationViews\":[],\"name\":\"COVIDDataDiagram\",\"package\":\"domain\",\"propertyViews\":[{\"line\":{\"points\":[{\"x\":382.91455078125,\"y\":223.0},{\"x\":753.844970703125,\"y\":267.0}]},\"property\":{\"class\":\"domain::COVIDData\",\"property\":\"demographics\"},\"sourceView\":\"6b69f44b-f729-46aa-b244-ec5ee8164142\",\"targetView\":\"159e797e-ae75-437d-ba9c-253f99a48826\"},{\"line\":{\"points\":[{\"x\":763.68994140625,\"y\":117.0},{\"x\":429.540283203125,\"y\":78.0}]},\"property\":{\"class\":\"domain::Class2\",\"property\":\"fromClass1\"},\"sourceView\":\"690e89d4-23e9-46e8-8543-c89c22cc9e15\",\"targetView\":\"f6bd8a50-8d18-4bd9-9a8d-7fad88d02b07\"},{\"line\":{\"points\":[{\"x\":429.540283203125,\"y\":78.0},{\"x\":763.68994140625,\"y\":117.0}]},\"property\":{\"class\":\"domain::Class1\",\"property\":\"fromClass2\"},\"sourceView\":\"f6bd8a50-8d18-4bd9-9a8d-7fad88d02b07\",\"targetView\":\"690e89d4-23e9-46e8-8543-c89c22cc9e15\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Class1\",\"originalMilestonedProperties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass2\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class2\"}],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"propClass1\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[{\"profile\":\"meta::pure::profiles::temporal\",\"value\":\"businesstemporal\"}],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"association\",\"name\":\"Class12Assoc\",\"originalMilestonedProperties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass1\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class1\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass2\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class2\"}],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass1\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class1\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass2\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class2\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Class2\",\"originalMilestonedProperties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass1\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class1\"}],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"propClass2\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[{\"profile\":\"meta::pure::profiles::temporal\",\"value\":\"processingtemporal\"}],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Demographics\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"state\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"BusinessDateMilestoning\",\"originalMilestonedProperties\":[],\"package\":\"meta::pure::milestoning\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"ProcessingDateMilestoning\",\"originalMilestonedProperties\":[],\"package\":\"meta::pure::milestoning\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"profile\",\"name\":\"temporal\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"bitemporal\",\"businesstemporal\",\"processingtemporal\"],\"tags\":[]}]},\"name\":\"COVIDDatapace\",\"package\":\"domain\",\"path\":\"domain::COVIDDatapace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithAssociation.pure", "domain::COVIDDatapace", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Not over yet?\",\"diagrams\":[{\"diagram\":\"domain::COVIDDataDiagram\",\"title\":\"\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"runtime::MyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"runtime::MyRuntime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext\"},{\"compatibleRuntimes\":[\"runtime::MyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"runtime::MyRuntime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[]},\"name\":\"dummyContext2\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"class\",\"constraints\":[],\"name\":\"COVIDData\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"id\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Integer\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"date\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"StrictDate\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"caseType\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"cases\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Float\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"lastReportedFlag\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"demographics\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Demographics\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"domain::COVIDData\",\"id\":\"6b69f44b-f729-46aa-b244-ec5ee8164142\",\"position\":{\"x\":280.0,\"y\":159.0},\"rectangle\":{\"height\":128.0,\"width\":205.8291015625}},{\"class\":\"domain::Demographics\",\"id\":\"159e797e-ae75-437d-ba9c-253f99a48826\",\"position\":{\"x\":698.0,\"y\":238.0},\"rectangle\":{\"height\":58.0,\"width\":111.68994140625}},{\"class\":\"domain::Class1\",\"id\":\"f6bd8a50-8d18-4bd9-9a8d-7fad88d02b07\",\"position\":{\"x\":360.844970703125,\"y\":49.0},\"rectangle\":{\"height\":58.0,\"width\":137.390625}},{\"class\":\"domain::Class2\",\"id\":\"690e89d4-23e9-46e8-8543-c89c22cc9e15\",\"position\":{\"x\":696.844970703125,\"y\":95.0},\"rectangle\":{\"height\":44.0,\"width\":133.68994140625}}],\"generalizationViews\":[],\"name\":\"COVIDDataDiagram\",\"package\":\"domain\",\"propertyViews\":[{\"line\":{\"points\":[{\"x\":382.91455078125,\"y\":223.0},{\"x\":753.844970703125,\"y\":267.0}]},\"property\":{\"class\":\"domain::COVIDData\",\"property\":\"demographics\"},\"sourceView\":\"6b69f44b-f729-46aa-b244-ec5ee8164142\",\"targetView\":\"159e797e-ae75-437d-ba9c-253f99a48826\"},{\"line\":{\"points\":[{\"x\":763.68994140625,\"y\":117.0},{\"x\":429.540283203125,\"y\":78.0}]},\"property\":{\"class\":\"domain::Class2\",\"property\":\"fromClass1\"},\"sourceView\":\"690e89d4-23e9-46e8-8543-c89c22cc9e15\",\"targetView\":\"f6bd8a50-8d18-4bd9-9a8d-7fad88d02b07\"},{\"line\":{\"points\":[{\"x\":429.540283203125,\"y\":78.0},{\"x\":763.68994140625,\"y\":117.0}]},\"property\":{\"class\":\"domain::Class1\",\"property\":\"fromClass2\"},\"sourceView\":\"f6bd8a50-8d18-4bd9-9a8d-7fad88d02b07\",\"targetView\":\"690e89d4-23e9-46e8-8543-c89c22cc9e15\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Class1\",\"originalMilestonedProperties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass2\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class2\"}],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"propClass1\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[{\"profile\":\"meta::pure::profiles::temporal\",\"value\":\"businesstemporal\"}],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"association\",\"name\":\"Class12Assoc\",\"originalMilestonedProperties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass1\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class1\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass2\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class2\"}],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass1\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class1\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass2\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class2\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Class2\",\"originalMilestonedProperties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fromClass1\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Class1\"}],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"propClass2\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[{\"profile\":\"meta::pure::profiles::temporal\",\"value\":\"processingtemporal\"}],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Demographics\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"state\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"BusinessDateMilestoning\",\"originalMilestonedProperties\":[],\"package\":\"meta::pure::milestoning\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"ProcessingDateMilestoning\",\"originalMilestonedProperties\":[],\"package\":\"meta::pure::milestoning\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"profile\",\"name\":\"temporal\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"bitemporal\",\"businesstemporal\",\"processingtemporal\"],\"tags\":[]}]},\"name\":\"COVIDDatapace\",\"package\":\"domain\",\"path\":\"domain::COVIDDatapace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}"); } @Test public void testAnalyticsForDataSpaceWithSubstantialMapping() throws Exception { - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithSubstantialMapping.pure", "model::animal::AnimalDS", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":\"model::animal::AnimalDiagram\",\"title\":\"\"},{\"diagram\":\"model::GeneralDiagram\",\"title\":\"\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"description\":\"An important execution context\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"info\":{\"classPath\":\"model::animal::Animal\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::animal::Animal\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"noOfLegs\"},{\"_type\":\"MappedProperty\",\"name\":\"something\"},{\"_type\":\"entity\",\"entityPath\":\"model::animal::GenericAnimal\",\"name\":\"something2\"}]},{\"info\":{\"classPath\":\"model::animal::Animal2\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::animal::Animal2\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"name\"},{\"_type\":\"MappedProperty\",\"name\":\"name2\"}]},{\"info\":{\"classPath\":\"model::animal::GenericAnimal\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::animal::GenericAnimal\",\"properties\":[]},{\"info\":{\"classPath\":\"model::animal::GenericAnimal2\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::animal::GenericAnimal2\",\"properties\":[]}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"name\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"string\",\"value\":\"\"}],\"name\":\"name2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"String\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::enterprise\",\"value\":\"taxonomyNodes\"},\"value\":\"abcdxyz005\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum2\"}]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]}]}},\"name\":\"dummyContext\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"info\":{\"classPath\":\"model::animal::mammal::Mammal\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::animal::mammal::Mammal\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"noOfLegs\"}]},{\"info\":{\"classPath\":\"model::animal::reptile::Reptile\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::animal::reptile::Reptile\",\"properties\":[{\"_type\":\"enum\",\"enumPath\":\"model::animal::Family\",\"name\":\"family\"},{\"_type\":\"MappedProperty\",\"name\":\"name2\"},{\"_type\":\"MappedProperty\",\"name\":\"something\"},{\"_type\":\"MappedProperty\",\"name\":\"hasFin\"}]}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"name\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"string\",\"value\":\"\"}],\"name\":\"name2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"String\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::enterprise\",\"value\":\"taxonomyNodes\"},\"value\":\"abcdxyz005\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum2\"}]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal2\",\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]}},\"name\":\"dummyContext2\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime2\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"info\":{\"classPath\":\"model::animal::mammal::Mammal\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::animal::mammal::Mammal\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"noOfLegs\"}]},{\"info\":{\"classPath\":\"model::animal::reptile::Reptile\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::animal::reptile::Reptile\",\"properties\":[{\"_type\":\"enum\",\"enumPath\":\"model::animal::Family\",\"name\":\"family\"},{\"_type\":\"MappedProperty\",\"name\":\"name2\"},{\"_type\":\"MappedProperty\",\"name\":\"something\"},{\"_type\":\"MappedProperty\",\"name\":\"hasFin\"}]}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"name\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"string\",\"value\":\"\"}],\"name\":\"name2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"String\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":{\"path\":\"model::animal::AnimalDiagram\"},\"title\":\"\"},{\"diagram\":{\"path\":\"model::GeneralDiagram\"},\"title\":\"\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"description\":\"An important execution context\",\"mapping\":{\"path\":\"model::dummyMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext2\"},{\"defaultRuntime\":{\"path\":\"model::dummyRuntime2\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"model::dummyMapping2\",\"type\":\"MAPPING\"},\"name\":\"dummyContext3\"}],\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"stereotypes\":[{\"profile\":\"doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::enterprise\",\"value\":\"taxonomyNodes\"},\"value\":\"abcdxyz005\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum\"},{\"tag\":{\"profile\":\"doc\",\"value\":\"doc\"},\"value\":\"Lorem ipsum2\"}]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal2\",\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]}},\"name\":\"dummyContext3\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"position\":{\"x\":568.0,\"y\":404.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\",\"position\":{\"x\":809.0,\"y\":187.0},\"rectangle\":{\"height\":44.0,\"width\":108.64453125}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":628.423828125,\"y\":433.0},{\"x\":863.322265625,\"y\":209.0}]},\"sourceView\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"targetView\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\"}],\"name\":\"GeneralDiagram\",\"package\":\"model\",\"propertyViews\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::mammal::Mammal\",\"id\":\"641a0336-d4b5-418c-b656-2f52461264e2\",\"position\":{\"x\":427.0,\"y\":210.0},\"rectangle\":{\"height\":44.0,\"width\":125.1123046875}},{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"position\":{\"x\":787.0,\"y\":216.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"7a992cfc-c888-4091-aa00-ab430915aced\",\"position\":{\"x\":515.423828125,\"y\":-7.5},\"rectangle\":{\"height\":100.0,\"width\":199.716796875}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":847.423828125,\"y\":245.0},{\"x\":615.2822265625,\"y\":42.5}]},\"sourceView\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"targetView\":\"7a992cfc-c888-4091-aa00-ab430915aced\"}],\"name\":\"AnimalDiagram\",\"package\":\"model::animal\",\"propertyViews\":[]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]},\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS\",\"stereotypes\":[{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"profile\":\"meta::pure::profiles::enterprise\",\"tag\":\"taxonomyNodes\",\"value\":\"abcdxyz005\"},{\"profile\":\"meta::pure::profiles::doc\",\"tag\":\"doc\",\"value\":\"Lorem ipsum\"},{\"profile\":\"meta::pure::profiles::doc\",\"tag\":\"doc\",\"value\":\"Lorem ipsum2\"}]}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithSubstantialMapping.pure", "model::animal::AnimalDS", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.\",\"diagrams\":[{\"diagram\":\"model::animal::AnimalDiagram\",\"title\":\"\"},{\"diagram\":\"model::GeneralDiagram\",\"title\":\"\"}],\"elementDocs\":[],\"elements\":[],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"model::dummyRuntime\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"description\":\"An important execution context\",\"mapping\":\"model::dummyMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"path\":\"model::animal::Animal\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"noOfLegs\"},{\"_type\":\"MappedProperty\",\"name\":\"something\"},{\"_type\":\"entity\",\"entityPath\":\"model::animal::GenericAnimal\",\"name\":\"something2\"}]},{\"path\":\"model::animal::Animal2\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"name\"},{\"_type\":\"MappedProperty\",\"name\":\"name2\"}]},{\"path\":\"model::animal::GenericAnimal\",\"properties\":[]},{\"path\":\"model::animal::GenericAnimal2\",\"properties\":[]}]},\"name\":\"dummyContext\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"path\":\"model::animal::mammal::Mammal\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"noOfLegs\"}]},{\"path\":\"model::animal::reptile::Reptile\",\"properties\":[{\"_type\":\"enum\",\"enumPath\":\"model::animal::Family\",\"name\":\"family\"},{\"_type\":\"MappedProperty\",\"name\":\"name2\"},{\"_type\":\"MappedProperty\",\"name\":\"something\"},{\"_type\":\"MappedProperty\",\"name\":\"hasFin\"}]}]},\"name\":\"dummyContext2\"},{\"compatibleRuntimes\":[\"model::dummyRuntime\",\"model::dummyRuntime2\"],\"datasets\":[],\"defaultRuntime\":\"model::dummyRuntime2\",\"mapping\":\"model::dummyMapping2\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"path\":\"model::animal::mammal::Mammal\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"noOfLegs\"}]},{\"path\":\"model::animal::reptile::Reptile\",\"properties\":[{\"_type\":\"enum\",\"enumPath\":\"model::animal::Family\",\"name\":\"family\"},{\"_type\":\"MappedProperty\",\"name\":\"name2\"},{\"_type\":\"MappedProperty\",\"name\":\"something\"},{\"_type\":\"MappedProperty\",\"name\":\"hasFin\"}]}]},\"name\":\"dummyContext3\"}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"position\":{\"x\":568.0,\"y\":404.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\",\"position\":{\"x\":809.0,\"y\":187.0},\"rectangle\":{\"height\":44.0,\"width\":108.64453125}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":628.423828125,\"y\":433.0},{\"x\":863.322265625,\"y\":209.0}]},\"sourceView\":\"4cec85f9-9b66-450a-bdcb-c855aa0314e1\",\"targetView\":\"902bf14e-e7ff-40e7-92e4-8780f91bfa29\"}],\"name\":\"GeneralDiagram\",\"package\":\"model\",\"propertyViews\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Animal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"family\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::Family\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Number\"},{\"multiplicity\":{\"lowerBound\":0},\"name\":\"children\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::animal::GenericAnimal2\"}],\"qualifiedProperties\":[{\"body\":[{\"_type\":\"func\",\"fControl\":\"greaterThan_Number_1__Number_1__Boolean_1_\",\"function\":\"greaterThan\",\"parameters\":[{\"_type\":\"property\",\"parameters\":[{\"_type\":\"var\",\"name\":\"this\"}],\"property\":\"noOfLegs\"},{\"_type\":\"integer\",\"value\":4}]}],\"name\":\"something\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"returnType\":\"Boolean\",\"stereotypes\":[],\"taggedValues\":[]},{\"body\":[{\"_type\":\"collection\",\"multiplicity\":{\"lowerBound\":0,\"upperBound\":0},\"values\":[]}],\"name\":\"something2\",\"parameters\":[],\"returnMultiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"returnType\":\"model::animal::GenericAnimal\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"diagram\",\"classViews\":[{\"class\":\"model::animal::mammal::Mammal\",\"id\":\"641a0336-d4b5-418c-b656-2f52461264e2\",\"position\":{\"x\":427.0,\"y\":210.0},\"rectangle\":{\"height\":44.0,\"width\":125.1123046875}},{\"class\":\"model::animal::reptile::Reptile\",\"id\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"position\":{\"x\":787.0,\"y\":216.0},\"rectangle\":{\"height\":58.0,\"width\":120.84765625}},{\"class\":\"model::animal::Animal\",\"id\":\"7a992cfc-c888-4091-aa00-ab430915aced\",\"position\":{\"x\":515.423828125,\"y\":-7.5},\"rectangle\":{\"height\":100.0,\"width\":199.716796875}}],\"generalizationViews\":[{\"line\":{\"points\":[{\"x\":847.423828125,\"y\":245.0},{\"x\":615.2822265625,\"y\":42.5}]},\"sourceView\":\"b92253d8-0389-4c7d-b5d2-3cdc3bb1ad98\",\"targetView\":\"7a992cfc-c888-4091-aa00-ab430915aced\"}],\"name\":\"AnimalDiagram\",\"package\":\"model::animal\",\"propertyViews\":[]},{\"_type\":\"Enumeration\",\"name\":\"Family\",\"package\":\"model::animal\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"UO\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"OP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"GenericAnimal2\",\"originalMilestonedProperties\":[],\"package\":\"model::animal\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Mammal\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::mammal\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"noOfLegs\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Reptile\",\"originalMilestonedProperties\":[],\"package\":\"model::animal::reptile\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"hasFin\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"model::animal::Animal\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"\"}]}]},\"name\":\"AnimalDS\",\"package\":\"model::animal\",\"path\":\"model::animal::AnimalDS\",\"stereotypes\":[{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"deprecated\"}],\"supportInfo\":{\"_type\":\"email\",\"address\":\"someEmail@test.org\"},\"taggedValues\":[{\"profile\":\"meta::pure::profiles::enterprise\",\"tag\":\"taxonomyNodes\",\"value\":\"abcdxyz005\"},{\"profile\":\"meta::pure::profiles::doc\",\"tag\":\"doc\",\"value\":\"Lorem ipsum\"},{\"profile\":\"meta::pure::profiles::doc\",\"tag\":\"doc\",\"value\":\"Lorem ipsum2\"}]}"); } @Test public void testAnalyticsForDataSpaceWithElements() throws Exception { - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithElements.pure", "dataSpace::_FirmDataSpace", "{\"defaultExecutionContext\":\"dummyContext\",\"diagrams\":[],\"elementDocs\":[{\"_type\":\"class\",\"docs\":[\"Animal class\"],\"name\":\"Animal\",\"path\":\"model::Animal\",\"properties\":[{\"docs\":[\"age\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"age\",\"type\":\"Integer\"}]},{\"_type\":\"class\",\"docs\":[\"The Firm concept\"],\"name\":\"Firm\",\"path\":\"model::Firm\",\"properties\":[{\"docs\":[\"type of firm: e.g. CORP, LTD\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"type\",\"type\":\"String\"},{\"docs\":[\"some doc for employees\"],\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::Person\"}]},{\"_type\":\"association\",\"docs\":[],\"name\":\"Firm_Person\",\"path\":\"model::Firm_Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firm\",\"type\":\"model::Firm\"},{\"docs\":[\"some doc for employees\"],\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::Person\"}]},{\"_type\":\"class\",\"docs\":[],\"name\":\"LegalEntity\",\"path\":\"model::LegalEntity\",\"properties\":[{\"docs\":[\"name of the entity\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"legalName\",\"type\":\"String\"}]},{\"_type\":\"class\",\"docs\":[\"Homo Sapien\"],\"name\":\"Person\",\"path\":\"model::Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firstName\",\"type\":\"String\"},{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"lastName\",\"type\":\"String\"},{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firm\",\"type\":\"model::Firm\"}]},{\"_type\":\"enumeration\",\"docs\":[\"Types of company\"],\"enumValues\":[{\"docs\":[\"Limited\"],\"name\":\"LLC\"},{\"docs\":[],\"name\":\"CORP\"}],\"name\":\"IncType\",\"path\":\"model::target::IncType\"},{\"_type\":\"class\",\"docs\":[],\"name\":\"_Person\",\"path\":\"model::target::_Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fullName\",\"type\":\"String\"}]}],\"elements\":[\"model::Animal\",\"model::Firm\",\"model::Firm_Person\",\"model::LegalEntity\",\"model::Person\",\"model::target::IncType\",\"model::target::_Person\"],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"mapping::ModelToModelRuntime\"],\"datasets\":[],\"defaultRuntime\":\"mapping::ModelToModelRuntime\",\"mapping\":\"mapping::ModelToModelMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"info\":{\"classPath\":\"model::target::_Firm\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::target::_Firm\",\"properties\":[{\"_type\":\"entity\",\"entityPath\":\"model::target::_Person\",\"name\":\"employees\"},{\"_type\":\"MappedProperty\",\"name\":\"name\"},{\"_type\":\"enum\",\"enumPath\":\"model::target::IncType\",\"name\":\"type\"}]},{\"info\":{\"classPath\":\"model::target::_Person\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::target::_Person\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fullName\"}]}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"elements\":[{\"path\":\"model\"},{\"exclude\":true,\"path\":\"model::experiment\"},{\"exclude\":true,\"path\":\"model::target::_Firm\"},{\"exclude\":true,\"path\":\"model::NotRelevant\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"mapping::ModelToModelRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::ModelToModelMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"}],\"name\":\"_FirmDataSpace\",\"package\":\"dataSpace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"Firm Dataspace\"},{\"_type\":\"Enumeration\",\"name\":\"IncType\",\"package\":\"model::target\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"LLC\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"CORP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"_Firm\",\"originalMilestonedProperties\":[],\"package\":\"model::target\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::target::_Person\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"type\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::target::IncType\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"name\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"_Person\",\"originalMilestonedProperties\":[],\"package\":\"model::target\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fullName\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]}]}},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"_FirmDataSpace\",\"package\":\"dataSpace\",\"path\":\"dataSpace::_FirmDataSpace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"Firm Dataspace\"}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithElements.pure", "dataSpace::_FirmDataSpace", "{\"defaultExecutionContext\":\"dummyContext\",\"diagrams\":[],\"elementDocs\":[{\"_type\":\"class\",\"docs\":[\"Animal class\"],\"name\":\"Animal\",\"path\":\"model::Animal\",\"properties\":[{\"docs\":[\"age\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"age\",\"type\":\"Integer\"}]},{\"_type\":\"class\",\"docs\":[\"The Firm concept\"],\"name\":\"Firm\",\"path\":\"model::Firm\",\"properties\":[{\"docs\":[\"type of firm: e.g. CORP, LTD\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"type\",\"type\":\"String\"},{\"docs\":[\"some doc for employees\"],\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::Person\"}]},{\"_type\":\"association\",\"docs\":[],\"name\":\"Firm_Person\",\"path\":\"model::Firm_Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firm\",\"type\":\"model::Firm\"},{\"docs\":[\"some doc for employees\"],\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::Person\"}]},{\"_type\":\"class\",\"docs\":[],\"name\":\"LegalEntity\",\"path\":\"model::LegalEntity\",\"properties\":[{\"docs\":[\"name of the entity\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"legalName\",\"type\":\"String\"}]},{\"_type\":\"class\",\"docs\":[\"Homo Sapien\"],\"name\":\"Person\",\"path\":\"model::Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firstName\",\"type\":\"String\"},{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"lastName\",\"type\":\"String\"},{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firm\",\"type\":\"model::Firm\"}]},{\"_type\":\"enumeration\",\"docs\":[\"Types of company\"],\"enumValues\":[{\"docs\":[\"Limited\"],\"name\":\"LLC\"},{\"docs\":[],\"name\":\"CORP\"}],\"name\":\"IncType\",\"path\":\"model::target::IncType\"},{\"_type\":\"class\",\"docs\":[],\"name\":\"_Person\",\"path\":\"model::target::_Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fullName\",\"type\":\"String\"}]}],\"elements\":[\"model::Animal\",\"model::Firm\",\"model::Firm_Person\",\"model::LegalEntity\",\"model::Person\",\"model::target::IncType\",\"model::target::_Person\"],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"mapping::ModelToModelRuntime\"],\"datasets\":[],\"defaultRuntime\":\"mapping::ModelToModelRuntime\",\"mapping\":\"mapping::ModelToModelMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"path\":\"model::target::_Firm\",\"properties\":[{\"_type\":\"entity\",\"entityPath\":\"model::target::_Person\",\"name\":\"employees\"},{\"_type\":\"MappedProperty\",\"name\":\"name\"},{\"_type\":\"enum\",\"enumPath\":\"model::target::IncType\",\"name\":\"type\"}]},{\"path\":\"model::target::_Person\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fullName\"}]}]},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"_FirmDataSpace\",\"package\":\"dataSpace\",\"path\":\"dataSpace::_FirmDataSpace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"Firm Dataspace\"}"); } @Test public void testAnalyticsForDataSpaceWithMilestoningElements() throws Exception { - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithMilestoningElements.pure", "dataSpace::_FirmDataSpace", "{\"defaultExecutionContext\":\"dummyContext\",\"diagrams\":[],\"elementDocs\":[{\"_type\":\"class\",\"docs\":[\"Animal class\"],\"name\":\"Animal\",\"path\":\"model::Animal\",\"properties\":[{\"docs\":[\"age\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"age\",\"type\":\"Integer\"}]},{\"_type\":\"class\",\"docs\":[\"The Firm concept\"],\"milestoning\":\"processingtemporal\",\"name\":\"Firm\",\"path\":\"model::Firm\",\"properties\":[{\"docs\":[\"type of firm: e.g. CORP, LTD\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"type\",\"type\":\"String\"},{\"docs\":[\"some doc for employees\"],\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::Person\"}]},{\"_type\":\"association\",\"docs\":[],\"name\":\"Firm_Person\",\"path\":\"model::Firm_Person\",\"properties\":[{\"docs\":[\"some doc for employees\"],\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::Person\"},{\"docs\":[],\"milestoning\":\"processingtemporal\",\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firm\",\"type\":\"model::Firm\"}]},{\"_type\":\"class\",\"docs\":[],\"name\":\"LegalEntity\",\"path\":\"model::LegalEntity\",\"properties\":[{\"docs\":[\"name of the entity\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"legalName\",\"type\":\"String\"}]},{\"_type\":\"class\",\"docs\":[],\"name\":\"NotRelevant\",\"path\":\"model::NotRelevant\",\"properties\":[]},{\"_type\":\"class\",\"docs\":[\"Homo Sapien\"],\"name\":\"Person\",\"path\":\"model::Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firstName\",\"type\":\"String\"},{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"lastName\",\"type\":\"String\"},{\"docs\":[],\"milestoning\":\"processingtemporal\",\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firm\",\"type\":\"model::Firm\"}]},{\"_type\":\"class\",\"docs\":[],\"milestoning\":\"businesstemporal\",\"name\":\"Animal\",\"path\":\"model::experiment::Animal\",\"properties\":[]},{\"_type\":\"class\",\"docs\":[],\"milestoning\":\"businesstemporal\",\"name\":\"Human\",\"path\":\"model::experiment::Human\",\"properties\":[]},{\"_type\":\"enumeration\",\"docs\":[\"Types of company\"],\"enumValues\":[{\"docs\":[\"Limited\"],\"name\":\"LLC\"},{\"docs\":[],\"name\":\"CORP\"}],\"name\":\"IncType\",\"path\":\"model::target::IncType\"},{\"_type\":\"class\",\"docs\":[],\"name\":\"_Firm\",\"path\":\"model::target::_Firm\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"type\",\"type\":\"model::target::IncType\"},{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"name\",\"type\":\"String\"},{\"docs\":[],\"milestoning\":\"bitemporal\",\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::target::_Person\"}]},{\"_type\":\"class\",\"docs\":[],\"milestoning\":\"bitemporal\",\"name\":\"_Person\",\"path\":\"model::target::_Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fullName\",\"type\":\"String\"}]}],\"elements\":[\"model::Animal\",\"model::Firm\",\"model::Firm_Person\",\"model::LegalEntity\",\"model::NotRelevant\",\"model::Person\",\"model::experiment::Animal\",\"model::experiment::Human\",\"model::target::IncType\",\"model::target::_Firm\",\"model::target::_Person\"],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"mapping::ModelToModelRuntime\"],\"datasets\":[],\"defaultRuntime\":\"mapping::ModelToModelRuntime\",\"mapping\":\"mapping::ModelToModelMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"info\":{\"classPath\":\"model::target::_Firm\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::target::_Firm\",\"properties\":[{\"_type\":\"entity\",\"entityPath\":\"model::target::_Person\",\"name\":\"employees\"},{\"_type\":\"MappedProperty\",\"name\":\"name\"},{\"_type\":\"enum\",\"enumPath\":\"model::target::IncType\",\"name\":\"type\"}]},{\"info\":{\"classPath\":\"model::target::_Person\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"model::target::_Person\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fullName\"}]}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"elements\":[{\"path\":\"model\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"mapping::ModelToModelRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::ModelToModelMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"}],\"name\":\"_FirmDataSpace\",\"package\":\"dataSpace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"Firm Dataspace\"},{\"_type\":\"class\",\"constraints\":[],\"name\":\"BiTemporalMilestoning\",\"originalMilestonedProperties\":[],\"package\":\"meta::pure::milestoning\",\"properties\":[],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[],\"taggedValues\":[]},{\"_type\":\"profile\",\"name\":\"temporal\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"bitemporal\",\"businesstemporal\",\"processingtemporal\"],\"tags\":[]},{\"_type\":\"Enumeration\",\"name\":\"IncType\",\"package\":\"model::target\",\"stereotypes\":[],\"taggedValues\":[],\"values\":[{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"LLC\"},{\"stereotypes\":[],\"taggedValues\":[],\"value\":\"CORP\"}]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"_Firm\",\"originalMilestonedProperties\":[{\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::target::_Person\"}],\"package\":\"model::target\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"type\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::target::IncType\"},{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"name\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"model::target::_Person\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]},{\"_type\":\"class\",\"constraints\":[],\"name\":\"_Person\",\"originalMilestonedProperties\":[],\"package\":\"model::target\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fullName\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[{\"profile\":\"meta::pure::profiles::temporal\",\"value\":\"bitemporal\"}],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[]}]}},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"_FirmDataSpace\",\"package\":\"dataSpace\",\"path\":\"dataSpace::_FirmDataSpace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"Firm Dataspace\"}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithMilestoningElements.pure", "dataSpace::_FirmDataSpace", "{\"defaultExecutionContext\":\"dummyContext\",\"diagrams\":[],\"elementDocs\":[{\"_type\":\"class\",\"docs\":[\"Animal class\"],\"name\":\"Animal\",\"path\":\"model::Animal\",\"properties\":[{\"docs\":[\"age\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"age\",\"type\":\"Integer\"}]},{\"_type\":\"class\",\"docs\":[\"The Firm concept\"],\"milestoning\":\"processingtemporal\",\"name\":\"Firm\",\"path\":\"model::Firm\",\"properties\":[{\"docs\":[\"type of firm: e.g. CORP, LTD\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"type\",\"type\":\"String\"},{\"docs\":[\"some doc for employees\"],\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::Person\"}]},{\"_type\":\"association\",\"docs\":[],\"name\":\"Firm_Person\",\"path\":\"model::Firm_Person\",\"properties\":[{\"docs\":[\"some doc for employees\"],\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::Person\"},{\"docs\":[],\"milestoning\":\"processingtemporal\",\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firm\",\"type\":\"model::Firm\"}]},{\"_type\":\"class\",\"docs\":[],\"name\":\"LegalEntity\",\"path\":\"model::LegalEntity\",\"properties\":[{\"docs\":[\"name of the entity\"],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"legalName\",\"type\":\"String\"}]},{\"_type\":\"class\",\"docs\":[],\"name\":\"NotRelevant\",\"path\":\"model::NotRelevant\",\"properties\":[]},{\"_type\":\"class\",\"docs\":[\"Homo Sapien\"],\"name\":\"Person\",\"path\":\"model::Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firstName\",\"type\":\"String\"},{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"lastName\",\"type\":\"String\"},{\"docs\":[],\"milestoning\":\"processingtemporal\",\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"firm\",\"type\":\"model::Firm\"}]},{\"_type\":\"class\",\"docs\":[],\"milestoning\":\"businesstemporal\",\"name\":\"Animal\",\"path\":\"model::experiment::Animal\",\"properties\":[]},{\"_type\":\"class\",\"docs\":[],\"milestoning\":\"businesstemporal\",\"name\":\"Human\",\"path\":\"model::experiment::Human\",\"properties\":[]},{\"_type\":\"enumeration\",\"docs\":[\"Types of company\"],\"enumValues\":[{\"docs\":[\"Limited\"],\"name\":\"LLC\"},{\"docs\":[],\"name\":\"CORP\"}],\"name\":\"IncType\",\"path\":\"model::target::IncType\"},{\"_type\":\"class\",\"docs\":[],\"name\":\"_Firm\",\"path\":\"model::target::_Firm\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"type\",\"type\":\"model::target::IncType\"},{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"name\",\"type\":\"String\"},{\"docs\":[],\"milestoning\":\"bitemporal\",\"multiplicity\":{\"lowerBound\":1},\"name\":\"employees\",\"type\":\"model::target::_Person\"}]},{\"_type\":\"class\",\"docs\":[],\"milestoning\":\"bitemporal\",\"name\":\"_Person\",\"path\":\"model::target::_Person\",\"properties\":[{\"docs\":[],\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"fullName\",\"type\":\"String\"}]}],\"elements\":[\"model::Animal\",\"model::Firm\",\"model::Firm_Person\",\"model::LegalEntity\",\"model::NotRelevant\",\"model::Person\",\"model::experiment::Animal\",\"model::experiment::Human\",\"model::target::IncType\",\"model::target::_Firm\",\"model::target::_Person\"],\"executables\":[],\"executionContexts\":[{\"compatibleRuntimes\":[\"mapping::ModelToModelRuntime\"],\"datasets\":[],\"defaultRuntime\":\"mapping::ModelToModelRuntime\",\"mapping\":\"mapping::ModelToModelMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"path\":\"model::target::_Firm\",\"properties\":[{\"_type\":\"entity\",\"entityPath\":\"model::target::_Person\",\"name\":\"employees\"},{\"_type\":\"MappedProperty\",\"name\":\"name\"},{\"_type\":\"enum\",\"enumPath\":\"model::target::IncType\",\"name\":\"type\"}]},{\"path\":\"model::target::_Person\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fullName\"}]}]},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"_FirmDataSpace\",\"package\":\"dataSpace\",\"path\":\"dataSpace::_FirmDataSpace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"Firm Dataspace\"}"); } @Test public void testAnalyticsForDataSpaceWithExecutables() throws Exception { - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithExecutables.pure", "domain::COVIDDataspace", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"diagrams\":[],\"elementDocs\":[],\"elements\":[],\"executables\":[{\"description\":\"Some exec description\",\"executable\":\"service::CovidDataMulti\",\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":\"service::CovidDataSingle\",\"info\":{\"_type\":\"service\",\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"mapping\":\"mapping::CovidDataMapping\",\"pattern\":\"/9566f101-2108-408f-863f-6d7e154dc17b\",\"query\":\"|domain::COVIDData.all()->project(\\n [\\n x: domain::COVIDData[1]|$x.cases\\n ],\\n ['Cases']\\n)\",\"runtime\":\"runtime::H2Runtime\"},\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"compatibleRuntimes\":[\"runtime::H2Runtime\"],\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"defaultRuntime\":\"runtime::H2Runtime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"info\":{\"classPath\":\"domain::COVIDData\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"domain::COVIDData\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"caseType\"},{\"_type\":\"MappedProperty\",\"name\":\"cases\"},{\"_type\":\"MappedProperty\",\"name\":\"date\"},{\"_type\":\"entity\",\"entityPath\":\"domain::Demographics\",\"name\":\"demographics\"},{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"id\"},{\"_type\":\"MappedProperty\",\"name\":\"lastReportedFlag\"}]},{\"info\":{\"classPath\":\"domain::Demographics\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"domain::Demographics\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"state\"}]}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"class\",\"constraints\":[],\"name\":\"COVIDData\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"id\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Integer\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"date\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"StrictDate\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"caseType\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"cases\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Float\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"lastReportedFlag\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"demographics\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Demographics\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"COVID-19 data report consisting of case statistics details and basic information on demographics\"}]},{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"executables\":[{\"description\":\"Some exec description\",\"executable\":{\"path\":\"service::CovidDataMulti\"},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":{\"path\":\"service::CovidDataSingle\"},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"runtime::H2Runtime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::CovidDataMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"}],\"name\":\"COVIDDataspace\",\"package\":\"domain\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Demographics\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"state\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"COVID-19 data demographics consisting of geolocation information\"}]},{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]}]}},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"COVIDDataspace\",\"package\":\"domain\",\"path\":\"domain::COVIDDataspace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataSpaceWithExecutables.pure", "domain::COVIDDataspace", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"diagrams\":[],\"elementDocs\":[],\"elements\":[],\"executables\":[{\"description\":\"Some exec description\",\"executable\":\"service::CovidDataMulti\",\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":\"service::CovidDataSingle\",\"info\":{\"_type\":\"service\",\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"mapping\":\"mapping::CovidDataMapping\",\"pattern\":\"/9566f101-2108-408f-863f-6d7e154dc17b\",\"query\":\"|domain::COVIDData.all()->project(\\n [\\n x: domain::COVIDData[1]|$x.cases\\n ],\\n ['Cases']\\n)\",\"runtime\":\"runtime::H2Runtime\"},\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"compatibleRuntimes\":[\"runtime::H2Runtime\"],\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"defaultRuntime\":\"runtime::H2Runtime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"path\":\"domain::COVIDData\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"caseType\"},{\"_type\":\"MappedProperty\",\"name\":\"cases\"},{\"_type\":\"MappedProperty\",\"name\":\"date\"},{\"_type\":\"entity\",\"entityPath\":\"domain::Demographics\",\"name\":\"demographics\"},{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"id\"},{\"_type\":\"MappedProperty\",\"name\":\"lastReportedFlag\"}]},{\"path\":\"domain::Demographics\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"state\"}]}]},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"COVIDDataspace\",\"package\":\"domain\",\"path\":\"domain::COVIDDataspace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}"); } @Test public void testAnalyticsForDataSpaceWithAccessEntitlementsToH2() throws Exception { - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataspaceAccessAnalytics.pure", "domain::COVIDDataspace", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"diagrams\":[],\"elementDocs\":[],\"elements\":[],\"executables\":[{\"description\":\"Some exec description\",\"executable\":\"service::CovidDataMulti\",\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":\"service::CovidDataSingle\",\"info\":{\"_type\":\"service\",\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"mapping\":\"mapping::CovidDataMapping\",\"pattern\":\"/9566f101-2108-408f-863f-6d7e154dc17b\",\"query\":\"|domain::COVIDData.all()->project(\\n [\\n x: domain::COVIDData[1]|$x.cases\\n ],\\n ['Cases']\\n)\",\"runtime\":\"runtime::H2Runtime\"},\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"compatibleRuntimes\":[\"runtime::H2Runtime\",\"runtime::SnowflakeRuntime\"],\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"defaultRuntime\":\"runtime::H2Runtime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"info\":{\"classPath\":\"domain::COVIDData\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"domain::COVIDData\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"caseType\"},{\"_type\":\"MappedProperty\",\"name\":\"cases\"},{\"_type\":\"MappedProperty\",\"name\":\"date\"},{\"_type\":\"entity\",\"entityPath\":\"domain::Demographics\",\"name\":\"demographics\"},{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"id\"},{\"_type\":\"MappedProperty\",\"name\":\"lastReportedFlag\"}]},{\"info\":{\"classPath\":\"domain::Demographics\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"domain::Demographics\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"state\"}]}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"class\",\"constraints\":[],\"name\":\"COVIDData\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"id\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Integer\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"date\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"StrictDate\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"caseType\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"cases\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Float\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"lastReportedFlag\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"demographics\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Demographics\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"COVID-19 data report consisting of case statistics details and basic information on demographics\"}]},{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"executables\":[{\"description\":\"Some exec description\",\"executable\":{\"path\":\"service::CovidDataMulti\"},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":{\"path\":\"service::CovidDataSingle\"},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"runtime::H2Runtime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::CovidDataMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"}],\"name\":\"COVIDDataspace\",\"package\":\"domain\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Demographics\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"state\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"COVID-19 data demographics consisting of geolocation information\"}]},{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]}]}},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"COVIDDataspace\",\"package\":\"domain\",\"path\":\"domain::COVIDDataspace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataspaceAccessAnalytics.pure", "domain::COVIDDataspace", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"diagrams\":[],\"elementDocs\":[],\"elements\":[],\"executables\":[{\"description\":\"Some exec description\",\"executable\":\"service::CovidDataMulti\",\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":\"service::CovidDataSingle\",\"info\":{\"_type\":\"service\",\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"mapping\":\"mapping::CovidDataMapping\",\"pattern\":\"/9566f101-2108-408f-863f-6d7e154dc17b\",\"query\":\"|domain::COVIDData.all()->project(\\n [\\n x: domain::COVIDData[1]|$x.cases\\n ],\\n ['Cases']\\n)\",\"runtime\":\"runtime::H2Runtime\"},\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"compatibleRuntimes\":[\"runtime::H2Runtime\",\"runtime::SnowflakeRuntime\"],\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"defaultRuntime\":\"runtime::H2Runtime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"path\":\"domain::COVIDData\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"caseType\"},{\"_type\":\"MappedProperty\",\"name\":\"cases\"},{\"_type\":\"MappedProperty\",\"name\":\"date\"},{\"_type\":\"entity\",\"entityPath\":\"domain::Demographics\",\"name\":\"demographics\"},{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"id\"},{\"_type\":\"MappedProperty\",\"name\":\"lastReportedFlag\"}]},{\"path\":\"domain::Demographics\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"state\"}]}]},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"COVIDDataspace\",\"package\":\"domain\",\"path\":\"domain::COVIDDataspace\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}"); } @Test public void testAnalyticsForDataSpaceWithAccessEntitlementsToSnowflake() throws Exception { - testDataSpaceAnalyticsArtifactGenerationExtension("models/dataspaceAccessAnalytics.pure", "domain::COVIDDataspaceSnowflake", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"diagrams\":[],\"elementDocs\":[],\"elements\":[],\"executables\":[{\"description\":\"Some exec description\",\"executable\":\"service::CovidDataMulti\",\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":\"service::CovidDataSingle\",\"info\":{\"_type\":\"service\",\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"mapping\":\"mapping::CovidDataMapping\",\"pattern\":\"/9566f101-2108-408f-863f-6d7e154dc17b\",\"query\":\"|domain::COVIDData.all()->project(\\n [\\n x: domain::COVIDData[1]|$x.cases\\n ],\\n ['Cases']\\n)\",\"runtime\":\"runtime::H2Runtime\"},\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"compatibleRuntimes\":[\"runtime::H2Runtime\",\"runtime::SnowflakeRuntime\"],\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"Snowflake\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"Snowflake\"}],\"defaultRuntime\":\"runtime::SnowflakeRuntime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"info\":{\"classPath\":\"domain::COVIDData\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"domain::COVIDData\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"caseType\"},{\"_type\":\"MappedProperty\",\"name\":\"cases\"},{\"_type\":\"MappedProperty\",\"name\":\"date\"},{\"_type\":\"entity\",\"entityPath\":\"domain::Demographics\",\"name\":\"demographics\"},{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"id\"},{\"_type\":\"MappedProperty\",\"name\":\"lastReportedFlag\"}]},{\"info\":{\"classPath\":\"domain::Demographics\",\"isRootEntity\":true,\"subClasses\":[]},\"path\":\"domain::Demographics\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"state\"}]}],\"model\":{\"_type\":\"data\",\"elements\":[{\"_type\":\"class\",\"constraints\":[],\"name\":\"COVIDData\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":1,\"upperBound\":1},\"name\":\"id\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Integer\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"date\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"StrictDate\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"caseType\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"cases\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Float\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"lastReportedFlag\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"Boolean\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"demographics\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"domain::Demographics\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"COVID-19 data report consisting of case statistics details and basic information on demographics\"}]},{\"_type\":\"dataSpace\",\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"executables\":[{\"description\":\"Some exec description\",\"executable\":{\"path\":\"service::CovidDataMulti\"},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":{\"path\":\"service::CovidDataSingle\"},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"defaultRuntime\":{\"path\":\"runtime::SnowflakeRuntime\",\"type\":\"RUNTIME\"},\"mapping\":{\"path\":\"mapping::CovidDataMapping\",\"type\":\"MAPPING\"},\"name\":\"dummyContext\"}],\"name\":\"COVIDDataspaceSnowflake\",\"package\":\"domain\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"},{\"_type\":\"class\",\"constraints\":[],\"name\":\"Demographics\",\"originalMilestonedProperties\":[],\"package\":\"domain\",\"properties\":[{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"fips\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"},{\"multiplicity\":{\"lowerBound\":0,\"upperBound\":1},\"name\":\"state\",\"stereotypes\":[],\"taggedValues\":[],\"type\":\"String\"}],\"qualifiedProperties\":[],\"stereotypes\":[],\"superTypes\":[\"meta::pure::metamodel::type::Any\"],\"taggedValues\":[{\"tag\":{\"profile\":\"meta::pure::profiles::doc\",\"value\":\"doc\"},\"value\":\"COVID-19 data demographics consisting of geolocation information\"}]},{\"_type\":\"profile\",\"name\":\"doc\",\"package\":\"meta::pure::profiles\",\"stereotypes\":[\"deprecated\"],\"tags\":[\"doc\",\"todo\"]}]}},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"COVIDDataspaceSnowflake\",\"package\":\"domain\",\"path\":\"domain::COVIDDataspaceSnowflake\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}"); + testDataSpaceAnalyticsArtifactGenerationExtension("models/dataspaceAccessAnalytics.pure", "domain::COVIDDataspaceSnowflake", "{\"defaultExecutionContext\":\"dummyContext\",\"description\":\"# Peleus rupit\\n\\n## Sum super gerens paterque\\n\\nLorem markdownum presso, et tamen cogitis, Taenarius lactantia fluxerunt\\nterrita, vota. Tempore flumina ferrumque bella.\\n\\n- Per dixit\\n- Truces tellusque indignata ducem\\n- Cervice venitis cavernis minus\\n\\n## Tum ausus fovebam incursus magis dici extemplo\\n\\nPiscator degenerat desertaque quid scelus tyranni feror ipsa mortis nec silva\\nsparsus neci cum? Est patulas meam decorem, dat demit corpora exuritque Ulixes,\\ngenitore. Captare certa amore pressos, Diamque\\n[traxit](http://istecondar.net/ministropudoris) devorat duritia ecce, capillos\\nfuerint progenitore curva relictas. Iubae pectus et quateret, non vires tibi\\ncacumina figuram Antigonen rursus verti.\\n\\n## Dicta nec Thestiadae tristi exempla sed suoque\\n\\nFlumina quae loricaeque meruique defensae *me terram* tamen attollere totum\\nneque nullos. Quem plus, stratum.\\n\\n## Quaeque si reddite summoque vultu Teleboasque vincere\\n\\nIsmariae me munus umbram. Usum pedem multis quotiensque mirantum Cephenum et\\namori Procne locutum auctor Volturnus pavent virgineas.\\n\\n if (edi + sidebarTooltip < aiffDisk) {\\n drive_key_firewire += bank(searchHardBoot(bus, packet_click));\\n }\\n var adRow = dlc_rootkit(rdramMegabit) - hertzBanner * 2 +\\n memory_adc.horizontal(class_box_rte, disk, lte_grep);\\n if (grayscale) {\\n spool_windows_metal.zif_firewire *= 3;\\n emoticon_mp = user.thunderboltIcqBus.installer_remote(4, searchCable) *\\n kibibyteYoutubeRaster.simm(-3, nosqlCharacter, sip);\\n }\\n var blob = -2;\\n\\n## Est magis interdum in luctus\\n\\nPrimus illa sub bis infregit saepe agrestem Cyllare lumen cultrosque **Cnosia**.\\nSuis est fero durisque satis.\\n\\n- Nos quas est maesta aliquis se unum\\n- Tu ossa Cupido sagitta hanc inflati profuso\\n- Modo est proles pavor\\n- Stillabant pallada invitaque et tantum dictaque in\\n- Generum coegi tum edaci\\n\\nSuo nec cerae me omnem Famemque, passi si auditque ullo, praebita. Gravi annos\\npudore formidabilis erat pectora perpetuo qua oscula cum ad sed Nabataeus\\nRomethiumque deum Erectheus? O Victoria rostro utque terras vitisque classe.\\nTibi [miserrima hirta](http://decentia-qui.net/docta-petentem), eratis saepius\\ntuus.\",\"diagrams\":[],\"elementDocs\":[],\"elements\":[],\"executables\":[{\"description\":\"Some exec description\",\"executable\":\"service::CovidDataMulti\",\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 1\"},{\"description\":\"Some more exec description\",\"executable\":\"service::CovidDataSingle\",\"info\":{\"_type\":\"service\",\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"H2\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"H2\"}],\"mapping\":\"mapping::CovidDataMapping\",\"pattern\":\"/9566f101-2108-408f-863f-6d7e154dc17b\",\"query\":\"|domain::COVIDData.all()->project(\\n [\\n x: domain::COVIDData[1]|$x.cases\\n ],\\n ['Cases']\\n)\",\"runtime\":\"runtime::H2Runtime\"},\"result\":{\"_type\":\"tds\",\"columns\":[{\"name\":\"Cases\",\"relationalType\":\"INTEGER\",\"type\":\"Float\"}]},\"title\":\"Exec 2\"}],\"executionContexts\":[{\"compatibleRuntimes\":[\"runtime::H2Runtime\",\"runtime::SnowflakeRuntime\"],\"datasets\":[{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.DEMOGRAPHICS\",\"schema\":\"default\",\"table\":\"DEMOGRAPHICS\",\"type\":\"Snowflake\"},{\"_type\":\"relationalDatabaseTable\",\"database\":\"CovidDataStore\",\"name\":\"default.COVID_DATA\",\"schema\":\"default\",\"table\":\"COVID_DATA\",\"type\":\"Snowflake\"}],\"defaultRuntime\":\"runtime::SnowflakeRuntime\",\"mapping\":\"mapping::CovidDataMapping\",\"mappingModelCoverageAnalysisResult\":{\"mappedEntities\":[{\"path\":\"domain::COVIDData\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"caseType\"},{\"_type\":\"MappedProperty\",\"name\":\"cases\"},{\"_type\":\"MappedProperty\",\"name\":\"date\"},{\"_type\":\"entity\",\"entityPath\":\"domain::Demographics\",\"name\":\"demographics\"},{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"id\"},{\"_type\":\"MappedProperty\",\"name\":\"lastReportedFlag\"}]},{\"path\":\"domain::Demographics\",\"properties\":[{\"_type\":\"MappedProperty\",\"name\":\"fips\"},{\"_type\":\"MappedProperty\",\"name\":\"state\"}]}]},\"name\":\"dummyContext\"}],\"model\":{\"_type\":\"data\",\"elements\":[]},\"name\":\"COVIDDataspaceSnowflake\",\"package\":\"domain\",\"path\":\"domain::COVIDDataspaceSnowflake\",\"stereotypes\":[],\"taggedValues\":[],\"title\":\"COVID Sample Data\"}"); } } diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-grammar/pom.xml b/legend-engine-xts-data-space/legend-engine-xt-data-space-grammar/pom.xml index edb773f3c75..0004ac4d9ca 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-grammar/pom.xml +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-grammar/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-data-space - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-protocol/pom.xml b/legend-engine-xts-data-space/legend-engine-xt-data-space-protocol/pom.xml index b4deaaee9be..b5ba3b55ec6 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-protocol/pom.xml +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-protocol/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-data-space - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-pure-metamodel/pom.xml b/legend-engine-xts-data-space/legend-engine-xt-data-space-pure-metamodel/pom.xml index e49d93e638e..ac7f2d481fa 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-pure-metamodel/pom.xml +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-pure-metamodel/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-data-space - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-pure/pom.xml b/legend-engine-xts-data-space/legend-engine-xt-data-space-pure/pom.xml index d8be092ea06..ffd270c924e 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-pure/pom.xml +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-data-space - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-data-space/legend-engine-xt-data-space-pure/src/main/resources/core_data_space/analytics/analytics.pure b/legend-engine-xts-data-space/legend-engine-xt-data-space-pure/src/main/resources/core_data_space/analytics/analytics.pure index 4639fdd86c3..3bd116b8589 100644 --- a/legend-engine-xts-data-space/legend-engine-xt-data-space-pure/src/main/resources/core_data_space/analytics/analytics.pure +++ b/legend-engine-xts-data-space/legend-engine-xt-data-space-pure/src/main/resources/core_data_space/analytics/analytics.pure @@ -32,21 +32,21 @@ Class meta::pure::metamodel::dataSpace::analytics::DataSpaceCoverageAnalysisResu executionContexts: DataSpaceExecutionContextAnalysisResult[*]; } -function meta::pure::metamodel::dataSpace::analytics::analyzeDataSpace(dataSpace: DataSpace[1], allAvailableRuntimes: PackageableRuntime[*]): DataSpaceAnalysisResult[1] +function meta::pure::metamodel::dataSpace::analytics::analyzeDataSpace(dataSpace: DataSpace[1], allAvailableRuntimes: PackageableRuntime[*], returnLightGraph: Boolean[1]): DataSpaceAnalysisResult[1] { ^DataSpaceAnalysisResult ( diagramModels = meta::pure::metamodel::diagram::analytics::modelCoverage::getDiagramModelCoverage($dataSpace.diagrams.diagram), - executionContexts = analyzeDataSpaceExecutionContexts($dataSpace, $allAvailableRuntimes), + executionContexts = analyzeDataSpaceExecutionContexts($dataSpace, $allAvailableRuntimes, $returnLightGraph), elementDocs = getModelsDoc($dataSpace) ); } -function meta::pure::metamodel::dataSpace::analytics::analyzeDataSpaceCoverage(dataSpace: DataSpace[1], allAvailableRuntimes: PackageableRuntime[*]): DataSpaceCoverageAnalysisResult[1] +function meta::pure::metamodel::dataSpace::analytics::analyzeDataSpaceCoverage(dataSpace: DataSpace[1], allAvailableRuntimes: PackageableRuntime[*], returnLightGraph: Boolean[1]): DataSpaceCoverageAnalysisResult[1] { ^DataSpaceCoverageAnalysisResult ( - executionContexts = analyzeDataSpaceExecutionContexts($dataSpace, $allAvailableRuntimes) + executionContexts = analyzeDataSpaceExecutionContexts($dataSpace, $allAvailableRuntimes, $returnLightGraph) ); } @@ -60,12 +60,12 @@ Class meta::pure::metamodel::dataSpace::analytics::DataSpaceExecutionContextAnal mappingCoverage: meta::analytics::mapping::modelCoverage::MappingModelCoverageAnalysisResult[1]; } -function <> meta::pure::metamodel::dataSpace::analytics::analyzeDataSpaceExecutionContexts(dataSpace: DataSpace[1], allAvailableRuntimes: PackageableRuntime[*]): DataSpaceExecutionContextAnalysisResult[*] +function <> meta::pure::metamodel::dataSpace::analytics::analyzeDataSpaceExecutionContexts(dataSpace: DataSpace[1], allAvailableRuntimes: PackageableRuntime[*], returnLightGraph: Boolean[1]): DataSpaceExecutionContextAnalysisResult[*] { $dataSpace.executionContexts->map(context|^DataSpaceExecutionContextAnalysisResult( name = $context.name, compatibleRuntimes = getMappingCompatibleRuntimes($context.mapping, $allAvailableRuntimes), - mappingCoverage = meta::analytics::mapping::modelCoverage::analyze($context.mapping, true, false, true) + mappingCoverage = meta::analytics::mapping::modelCoverage::analyze($context.mapping, $returnLightGraph, false, $returnLightGraph) )); } diff --git a/legend-engine-xts-data-space/pom.xml b/legend-engine-xts-data-space/pom.xml index be7c0003a17..874e4086d1f 100644 --- a/legend-engine-xts-data-space/pom.xml +++ b/legend-engine-xts-data-space/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-diagram/legend-engine-xt-diagram-api/pom.xml b/legend-engine-xts-diagram/legend-engine-xt-diagram-api/pom.xml index 7c1c7f9f9f6..4bae4126130 100644 --- a/legend-engine-xts-diagram/legend-engine-xt-diagram-api/pom.xml +++ b/legend-engine-xts-diagram/legend-engine-xt-diagram-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-diagram - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-diagram/legend-engine-xt-diagram-compiler/pom.xml b/legend-engine-xts-diagram/legend-engine-xt-diagram-compiler/pom.xml index a95dccd3e25..eeb374821b3 100644 --- a/legend-engine-xts-diagram/legend-engine-xt-diagram-compiler/pom.xml +++ b/legend-engine-xts-diagram/legend-engine-xt-diagram-compiler/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-diagram org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-diagram/legend-engine-xt-diagram-grammar/pom.xml b/legend-engine-xts-diagram/legend-engine-xt-diagram-grammar/pom.xml index 472d0806b10..67936906248 100644 --- a/legend-engine-xts-diagram/legend-engine-xt-diagram-grammar/pom.xml +++ b/legend-engine-xts-diagram/legend-engine-xt-diagram-grammar/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-diagram - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-diagram/legend-engine-xt-diagram-protocol/pom.xml b/legend-engine-xts-diagram/legend-engine-xt-diagram-protocol/pom.xml index a1d12eb2859..89613a599f7 100644 --- a/legend-engine-xts-diagram/legend-engine-xt-diagram-protocol/pom.xml +++ b/legend-engine-xts-diagram/legend-engine-xt-diagram-protocol/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-diagram - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-diagram/legend-engine-xt-diagram-pure-metamodel/pom.xml b/legend-engine-xts-diagram/legend-engine-xt-diagram-pure-metamodel/pom.xml index 38858e9b4b1..de2de27311e 100644 --- a/legend-engine-xts-diagram/legend-engine-xt-diagram-pure-metamodel/pom.xml +++ b/legend-engine-xts-diagram/legend-engine-xt-diagram-pure-metamodel/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-diagram - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-diagram/legend-engine-xt-diagram-pure/pom.xml b/legend-engine-xts-diagram/legend-engine-xt-diagram-pure/pom.xml index f7d11554ee3..7321c380b38 100644 --- a/legend-engine-xts-diagram/legend-engine-xt-diagram-pure/pom.xml +++ b/legend-engine-xts-diagram/legend-engine-xt-diagram-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-diagram - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-diagram/pom.xml b/legend-engine-xts-diagram/pom.xml index 30b4dad81c6..4909a290dfc 100644 --- a/legend-engine-xts-diagram/pom.xml +++ b/legend-engine-xts-diagram/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-executionPlan/pom.xml b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-executionPlan/pom.xml index 95eb5b30e0d..04786a77495 100644 --- a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-executionPlan/pom.xml +++ b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-executionPlan/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-elasticsearch - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-grammar/pom.xml b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-grammar/pom.xml index 9c3afc01ca7..92bc3062a6c 100644 --- a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-grammar/pom.xml +++ b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-elasticsearch - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-protocol/pom.xml b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-protocol/pom.xml index b71e66b3195..298212689cb 100644 --- a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-protocol/pom.xml +++ b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-protocol/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-elasticsearch - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-pure-metamodel/pom.xml b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-pure-metamodel/pom.xml index f5e1fff78db..c47506b2850 100644 --- a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-pure-metamodel/pom.xml +++ b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-V7-pure-metamodel/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-elasticsearch - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-executionPlan-test/pom.xml b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-executionPlan-test/pom.xml index 92317a1d9fa..a078bda2d13 100644 --- a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-executionPlan-test/pom.xml +++ b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-executionPlan-test/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-elasticsearch - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-protocol-utils/pom.xml b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-protocol-utils/pom.xml index 1a9c94f58ed..a43de61a6fb 100644 --- a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-protocol-utils/pom.xml +++ b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-protocol-utils/pom.xml @@ -4,7 +4,7 @@ org.finos.legend.engine legend-engine-xts-elasticsearch - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-elasticsearch-protocol-utils diff --git a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-pure-specification-metamodel/pom.xml b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-pure-specification-metamodel/pom.xml index 9132e7e2ace..3fc963d8d10 100644 --- a/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-pure-specification-metamodel/pom.xml +++ b/legend-engine-xts-elasticsearch/legend-engine-xt-elasticsearch-pure-specification-metamodel/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-elasticsearch - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-elasticsearch/pom.xml b/legend-engine-xts-elasticsearch/pom.xml index 71811023ad8..5bc940d0bf0 100644 --- a/legend-engine-xts-elasticsearch/pom.xml +++ b/legend-engine-xts-elasticsearch/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-driver-bloomberg/pom.xml b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-driver-bloomberg/pom.xml index ca6d92e88b7..1c15c4e64d7 100644 --- a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-driver-bloomberg/pom.xml +++ b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-driver-bloomberg/pom.xml @@ -19,7 +19,7 @@ legend-engine-xts-flatdata org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-javaPlatformBinding-pure/pom.xml b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-javaPlatformBinding-pure/pom.xml index 2ce3ee6188a..532bacb63cd 100644 --- a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-javaPlatformBinding-pure/pom.xml +++ b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-javaPlatformBinding-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-flatdata - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-javaPlatformBinding-test/pom.xml b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-javaPlatformBinding-test/pom.xml index 15743a9a12a..846a1db9e1a 100644 --- a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-javaPlatformBinding-test/pom.xml +++ b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-javaPlatformBinding-test/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-flatdata - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-model/pom.xml b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-model/pom.xml index 2608aad24b6..a22390fcea4 100644 --- a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-model/pom.xml +++ b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-model/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-flatdata - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-pure/pom.xml b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-pure/pom.xml index 0ae45f6c61e..773ce856600 100644 --- a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-pure/pom.xml +++ b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-flatdata - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-runtime/pom.xml b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-runtime/pom.xml index 7a11ca9f632..aa563eef360 100644 --- a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-runtime/pom.xml +++ b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-runtime/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-flatdata - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-shared/pom.xml b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-shared/pom.xml index 0b4c1bc13dc..ddfde02af48 100644 --- a/legend-engine-xts-flatdata/legend-engine-xt-flatdata-shared/pom.xml +++ b/legend-engine-xts-flatdata/legend-engine-xt-flatdata-shared/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-flatdata - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-flatdata/pom.xml b/legend-engine-xts-flatdata/pom.xml index 6e1065054e1..ab00161e972 100644 --- a/legend-engine-xts-flatdata/pom.xml +++ b/legend-engine-xts-flatdata/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/pom.xml b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/pom.xml index 8cd5d554c3e..fbf5a212946 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/pom.xml +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-functionActivator - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -126,6 +126,7 @@ org.finos.legend.engine legend-engine-xt-functionActivator-protocol + diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/api/FunctionActivatorAPI.java b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/api/FunctionActivatorAPI.java index 7e314bd1645..922b8069b00 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/api/FunctionActivatorAPI.java +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/api/FunctionActivatorAPI.java @@ -24,20 +24,21 @@ import org.eclipse.collections.api.list.MutableList; import org.finos.legend.engine.functionActivator.api.input.FunctionActivatorInput; import org.finos.legend.engine.functionActivator.api.output.FunctionActivatorInfo; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; import org.finos.legend.engine.functionActivator.service.FunctionActivatorLoader; import org.finos.legend.engine.functionActivator.service.FunctionActivatorService; import org.finos.legend.engine.language.pure.compiler.Compiler; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.language.pure.modelManager.ModelManager; //import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; -import org.finos.legend.engine.functionActivator.deployment.DeploymentResult; -import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentStage; +import org.finos.legend.engine.protocol.functionActivator.deployment.DeploymentResult; import org.finos.legend.engine.protocol.pure.PureClientVersions; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; import org.finos.legend.engine.shared.core.ObjectMapperFactory; import org.finos.legend.engine.shared.core.api.result.ManageConstantResult; import org.finos.legend.engine.shared.core.deployment.DeploymentMode; +import org.finos.legend.engine.shared.core.identity.factory.DefaultIdentityFactory; +import org.finos.legend.engine.shared.core.identity.factory.IdentityFactory; import org.finos.legend.engine.shared.core.kerberos.ProfileManagerHelper; import org.finos.legend.engine.shared.core.operational.errorManagement.ExceptionTool; import org.finos.legend.engine.shared.core.operational.logs.LoggingEventType; @@ -65,18 +66,22 @@ public class FunctionActivatorAPI private final PureModel emptyModel; private final Function> routerExtensions; private List runtimeDeploymentConfig = Lists.mutable.empty(); + private MutableList> availableActivatorServices = Lists.mutable.empty(); + private IdentityFactory identityFactory; public FunctionActivatorAPI(ModelManager modelManager, Function> routerExtensions) { this.modelManager = modelManager; this.routerExtensions = routerExtensions; this.emptyModel = Compiler.compile(PureModelContextData.newPureModelContextData(), DeploymentMode.PROD, null); + this.identityFactory = new DefaultIdentityFactory(); } - public FunctionActivatorAPI(ModelManager modelManager, List activatorConfigurations, Function> routerExtensions) + public FunctionActivatorAPI(ModelManager modelManager, List activatorConfigurations, MutableList> availableActivatorServices, Function> routerExtensions) { this(modelManager, routerExtensions); this.runtimeDeploymentConfig = activatorConfigurations; + this.availableActivatorServices = availableActivatorServices; } @GET @@ -87,7 +92,7 @@ public FunctionActivatorAPI(ModelManager modelManager, List pm) { MutableList profiles = ProfileManagerHelper.extractProfiles(pm); - MutableList values = FunctionActivatorLoader.extensions().collect(x -> x.info(emptyModel, "vX_X_X")); + MutableList values = this.availableActivatorServices.isEmpty() ? FunctionActivatorLoader.extensions().collect(x -> x.info(emptyModel, "vX_X_X")) : availableActivatorServices.collect(c -> c.info(emptyModel,"vX_X_X")); return ManageConstantResult.manageResult(profiles, values, objectMapper); } @@ -105,7 +110,7 @@ public Response validate(FunctionActivatorInput input, @ApiParam(hidden = true) PureModel pureModel = modelManager.loadModel(input.model, clientVersion, profiles, null); Root_meta_external_function_activator_FunctionActivator activator = (Root_meta_external_function_activator_FunctionActivator) pureModel.getPackageableElement(input.functionActivator); FunctionActivatorService service = getActivatorService(activator, pureModel); - return Response.ok(objectMapper.writeValueAsString(service.validate(profiles, pureModel, activator, input.model, routerExtensions))).type(MediaType.APPLICATION_JSON_TYPE).build(); + return Response.ok(objectMapper.writeValueAsString(service.validate(identityFactory.makeIdentity(profiles), pureModel, activator, input.model, routerExtensions))).type(MediaType.APPLICATION_JSON_TYPE).build(); } catch (Exception ex) { @@ -128,7 +133,7 @@ public Response publishToSandbox(FunctionActivatorInput input, @ApiParam(hidden PureModel pureModel = modelManager.loadModel(input.model, clientVersion, profiles, null); Root_meta_external_function_activator_FunctionActivator activator = (Root_meta_external_function_activator_FunctionActivator) pureModel.getPackageableElement(input.functionActivator); FunctionActivatorService service = getActivatorService(activator,pureModel); - return Response.ok(objectMapper.writeValueAsString(service.publishToSandbox(profiles, pureModel, activator, input.model, service.selectConfig(this.runtimeDeploymentConfig), routerExtensions))).type(MediaType.APPLICATION_JSON_TYPE).build(); + return Response.ok(objectMapper.writeValueAsString(service.publishToSandbox(this.identityFactory.makeIdentity(profiles), pureModel, activator, input.model, service.selectConfig(this.runtimeDeploymentConfig), routerExtensions))).type(MediaType.APPLICATION_JSON_TYPE).build(); } catch (Exception ex) { @@ -162,7 +167,11 @@ public Response renderArtifact(FunctionActivatorInput input, @ApiParam(hidden = public FunctionActivatorService getActivatorService(Root_meta_external_function_activator_FunctionActivator activator, PureModel pureModel) { - FunctionActivatorService service = FunctionActivatorLoader.extensions().select(c -> c.supports(activator)).getFirst(); + FunctionActivatorService service = (FunctionActivatorService)this.availableActivatorServices.select(c -> c.supports(activator)).getFirst(); + if (service == null) + { + service = FunctionActivatorLoader.extensions().select(c -> c.supports(activator)).getFirst(); + } if (service == null) { throw new RuntimeException(activator.getClass().getSimpleName() + "is not supported!"); diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/service/FunctionActivatorService.java b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/service/FunctionActivatorService.java index 7086ba02aa6..537095a0c46 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/service/FunctionActivatorService.java +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/service/FunctionActivatorService.java @@ -18,16 +18,14 @@ import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.list.MutableList; import org.finos.legend.engine.functionActivator.api.output.FunctionActivatorInfo; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorArtifact; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; -import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentStage; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; -import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; -import org.finos.legend.engine.functionActivator.deployment.DeploymentResult; +import org.finos.legend.engine.protocol.functionActivator.deployment.DeploymentResult; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.pure.generated.Root_meta_external_function_activator_FunctionActivator; import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; -import org.pac4j.core.profile.CommonProfile; import java.util.List; @@ -37,9 +35,9 @@ public interface FunctionActivatorService validate(MutableList profiles, PureModel pureModel, T functionActivator, PureModelContext inputModel, Function> routerExtensions); + MutableList validate(Identity identity, PureModel pureModel, T functionActivator, PureModelContext inputModel, Function> routerExtensions); - V publishToSandbox(MutableList profiles, PureModel pureModel, T functionActivator, PureModelContext inputModel, List runtimeConfigurations, Function> routerExtensions); + V publishToSandbox(Identity identity, PureModel pureModel, T functionActivator, PureModelContext inputModel, List runtimeConfigurations, Function> routerExtensions); FunctionActivatorArtifact renderArtifact(PureModel pureModel, T functionActivator, PureModelContext inputModel, String clientVersion, Function> routerExtensions); diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-deployment/pom.xml b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-deployment/pom.xml new file mode 100644 index 00000000000..a96c9ea83cb --- /dev/null +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-deployment/pom.xml @@ -0,0 +1,58 @@ + + + + + + org.finos.legend.engine + legend-engine-xts-functionActivator + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-functionActivator-deployment + jar + Legend Engine - XT - Function Activator - Deployment + + + + + + org.finos.legend.engine + legend-engine-shared-core + + + + + + + junit + junit + test + + + log4j + log4j + test + + + org.finos.legend.engine + legend-engine-xt-functionActivator-protocol + + + + + diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentManager.java b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-deployment/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentManager.java similarity index 64% rename from legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentManager.java rename to legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-deployment/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentManager.java index 302270e5c49..713b3b97a96 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentManager.java +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-deployment/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentManager.java @@ -14,18 +14,19 @@ package org.finos.legend.engine.functionActivator.deployment; -import org.eclipse.collections.api.list.MutableList; -import org.pac4j.core.profile.CommonProfile; +import org.finos.legend.engine.protocol.functionActivator.deployment.DeploymentResult; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.shared.core.identity.Identity; import java.util.List; public interface DeploymentManager { - public V deploy(MutableList profiles, U artifact); - - public V deploy(MutableList profiles, U artifact, List availableRuntimeConfigurations); + public V deploy(Identity identity, U artifact); + public V deploy(Identity identity, U artifact, List availableRuntimeConfigurations); public boolean canDeploy(FunctionActivatorArtifact activatorArtifact); } diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/pom.xml b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/pom.xml index ce19c12348c..547861e3478 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/pom.xml +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-functionActivator - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentResult.java b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/DeploymentResult.java similarity index 90% rename from legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentResult.java rename to legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/DeploymentResult.java index 4f487838f45..f63aa8f8b95 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/DeploymentResult.java +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/DeploymentResult.java @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.functionActivator.deployment; +package org.finos.legend.engine.protocol.functionActivator.deployment; public class DeploymentResult { diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorArtifact.java b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorArtifact.java similarity index 80% rename from legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorArtifact.java rename to legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorArtifact.java index 5a29cfb0043..c16fee7c2c2 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorArtifact.java +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorArtifact.java @@ -12,8 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.functionActivator.deployment; +package org.finos.legend.engine.protocol.functionActivator.deployment; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") public class FunctionActivatorArtifact { public FunctionActivatorDeploymentContent content; diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorDeploymentConfiguration.java b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorDeploymentConfiguration.java similarity index 77% rename from legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorDeploymentConfiguration.java rename to legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorDeploymentConfiguration.java index 963ec5e2587..7e21ffa46b9 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorDeploymentConfiguration.java +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorDeploymentConfiguration.java @@ -12,8 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.functionActivator.deployment; +package org.finos.legend.engine.protocol.functionActivator.deployment; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") public class FunctionActivatorDeploymentConfiguration { } diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorDeploymentContent.java b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorDeploymentContent.java similarity index 77% rename from legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorDeploymentContent.java rename to legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorDeploymentContent.java index 47ecf7745ee..59a64291936 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-api/src/main/java/org/finos/legend/engine/functionActivator/deployment/FunctionActivatorDeploymentContent.java +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-protocol/src/main/java/org/finos/legend/engine/protocol/functionActivator/deployment/FunctionActivatorDeploymentContent.java @@ -12,8 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.functionActivator.deployment; +package org.finos.legend.engine.protocol.functionActivator.deployment; +import com.fasterxml.jackson.annotation.JsonTypeInfo; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "_type") public class FunctionActivatorDeploymentContent { } diff --git a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-pure/pom.xml b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-pure/pom.xml index 9390cc74c2a..4976cc4b767 100644 --- a/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-pure/pom.xml +++ b/legend-engine-xts-functionActivator/legend-engine-xt-functionActivator-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-functionActivator - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-functionActivator/pom.xml b/legend-engine-xts-functionActivator/pom.xml index 55c5943d834..37ba63f8306 100644 --- a/legend-engine-xts-functionActivator/pom.xml +++ b/legend-engine-xts-functionActivator/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -30,6 +30,7 @@ legend-engine-xt-functionActivator-api + legend-engine-xt-functionActivator-deployment legend-engine-xt-functionActivator-protocol legend-engine-xt-functionActivator-pure diff --git a/legend-engine-xts-generation/legend-engine-external-shared/pom.xml b/legend-engine-xts-generation/legend-engine-external-shared/pom.xml index fa7cbd3981e..6baac4e4596 100644 --- a/legend-engine-xts-generation/legend-engine-external-shared/pom.xml +++ b/legend-engine-xts-generation/legend-engine-external-shared/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation-pure/pom.xml b/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation-pure/pom.xml index b9c46315d9d..244c5a169fe 100644 --- a/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation-pure/pom.xml +++ b/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation/pom.xml b/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation/pom.xml index fcc6340f2c1..bc9b3b28adb 100644 --- a/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation/pom.xml +++ b/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-dsl-generation @@ -146,6 +146,10 @@ com.fasterxml.jackson.core jackson-databind + + com.fasterxml.jackson.core + jackson-annotations + diff --git a/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation/src/main/java/org/finos/legend/engine/language/pure/dsl/generation/extension/Artifact.java b/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation/src/main/java/org/finos/legend/engine/language/pure/dsl/generation/extension/Artifact.java index e3af2dd29ab..aa9e28eefc0 100644 --- a/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation/src/main/java/org/finos/legend/engine/language/pure/dsl/generation/extension/Artifact.java +++ b/legend-engine-xts-generation/legend-engine-language-pure-dsl-generation/src/main/java/org/finos/legend/engine/language/pure/dsl/generation/extension/Artifact.java @@ -14,12 +14,19 @@ package org.finos.legend.engine.language.pure.dsl.generation.extension; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +@JsonIgnoreProperties(ignoreUnknown = true) public class Artifact { public String content; public String path; public String format; + public Artifact() + { + //Empty constructor for Jackson + } public Artifact(String content, String path, String format) { diff --git a/legend-engine-xts-generation/legend-engine-xt-artifact-generation-api/pom.xml b/legend-engine-xts-generation/legend-engine-xt-artifact-generation-api/pom.xml index cde9e7c2f37..51363f114ff 100644 --- a/legend-engine-xts-generation/legend-engine-xt-artifact-generation-api/pom.xml +++ b/legend-engine-xts-generation/legend-engine-xt-artifact-generation-api/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-generation org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-generation/pom.xml b/legend-engine-xts-generation/pom.xml index 4b8f2f5b637..0b8732b3da0 100644 --- a/legend-engine-xts-generation/pom.xml +++ b/legend-engine-xts-generation/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-compiler/pom.xml b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-compiler/pom.xml index d34644934c3..8b9847512a6 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-compiler/pom.xml +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-compiler/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-graphQL - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -73,7 +73,7 @@ org.finos.legend.engine legend-engine-protocol-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT org.finos.legend.pure diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar-integration/pom.xml b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar-integration/pom.xml index 9d90f50016d..0b5cb5828b9 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar-integration/pom.xml +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar-integration/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-graphQL - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/pom.xml b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/pom.xml index 54f1645b576..6e9544de553 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/pom.xml +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-graphQL - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/antlr4/org/finos/legend/engine/language/graphQL/grammar/from/antlr4/GraphQL.g4 b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/antlr4/org/finos/legend/engine/language/graphQL/grammar/from/antlr4/GraphQL.g4 index 754ba57845f..1d735021fb4 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/antlr4/org/finos/legend/engine/language/graphQL/grammar/from/antlr4/GraphQL.g4 +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/antlr4/org/finos/legend/engine/language/graphQL/grammar/from/antlr4/GraphQL.g4 @@ -223,7 +223,7 @@ objectTypeExtension: ; //https://spec.graphql.org/June2018/#sec-Interfaces -interfaceTypeDefinition: description? INTERFACE name directives? fieldsDefinition?; +interfaceTypeDefinition: description? INTERFACE name implementsInterfaces? directives? fieldsDefinition?; //https://spec.graphql.org/June2018/#sec-Interface-Extensions interfaceTypeExtension: EXTEND INTERFACE name directives? fieldsDefinition diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/java/org/finos/legend/engine/language/graphQL/grammar/from/GraphQLGrammarParser.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/java/org/finos/legend/engine/language/graphQL/grammar/from/GraphQLGrammarParser.java index ca070583638..7c3f3407a71 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/java/org/finos/legend/engine/language/graphQL/grammar/from/GraphQLGrammarParser.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/java/org/finos/legend/engine/language/graphQL/grammar/from/GraphQLGrammarParser.java @@ -374,6 +374,7 @@ private InterfaceTypeDefinition visitInterfaceTypeDefinition(GraphQLParser.Inter InterfaceTypeDefinition interfaceTypeDefinition = new InterfaceTypeDefinition(); interfaceTypeDefinition.name = interfaceTypeDefinitionContext.name().getText(); interfaceTypeDefinition.fields = ListIterate.collect(interfaceTypeDefinitionContext.fieldsDefinition().fieldDefinition(), this::visitFieldsDefinitionContext); + interfaceTypeDefinition._implements = visitImplementInterface(interfaceTypeDefinitionContext.implementsInterfaces(), Lists.mutable.empty()).reverseThis(); return interfaceTypeDefinition; } diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/java/org/finos/legend/engine/language/graphQL/grammar/to/GraphQLGrammarComposer.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/java/org/finos/legend/engine/language/graphQL/grammar/to/GraphQLGrammarComposer.java index 60fde75a856..c029af7f07d 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/java/org/finos/legend/engine/language/graphQL/grammar/to/GraphQLGrammarComposer.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/main/java/org/finos/legend/engine/language/graphQL/grammar/to/GraphQLGrammarComposer.java @@ -103,7 +103,7 @@ public String visit(UnionTypeDefinition unionTypeDefinition) @Override public String visit(InterfaceTypeDefinition interfaceTypeDefinition) { - return "interface " + interfaceTypeDefinition.name + " {\n" + + return "interface " + interfaceTypeDefinition.name + (interfaceTypeDefinition._implements.isEmpty() ? "" : " implements " + ListAdapter.adapt(interfaceTypeDefinition._implements).makeString(" & ")) + " {\n" + ListIterate.collect(interfaceTypeDefinition.fields, f -> " " + renderField(f)).makeString("\n") + "\n}"; } diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/test/java/org/finos/legend/engine/language/graphQL/grammar/test/roundtrip/TestGraphQLRoundtrip.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/test/java/org/finos/legend/engine/language/graphQL/grammar/test/roundtrip/TestGraphQLRoundtrip.java index 8a87f430cdc..5d5c6e706e7 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/test/java/org/finos/legend/engine/language/graphQL/grammar/test/roundtrip/TestGraphQLRoundtrip.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-grammar/src/test/java/org/finos/legend/engine/language/graphQL/grammar/test/roundtrip/TestGraphQLRoundtrip.java @@ -83,7 +83,7 @@ public void testParsingError() @Test public void testInterfaceRoundtrip() { - check("interface Car {\n" + + check("interface Car implements Automobile {\n" + " id: ID!\n" + " name: String!\n" + " values: [String]\n" + diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-protocol/pom.xml b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-protocol/pom.xml index b61c227ba4e..6aebde8f1ef 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-protocol/pom.xml +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-graphQL - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure-metamodel/pom.xml b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure-metamodel/pom.xml index 8c951cada22..e2ebdadf8f3 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure-metamodel/pom.xml +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure-metamodel/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-graphQL - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure-metamodel/src/main/resources/core_external_query_graphql_metamodel/sdl/metamodel.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure-metamodel/src/main/resources/core_external_query_graphql_metamodel/sdl/metamodel.pure index b5fc7db1371..a9b5bfa34d1 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure-metamodel/src/main/resources/core_external_query_graphql_metamodel/sdl/metamodel.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure-metamodel/src/main/resources/core_external_query_graphql_metamodel/sdl/metamodel.pure @@ -179,6 +179,7 @@ Class meta::external::query::graphQL::metamodel::sdl::typeSystem::InterfaceTypeD name: String[1]; directives: meta::external::query::graphQL::metamodel::sdl::Directive[*]; fields: meta::external::query::graphQL::metamodel::sdl::typeSystem::FieldDefinition[*]; + implements: String[*]; } Class meta::external::query::graphQL::metamodel::sdl::value::ListValue extends meta::external::query::graphQL::metamodel::sdl::value::Value diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/pom.xml b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/pom.xml index 7974680f5b0..90eac1ecb6b 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/pom.xml +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-graphQL - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/bindingCommon.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/bindingCommon.pure index 868793bfa66..7986292093f 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/bindingCommon.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/bindingCommon.pure @@ -26,6 +26,17 @@ function meta::external::query::graphQL::binding::fromInputTypeName(inputTypeNam if($inputTypeName->endsWith('Input'), | $inputTypeName->substring(0, $inputTypeName->length() - 5), | '') } +// These two functions must roundtrip +function meta::external::query::graphQL::binding::toInterfaceTypeName(pureClassName: String[1]) : String[1] +{ + $pureClassName + 'Interface' +} + +function meta::external::query::graphQL::binding::fromInterfaceTypeName(interfaceTypeName: String[1]) : String[1] +{ + if($interfaceTypeName->endsWith('Interface'), | $interfaceTypeName->substring(0, $interfaceTypeName->length() - 9), | '') +} + function meta::external::query::graphQL::binding::purePrimitivesToGraphQLScalarTypes(): Pair[*] { [ @@ -61,4 +72,9 @@ function meta::external::query::graphQL::binding::temporalityToDirectives(): Map pair('businesstemporal', ^DirectiveDefinition(name = 'businesstemporal', typeSystemLocation = [TypeSystemDirectiveLocation.OBJECT, TypeSystemDirectiveLocation.INPUT_OBJECT])), pair('processingtemporal', ^DirectiveDefinition(name = 'processingtemporal', typeSystemLocation = [TypeSystemDirectiveLocation.OBJECT, TypeSystemDirectiveLocation.INPUT_OBJECT])) ]->newMap(); +} + +function meta::external::query::graphQL::binding::hierarchyDirective(): DirectiveDefinition[1] +{ + ^DirectiveDefinition(name = 'extends', typeSystemLocation = [TypeSystemDirectiveLocation.OBJECT, TypeSystemDirectiveLocation.INPUT_OBJECT]); } \ No newline at end of file diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/introspection/fromPure_Introspection.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/introspection/fromPure_Introspection.pure index 6d3e70c1dde..220a722795e 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/introspection/fromPure_Introspection.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/introspection/fromPure_Introspection.pure @@ -86,7 +86,7 @@ function <> meta::external::query::graphQL::binding::fromPure::i $t->match( [ c:Class[1] | - let fields = $c->allProperties()->map(p| + let fields = $c->allProperties()->filter(p|$p.genericType.rawType != Any)->map(p| ^__Field ( name = $p.name->toOne(), diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/sdl/fromPure_sdl.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/sdl/fromPure_sdl.pure index 5134624a040..fe18b826712 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/sdl/fromPure_sdl.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/sdl/fromPure_sdl.pure @@ -2,6 +2,7 @@ import meta::pure::model::unit::*; import meta::external::query::graphQL::binding::*; import meta::external::query::graphQL::binding::fromPure::sdl::*; import meta::external::query::graphQL::metamodel::sdl::*; +import meta::external::query::graphQL::metamodel::sdl::value::*; import meta::external::query::graphQL::metamodel::sdl::typeSystem::*; import meta::external::query::graphQL::metamodel::sdl::executable::*; @@ -92,6 +93,8 @@ function meta::external::query::graphQL::binding::fromPure::sdl::transformPureTo ->distinct() ->filter(t | $t->instanceOf(Class)); + let extendedClasses = $allTypes->map(t | $t->validGeneralizations()); + // Build types let graphQLTypes = $allTypes->map(c | $c->match( @@ -102,13 +105,13 @@ function meta::external::query::graphQL::binding::fromPure::sdl::transformPureTo buildInputObjectTypeDefinition($c)->concatenate( if ($classReturnTypes->contains($c), // Used as both -> needs both input and output types - | buildObjectTypeDefinition($c), + | buildObjectTypeDefinition($c, $extendedClasses->contains($c)), // Used as input -> needs input | [] ) ), // Not used as input --> needs output (unused types will result in output types) - | buildObjectTypeDefinition($c) + | buildObjectTypeDefinition($c, $extendedClasses->contains($c)) ), e:Enumeration[1] | ^EnumTypeDefinition( @@ -130,15 +133,20 @@ function meta::external::query::graphQL::binding::fromPure::sdl::transformPureTo ->distinct() ->map(s | temporalityToDirectives()->get($s)->toOne()); + let interfaces = $extendedClasses->map(c | $c->buildInterfaceTypeDefinition()); + $partitioned.second.values // Remove duplicated scalar definitions ->concatenate($partitioned.first.values->cast(@ScalarTypeDefinition)->removeDuplicatesBy(s | $s.name->toOne())) ->concatenate($processingDirectives) + ->concatenate($interfaces) + ->concatenate(if($interfaces->isNotEmpty(), | hierarchyDirective(), | [])) ->meta::pure::functions::collection::sortBy(t | $t->match( [ o : ObjectTypeDefinition[1] | $o.name, i : InputObjectTypeDefinition[1] | $i.name, + i : InterfaceTypeDefinition[1] | $i.name, e : EnumTypeDefinition[1] | $e.name, s : ScalarTypeDefinition[1] | $s.name, d : DirectiveDefinition[1] | $d.name @@ -165,7 +173,6 @@ function <> meta::external::query::graphQL::binding::fromPure::s ->map(s | $s.value) } - function <> meta::external::query::graphQL::binding::fromPure::sdl::isValidPropertyForGraphQL(p: AbstractProperty[1]): Boolean[1] { !$p.name->in(['processingDate', 'businessDate', 'milestoning']); @@ -186,7 +193,7 @@ function <> meta::external::query::graphQL::binding::fromPure::s function <> meta::external::query::graphQL::binding::fromPure::sdl::buildInputObjectTypeDefinition(c: Class[1]): TypeSystemDefinition[*] { - let props = $c->allProperties()->filter(p | $p->isValidPropertyForGraphQL()); + let props = $c->hierarchicalAllProperties()->filter(p | $p->isValidPropertyForGraphQL()); let nonBuiltInScalars = $props->buildNonBuiltInGraphQLScalars(); let temporalStereotypes = $c->getTemporalStereotypes(); @@ -194,7 +201,7 @@ function <> meta::external::query::graphQL::binding::fromPure::s ->concatenate( ^InputObjectTypeDefinition( name = toInputTypeName($c.name->toOne()), - directives = if($temporalStereotypes->isEmpty(), | [], | $temporalStereotypes->map(s | temporalityToDirectives()->get($s)->toOne())->map(def | ^Directive(name = $def.name))), + directives = $temporalStereotypes->map(s | temporalityToDirectives()->get($s)->toOne())->map(def | ^Directive(name = $def.name))->concatenate(buildHierarchyDirective($c, true)), fields = $props ->map(p | ^InputValueDefinition @@ -205,9 +212,9 @@ function <> meta::external::query::graphQL::binding::fromPure::s )); } -function <> meta::external::query::graphQL::binding::fromPure::sdl::buildObjectTypeDefinition(c: Class[1]): TypeSystemDefinition[*] +function <> meta::external::query::graphQL::binding::fromPure::sdl::buildObjectTypeDefinition(c: Class[1], isExtended: Boolean[1]): TypeSystemDefinition[*] { - let props = $c->allProperties()->filter(p | $p->isValidPropertyForGraphQL()); + let props = $c->hierarchicalAllProperties()->filter(p | $p->isValidPropertyForGraphQL()); let nonBuiltInScalars = $props->buildNonBuiltInGraphQLScalars(); let temporalStereotypes = $c->getTemporalStereotypes(); @@ -215,9 +222,9 @@ function <> meta::external::query::graphQL::binding::fromPure::s ->concatenate( ^ObjectTypeDefinition( name = $c.name->toOne(), - directives = if($temporalStereotypes->isEmpty(), | [], | $temporalStereotypes->map(s | temporalityToDirectives()->get($s)->toOne())->map(def | ^Directive(name = $def.name))), - fields = $c->allProperties() - ->filter(p | $p->isValidPropertyForGraphQL()) + implements = if($isExtended, | $c.name, | $c->validGeneralizations()->map(g | $g.name))->map(n | $n->toInterfaceTypeName()), + directives = $temporalStereotypes->map(s | temporalityToDirectives()->get($s)->toOne())->map(def | ^Directive(name = $def.name))->concatenate(buildHierarchyDirective($c, false)), + fields = $props ->map(p | ^FieldDefinition ( @@ -241,6 +248,27 @@ function <> meta::external::query::graphQL::binding::fromPure::s )); } +function <> meta::external::query::graphQL::binding::fromPure::sdl::buildInterfaceTypeDefinition(c: Class[1]): TypeSystemDefinition[*] +{ + let props = $c->hierarchicalAllProperties()->filter(p | $p->isValidPropertyForGraphQL()); + + let temporalStereotypes = $c->getTemporalStereotypes(); + ^InterfaceTypeDefinition( + name = $c.name->toOne()->toInterfaceTypeName(), + implements = $c->validGeneralizations()->map(g | $g.name)->map(n | $n->toInterfaceTypeName()), + directives = $temporalStereotypes->map(s | temporalityToDirectives()->get($s)->toOne())->map(def | ^Directive(name = $def.name))->concatenate(buildHierarchyDirective($c, false)), + fields = $props + ->map(p | + ^FieldDefinition + ( + name = $p.name->toOne(), + type = buildObjectTypeCompatibleTypeReference($p->functionReturnType(), $p->functionReturnMultiplicity()), + argumentDefinitions = [] + ) + ) + ); +} + function <> meta::external::query::graphQL::binding::fromPure::sdl::buildObjectTypeCompatibleTypeReference(type:meta::pure::metamodel::type::generics::GenericType[1], mul:Multiplicity[1]): TypeReference[1] { buildTypeReference($type, $mul, false) @@ -273,6 +301,11 @@ function <> meta::external::query::graphQL::binding::fromPure::s ); } +function meta::external::query::graphQL::binding::fromPure::sdl::buildHierarchyDirective(c: Class[1], forInput: Boolean[1]):Directive[*] +{ + $c->validGeneralizations()->map(g | ^Directive(name = meta::external::query::graphQL::binding::hierarchyDirective().name, arguments = ^Argument(name = 'class', value = ^StringValue(value = if($forInput, |$g.name->toOne()->toInputTypeName(), | $g.name->toOne()))))); +} + function meta::external::query::graphQL::binding::fromPure::sdl::findTypes(pElems:meta::pure::metamodel::PackageableElement[*]):meta::pure::metamodel::type::Type[*] { $pElems @@ -280,4 +313,3 @@ function meta::external::query::graphQL::binding::fromPure::sdl::findTypes(pElem ->cast(@meta::pure::metamodel::type::Type) ->removeDuplicates() } - diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/sdl/tests/simpleTest.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/sdl/tests/simpleTest.pure index 1584f315000..44d469792c8 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/sdl/tests/simpleTest.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/fromPure/sdl/tests/simpleTest.pure @@ -21,6 +21,23 @@ Class meta::external::query::graphQL::binding::fromPure::sdl::tests::model::Pers age : Integer[1]; } +Class meta::external::query::graphQL::binding::fromPure::sdl::tests::model::Employee extends meta::external::query::graphQL::binding::fromPure::sdl::tests::model::Person +{ + title: String[1]; + startDate: StrictDate[1]; +} + +Class meta::external::query::graphQL::binding::fromPure::sdl::tests::model::GSEmployee extends meta::external::query::graphQL::binding::fromPure::sdl::tests::model::Employee +{ + division: String[1]; +} + +Class meta::external::query::graphQL::binding::fromPure::sdl::tests::model::Team +{ + name: String[1]; + members: Employee[1..*]; +} + Class meta::external::query::graphQL::binding::fromPure::sdl::tests::model::ClassWithPrimitiveTypes { string: String[1]; @@ -54,6 +71,19 @@ Class <> meta::external::query::graphQL::binding::fromPure::sd upsertFirm(firm:Firm[1]){Firm.all()->first()}:Firm[0..1]; } +Class <> meta::external::query::graphQL::binding::fromPure::sdl::tests::model::QueryHierarchy +{ + teamByName(n:String[1]){Team.all()->filter(z|$z.name == $n)->first()}:Team[0..1]; + employeeByFirstName(n:String[1]){Employee.all()->filter(z|$z.firstName == $n)->first()}:Employee[0..1]; +} + +Class <> meta::external::query::graphQL::binding::fromPure::sdl::tests::model::MutationHierarchy +{ + // Dummy implementation + upsertTeam(team:Team[1]){Team.all()->first()}:Team[0..1]; + upsertEmployee(employee:Employee[1]){Employee.all()->first()}:Employee[0..1]; +} + function <> meta::external::query::graphQL::binding::fromPure::sdl::tests::testQuery():Boolean[1] { let res = typesToGraphQLString([Query, Firm, Person, IncType]->cast(@PackageableElement)); @@ -208,6 +238,191 @@ function <> meta::external::query::graphQL::binding::fromPure::sdl::t $res); } +function <> meta::external::query::graphQL::binding::fromPure::sdl::tests::testClassWithInheritance():Boolean[1] +{ + let res = typesToGraphQLString([Employee, Person]); + + assertEquals( + 'type Employee implements PersonInterface @extends(class: "Person") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type Person implements PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'scalar StrictDate\n' + + '\n' + + 'directive @extends on OBJECT | INPUT_OBJECT', + $res); +} + +function <> meta::external::query::graphQL::binding::fromPure::sdl::tests::testClassWithDeepInheritance():Boolean[1] +{ + let res = typesToGraphQLString([GSEmployee, Employee, Person]); + + assertEquals( + 'type Employee implements EmployeeInterface @extends(class: "Person") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface EmployeeInterface implements PersonInterface {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type GSEmployee implements EmployeeInterface @extends(class: "Employee") {\n' + + ' division: String!\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type Person implements PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'scalar StrictDate\n' + + '\n' + + 'directive @extends on OBJECT | INPUT_OBJECT', + $res); +} + +function <> meta::external::query::graphQL::binding::fromPure::sdl::tests::testPropertyWithInheritance():Boolean[1] +{ + let res = typesToGraphQLString([Team, Employee, Person]); + + assertEquals( + 'type Employee implements PersonInterface @extends(class: "Person") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type Person implements PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'scalar StrictDate\n' + + '\n' + + 'type Team {\n' + + ' name: String!\n' + + ' members: [Employee!]!\n' + + '}\n' + + '\n' + + 'directive @extends on OBJECT | INPUT_OBJECT', + $res); +} + +function <> meta::external::query::graphQL::binding::fromPure::sdl::tests::testQueryAndMutationWithHierarchy():Boolean[1] +{ + let res = typesToGraphQLString([QueryHierarchy, MutationHierarchy, Team, Employee, Person]); + + assertEquals( + 'type Employee implements PersonInterface @extends(class: "Person") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'input EmployeeInput @extends(class: "PersonInput") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type MutationHierarchy {\n' + + ' upsertTeam(team: TeamInput!): Team\n' + + ' upsertEmployee(employee: EmployeeInput!): Employee\n' + + '}\n' + + '\n' + + 'type Person implements PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'input PersonInput {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type QueryHierarchy {\n' + + ' teamByName(n: String!): Team\n' + + ' employeeByFirstName(n: String!): Employee\n' + + '}\n' + + '\n' + + 'scalar StrictDate\n' + + '\n' + + 'type Team {\n' + + ' name: String!\n' + + ' members: [Employee!]!\n' + + '}\n' + + '\n' + + 'input TeamInput {\n' + + ' name: String!\n' + + ' members: [EmployeeInput!]!\n' + + '}\n' + + '\n' + + 'directive @extends on OBJECT | INPUT_OBJECT\n' + + '\n' + + 'schema {\n' + + ' query : QueryHierarchy\n' + + ' mutation : MutationHierarchy\n' + + '}', + $res); +} + function <> meta::external::query::graphQL::binding::fromPure::sdl::tests::typesToGraphQLString(types: PackageableElement[*]): String[1] { meta::external::query::graphQL::binding::fromPure::sdl::transformPureToGraphQLSDL($types) diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/tests/inputTypeNameTest.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/tests/bindingCommonTest.pure similarity index 78% rename from legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/tests/inputTypeNameTest.pure rename to legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/tests/bindingCommonTest.pure index da3ee40d510..61989c85d21 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/tests/inputTypeNameTest.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/tests/bindingCommonTest.pure @@ -1,21 +1,27 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import meta::external::query::graphQL::binding::*; - -function <> meta::external::query::graphQL::binding::tests::testInputTypeNameRoundTrip() : Boolean[1] -{ - let testTypeName = 'test'; - assertEquals($testTypeName, fromInputTypeName(toInputTypeName($testTypeName))); +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import meta::external::query::graphQL::binding::*; + +function <> meta::external::query::graphQL::binding::tests::testInputTypeNameRoundTrip() : Boolean[1] +{ + let testTypeName = 'test'; + assertEquals($testTypeName, fromInputTypeName(toInputTypeName($testTypeName))); +} + +function <> meta::external::query::graphQL::binding::tests::testInterfaceTypeNameRoundTrip() : Boolean[1] +{ + let testTypeName = 'test'; + assertEquals($testTypeName, fromInterfaceTypeName(toInterfaceTypeName($testTypeName))); } \ No newline at end of file diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/tests/simpleTest.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/tests/simpleTest.pure index 1d2c60232d1..1f34e75abd9 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/tests/simpleTest.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/tests/simpleTest.pure @@ -219,6 +219,289 @@ function <> meta::external::query::graphQL::binding::toPure::sdl::tes $pureTypes); } +function <> meta::external::query::graphQL::binding::toPure::sdl::tests::testClassWithInheritance():Boolean[1] +{ + let pureTypes = + graphQLToPure( + '#GQL{' + + 'type Employee implements PersonInterface @extends(class: "Person") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type Person implements PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'scalar StrictDate\n' + + '\n' + + 'directive @extends on OBJECT | INPUT_OBJECT\n' + + '}#', + 'meta::external::query::graphQL::metamodel::sql' + ); + + assertEquals( + 'Class meta::external::query::graphQL::metamodel::sql::Employee extends meta::external::query::graphQL::metamodel::sql::Person\n' + + '{\n' + + ' title : String[1];\n' + + ' startDate : StrictDate[1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::Person\n' + + '{\n' + + ' firstName : String[0..1];\n' + + ' lastName : String[1];\n' + + ' age : Integer[1];\n' + + '}', + $pureTypes); +} + +function <> meta::external::query::graphQL::binding::toPure::sdl::tests::testPropertyWithInheritance():Boolean[1] +{ + let pureTypes = + graphQLToPure( + '#GQL{' + + 'type Employee implements PersonInterface @extends(class: "Person") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type Person implements PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'scalar StrictDate\n' + + '\n' + + 'type Team {\n' + + ' name: String!\n' + + ' members: [Employee!]!\n' + + '}\n' + + '\n' + + 'directive @extends on OBJECT | INPUT_OBJECT\n' + + '}#', + 'meta::external::query::graphQL::metamodel::sql' + ); + + assertEquals( + 'Class meta::external::query::graphQL::metamodel::sql::Employee extends meta::external::query::graphQL::metamodel::sql::Person\n' + + '{\n' + + ' title : String[1];\n' + + ' startDate : StrictDate[1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::Person\n' + + '{\n' + + ' firstName : String[0..1];\n' + + ' lastName : String[1];\n' + + ' age : Integer[1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::Team\n' + + '{\n' + + ' name : String[1];\n' + + ' members : meta::external::query::graphQL::metamodel::sql::Employee[*];\n' + + '}', + $pureTypes); +} + + +function <> meta::external::query::graphQL::binding::toPure::sdl::tests::testClassWithDeepInheritance():Boolean[1] +{ + let pureTypes = + graphQLToPure( + '#GQL{' + + 'type Employee implements EmployeeInterface @extends(class: "Person") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface EmployeeInterface implements PersonInterface {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type GSEmployee implements EmployeeInterface @extends(class: "Employee") {\n' + + ' division: String!\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type Person implements PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'scalar StrictDate\n' + + '\n' + + 'directive @extends on OBJECT | INPUT_OBJECT\n' + + '}#', + 'meta::external::query::graphQL::metamodel::sql' + ); + + assertEquals( + 'Class meta::external::query::graphQL::metamodel::sql::Employee extends meta::external::query::graphQL::metamodel::sql::Person\n' + + '{\n' + + ' title : String[1];\n' + + ' startDate : StrictDate[1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::GSEmployee extends meta::external::query::graphQL::metamodel::sql::Employee\n' + + '{\n' + + ' division : String[1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::Person\n' + + '{\n' + + ' firstName : String[0..1];\n' + + ' lastName : String[1];\n' + + ' age : Integer[1];\n' + + '}', + $pureTypes); +} + + +function <> meta::external::query::graphQL::binding::toPure::sdl::tests::testQueryAndMutationWithHierarchy():Boolean[1] +{ + let pureTypes = + graphQLToPure( + '#GQL{' + + 'type Employee implements PersonInterface @extends(class: "Person") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'input EmployeeInput @extends(class: "PersonInput") {\n' + + ' title: String!\n' + + ' startDate: StrictDate!\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type MutationHierarchy {\n' + + ' upsertTeam(team: TeamInput!): Team\n' + + ' upsertEmployee(employee: EmployeeInput!): Employee\n' + + '}\n' + + '\n' + + 'type Person implements PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'input PersonInput {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'interface PersonInterface {\n' + + ' firstName: String\n' + + ' lastName: String!\n' + + ' age: Int!\n' + + '}\n' + + '\n' + + 'type QueryHierarchy {\n' + + ' teamByName(n: String!): Team\n' + + ' employeeByFirstName(n: String!): Employee\n' + + '}\n' + + '\n' + + 'scalar StrictDate\n' + + '\n' + + 'type Team {\n' + + ' name: String!\n' + + ' members: [Employee!]!\n' + + '}\n' + + '\n' + + 'input TeamInput {\n' + + ' name: String!\n' + + ' members: [EmployeeInput!]!\n' + + '}\n' + + '\n' + + 'directive @extends on OBJECT | INPUT_OBJECT\n' + + '\n' + + 'schema {\n' + + ' query : QueryHierarchy\n' + + ' mutation : MutationHierarchy\n' + + '}\n' + + '}#', + 'meta::external::query::graphQL::metamodel::sql' + ); + + assertEquals( + 'Class meta::external::query::graphQL::metamodel::sql::Employee extends meta::external::query::graphQL::metamodel::sql::Person\n' + + '{\n' + + ' title : String[1];\n' + + ' startDate : StrictDate[1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::MutationHierarchy\n' + + '{\n' + + ' upsertTeam(team: meta::external::query::graphQL::metamodel::sql::Team[1]): meta::external::query::graphQL::metamodel::sql::Team[0..1];\n' + + ' upsertEmployee(employee: meta::external::query::graphQL::metamodel::sql::Employee[1]): meta::external::query::graphQL::metamodel::sql::Employee[0..1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::Person\n' + + '{\n' + + ' firstName : String[0..1];\n' + + ' lastName : String[1];\n' + + ' age : Integer[1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::QueryHierarchy\n' + + '{\n' + + ' teamByName(n: String[1]): meta::external::query::graphQL::metamodel::sql::Team[0..1];\n' + + ' employeeByFirstName(n: String[1]): meta::external::query::graphQL::metamodel::sql::Employee[0..1];\n' + + '}\n' + + '\n' + + 'Class meta::external::query::graphQL::metamodel::sql::Team\n' + + '{\n' + + ' name : String[1];\n' + + ' members : meta::external::query::graphQL::metamodel::sql::Employee[*];\n' + + '}', + $pureTypes); +} function <> meta::external::query::graphQL::binding::toPure::sdl::tests::graphQLToPure(graphQLDocument: String[1], purePackage: String[1]): String[1] { diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/tests/typeCompatibility.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/tests/typeCompatibility.pure index 528ee0917ae..e8d14098aa3 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/tests/typeCompatibility.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/tests/typeCompatibility.pure @@ -26,14 +26,19 @@ function <> meta::external::query::graphQL::binding::toPure::sdl::tes assert(isTypeCompatible(Employee, Employee_2)); assert(isTypeCompatible(Address, Address_2)); assert(isTypeCompatible(Country, Country_2)); + assert(isTypeCompatible(Child, Child_2)); + assert(isTypeCompatible(Parent, Parent_2)); assertFalse(isTypeCompatible(Firm, Employee_2)); assertFalse(isTypeCompatible(Address, Country_2)); + assertFalse(isTypeCompatible(Child, Parent)); assertFalse(isTypeCompatible(Firm, Firm_False)); assertFalse(isTypeCompatible(Employee, Employee_False)); assertFalse(isTypeCompatible(Address, Address_False)); assertFalse(isTypeCompatible(Country, Country_False)); + assertFalse(isTypeCompatible(Child, Child_False)); + assertFalse(isTypeCompatible(Child, Child_Parent_False)); } @@ -138,3 +143,37 @@ Class meta::external::query::graphQL::binding::toPure::sdl::tests::Country_False iso2Code : CountryCode[1]; } + +Class meta::external::query::graphQL::binding::toPure::sdl::tests::Child extends meta::external::query::graphQL::binding::toPure::sdl::tests::Parent +{ + child: String[1]; +} + +Class meta::external::query::graphQL::binding::toPure::sdl::tests::Parent +{ + parent: String[1]; +} + +Class meta::external::query::graphQL::binding::toPure::sdl::tests::Child_2 extends meta::external::query::graphQL::binding::toPure::sdl::tests::Parent_2 +{ + child: String[1]; +} + +Class meta::external::query::graphQL::binding::toPure::sdl::tests::Parent_2 +{ + parent: String[1]; +} + +Class meta::external::query::graphQL::binding::toPure::sdl::tests::Child_False +{ + child: String[1]; +} + +Class meta::external::query::graphQL::binding::toPure::sdl::tests::Parent_False +{ +} + +Class meta::external::query::graphQL::binding::toPure::sdl::tests::Child_Parent_False extends meta::external::query::graphQL::binding::toPure::sdl::tests::Parent_False +{ + child: String[1]; +} \ No newline at end of file diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/toPure_sdl.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/toPure_sdl.pure index 87f5f55494b..fd8eb1cc201 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/toPure_sdl.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/binding/toPure/sdl/toPure_sdl.pure @@ -1,6 +1,7 @@ ###Pure import meta::external::query::graphQL::binding::*; import meta::external::query::graphQL::metamodel::sdl::*; +import meta::external::query::graphQL::metamodel::sdl::value::*; import meta::external::query::graphQL::metamodel::sdl::typeSystem::*; import meta::external::query::graphQL::binding::toPure::*; import meta::external::query::graphQL::binding::toPure::sdl::*; @@ -11,38 +12,60 @@ function meta::external::query::graphQL::binding::toPure::sdl::graphQLTypeSystem let operationTypes = $doc.definitions->map(def | if($def->instanceOf(SchemaDefinition), | $def->cast(@SchemaDefinition).rootOperationTypeDefinitions.type, | [])); - // Pass 1 -- shell types used as type references + // Pass 1a -- shell types used as type references let allShellTypesByName = $doc.definitions->map(def| $def->match( [ o : ObjectTypeDefinition[1] | let no = newClass($o.name); let temporality = $o.directives.name->intersection(temporalityToDirectives()->keys())->map(td | ^Stereotype(profile = meta::pure::profiles::temporal, value = $td)); - let cp = ^$no(package = $pack, stereotypes = $temporality); + let cp = ^$no(package = $pack, stereotypes = $temporality, generalizations = []); pair($o.name, $cp);, i : InputObjectTypeDefinition[1] | let no = newClass($i.name); let temporality = $i.directives.name->intersection(temporalityToDirectives()->keys())->map(td | ^Stereotype(profile = meta::pure::profiles::temporal, value = $td)); - let cp = ^$no(package = $pack, stereotypes = $temporality); + let cp = ^$no(package = $pack, stereotypes = $temporality, generalizations = []); pair($i.name, $cp);, e : EnumTypeDefinition[1] | let ne = newEnumeration($e.name, $e.values.value); let ep = ^$ne(package = $pack); pair($e.name, $ep);, s : SchemaDefinition[1] | [], d : DirectiveDefinition[1] | [], - s : ScalarTypeDefinition[1] | [] + s : ScalarTypeDefinition[1] | [], + i : InterfaceTypeDefinition[1] | [] ] ) )->concatenate(graphQLScalarTypesToPurePrimitives())->newMap(); + // Pass 1b -- add hierarchy + $doc.definitions->map(def| + $def->match( + [ + o : ObjectTypeDefinition[1] | buildHierarchy($o.name, $o.directives, $allShellTypesByName), + i : InputObjectTypeDefinition[1] | buildHierarchy($i.name, $i.directives, $allShellTypesByName), + a : Any[1] | [] + ] + ) + ); + + let graphQLTypesByName = $doc.definitions->map(def| + $def->match( + [ + o : ObjectTypeDefinition[1] | pair($o.name, $o), + i : InputObjectTypeDefinition[1] | pair($i.name, $i), + a : Any[1] | [] + ] + ))->newMap(); + // Pass 2a -- build non-operation object and enum types let builtObjectTypesByName = $doc.definitions->map(def | $def->match( [ - o : ObjectTypeDefinition[1] | if($o.name->in($operationTypes), | [], | $o->buildPureType($allShellTypesByName)), + o : ObjectTypeDefinition[1] | if($o.name->in($operationTypes), | [], | $o->buildPureType($allShellTypesByName, $graphQLTypesByName)), i : InputObjectTypeDefinition[1] | [], e : EnumTypeDefinition[1] | $allShellTypesByName->get($e.name)->toOne(), s : SchemaDefinition[1] | [], d : DirectiveDefinition[1] | [], - s : ScalarTypeDefinition[1] | [] + s : ScalarTypeDefinition[1] | [], + i : InterfaceTypeDefinition[1] | [] ] ) )->map(t | pair($t.name->toOne(), $t))->newMap(); @@ -51,12 +74,13 @@ function meta::external::query::graphQL::binding::toPure::sdl::graphQLTypeSystem let builtInputAndOperationTypesByName = $doc.definitions->map(def | $def->match( [ - o : ObjectTypeDefinition[1] | if($o.name->in($operationTypes), | $o->buildPureType($allShellTypesByName), | []), - i : InputObjectTypeDefinition[1] | $i->buildPureType($allShellTypesByName), + o : ObjectTypeDefinition[1] | if($o.name->in($operationTypes), | $o->buildPureType($allShellTypesByName, $graphQLTypesByName), | []), + i : InputObjectTypeDefinition[1] | $i->buildPureType($allShellTypesByName, $graphQLTypesByName), e : EnumTypeDefinition[1] | [], s : SchemaDefinition[1] | [], d : DirectiveDefinition[1] | [], - s : ScalarTypeDefinition[1] | [] + s : ScalarTypeDefinition[1] | [], + i : InterfaceTypeDefinition[1] | [] ] ) )->map(t | pair($t.name->toOne(), $t))->newMap(); @@ -110,45 +134,73 @@ function meta::external::query::graphQL::binding::toPure::buildTransientPackageF $package->split('::')->fold({a,b|^Package(name = $a, package=$b)}, ^Package()); } -function <> meta::external::query::graphQL::binding::toPure::sdl::buildPureType(graphQLType: meta::external::query::graphQL::metamodel::sdl::typeSystem::Type[1], shellTypesByName: Map[1]) : meta::pure::metamodel::type::Type[1] +function <> meta::external::query::graphQL::binding::toPure::sdl::buildPureType(graphQLType: meta::external::query::graphQL::metamodel::sdl::typeSystem::Type[1], shellTypesByName: Map[1], graphQLTypesByName: Map[1]) : meta::pure::metamodel::type::Type[*] { $graphQLType->match( [ o : ObjectTypeDefinition[1] | let shellType = $shellTypesByName->get($o.name)->toOne(); - let properties = $o.fields->map(f | - let fieldTypeMultiplicityAndName = $f.type->extractPureMultiplicityAndTypeName(); - let fieldType = $shellTypesByName->get($fieldTypeMultiplicityAndName.second); - assert($fieldType->isNotEmpty(), 'Unable to find type ' + $fieldTypeMultiplicityAndName.second); - // Has arguments -- wil be qualified property - let fieldVariableExpressions = $f.argumentDefinitions->map(a | - let argumentMultiplicityAndName = $a.type->extractPureMultiplicityAndTypeName(); - let argumentType = $shellTypesByName->get($argumentMultiplicityAndName.second); - assert($argumentType->isNotEmpty(), 'Unable to find type ' + $argumentMultiplicityAndName.second); - ^VariableExpression(name = $a.name, multiplicity = $argumentMultiplicityAndName.first, genericType = ^GenericType(rawType = $argumentType->toOne())); - ); - if ($fieldVariableExpressions->isEmpty(), - | - meta::pure::functions::meta::newProperty($f.name, ^GenericType(rawType = $shellType), ^GenericType(rawType = $fieldType->toOne()), $fieldTypeMultiplicityAndName.first), - | - newQualifiedProperty($f.name, ^GenericType(rawType = $shellType), ^GenericType(rawType = $fieldType->toOne()), $fieldTypeMultiplicityAndName.first, $fieldVariableExpressions); - ); - ); - $shellType->mutateAdd('properties', $properties->filter(p | $p->instanceOf(Property))); - $shellType->mutateAdd('qualifiedProperties', $properties->filter(p | $p->instanceOf(QualifiedProperty)));, + if ($shellType->cast(@Class).properties->isNotEmpty(), + | [], + | let allGeneralizations = $shellType->hierarchicalAllGeneralizations(); + let builtGeneralizations = $allGeneralizations->map(c | buildPureType($graphQLTypesByName->get($c.name->toOne())->toOne(), $shellTypesByName, $graphQLTypesByName)); + let generalizationProps = $allGeneralizations->filter(t | $t->instanceOf(Class))->map(t | $t->cast(@Class)).properties.name; + let properties = $o.fields->map(f | + if ($generalizationProps->contains($f.name), + | [], + | let fieldTypeMultiplicityAndName = $f.type->extractPureMultiplicityAndTypeName(); + let fieldType = $shellTypesByName->get($fieldTypeMultiplicityAndName.second); + assert($fieldType->isNotEmpty(), 'Unable to find type ' + $fieldTypeMultiplicityAndName.second); + // Has arguments -- wil be qualified property + let fieldVariableExpressions = $f.argumentDefinitions->map(a | + let argumentMultiplicityAndName = $a.type->extractPureMultiplicityAndTypeName(); + let argumentType = $shellTypesByName->get($argumentMultiplicityAndName.second); + assert($argumentType->isNotEmpty(), 'Unable to find type ' + $argumentMultiplicityAndName.second); + ^VariableExpression(name = $a.name, multiplicity = $argumentMultiplicityAndName.first, genericType = ^GenericType(rawType = $argumentType->toOne())); + ); + if ($fieldVariableExpressions->isEmpty(), + | + meta::pure::functions::meta::newProperty($f.name, ^GenericType(rawType = $shellType), ^GenericType(rawType = $fieldType->toOne()), $fieldTypeMultiplicityAndName.first), + | + newQualifiedProperty($f.name, ^GenericType(rawType = $shellType), ^GenericType(rawType = $fieldType->toOne()), $fieldTypeMultiplicityAndName.first, $fieldVariableExpressions); + ); + ); + ); + $shellType->mutateAdd('properties', $properties->filter(p | $p->instanceOf(Property))); + $shellType->mutateAdd('qualifiedProperties', $properties->filter(p | $p->instanceOf(QualifiedProperty)))->concatenate($builtGeneralizations); + );, i : InputObjectTypeDefinition[1] | let shellType = $shellTypesByName->get($i.name)->toOne(); - let properties = $i.fields->map(f | - let fieldTypeMultiplicityAndName = $f.type->extractPureMultiplicityAndTypeName(); - let fieldType = $shellTypesByName->get($fieldTypeMultiplicityAndName.second); - assert($fieldType->isNotEmpty(), 'Unable to find type ' + $fieldTypeMultiplicityAndName.second); - meta::pure::functions::meta::newProperty($f.name, ^GenericType(rawType = $shellType), ^GenericType(rawType = $fieldType->toOne()), $fieldTypeMultiplicityAndName.first); + if ($shellType->cast(@Class).properties->isNotEmpty(), + | [], + | let allGeneralizations = $shellType->hierarchicalAllGeneralizations(); + let builtGeneralizations = $allGeneralizations->map(c | buildPureType($graphQLTypesByName->get($c.name->toOne())->toOne(), $shellTypesByName, $graphQLTypesByName)); + let generalizationProps = $allGeneralizations->filter(t | $t->instanceOf(Class))->map(t | $t->cast(@Class)).properties.name; + let properties = $i.fields->map(f | + if ($generalizationProps->contains($f.name), + | [], + | let fieldTypeMultiplicityAndName = $f.type->extractPureMultiplicityAndTypeName(); + let fieldType = $shellTypesByName->get($fieldTypeMultiplicityAndName.second); + assert($fieldType->isNotEmpty(), 'Unable to find type ' + $fieldTypeMultiplicityAndName.second); + meta::pure::functions::meta::newProperty($f.name, ^GenericType(rawType = $shellType), ^GenericType(rawType = $fieldType->toOne()), $fieldTypeMultiplicityAndName.first); + ); + ); + $shellType->mutateAdd('properties', $properties)->concatenate($builtGeneralizations); ); - $shellType->mutateAdd('properties', $properties); ] ) } +function <> meta::external::query::graphQL::binding::toPure::sdl::buildHierarchy(graphQLTypeName: String[1], graphQLTypeDirectives: Directive[*], allShellTypesByName: Map[1]): Nil[0] +{ + let extendedClassNames = $graphQLTypeDirectives->filter(d | $d.name == hierarchyDirective().name)->map(hd | $hd.arguments->filter(a | $a.name == 'class').value->cast(@StringValue).value->toOne()); + let thisClass = $allShellTypesByName->get($graphQLTypeName)->toOne(); + let generalizations = $extendedClassNames->map(n | ^Generalization(specific = $thisClass, general = ^GenericType(rawType = $allShellTypesByName->get($n)->toOne()))); + if ($generalizations->isEmpty(), | $thisClass->mutateAdd('generalizations', ^Generalization(specific = $thisClass, general = ^GenericType(rawType = Any))), | $thisClass->mutateAdd('generalizations', $generalizations)); + $extendedClassNames->map(n | $allShellTypesByName->get($n)->toOne()->mutateAdd('specializations', $generalizations)); + []; +} + function <> meta::external::query::graphQL::binding::toPure::sdl::extractPureMultiplicityAndTypeName(typeReference: TypeReference[1]): Pair[1] { $typeReference @@ -166,11 +218,17 @@ function meta::external::query::graphQL::binding::toPure::isTypeCompatible(one: | if ($one->instanceOf(PrimitiveType) || $one->instanceOf(Enumeration), | $one == $two, - | $one->cast(@Class).properties->sortBy(p | $p.name->toOne()) - ->zip($two->cast(@Class).properties->sortBy(p | $p.name->toOne())) - ->map(pair | $pair.first->isPropertyCompatible($pair.second)) - ->distinct() - ->equal([true]) + | let oneGeneralizations = $one->validGeneralizations()->sort(); + let twoGeneralizations = $two->validGeneralizations()->sort(); + let matchedGeneralizations = $oneGeneralizations->zip($twoGeneralizations)->map(pair | $pair.first->isTypeCompatible($pair.second))->distinct(); + + $oneGeneralizations->size() == $twoGeneralizations->size() + && ($matchedGeneralizations->isEmpty() || $matchedGeneralizations->equal([true])) + && $one->cast(@Class).properties->sortBy(p | $p.name->toOne()) + ->zip($two->cast(@Class).properties->sortBy(p | $p.name->toOne())) + ->map(pair | $pair.first->isPropertyCompatible($pair.second)) + ->distinct() + ->equal([true]); ), | false ) diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/serialization/serialization.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/serialization/serialization.pure index fa5f9d06da6..754b68e5c5c 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/serialization/serialization.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/serialization/serialization.pure @@ -75,7 +75,7 @@ function meta::external::query::graphQL::serialization::graphQLtoString(type: Un function meta::external::query::graphQL::serialization::graphQLtoString(type: ObjectTypeDefinition[1]):String[1] { - 'type ' + $type.name + $type.directives->graphQLtoString() + if ($type.implements->isEmpty(),|'',|' implements '+$type.implements->joinStrings(' & ')) + ' {\n'+ + 'type ' + $type.name + if ($type.implements->isEmpty(),|'',|' implements '+$type.implements->joinStrings(' & ')) + $type.directives->graphQLtoString() + ' {\n'+ $type.fields->map(f|' ' + $f->graphQLtoString())->joinStrings('\n') + '\n}' } @@ -94,7 +94,7 @@ function meta::external::query::graphQL::serialization::graphQLtoString(field: F function meta::external::query::graphQL::serialization::graphQLtoString(type: InterfaceTypeDefinition[1]):String[1] { - 'interface ' + $type.name+' {\n' + + 'interface ' + $type.name+ if($type.implements->isEmpty(),|'',|' implements '+$type.implements->joinStrings(' & ')) + ' {\n' + $type.fields->map(f|' ' + $f->graphQLtoString())->joinStrings('\n') + '\n}' } diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/transformation/tests/testIntrospectionQuery.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/transformation/tests/testIntrospectionQuery.pure index e69fdf271c4..efda05f5b24 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/transformation/tests/testIntrospectionQuery.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-pure/src/main/resources/core_external_query_graphql/transformation/tests/testIntrospectionQuery.pure @@ -6,6 +6,8 @@ Class {doc.doc = 'Firm class representing a physical entity of Firm'} meta::exte {doc.doc = 'All employees of the firm'} employees : meta::external::query::graphQL::transformation::introspection::tests::Person[*]; {doc.doc = 'Is firm a public entity?'} isPublicEntity: Boolean[1]; {doc.doc = 'Type of the firm'} firmType: Firm_Type[1]; + anyProperty: Any[1]; + anyQualifiedProperty() {'LegalName: ' + $this.legalName}: Any[1]; } Enum meta::external::query::graphQL::transformation::introspection::tests::Firm_Type diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/pom.xml b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/pom.xml index f093ec9f128..c99a73129fa 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/pom.xml +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-graphQL - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -132,6 +132,10 @@ org.finos.legend.engine legend-engine-language-pure-dsl-generation + + org.finos.legend.engine + legend-engine-xt-data-space-pure-metamodel + @@ -192,21 +196,6 @@ jackson-databind - - org.apache.httpcomponents - httpclient - - - commons-codec - commons-codec - - - - - org.apache.httpcomponents - httpcore - - com.google.guava @@ -304,6 +293,16 @@ legend-engine-xt-relationalStore-protocol test + + org.finos.legend.engine + legend-engine-xt-data-space-grammar + test + + + org.finos.legend.engine + legend-engine-xt-data-space-compiler + test + diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/GraphQL.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/GraphQL.java index fb7c116a500..529a3af8af7 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/GraphQL.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/GraphQL.java @@ -14,51 +14,30 @@ package org.finos.legend.engine.query.graphQL.api; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.http.client.ClientProtocolException; -import org.apache.http.client.CookieStore; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.impl.client.BasicCookieStore; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.util.EntityUtils; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import javax.security.auth.Subject; +import javax.servlet.http.HttpServletRequest; import org.eclipse.collections.api.list.MutableList; -import org.eclipse.collections.impl.utility.ArrayIterate; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.language.pure.modelManager.ModelManager; import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.MetaDataServerConfiguration; -import org.finos.legend.engine.plan.execution.result.ConstantResult; -import org.finos.legend.engine.plan.execution.result.Result; import org.finos.legend.engine.protocol.graphQL.metamodel.Document; import org.finos.legend.engine.protocol.graphQL.metamodel.ProtocolToMetamodelTranslator; -import org.finos.legend.engine.protocol.graphQL.metamodel.executable.OperationDefinition; import org.finos.legend.engine.protocol.pure.PureClientVersions; import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; -import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextPointer; -import org.finos.legend.engine.shared.core.ObjectMapperFactory; -import org.finos.legend.engine.shared.core.kerberos.HttpClientBuilder; +import org.finos.legend.engine.protocol.pure.v1.model.context.WorkspaceSDLC; import org.finos.legend.engine.shared.core.kerberos.ProfileManagerHelper; -import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; import org.pac4j.core.profile.CommonProfile; -import javax.security.auth.Subject; -import javax.servlet.http.HttpServletRequest; -import java.security.PrivilegedActionException; -import java.security.PrivilegedExceptionAction; -import java.util.List; - public abstract class GraphQL { protected ModelManager modelManager; - protected MetaDataServerConfiguration metadataserver; public GraphQL(ModelManager modelManager, MetaDataServerConfiguration metadataserver) { this.modelManager = modelManager; - this.metadataserver = metadataserver; } public static org.finos.legend.pure.generated.Root_meta_external_query_graphQL_metamodel_sdl_Document toPureModel(Document document, PureModel pureModel) @@ -68,106 +47,21 @@ public static org.finos.legend.pure.generated.Root_meta_external_query_graphQL_m protected PureModel loadSDLCProjectModel(MutableList profiles, HttpServletRequest request, String projectId, String workspaceId, boolean isGroupWorkspace) throws PrivilegedActionException { - Subject subject = ProfileManagerHelper.extractSubject(profiles); - return subject == null ? - getSDLCProjectPureModel(profiles, request, projectId, workspaceId, isGroupWorkspace) : - Subject.doAs(subject, (PrivilegedExceptionAction) () -> getSDLCProjectPureModel(profiles, request, projectId, workspaceId, isGroupWorkspace)); - } - - private static class SDLCProjectDependency - { - public String projectId; - public String versionId; - - public String getGroupId() - { - return projectId.split(":")[0]; - } - - public String getArtifactId() - { - return projectId.split(":")[1]; - } - - public String getVersionId() - { - return versionId; - } - } - - private PureModelContextData getSDLCDependenciesPMCD(MutableList profiles, CookieStore cookieStore, String projectId, String workspaceId, boolean isGroupWorkspace) - { - try (CloseableHttpClient client = (CloseableHttpClient) HttpClientBuilder.getHttpClient(cookieStore)) - { - HttpGet req = new HttpGet("http://" + metadataserver.getSdlc().host + ":" + metadataserver.getSdlc().port + "/api/projects/" + projectId + (isGroupWorkspace ? "/groupWorkspaces/" : "/workspaces/") + workspaceId + "/revisions/" + "HEAD" + "/upstreamProjects"); - try (CloseableHttpResponse res = client.execute(req)) - { - ObjectMapper mapper = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports(); - List dependencies = mapper.readValue(EntityUtils.toString(res.getEntity()), new TypeReference>() {}); - PureModelContextData.Builder builder = PureModelContextData.newBuilder(); - dependencies.forEach(dependency -> - { - try - { - builder.addPureModelContextData(loadProjectData(profiles, dependency.getGroupId(), dependency.getArtifactId(), dependency.versionId)); - } - catch (Exception e) - { - throw new RuntimeException(e); - } - }); - builder.removeDuplicates(); - return builder.build(); - } - } - catch (Exception e) - { - throw new RuntimeException(e); - } - } - - private PureModel getSDLCProjectPureModel(MutableList profiles, HttpServletRequest request, String projectId, String workspaceId, boolean isGroupWorkspace) - { - CookieStore cookieStore = new BasicCookieStore(); - ArrayIterate.forEach(request.getCookies(), c -> cookieStore.addCookie(new MyCookie(c))); - + WorkspaceSDLC sdlcInfo = new WorkspaceSDLC(); + sdlcInfo.project = projectId; + sdlcInfo.version = workspaceId; + sdlcInfo.isGroupWorkspace = isGroupWorkspace; - try (CloseableHttpClient client = (CloseableHttpClient) HttpClientBuilder.getHttpClient(cookieStore)) - { - if (metadataserver == null || metadataserver.getSdlc() == null) - { - throw new EngineException("Please specify the metadataserver.sdlc information in the server configuration"); - } - HttpGet req = new HttpGet("http://" + metadataserver.getSdlc().host + ":" + metadataserver.getSdlc().port + "/api/projects/" + projectId + (isGroupWorkspace ? "/groupWorkspaces/" : "/workspaces/") + workspaceId + "/pureModelContextData"); - try (CloseableHttpResponse res = client.execute(req)) - { - ObjectMapper mapper = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports(); - PureModelContextData pureModelContextData = mapper.readValue(res.getEntity().getContent(), PureModelContextData.class); - PureModelContextData dependenciesPMCD = getSDLCDependenciesPMCD(profiles, cookieStore, projectId, workspaceId, isGroupWorkspace); - return this.modelManager.loadModel(pureModelContextData.combine(dependenciesPMCD), PureClientVersions.production, profiles, ""); - } - } - catch (Exception e) - { - throw new RuntimeException(e); - } - } - - protected PureModel loadProjectModel(MutableList profiles, String groupId, String artifactId, String versionId) throws PrivilegedActionException - { - Subject subject = ProfileManagerHelper.extractSubject(profiles); PureModelContextPointer pointer = new PureModelContextPointer(); - AlloySDLC sdlcInfo = new AlloySDLC(); - sdlcInfo.groupId = groupId; - sdlcInfo.artifactId = artifactId; - sdlcInfo.version = versionId; pointer.sdlcInfo = sdlcInfo; + + Subject subject = ProfileManagerHelper.extractSubject(profiles); return subject == null ? this.modelManager.loadModel(pointer, PureClientVersions.production, profiles, "") : Subject.doAs(subject, (PrivilegedExceptionAction) () -> this.modelManager.loadModel(pointer, PureClientVersions.production, profiles, "")); } - protected PureModelContextData loadProjectData(MutableList profiles, String groupId, String artifactId, String versionId) throws PrivilegedActionException + protected PureModel loadProjectModel(MutableList profiles, String groupId, String artifactId, String versionId) throws PrivilegedActionException { Subject subject = ProfileManagerHelper.extractSubject(profiles); PureModelContextPointer pointer = new PureModelContextPointer(); @@ -177,7 +71,7 @@ protected PureModelContextData loadProjectData(MutableList profil sdlcInfo.version = versionId; pointer.sdlcInfo = sdlcInfo; return subject == null ? - this.modelManager.loadData(pointer, PureClientVersions.production, profiles) : - Subject.doAs(subject, (PrivilegedExceptionAction) () -> this.modelManager.loadData(pointer, PureClientVersions.production, profiles)); + this.modelManager.loadModel(pointer, PureClientVersions.production, profiles, "") : + Subject.doAs(subject, (PrivilegedExceptionAction) () -> this.modelManager.loadModel(pointer, PureClientVersions.production, profiles, "")); } } diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/MyCookie.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/MyCookie.java deleted file mode 100644 index 93152da1e14..00000000000 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/MyCookie.java +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2022 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.engine.query.graphQL.api; - -import org.apache.http.cookie.Cookie; - -import java.util.Date; - -public class MyCookie implements Cookie -{ - private javax.servlet.http.Cookie cookie; - - public MyCookie(javax.servlet.http.Cookie cookie) - { - this.cookie = cookie; - } - - @Override - public String getName() - { - return this.cookie.getName(); - } - - @Override - public String getValue() - { - return this.cookie.getValue(); - } - - @Override - public String getComment() - { - return this.cookie.getComment(); - } - - @Override - public String getCommentURL() - { - return ""; - } - - @Override - public Date getExpiryDate() - { - if (this.cookie.getMaxAge() >= 0) - { - return new Date(System.currentTimeMillis() + this.cookie.getMaxAge() * 1000L); - } - throw new RuntimeException(""); - } - - @Override - public boolean isPersistent() - { - return true; - } - - @Override - public String getDomain() - { - return "localhost"; - } - - @Override - public String getPath() - { - return "/"; - } - - @Override - public int[] getPorts() - { - return new int[]{}; - } - - @Override - public boolean isSecure() - { - return false; - } - - @Override - public int getVersion() - { - return this.cookie.getVersion(); - } - - @Override - public boolean isExpired(Date date) - { - return false; - } -} diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLCacheKey.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLCacheKey.java index 0efc107cfde..950c03c281d 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLCacheKey.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLCacheKey.java @@ -16,4 +16,5 @@ public interface GraphQLCacheKey { + String getQueryClassPath(); } diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLDevCacheKey.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLDevCacheKey.java index e21d51dede0..77f3d3e2489 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLDevCacheKey.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLDevCacheKey.java @@ -14,7 +14,7 @@ package org.finos.legend.engine.query.graphQL.api.cache; -import com.google.common.base.Objects; +import java.util.Objects; public class GraphQLDevCacheKey implements GraphQLCacheKey { @@ -47,12 +47,37 @@ public boolean equals(Object o) return false; } GraphQLDevCacheKey that = (GraphQLDevCacheKey) o; - return Objects.equal(projectId, that.projectId) && Objects.equal(workspaceId, that.workspaceId) && Objects.equal(queryClassPath, that.queryClassPath) && Objects.equal(mappingPath, that.mappingPath) && Objects.equal(runtimePath, that.runtimePath) && Objects.equal(query, that.query); + return Objects.equals(projectId, that.projectId) + && Objects.equals(workspaceId, that.workspaceId) + && Objects.equals(queryClassPath, that.queryClassPath) + && Objects.equals(mappingPath, that.mappingPath) + && Objects.equals(runtimePath, that.runtimePath) + && Objects.equals(query, that.query); } @Override public int hashCode() { - return Objects.hashCode(projectId, workspaceId, queryClassPath, mappingPath, runtimePath, query); + return Objects.hash(projectId, workspaceId, queryClassPath, mappingPath, runtimePath, query); + } + + public String getQueryClassPath() + { + return queryClassPath; + } + + public String getMappingPath() + { + return mappingPath; + } + + public String getRuntimePath() + { + return runtimePath; + } + + public String getQuery() + { + return query; } } diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdCacheKey.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdCacheKey.java index 409978309f1..226cf132c90 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdCacheKey.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdCacheKey.java @@ -14,25 +14,21 @@ package org.finos.legend.engine.query.graphQL.api.cache; -import com.google.common.base.Objects; +import java.util.Objects; -public class GraphQLProdCacheKey implements GraphQLCacheKey +public abstract class GraphQLProdCacheKey implements GraphQLCacheKey { - private String groupID; - private String artifactId; - private String versionId; - private String mappingPath; - private String runtimePath; - private String queryClassPath; - private String query; - - public GraphQLProdCacheKey(String groupID, String artifactId, String versionId, String mappingPath, String runtimePath, String queryClassPath, String query) + protected String groupID; + protected String artifactId; + protected String versionId; + protected String queryClassPath; + protected String query; + + public GraphQLProdCacheKey(String groupID, String artifactId, String versionId, String queryClassPath, String query) { this.groupID = groupID; this.artifactId = artifactId; this.versionId = versionId; - this.mappingPath = mappingPath; - this.runtimePath = runtimePath; this.queryClassPath = queryClassPath; this.query = query; } @@ -49,12 +45,35 @@ public boolean equals(Object o) return false; } GraphQLProdCacheKey that = (GraphQLProdCacheKey) o; - return Objects.equal(groupID, that.groupID) && Objects.equal(artifactId, that.artifactId) && Objects.equal(versionId, that.versionId) && Objects.equal(mappingPath, that.mappingPath) && Objects.equal(runtimePath, that.runtimePath) && Objects.equal(queryClassPath, that.queryClassPath) && Objects.equal(query, that.query); + return Objects.equals(groupID, that.groupID) + && Objects.equals(artifactId, that.artifactId) + && Objects.equals(versionId, that.versionId) + && Objects.equals(queryClassPath, that.queryClassPath) + && Objects.equals(query, that.query); } - @Override - public int hashCode() + public String getGroupID() + { + return groupID; + } + + public String getArtifactId() + { + return artifactId; + } + + public String getVersionId() + { + return versionId; + } + + public String getQueryClassPath() + { + return queryClassPath; + } + + public String getQuery() { - return Objects.hashCode(groupID, artifactId, versionId, mappingPath, runtimePath, queryClassPath, query); + return query; } } diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdDataspaceCacheKey.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdDataspaceCacheKey.java new file mode 100644 index 00000000000..36ce591b385 --- /dev/null +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdDataspaceCacheKey.java @@ -0,0 +1,63 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.query.graphQL.api.cache; + +import java.util.Objects; + +public class GraphQLProdDataspaceCacheKey extends GraphQLProdCacheKey +{ + private String dataspacePath; + private String executionContext; + + public GraphQLProdDataspaceCacheKey(String groupID, String artifactId, String versionId, String dataspacePath, String executionContext, String queryClassPath, String query) + { + super(groupID, artifactId, versionId, queryClassPath, query); + this.dataspacePath = dataspacePath; + this.executionContext = executionContext; + } + + @Override + public boolean equals(Object o) + { + if (this == o) + { + return true; + } + if (o == null || getClass() != o.getClass()) + { + return false; + } + GraphQLProdDataspaceCacheKey that = (GraphQLProdDataspaceCacheKey) o; + return super.equals(that) + && Objects.equals(dataspacePath, that.dataspacePath) + && Objects.equals(executionContext, that.executionContext); + } + + @Override + public int hashCode() + { + return Objects.hash(groupID, artifactId, versionId, dataspacePath, executionContext, queryClassPath, query); + } + + public String getDataspacePath() + { + return dataspacePath; + } + + public String getExecutionContext() + { + return executionContext; + } +} diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdMappingRuntimeCacheKey.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdMappingRuntimeCacheKey.java new file mode 100644 index 00000000000..1790fcdf981 --- /dev/null +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/cache/GraphQLProdMappingRuntimeCacheKey.java @@ -0,0 +1,63 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.query.graphQL.api.cache; + +import java.util.Objects; + +public class GraphQLProdMappingRuntimeCacheKey extends GraphQLProdCacheKey +{ + private String mappingPath; + private String runtimePath; + + public GraphQLProdMappingRuntimeCacheKey(String groupID, String artifactId, String versionId, String mappingPath, String runtimePath, String queryClassPath, String query) + { + super(groupID, artifactId, versionId, queryClassPath, query); + this.mappingPath = mappingPath; + this.runtimePath = runtimePath; + } + + @Override + public boolean equals(Object o) + { + if (this == o) + { + return true; + } + if (o == null || getClass() != o.getClass()) + { + return false; + } + GraphQLProdMappingRuntimeCacheKey that = (GraphQLProdMappingRuntimeCacheKey) o; + return super.equals(that) + && Objects.equals(mappingPath, that.mappingPath) + && Objects.equals(runtimePath, that.runtimePath); + } + + @Override + public int hashCode() + { + return Objects.hash(groupID, artifactId, versionId, mappingPath, runtimePath, queryClassPath, query); + } + + public String getMappingPath() + { + return mappingPath; + } + + public String getRuntimePath() + { + return runtimePath; + } +} diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/GraphQLExecute.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/GraphQLExecute.java index e152c9e9faf..ea83e92e110 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/GraphQLExecute.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/GraphQLExecute.java @@ -51,12 +51,15 @@ import org.finos.legend.engine.query.graphQL.api.cache.GraphQLDevCacheKey; import org.finos.legend.engine.query.graphQL.api.cache.GraphQLPlanCache; import org.finos.legend.engine.query.graphQL.api.cache.GraphQLProdCacheKey; +import org.finos.legend.engine.query.graphQL.api.cache.GraphQLProdDataspaceCacheKey; +import org.finos.legend.engine.query.graphQL.api.cache.GraphQLProdMappingRuntimeCacheKey; import org.finos.legend.engine.query.graphQL.api.execute.directives.IGraphQLDirectiveExtension; import org.finos.legend.engine.query.graphQL.api.execute.model.PlansResult; import org.finos.legend.engine.query.graphQL.api.execute.model.Query; import org.finos.legend.engine.query.graphQL.api.execute.model.error.GraphQLErrorMain; import org.finos.legend.engine.shared.core.ObjectMapperFactory; import org.finos.legend.engine.shared.core.kerberos.ProfileManagerHelper; +import org.finos.legend.engine.shared.core.operational.Assert; import org.finos.legend.engine.shared.core.operational.errorManagement.ExceptionTool; import org.finos.legend.engine.shared.core.operational.logs.LogInfo; import org.finos.legend.engine.shared.core.operational.logs.LoggingEventType; @@ -64,11 +67,14 @@ import org.finos.legend.pure.generated.Root_meta_pure_executionPlan_ExecutionPlan; import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; import org.finos.legend.pure.generated.Root_meta_core_runtime_Runtime; +import org.finos.legend.pure.generated.Root_meta_pure_metamodel_dataSpace_DataSpace; +import org.finos.legend.pure.generated.Root_meta_pure_metamodel_dataSpace_DataSpaceExecutionContext; import org.finos.legend.pure.generated.core_external_query_graphql_transformation_transformation_graphFetch; import org.finos.legend.pure.generated.core_external_query_graphql_transformation_transformation_introspection_query; import org.finos.legend.pure.generated.core_pure_executionPlan_executionPlan_print; import org.finos.legend.pure.m3.coreinstance.meta.pure.functions.collection.Pair; import org.finos.legend.pure.m3.coreinstance.meta.pure.mapping.Mapping; +import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.PackageableElement; import org.pac4j.core.profile.CommonProfile; import org.pac4j.core.profile.ProfileManager; import org.pac4j.jax.rs.annotations.Pac4JProfileManager; @@ -85,10 +91,12 @@ import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; +import javax.ws.rs.DefaultValue; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; @@ -222,7 +230,17 @@ public Response generatePlansProd(@Context HttpServletRequest request, @PathPara } } - private Response executeGraphQLQuery(String queryClassPath, String mappingPath, String runtimePath, Document document, GraphQLCacheKey graphQLCacheKey, MutableList profiles, Callable modelLoader) + private Response executeIntrospection(String queryClassPath, Document document, PureModel pureModel) + { + org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class = pureModel.getClass(queryClassPath); + org.finos.legend.pure.generated.Root_meta_external_query_graphQL_metamodel_sdl_Document queryDoc = toPureModel(document, pureModel); + + return Response.ok("{" + + " \"data\":" + core_external_query_graphql_transformation_transformation_introspection_query.Root_meta_external_query_graphQL_transformation_introspection_graphQLIntrospectionQuery_Class_1__Document_1__String_1_(_class, queryDoc, pureModel.getExecutionSupport()) + + "}").type(MediaType.TEXT_HTML_TYPE).build(); + } + + private Response executeGraphQLQuery(Document document, GraphQLCacheKey graphQLCacheKey, MutableList profiles, Callable modelLoader) { List planWithSerialized; OperationDefinition graphQLQuery = GraphQLExecutionHelper.findQuery(document); @@ -232,12 +250,7 @@ private Response executeGraphQLQuery(String queryClassPath, String mappingPath, if (isQueryIntrospection(graphQLQuery)) { pureModel = modelLoader.call(); - org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class = pureModel.getClass(queryClassPath); - org.finos.legend.pure.generated.Root_meta_external_query_graphQL_metamodel_sdl_Document queryDoc = toPureModel(document, pureModel); - - return Response.ok("{" + - " \"data\":" + core_external_query_graphql_transformation_transformation_introspection_query.Root_meta_external_query_graphQL_transformation_introspection_graphQLIntrospectionQuery_Class_1__Document_1__String_1_(_class, queryDoc, pureModel.getExecutionSupport()) + - "}").type(MediaType.TEXT_HTML_TYPE).build(); + return executeIntrospection(graphQLCacheKey.getQueryClassPath(), document, pureModel); } else { @@ -248,7 +261,7 @@ private Response executeGraphQLQuery(String queryClassPath, String mappingPath, { LOGGER.debug(new LogInfo(profiles, LoggingEventType.GRAPHQL_EXECUTE, "Cache miss. Generating new plan").toString()); pureModel = modelLoader.call(); - planWithSerialized = buildPlanWithParameter(queryClassPath, mappingPath, runtimePath, document, graphQLQuery, pureModel, graphQLCacheKey); + planWithSerialized = getSerializedNamedPlans(document, graphQLCacheKey, graphQLQuery, pureModel); graphQLPlanCache.put(graphQLCacheKey, planWithSerialized); } else @@ -259,7 +272,7 @@ private Response executeGraphQLQuery(String queryClassPath, String mappingPath, else //no cache so we generate the plan { pureModel = modelLoader.call(); - planWithSerialized = buildPlanWithParameter(queryClassPath, mappingPath, runtimePath, document, graphQLQuery, pureModel, graphQLCacheKey); + planWithSerialized = getSerializedNamedPlans(document, graphQLCacheKey, graphQLQuery, pureModel); } } } @@ -267,8 +280,36 @@ private Response executeGraphQLQuery(String queryClassPath, String mappingPath, { return ExceptionTool.exceptionManager(e, LoggingEventType.EXECUTE_INTERACTIVE_ERROR, profiles); } - final PureModel pureModel1 = pureModel; - List finalPlanWithSerialized = planWithSerialized; + return execute(profiles, planWithSerialized, graphQLQuery); + } + + private List getSerializedNamedPlans(Document document, GraphQLCacheKey graphQLCacheKey,OperationDefinition graphQLQuery, PureModel pureModel) + { + List planWithSerialized; + if (graphQLCacheKey instanceof GraphQLDevCacheKey) + { + GraphQLDevCacheKey key = (GraphQLDevCacheKey) graphQLCacheKey; + planWithSerialized = buildPlanWithParameter(key.getQueryClassPath(), key.getMappingPath(), key.getRuntimePath(), document, graphQLQuery, pureModel, graphQLCacheKey); + } + else if (graphQLCacheKey instanceof GraphQLProdMappingRuntimeCacheKey) + { + GraphQLProdMappingRuntimeCacheKey key = (GraphQLProdMappingRuntimeCacheKey) graphQLCacheKey; + planWithSerialized = buildPlanWithParameter(key.getQueryClassPath(), key.getMappingPath(), key.getRuntimePath(), document, graphQLQuery, pureModel, graphQLCacheKey); + } + else if (graphQLCacheKey instanceof GraphQLProdDataspaceCacheKey) + { + GraphQLProdDataspaceCacheKey key = (GraphQLProdDataspaceCacheKey) graphQLCacheKey; + planWithSerialized = buildPlanWithParameterUsingDataspace(key.getQueryClassPath(), key.getDataspacePath(), key.getExecutionContext(), document, graphQLQuery, pureModel, graphQLCacheKey); + } + else + { + throw new UnsupportedOperationException("Invalid graphql cache key"); + } + return planWithSerialized; + } + + private Response execute(MutableList profiles, List planWithSerialized, OperationDefinition graphQLQuery) + { return Response.ok( (StreamingOutput) outputStream -> { @@ -281,7 +322,7 @@ private Response executeGraphQLQuery(String queryClassPath, String mappingPath, generator.writeFieldName("data"); generator.writeStartObject(); - finalPlanWithSerialized.stream().filter(serializedNamedPlans -> GraphQLExecutionHelper.isARootField(serializedNamedPlans.propertyName, graphQLQuery)).forEach(p -> + planWithSerialized.stream().filter(serializedNamedPlans -> GraphQLExecutionHelper.isARootField(serializedNamedPlans.propertyName, graphQLQuery)).forEach(p -> { JsonStreamingResult result = null; try @@ -305,7 +346,7 @@ private Response executeGraphQLQuery(String queryClassPath, String mappingPath, } }); generator.writeEndObject(); - Map extensions = this.computeExtensionsField(graphQLQuery, finalPlanWithSerialized, profiles); + Map extensions = this.computeExtensionsField(graphQLQuery, planWithSerialized, profiles); if (!extensions.isEmpty()) { generator.writeFieldName("extensions"); @@ -351,7 +392,7 @@ else if (modifiedListOfExtensions.size() > 1) return modifiedListOfExtensions.get(0); } - private List buildExtensionsPlanWithParameter(String rootFieldName, String queryClassPath, String mappingPath, String runtimePath, Document document, OperationDefinition query, PureModel pureModel, GraphQLCacheKey graphQLCacheKey) + private List buildExtensionsPlanWithParameter(String rootFieldName, org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class, Mapping mapping, Root_meta_core_runtime_Runtime runtime, Document document, OperationDefinition query, PureModel pureModel, GraphQLCacheKey graphQLCacheKey) { List directives = GraphQLExecutionHelper.findDirectives(query); List serializedNamedPlans = Lists.mutable.empty(); @@ -360,9 +401,9 @@ private List buildExtensionsPlanWithParameter(String rootF SingleExecutionPlan plan = (SingleExecutionPlan) getExtensionForDirective(directive).planDirective( document, pureModel, - queryClassPath, - mappingPath, - runtimePath, + _class, + mapping, + runtime, this.extensionsFunc.apply(pureModel), this.transformers ); @@ -371,13 +412,61 @@ private List buildExtensionsPlanWithParameter(String rootF return serializedNamedPlans; } + private Root_meta_pure_metamodel_dataSpace_DataSpaceExecutionContext getDataspaceExecutionContext(String dataspacePath, String executionContext, PureModel pureModel) + { + PackageableElement packageableElement = pureModel.getPackageableElement(dataspacePath); + Assert.assertTrue(packageableElement instanceof Root_meta_pure_metamodel_dataSpace_DataSpace, () -> "Can't find data space '" + dataspacePath + "'"); + if (executionContext.equals("defaultExecutionContext")) + { + return ((Root_meta_pure_metamodel_dataSpace_DataSpace) packageableElement)._executionContexts().select(dataSpaceExecutionContext -> dataSpaceExecutionContext._name().equals(((Root_meta_pure_metamodel_dataSpace_DataSpace) packageableElement)._defaultExecutionContext()._name())).toList().get(0); + } + else + { + try + { + return ((Root_meta_pure_metamodel_dataSpace_DataSpace) packageableElement)._executionContexts().select(dataSpaceExecutionContext -> dataSpaceExecutionContext._name().equals(executionContext)).toList().get(0); + } + catch (Exception e) + { + throw new RuntimeException("Invalid execution context " + executionContext, e); + } + } + } + + private List buildPlanWithParameterUsingDataspace(String queryClassPath, String dataspacePath, String executionContext, Document document, OperationDefinition query, PureModel pureModel, GraphQLCacheKey graphQLCacheKey) + { + RichIterable extensions = this.extensionsFunc.apply(pureModel); + org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class = pureModel.getClass(queryClassPath); + org.finos.legend.pure.generated.Root_meta_external_query_graphQL_metamodel_sdl_Document queryDoc = toPureModel(document, pureModel); + + Root_meta_pure_metamodel_dataSpace_DataSpaceExecutionContext executionContextPureElement = getDataspaceExecutionContext(dataspacePath, executionContext, pureModel); + Mapping mapping = executionContextPureElement._mapping(); + Root_meta_core_runtime_Runtime runtime = executionContextPureElement._defaultRuntime()._runtimeValue(); + return getSerializedNamedPlans(pureModel, extensions, _class, mapping, runtime, document, query, queryDoc, graphQLCacheKey); + } + private List buildPlanWithParameter(String queryClassPath, String mappingPath, String runtimePath, Document document, OperationDefinition query, PureModel pureModel, GraphQLCacheKey graphQLCacheKey) { RichIterable extensions = this.extensionsFunc.apply(pureModel); org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class = pureModel.getClass(queryClassPath); + org.finos.legend.pure.generated.Root_meta_external_query_graphQL_metamodel_sdl_Document queryDoc = toPureModel(document, pureModel); + Mapping mapping = pureModel.getMapping(mappingPath); Root_meta_core_runtime_Runtime runtime = pureModel.getRuntime(runtimePath); - org.finos.legend.pure.generated.Root_meta_external_query_graphQL_metamodel_sdl_Document queryDoc = toPureModel(document, pureModel); + return getSerializedNamedPlans(pureModel, extensions, _class, mapping, runtime, document, query, queryDoc, graphQLCacheKey); + } + + private List getSerializedNamedPlans( + PureModel pureModel,RichIterable extensions, + org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class, + Mapping mapping, + Root_meta_core_runtime_Runtime runtime, + Document document, + OperationDefinition query, + org.finos.legend.pure.generated.Root_meta_external_query_graphQL_metamodel_sdl_Document queryDoc, + GraphQLCacheKey graphQLCacheKey + ) + { RichIterable purePlans = core_external_query_graphql_transformation_transformation_graphFetch.Root_meta_external_query_graphQL_transformation_queryToPure_graphQLExecutableToPlansWithParameters_Class_1__Document_1__Mapping_1__Runtime_1__Extension_MANY__NamedExecutionPlan_MANY_(_class, queryDoc, mapping, runtime, extensions, pureModel.getExecutionSupport()); List plans = purePlans.toList().stream().map(p -> { @@ -388,7 +477,7 @@ private List buildPlanWithParameter(String queryClassPath, return serializedPlans; }).collect(Collectors.toList()); List> extensionPlans = plans.stream().map(plan -> - buildExtensionsPlanWithParameter(plan.propertyName, queryClassPath, mappingPath, runtimePath, document, query, pureModel, graphQLCacheKey) + buildExtensionsPlanWithParameter(plan.propertyName, _class, mapping, runtime, document, query, pureModel, graphQLCacheKey) ).collect(Collectors.toList()); extensionPlans.forEach(plans::addAll); return plans; @@ -429,7 +518,7 @@ private Response executeDevImpl(HttpServletRequest request, String projectId, St Document document = GraphQLGrammarParser.newInstance().parseDocument(query.query); Document cachableGraphQLQuery = createCachableGraphQLQuery(document); GraphQLDevCacheKey key = new GraphQLDevCacheKey(projectId, workspaceId, queryClassPath, mappingPath, runtimePath, objectMapper.writeValueAsString(cachableGraphQLQuery)); - return this.executeGraphQLQuery(queryClassPath, mappingPath, runtimePath, document, key, profiles, () -> loadSDLCProjectModel(profiles, request, projectId, workspaceId, isGroupWorkspace)); + return this.executeGraphQLQuery(document, key, profiles, () -> loadSDLCProjectModel(profiles, request, projectId, workspaceId, isGroupWorkspace)); } catch (Exception ex) { @@ -447,9 +536,29 @@ public Response executeProd(@Context HttpServletRequest request, @PathParam("gro try (Scope scope = GlobalTracer.get().buildSpan("GraphQL: Execute").startActive(true)) { Document document = GraphQLGrammarParser.newInstance().parseDocument(query.query); - GraphQLProdCacheKey key = new GraphQLProdCacheKey(groupId, artifactId, versionId, mappingPath, runtimePath, queryClassPath, objectMapper.writeValueAsString(createCachableGraphQLQuery(document))); + GraphQLProdMappingRuntimeCacheKey key = new GraphQLProdMappingRuntimeCacheKey(groupId, artifactId, versionId, mappingPath, runtimePath, queryClassPath, objectMapper.writeValueAsString(createCachableGraphQLQuery(document))); + + return this.executeGraphQLQuery(document, key, profiles, () -> loadProjectModel(profiles, groupId, artifactId, versionId)); + } + catch (Exception ex) + { + return Response.ok(new GraphQLErrorMain(ex.getMessage())).build(); + } + } + + @POST + @ApiOperation(value = "Execute a GraphQL query in the context of a mapping and a runtime") + @Path("execute/prod/{groupId}/{artifactId}/{versionId}/query/{queryClassPath}/dataspace/{dataspacePath}") + @Consumes({MediaType.APPLICATION_JSON, APPLICATION_ZLIB}) + public Response executeProdWithDataspace(@Context HttpServletRequest request, @PathParam("groupId") String groupId, @PathParam("artifactId") String artifactId, @PathParam("versionId") String versionId, @PathParam("dataspacePath") String dataspacePath, @QueryParam("executionContext") @DefaultValue("defaultExecutionContext") String executionContext, @PathParam("queryClassPath") String queryClassPath, Query query, @ApiParam(hidden = true) @Pac4JProfileManager ProfileManager pm) + { + MutableList profiles = ProfileManagerHelper.extractProfiles(pm); + try (Scope scope = GlobalTracer.get().buildSpan("GraphQL: Execute").startActive(true)) + { + Document document = GraphQLGrammarParser.newInstance().parseDocument(query.query); + GraphQLProdDataspaceCacheKey key = new GraphQLProdDataspaceCacheKey(groupId, artifactId, versionId, dataspacePath, executionContext, queryClassPath, objectMapper.writeValueAsString(createCachableGraphQLQuery(document))); - return this.executeGraphQLQuery(queryClassPath, mappingPath, runtimePath, document, key, profiles, () -> loadProjectModel(profiles, groupId, artifactId, versionId)); + return this.executeGraphQLQuery(document, key, profiles, () -> loadProjectModel(profiles, groupId, artifactId, versionId)); } catch (Exception ex) { diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/directives/DefaultGraphQLDirectiveExtension.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/directives/DefaultGraphQLDirectiveExtension.java index 4337f2da38c..61a9aca681c 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/directives/DefaultGraphQLDirectiveExtension.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/directives/DefaultGraphQLDirectiveExtension.java @@ -27,6 +27,8 @@ import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.ExecutionPlan; import org.finos.legend.engine.query.graphQL.api.execute.directives.IGraphQLDirectiveExtension; import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; +import org.finos.legend.pure.generated.Root_meta_core_runtime_Runtime; +import org.finos.legend.pure.m3.coreinstance.meta.pure.mapping.Mapping; import org.pac4j.core.profile.CommonProfile; import java.util.Map; @@ -40,7 +42,7 @@ public ImmutableList getSupportedDirectives() } @Override - public ExecutionPlan planDirective(Document document, PureModel pureModel, String rootClassPath, String mappingPath, String runtimePath, RichIterable _extensions, Iterable transformers) + public ExecutionPlan planDirective(Document document, PureModel pureModel, org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class, Mapping mapping, Root_meta_core_runtime_Runtime runtime, RichIterable _extensions, Iterable transformers) { return null; } diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/directives/IGraphQLDirectiveExtension.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/directives/IGraphQLDirectiveExtension.java index 3406ae98e6e..ce642cdc670 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/directives/IGraphQLDirectiveExtension.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/main/java/org/finos/legend/engine/query/graphQL/api/execute/directives/IGraphQLDirectiveExtension.java @@ -25,6 +25,8 @@ import org.finos.legend.engine.protocol.graphQL.metamodel.Document; import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.ExecutionPlan; import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; +import org.finos.legend.pure.generated.Root_meta_core_runtime_Runtime; +import org.finos.legend.pure.m3.coreinstance.meta.pure.mapping.Mapping; import org.pac4j.core.profile.CommonProfile; import java.util.Map; @@ -36,9 +38,9 @@ public interface IGraphQLDirectiveExtension ExecutionPlan planDirective( Document document, PureModel pureModel, - String rootClassPath, - String mappingPath, - String runtimePath, + org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class, + Mapping mapping, + Root_meta_core_runtime_Runtime runtime, RichIterable _extensions, Iterable transformers); diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/test/java/org/finos/legend/engine/query/graphQL/api/test/TestGraphQLAPI.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/test/java/org/finos/legend/engine/query/graphQL/api/test/TestGraphQLAPI.java index 5990eef55c4..6b7c744bbb9 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/test/java/org/finos/legend/engine/query/graphQL/api/test/TestGraphQLAPI.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/test/java/org/finos/legend/engine/query/graphQL/api/test/TestGraphQLAPI.java @@ -36,6 +36,7 @@ import org.finos.legend.engine.plan.generation.extension.PlanGeneratorExtension; import org.finos.legend.engine.protocol.Protocol; import org.finos.legend.engine.protocol.pure.PureClientVersions; +import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextPointer; import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.SingleExecutionPlan; @@ -94,6 +95,7 @@ public static void beforeClass() throws Exception handlerCollection.setHandlers(new Handler[] { buildPMCDMetadataHandler("/api/projects/Project1/workspaces/Workspace1/pureModelContextData", "/org/finos/legend/engine/query/graphQL/api/test/Project1_Workspace1.pure"), buildJsonHandler("/api/projects/Project1/workspaces/Workspace1/revisions/HEAD/upstreamProjects", "[]"), + buildPMCDMetadataHandler("/projects/org.finos.legend.graphql/model.one/versions/1.0.0/pureModelContextData","/org/finos/legend/engine/query/graphQL/api/test/Project1_Workspace1.pure",new Protocol("pure", PureClientVersions.production),new PureModelContextPointer()), buildPMCDMetadataHandler("/api/projects/Project1/workspaces/Workspace2/pureModelContextData", "/org/finos/legend/engine/query/graphQL/api/test/Project1_Workspace2.pure"), buildJsonHandler("/api/projects/Project1/workspaces/Workspace2/revisions/HEAD/upstreamProjects", "[]"), @@ -128,7 +130,7 @@ private GraphQLExecute getGraphQLExecute() private GraphQLExecute getGraphQLExecuteWithCache(GraphQLPlanCache cache) { - ModelManager modelManager = new ModelManager(DeploymentMode.TEST); + ModelManager modelManager = createModelManager(); PlanExecutor executor = PlanExecutor.newPlanExecutorWithAvailableStoreExecutors(); MutableList generatorExtensions = Lists.mutable.withAll(ServiceLoader.load(PlanGeneratorExtension.class)); GraphQLExecute graphQLExecute = new GraphQLExecute(modelManager, executor, metaDataServerConfiguration, (pm) -> PureCoreExtensionLoader.extensions().flatCollect(g -> g.extraPureCoreExtensions(pm.getExecutionSupport())), generatorExtensions.flatCollect(PlanGeneratorExtension::getExtraPlanTransformers), cache); @@ -165,6 +167,74 @@ public void testGraphQLExecuteDevAPI_Relational() throws Exception Assert.assertEquals(expected, responseAsString(response)); } + @Test + public void testGraphQLExecuteProdAPI_Relational_With_Dataspace() throws Exception + { + GraphQLExecute graphQLExecute = getGraphQLExecute(); + HttpServletRequest mockRequest = Mockito.mock(HttpServletRequest.class); + Mockito.when(mockRequest.getCookies()).thenReturn(new Cookie[0]); + Query query = new Query(); + query.query = "query Query {\n" + + " allFirms {\n" + + " legalName,\n" + + " employees {\n" + + " firstName,\n" + + " lastName\n" + + " }\n" + + " }\n" + + " }"; + Response response = graphQLExecute.executeProdWithDataspace(mockRequest, "org.finos.legend.graphql", "model.one", "1.0.0", "simple::dataspace", "defaultExecutionContext", "simple::model::Query", query, null); + + String expected = "{" + + "\"data\":{" + + "\"allFirms\":[" + + "{\"legalName\":\"Firm X\",\"employees\":[{\"firstName\":\"Peter\",\"lastName\":\"Smith\"},{\"firstName\":\"John\",\"lastName\":\"Johnson\"},{\"firstName\":\"John\",\"lastName\":\"Hill\"},{\"firstName\":\"Anthony\",\"lastName\":\"Allen\"}]}," + + "{\"legalName\":\"Firm A\",\"employees\":[{\"firstName\":\"Fabrice\",\"lastName\":\"Roberts\"}]}," + + "{\"legalName\":\"Firm B\",\"employees\":[{\"firstName\":\"Oliver\",\"lastName\":\"Hill\"},{\"firstName\":\"David\",\"lastName\":\"Harris\"}]}" + + "]" + + "}" + + "}"; + Assert.assertEquals(expected, responseAsString(response)); + } + + @Test + public void testGraphQLExecuteProdAPI_Relational_With_Dataspace_With_Caching() throws Exception + { + GraphQLPlanCache cache = new GraphQLPlanCache(getExecutionCacheInstance()); + GraphQLExecute graphQLExecute = getGraphQLExecuteWithCache(cache); + HttpServletRequest mockRequest = Mockito.mock(HttpServletRequest.class); + Mockito.when(mockRequest.getCookies()).thenReturn(new Cookie[0]); + Query query = new Query(); + query.query = "query Query {\n" + + " allFirms {\n" + + " legalName,\n" + + " employees {\n" + + " firstName,\n" + + " lastName\n" + + " }\n" + + " }\n" + + " }"; + Response response = graphQLExecute.executeProdWithDataspace(mockRequest, "org.finos.legend.graphql", "model.one", "1.0.0", "simple::dataspace", "defaultExecutionContext", "simple::model::Query", query, null); + + String expected = "{" + + "\"data\":{" + + "\"allFirms\":[" + + "{\"legalName\":\"Firm X\",\"employees\":[{\"firstName\":\"Peter\",\"lastName\":\"Smith\"},{\"firstName\":\"John\",\"lastName\":\"Johnson\"},{\"firstName\":\"John\",\"lastName\":\"Hill\"},{\"firstName\":\"Anthony\",\"lastName\":\"Allen\"}]}," + + "{\"legalName\":\"Firm A\",\"employees\":[{\"firstName\":\"Fabrice\",\"lastName\":\"Roberts\"}]}," + + "{\"legalName\":\"Firm B\",\"employees\":[{\"firstName\":\"Oliver\",\"lastName\":\"Hill\"},{\"firstName\":\"David\",\"lastName\":\"Harris\"}]}" + + "]" + + "}" + + "}"; + Assert.assertEquals(expected, responseAsString(response)); + Assert.assertEquals(0, cache.getCache().stats().hitCount(), 0); + Assert.assertEquals(1, cache.getCache().stats().missCount(), 0); + + response = graphQLExecute.executeProdWithDataspace(mockRequest, "org.finos.legend.graphql", "model.one", "1.0.0", "simple::dataspace", "defaultExecutionContext", "simple::model::Query", query, null); + Assert.assertEquals(expected, responseAsString(response)); + Assert.assertEquals(1, cache.getCache().stats().hitCount(), 0); + Assert.assertEquals(1, cache.getCache().stats().missCount(), 0); + } + @Test public void testGraphQLExecuteDevAPI_BiTemporalMilestoning_Root() throws Exception { @@ -222,7 +292,7 @@ public void testGraphQLExecuteDevAPI_ProcessingTemporalMilestoning_Root() throws @Test public void testGraphQLExecuteDevAPI_Relational_WithDependencies() throws Exception { - ModelManager modelManager = new ModelManager(DeploymentMode.TEST, new SDLCLoader(metaDataServerConfiguration, null)); + ModelManager modelManager = createModelManager(); PlanExecutor executor = PlanExecutor.newPlanExecutorWithAvailableStoreExecutors(); MutableList generatorExtensions = Lists.mutable.withAll(ServiceLoader.load(PlanGeneratorExtension.class)); GraphQLExecute graphQLExecute = new GraphQLExecute(modelManager, executor, metaDataServerConfiguration, (pm) -> PureCoreExtensionLoader.extensions().flatCollect(g -> g.extraPureCoreExtensions(pm.getExecutionSupport())), generatorExtensions.flatCollect(PlanGeneratorExtension::getExtraPlanTransformers)); @@ -252,6 +322,11 @@ public void testGraphQLExecuteDevAPI_Relational_WithDependencies() throws Except Assert.assertEquals(expected, responseAsString(response)); } + private static ModelManager createModelManager() + { + return new ModelManager(DeploymentMode.TEST, new SDLCLoader(metaDataServerConfiguration, null)); + } + @Test public void testGraphQLExecuteDevAPI_RelationalWithParameter() throws Exception @@ -483,7 +558,7 @@ public void testGraphQLExecuteGeneratePlansDevAPI_Relational() @Test public void testGraphQLDebugGenerateGraphFetchDevAPI() { - ModelManager modelManager = new ModelManager(DeploymentMode.TEST); + ModelManager modelManager = createModelManager(); MutableList generatorExtensions = Lists.mutable.withAll(ServiceLoader.load(PlanGeneratorExtension.class)); GraphQLDebug graphQLDebug = new GraphQLDebug(modelManager, metaDataServerConfiguration, (pm) -> PureCoreExtensionLoader.extensions().flatCollect(g -> g.extraPureCoreExtensions(pm.getExecutionSupport()))); HttpServletRequest mockRequest = Mockito.mock(HttpServletRequest.class); @@ -674,7 +749,7 @@ public void testGraphQLExecuteDevAPI_EchoDirective() throws Exception private static Handler buildPMCDMetadataHandler(String path, String resourcePath) throws Exception { - return buildPMCDMetadataHandler(path, resourcePath, null, null); + return buildPMCDMetadataHandler(path, resourcePath, new Protocol("pure", PureClientVersions.production), new PureModelContextPointer()); } private static Handler buildPMCDMetadataHandler(String path, String resourcePath, Protocol serializer, PureModelContextPointer pointer) throws Exception diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/test/resources/org/finos/legend/engine/query/graphQL/api/test/Project1_Workspace1.pure b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/test/resources/org/finos/legend/engine/query/graphQL/api/test/Project1_Workspace1.pure index 2635bca3067..c517b763697 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/test/resources/org/finos/legend/engine/query/graphQL/api/test/Project1_Workspace1.pure +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-query/src/test/resources/org/finos/legend/engine/query/graphQL/api/test/Project1_Workspace1.pure @@ -119,6 +119,21 @@ Database simple::store::DB Join PERSON_ADDRESS(PERSON_TABLE.ID = ADDRESS_TABLE.PERSON_ID) ) +###DataSpace +DataSpace simple::dataspace +{ + executionContexts: + [ + { + name: 'dummyContext'; + mapping: simple::mapping::Map; + defaultRuntime: simple::runtime::Runtime; + } + ]; + defaultExecutionContext: 'dummyContext'; +} + + ###Mapping import simple::model::*; import simple::store::*; diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/pom.xml b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/pom.xml index e22db607ac2..0685c7f3623 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/pom.xml +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-graphQL - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/src/main/java/org/finos/legend/engine/query/graphQL/extension/relational/directives/TotalCountDirective.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/src/main/java/org/finos/legend/engine/query/graphQL/extension/relational/directives/TotalCountDirective.java index 1ddf7b144f4..31f102157dc 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/src/main/java/org/finos/legend/engine/query/graphQL/extension/relational/directives/TotalCountDirective.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/src/main/java/org/finos/legend/engine/query/graphQL/extension/relational/directives/TotalCountDirective.java @@ -57,13 +57,10 @@ public ImmutableList getSupportedDirectives() } @Override - public ExecutionPlan planDirective(Document document, PureModel pureModel, String rootClassPath, String mappingPath, String runtimePath, RichIterable extensions, Iterable transformers) + public ExecutionPlan planDirective(Document document, PureModel pureModel, org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class, Mapping mapping, Root_meta_core_runtime_Runtime runtime, RichIterable extensions, Iterable transformers) { try { - org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class _class = pureModel.getClass(rootClassPath); - Mapping mapping = pureModel.getMapping(mappingPath); - Root_meta_core_runtime_Runtime runtime = pureModel.getRuntime(runtimePath); org.finos.legend.pure.generated.Root_meta_external_query_graphQL_metamodel_sdl_Document queryDoc = GraphQLExecute.toPureModel(document, pureModel); RichIterable purePlans = core_external_query_graphql_transformation_transformation_graphFetch.Root_meta_external_query_graphQL_transformation_queryToPure_getPlanForTotalCountDirective_Class_1__Mapping_1__Runtime_1__Document_1__Extension_MANY__NamedExecutionPlan_MANY_(_class, mapping, runtime, queryDoc, extensions, pureModel.getExecutionSupport()); List plans = purePlans.toList().stream().map(p -> diff --git a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/src/test/java/org/finos/legend/engine/query/graphQL/extension/relational/directives/TestTotalCountDirective.java b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/src/test/java/org/finos/legend/engine/query/graphQL/extension/relational/directives/TestTotalCountDirective.java index 08145f9f25b..f194834514e 100644 --- a/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/src/test/java/org/finos/legend/engine/query/graphQL/extension/relational/directives/TestTotalCountDirective.java +++ b/legend-engine-xts-graphQL/legend-engine-xt-graphQL-relational-extension/src/test/java/org/finos/legend/engine/query/graphQL/extension/relational/directives/TestTotalCountDirective.java @@ -27,6 +27,7 @@ import org.finos.legend.engine.language.graphQL.grammar.from.GraphQLGrammarParser; import org.finos.legend.engine.language.pure.grammar.from.PureGrammarParser; import org.finos.legend.engine.language.pure.modelManager.ModelManager; +import org.finos.legend.engine.language.pure.modelManager.sdlc.SDLCLoader; import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.MetaDataServerConfiguration; import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.ServerConnectionConfiguration; import org.finos.legend.engine.plan.execution.PlanExecutor; @@ -112,7 +113,7 @@ private GraphQLExecute getGraphQLExecute() private GraphQLExecute getGraphQLExecuteWithCache(GraphQLPlanCache cache) { - ModelManager modelManager = new ModelManager(DeploymentMode.TEST); + ModelManager modelManager = new ModelManager(DeploymentMode.TEST, new SDLCLoader(metaDataServerConfiguration, null)); PlanExecutor executor = PlanExecutor.newPlanExecutorWithAvailableStoreExecutors(); MutableList generatorExtensions = Lists.mutable.withAll(ServiceLoader.load(PlanGeneratorExtension.class)); GraphQLExecute graphQLExecute = new GraphQLExecute(modelManager, executor, metaDataServerConfiguration, (pm) -> PureCoreExtensionLoader.extensions().flatCollect(g -> g.extraPureCoreExtensions(pm.getExecutionSupport())), generatorExtensions.flatCollect(PlanGeneratorExtension::getExtraPlanTransformers), cache); @@ -242,7 +243,7 @@ public void testGraphQLExecuteDevAPI_TotalCountDirective_Caching() throws Except private static Handler buildPMCDMetadataHandler(String path, String resourcePath) throws Exception { - return buildPMCDMetadataHandler(path, resourcePath, null, null); + return buildPMCDMetadataHandler(path, resourcePath, new Protocol(), new PureModelContextPointer()); } private static Handler buildPMCDMetadataHandler(String path, String resourcePath, Protocol serializer, PureModelContextPointer pointer) throws Exception diff --git a/legend-engine-xts-graphQL/pom.xml b/legend-engine-xts-graphQL/pom.xml index e448b626519..6e0f2b730fb 100644 --- a/legend-engine-xts-graphQL/pom.xml +++ b/legend-engine-xts-graphQL/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-haskell/legend-engine-xt-haskell-grammar/pom.xml b/legend-engine-xts-haskell/legend-engine-xt-haskell-grammar/pom.xml index 98523bd142f..06839b0d20d 100644 --- a/legend-engine-xts-haskell/legend-engine-xt-haskell-grammar/pom.xml +++ b/legend-engine-xts-haskell/legend-engine-xt-haskell-grammar/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-haskell org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-haskell/legend-engine-xt-haskell-protocol/pom.xml b/legend-engine-xts-haskell/legend-engine-xt-haskell-protocol/pom.xml index 2b74a5f133f..84348cf8bce 100644 --- a/legend-engine-xts-haskell/legend-engine-xt-haskell-protocol/pom.xml +++ b/legend-engine-xts-haskell/legend-engine-xt-haskell-protocol/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-haskell org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-haskell/legend-engine-xt-haskell-pure/pom.xml b/legend-engine-xts-haskell/legend-engine-xt-haskell-pure/pom.xml index 72397413fe1..0d8e6dbc449 100644 --- a/legend-engine-xts-haskell/legend-engine-xt-haskell-pure/pom.xml +++ b/legend-engine-xts-haskell/legend-engine-xt-haskell-pure/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-haskell org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-haskell/pom.xml b/legend-engine-xts-haskell/pom.xml index 163105a0bce..324a69c89bb 100644 --- a/legend-engine-xts-haskell/pom.xml +++ b/legend-engine-xts-haskell/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/pom.xml b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/pom.xml index e951e3b7b30..5e80e38f94f 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/pom.xml +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-hostedService - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -32,10 +32,6 @@ org.finos.legend.engine legend-engine-shared-core - - org.finos.legend.engine - legend-engine-pure-code-core-extension - org.finos.legend.engine legend-engine-language-pure-compiler @@ -45,10 +41,6 @@ org.finos.legend.engine legend-engine-pure-code-compiled-core - - org.finos.legend.engine - legend-engine-executionPlan-generation - org.finos.legend.engine legend-engine-pure-platform-java @@ -65,6 +57,10 @@ org.finos.legend.engine legend-engine-xt-functionActivator-api + + org.finos.legend.engine + legend-engine-xt-functionActivator-protocol + org.finos.legend.engine legend-engine-xt-hostedService-pure @@ -116,10 +112,6 @@ - - com.fasterxml.jackson.core - jackson-databind - @@ -131,10 +123,6 @@ eclipse-collections - - org.pac4j - pac4j-core - @@ -164,10 +152,6 @@ jersey-common test - - org.finos.legend.engine - legend-engine-language-pure-dsl-generation - diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/api/HostedServiceService.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/api/HostedServiceService.java index a35aa36f5a0..b6ed4f1372e 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/api/HostedServiceService.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/api/HostedServiceService.java @@ -19,25 +19,22 @@ import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.impl.factory.Lists; import org.finos.legend.engine.functionActivator.api.output.FunctionActivatorInfo; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; -import org.finos.legend.engine.language.hostedService.deployment.HostedServiceArtifact; -import org.finos.legend.engine.language.hostedService.deployment.HostedServiceDeploymentConfiguration; -import org.finos.legend.engine.language.hostedService.generation.model.GenerationInfoData; -import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentStage; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceArtifact; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceDeploymentConfiguration; import org.finos.legend.engine.functionActivator.service.FunctionActivatorError; import org.finos.legend.engine.functionActivator.service.FunctionActivatorService; -import org.finos.legend.engine.language.hostedService.deployment.HostedServiceDeploymentManager; -import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; +import org.finos.legend.engine.language.hostedService.generation.deployment.HostedServiceDeploymentManager; +import org.finos.legend.engine.protocol.hostedService.deployment.model.GenerationInfoData; import org.finos.legend.engine.protocol.hostedService.metamodel.HostedService; -//import org.finos.legend.engine.protocol.hostedService.metamodel.HostedServiceDeploymentConfiguration; -import org.finos.legend.engine.language.hostedService.deployment.HostedServiceDeploymentResult; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceDeploymentResult; import org.finos.legend.engine.language.hostedService.generation.HostedServiceArtifactGenerator; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.protocol.hostedService.metamodel.HostedServiceProtocolExtension; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.pure.generated.*; -import org.pac4j.core.profile.CommonProfile; import java.util.List; @@ -73,12 +70,12 @@ public boolean supports(Root_meta_external_function_activator_FunctionActivator } @Override - public MutableList validate(MutableList profiles, PureModel pureModel, Root_meta_external_function_activator_hostedService_HostedService activator, PureModelContext inputModel, Function> routerExtensions) + public MutableList validate(Identity identity, PureModel pureModel, Root_meta_external_function_activator_hostedService_HostedService activator, PureModelContext inputModel, Function> routerExtensions) { MutableList errors = Lists.mutable.empty(); try { - this.hostedServiceArtifactgenerator.validateOwner(profiles, pureModel, activator, routerExtensions); + this.hostedServiceArtifactgenerator.validateOwner(identity, pureModel, activator, routerExtensions); core_hostedservice_generation_generation.Root_meta_external_function_activator_hostedService_validator_validateService_HostedService_1__Boolean_1_(activator, pureModel.getExecutionSupport()); //returns true or errors out } @@ -104,11 +101,11 @@ public List selectConfig(List profiles, PureModel pureModel, Root_meta_external_function_activator_hostedService_HostedService activator, PureModelContext inputModel, List runtimeConfigs, Function> routerExtensions) + public HostedServiceDeploymentResult publishToSandbox(Identity identity, PureModel pureModel, Root_meta_external_function_activator_hostedService_HostedService activator, PureModelContext inputModel, List runtimeConfigs, Function> routerExtensions) { GenerationInfoData generation = this.hostedServiceArtifactgenerator.renderArtifact(pureModel, activator, inputModel, "vX_X_X",routerExtensions); HostedServiceArtifact artifact = new HostedServiceArtifact(generation, fetchHostedService(activator, (PureModelContextData)inputModel, pureModel)); - return this.hostedServiceDeploymentManager.deploy(profiles, artifact, runtimeConfigs); + return this.hostedServiceDeploymentManager.deploy(identity, artifact, runtimeConfigs); // return new HostedServiceDeploymentResult(); } diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-compiler/pom.xml b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-compiler/pom.xml index e813b1fd989..ccf0afc1250 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-compiler/pom.xml +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-compiler/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-hostedService - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/pom.xml b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/pom.xml index 8c7d18a42df..ca33ecc7591 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/pom.xml +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-hostedService - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -29,10 +29,7 @@ - - com.fasterxml.jackson.core - jackson-annotations - + @@ -41,17 +38,21 @@ org.finos.legend.engine - legend-engine-language-pure-compiler + legend-engine-pure-code-core-extension - org.finos.legend.engine - legend-engine-pure-code-compiled-core + legend-engine-language-pure-compiler + org.finos.legend.engine - legend-engine-protocol + legend-engine-pure-code-compiled-core + + + + org.finos.legend.engine legend-engine-protocol-pure @@ -60,30 +61,37 @@ org.finos.legend.engine legend-engine-shared-core - org.finos.legend.engine - legend-engine-xt-hostedService-pure + legend-engine-xt-functionActivator-deployment - org.finos.legend.engine - legend-engine-xt-hostedService-compiler - runtime + legend-engine-xt-functionActivator-protocol org.finos.legend.engine - legend-engine-xt-hostedService-grammar - runtime + legend-engine-xt-hostedService-pure + + + + + + + + + org.finos.legend.engine - legend-engine-executionPlan-generation + legend-engine-xt-hostedService-protocol + org.finos.legend.engine - legend-engine-xt-analytics-lineage-api + legend-engine-executionPlan-generation + @@ -102,14 +110,6 @@ slf4j-api - - - - org.pac4j - pac4j-core - - - junit junit @@ -130,6 +130,13 @@ jersey-common test - + + org.finos.legend.engine + legend-engine-language-pure-dsl-generation + + + com.fasterxml.jackson.core + jackson-databind + diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/HostedServiceArtifactGenerator.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/HostedServiceArtifactGenerator.java index 4a8697b27b6..8056699e762 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/HostedServiceArtifactGenerator.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/HostedServiceArtifactGenerator.java @@ -18,26 +18,24 @@ import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.factory.Maps; -import org.eclipse.collections.api.list.MutableList; import org.finos.legend.engine.language.hostedService.generation.control.HostedServiceOwnerValidationService; import org.finos.legend.engine.language.hostedService.generation.control.HostedServiceOwnerValidator; -import org.finos.legend.engine.language.hostedService.generation.model.GenerationInfoData; +import org.finos.legend.engine.protocol.hostedService.deployment.model.GenerationInfoData; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.plan.generation.PlanGenerator; import org.finos.legend.engine.plan.generation.transformers.LegendPlanTransformers; import org.finos.legend.engine.plan.platform.PlanPlatform; -import org.finos.legend.engine.language.hostedService.generation.model.lineage.Lineage; -import org.finos.legend.engine.protocol.pure.PureClientVersions; +import org.finos.legend.engine.protocol.hostedService.deployment.model.lineage.Lineage; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.CompositeExecutionPlan; import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.ExecutionPlan; import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.SingleExecutionPlan; +import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.pure.generated.Root_meta_external_function_activator_hostedService_HostedService; import org.finos.legend.pure.generated.Root_meta_external_function_activator_hostedService_Ownership; import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; import org.finos.legend.pure.generated.core_hostedservice_generation_generation; import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.ConcreteFunctionDefinition; -import org.pac4j.core.profile.CommonProfile; import java.util.Map; @@ -73,10 +71,10 @@ public static ExecutionPlan generatePlan(PureModel pureModel, Root_meta_external } } - public boolean validateOwner(MutableList profiles, PureModel pureModel, Root_meta_external_function_activator_hostedService_HostedService activator, Function> routerExtensions) + public boolean validateOwner(Identity identity, PureModel pureModel, Root_meta_external_function_activator_hostedService_HostedService activator, Function> routerExtensions) { HostedServiceOwnerValidator service = getOwnerValidatorService(activator,pureModel); - return service.isOwner(profiles, activator._ownership()); + return service.isOwner(identity, activator._ownership()); } public HostedServiceOwnerValidator getOwnerValidatorService(Root_meta_external_function_activator_hostedService_HostedService activator, PureModel pureModel) diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/DeploymentOwnerValidator.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/DeploymentOwnerValidator.java index d758aa0f497..352f66f0453 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/DeploymentOwnerValidator.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/DeploymentOwnerValidator.java @@ -14,15 +14,14 @@ package org.finos.legend.engine.language.hostedService.generation.control; -import org.eclipse.collections.api.list.MutableList; +import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.pure.generated.Root_meta_external_function_activator_hostedService_Deployment; import org.finos.legend.pure.generated.Root_meta_external_function_activator_hostedService_Ownership; -import org.pac4j.core.profile.CommonProfile; public class DeploymentOwnerValidator implements HostedServiceOwnerValidator { @Override - public boolean isOwner(MutableList profiles, Root_meta_external_function_activator_hostedService_Deployment ownershipModel) + public boolean isOwner(Identity identity, Root_meta_external_function_activator_hostedService_Deployment ownershipModel) { return ownershipModel._id() > 10; } diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/HostedServiceOwnerValidator.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/HostedServiceOwnerValidator.java index ecf5639e90a..7cb98be504f 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/HostedServiceOwnerValidator.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/HostedServiceOwnerValidator.java @@ -14,13 +14,12 @@ package org.finos.legend.engine.language.hostedService.generation.control; -import org.eclipse.collections.api.list.MutableList; +import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.pure.generated.Root_meta_external_function_activator_hostedService_Ownership; -import org.pac4j.core.profile.CommonProfile; public interface HostedServiceOwnerValidator { - boolean isOwner(MutableList profiles, T ownershipModel); + boolean isOwner(Identity identity, T ownershipModel); public boolean supports(Root_meta_external_function_activator_hostedService_Ownership ownershipModel); } diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/UserListOwnerValidator.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/UserListOwnerValidator.java index 6a680c6bcaf..685c685e16e 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/UserListOwnerValidator.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/control/UserListOwnerValidator.java @@ -14,19 +14,30 @@ package org.finos.legend.engine.language.hostedService.generation.control; -import org.eclipse.collections.api.list.MutableList; -import org.finos.legend.engine.shared.core.kerberos.ProfileManagerHelper; +import org.finos.legend.engine.shared.core.identity.Identity; +import org.finos.legend.engine.shared.core.identity.credential.LegendKerberosCredential; import org.finos.legend.engine.shared.core.kerberos.SubjectTools; import org.finos.legend.pure.generated.Root_meta_external_function_activator_hostedService_Ownership; import org.finos.legend.pure.generated.Root_meta_external_function_activator_hostedService_UserList; -import org.pac4j.core.profile.CommonProfile; + +import javax.security.auth.Subject; +import java.util.NoSuchElementException; public class UserListOwnerValidator implements HostedServiceOwnerValidator { @Override - public boolean isOwner(MutableList profiles, Root_meta_external_function_activator_hostedService_UserList users) + public boolean isOwner(Identity identity, Root_meta_external_function_activator_hostedService_UserList users) { - return users._users().contains(SubjectTools.getKerberos(ProfileManagerHelper.extractSubject(profiles))); //use profile + Subject subject = null; + try + { + subject = identity.getCredential(LegendKerberosCredential.class).get().getSubject(); + } + catch (NoSuchElementException e) + { + return false; + } + return users._users().contains(SubjectTools.getKerberos(subject)); //use profile } @Override diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceArtifactGenerationExtension.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/deployment/HostedServiceArtifactGenerationExtension.java similarity index 95% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceArtifactGenerationExtension.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/deployment/HostedServiceArtifactGenerationExtension.java index 2de7d269570..c8217da065b 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceArtifactGenerationExtension.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/deployment/HostedServiceArtifactGenerationExtension.java @@ -13,14 +13,14 @@ // limitations under the License. -package org.finos.legend.engine.language.hostedService.deployment; +package org.finos.legend.engine.language.hostedService.generation.deployment; import org.eclipse.collections.api.RichIterable; import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.list.MutableList; import org.finos.legend.engine.language.hostedService.generation.HostedServiceArtifactGenerator; -import org.finos.legend.engine.language.hostedService.generation.model.GenerationInfoData; +import org.finos.legend.engine.protocol.hostedService.deployment.model.GenerationInfoData; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.language.pure.dsl.generation.extension.Artifact; import org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension; diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentManager.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/deployment/HostedServiceDeploymentManager.java similarity index 83% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentManager.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/deployment/HostedServiceDeploymentManager.java index 8309de32973..2d95cb87283 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentManager.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/deployment/HostedServiceDeploymentManager.java @@ -12,15 +12,17 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.deployment; +package org.finos.legend.engine.language.hostedService.generation.deployment; import com.fasterxml.jackson.databind.ObjectMapper; -import org.eclipse.collections.api.list.MutableList; import org.finos.legend.engine.functionActivator.deployment.DeploymentManager; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceArtifact; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceDeploymentConfiguration; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceDeploymentResult; import org.finos.legend.engine.shared.core.ObjectMapperFactory; -import org.pac4j.core.profile.CommonProfile; -import org.finos.legend.engine.language.hostedService.deployment.HostedServiceDeploymentConfiguration; +import org.finos.legend.engine.shared.core.identity.Identity; + import java.util.List; public class HostedServiceDeploymentManager implements DeploymentManager @@ -33,13 +35,13 @@ public boolean canDeploy(FunctionActivatorArtifact element) return element instanceof HostedServiceArtifact; } - public HostedServiceDeploymentResult deploy(MutableList profiles, HostedServiceArtifact artifact) + public HostedServiceDeploymentResult deploy(Identity identity, HostedServiceArtifact artifact) { return new HostedServiceDeploymentResult(); } - public HostedServiceDeploymentResult deploy(MutableList profiles, HostedServiceArtifact artifact, List availableRuntimeConfigurations) + public HostedServiceDeploymentResult deploy(Identity identity, HostedServiceArtifact artifact, List availableRuntimeConfigurations) { String host; String path; diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-grammar/pom.xml b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-grammar/pom.xml index d56a41cd2c0..25fe6e4a6f9 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-grammar/pom.xml +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-hostedService - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/pom.xml b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/pom.xml index b04ec33d513..8e008f4afb4 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/pom.xml +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-hostedService - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -35,7 +35,14 @@ org.finos.legend.engine legend-engine-xt-functionActivator-protocol - ${project.version} + + + org.finos.legend.engine + legend-engine-xt-analytics-lineage-api + + + org.finos.legend.engine + legend-engine-protocol diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceArtifact.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceArtifact.java similarity index 78% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceArtifact.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceArtifact.java index 1bfe026b012..6d02619b4f6 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceArtifact.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceArtifact.java @@ -12,10 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.deployment; +package org.finos.legend.engine.protocol.hostedService.deployment; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorArtifact; -import org.finos.legend.engine.language.hostedService.generation.model.GenerationInfo; +//import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.hostedService.deployment.model.GenerationInfo; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; public class HostedServiceArtifact extends FunctionActivatorArtifact diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceContent.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceContent.java similarity index 82% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceContent.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceContent.java index 36de3185647..6a753ecef9c 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceContent.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceContent.java @@ -12,10 +12,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.deployment; +package org.finos.legend.engine.protocol.hostedService.deployment; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorDeploymentContent; -import org.finos.legend.engine.language.hostedService.generation.model.GenerationInfo; +import org.finos.legend.engine.protocol.hostedService.deployment.model.GenerationInfo; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentContent; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; public class HostedServiceContent extends FunctionActivatorDeploymentContent diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentConfiguration.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceDeploymentConfiguration.java similarity index 80% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentConfiguration.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceDeploymentConfiguration.java index f8627b853e7..91f156d3408 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentConfiguration.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceDeploymentConfiguration.java @@ -12,9 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.deployment; +package org.finos.legend.engine.protocol.hostedService.deployment; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; public class HostedServiceDeploymentConfiguration extends FunctionActivatorDeploymentConfiguration { diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentResult.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceDeploymentResult.java similarity index 81% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentResult.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceDeploymentResult.java index 8917f5a219e..3ab1600de5e 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-api/src/main/java/org/finos/legend/engine/language/hostedService/deployment/HostedServiceDeploymentResult.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/HostedServiceDeploymentResult.java @@ -12,9 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.deployment; +package org.finos.legend.engine.protocol.hostedService.deployment; -import org.finos.legend.engine.functionActivator.deployment.DeploymentResult; +import org.finos.legend.engine.protocol.functionActivator.deployment.DeploymentResult; public class HostedServiceDeploymentResult extends DeploymentResult { diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/GenerationInfo.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/GenerationInfo.java similarity index 92% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/GenerationInfo.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/GenerationInfo.java index 642a287efe7..71b35a1b7ca 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/GenerationInfo.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/GenerationInfo.java @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.generation.model; +package org.finos.legend.engine.protocol.hostedService.deployment.model; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/GenerationInfoData.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/GenerationInfoData.java similarity index 88% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/GenerationInfoData.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/GenerationInfoData.java index ebee8649f10..39c3b489fc8 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/GenerationInfoData.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/GenerationInfoData.java @@ -12,9 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.generation.model; +package org.finos.legend.engine.protocol.hostedService.deployment.model; -import org.finos.legend.engine.language.hostedService.generation.model.lineage.Lineage; +import org.finos.legend.engine.protocol.hostedService.deployment.model.lineage.Lineage; import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.ExecutionPlan; public class GenerationInfoData extends GenerationInfo diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/CompositeLineage.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/CompositeLineage.java similarity index 92% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/CompositeLineage.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/CompositeLineage.java index b58d93e6044..138f94ce329 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/CompositeLineage.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/CompositeLineage.java @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.generation.model.lineage; +package org.finos.legend.engine.protocol.hostedService.deployment.model.lineage; import org.eclipse.collections.api.factory.Maps; diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/Lineage.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/Lineage.java similarity index 92% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/Lineage.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/Lineage.java index 5b1e67464a3..74875d948df 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/Lineage.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/Lineage.java @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.generation.model.lineage; +package org.finos.legend.engine.protocol.hostedService.deployment.model.lineage; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/SingleLineage.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/SingleLineage.java similarity index 97% rename from legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/SingleLineage.java rename to legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/SingleLineage.java index 131e3f772cf..b2315f6adfe 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-generation/src/main/java/org/finos/legend/engine/language/hostedService/generation/model/lineage/SingleLineage.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/deployment/model/lineage/SingleLineage.java @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.hostedService.generation.model.lineage; +package org.finos.legend.engine.protocol.hostedService.deployment.model.lineage; import com.fasterxml.jackson.annotation.JsonFormat; import org.eclipse.collections.api.factory.Lists; diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/metamodel/HostedServiceProtocolExtension.java b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/metamodel/HostedServiceProtocolExtension.java index 81fc05ea515..7185ae8d9be 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/metamodel/HostedServiceProtocolExtension.java +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/java/org/finos/legend/engine/protocol/hostedService/metamodel/HostedServiceProtocolExtension.java @@ -17,6 +17,12 @@ import org.eclipse.collections.api.block.function.Function0; import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.factory.Maps; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentContent; +import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceArtifact; +import org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceContent; import org.finos.legend.engine.protocol.hostedService.metamodel.control.Deployment; import org.finos.legend.engine.protocol.hostedService.metamodel.control.Ownership; import org.finos.legend.engine.protocol.hostedService.metamodel.control.UserList; @@ -41,6 +47,18 @@ public List>>> getExtraProtocolSubTypeInfo ProtocolSubTypeInfo.newBuilder(Ownership.class) .withSubtype(UserList.class, "userList") .withSubtype(Deployment.class, "deployment") + .build(), + ProtocolSubTypeInfo.newBuilder(DeploymentConfiguration.class) + .withSubtype(HostedServiceDeploymentConfiguration.class, "hostedServiceDeploymentConfiguration") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorDeploymentConfiguration.class) + .withSubtype(org.finos.legend.engine.protocol.hostedService.deployment.HostedServiceDeploymentConfiguration.class, "hostedServiceDeploymentConfig") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorArtifact.class) + .withSubtype(HostedServiceArtifact.class, "hostedServiceArtifact") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorDeploymentContent.class) + .withSubtype(HostedServiceContent.class, "hostedServiceDeploymentContent") .build() )); } diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.functionActivator.metamodel.FunctionActivator b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.functionActivator.metamodel.FunctionActivator new file mode 100644 index 00000000000..85fed0b96b3 --- /dev/null +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.functionActivator.metamodel.FunctionActivator @@ -0,0 +1 @@ +org.finos.legend.engine.protocol.hostedService.metamodel.HostedService \ No newline at end of file diff --git a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-pure/pom.xml b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-pure/pom.xml index df59aa04e05..906addfd4db 100644 --- a/legend-engine-xts-hostedService/legend-engine-xt-hostedService-pure/pom.xml +++ b/legend-engine-xts-hostedService/legend-engine-xt-hostedService-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-hostedService - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-hostedService/pom.xml b/legend-engine-xts-hostedService/pom.xml index bd432585b6b..290586cc5ee 100644 --- a/legend-engine-xts-hostedService/pom.xml +++ b/legend-engine-xts-hostedService/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-iceberg/legend-engine-xt-iceberg-pure/pom.xml b/legend-engine-xts-iceberg/legend-engine-xt-iceberg-pure/pom.xml index 3b8a9040b9e..37354fadfbb 100644 --- a/legend-engine-xts-iceberg/legend-engine-xt-iceberg-pure/pom.xml +++ b/legend-engine-xts-iceberg/legend-engine-xt-iceberg-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-iceberg - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-iceberg/legend-engine-xt-iceberg-test-support/pom.xml b/legend-engine-xts-iceberg/legend-engine-xt-iceberg-test-support/pom.xml index 3e981c3c934..6ef693b35e1 100644 --- a/legend-engine-xts-iceberg/legend-engine-xt-iceberg-test-support/pom.xml +++ b/legend-engine-xts-iceberg/legend-engine-xt-iceberg-test-support/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-iceberg - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -122,13 +122,6 @@ io.minio minio - 8.5.5 - - - org.jetbrains - * - - diff --git a/legend-engine-xts-iceberg/pom.xml b/legend-engine-xts-iceberg/pom.xml index 5a0a130aedd..978185473c0 100644 --- a/legend-engine-xts-iceberg/pom.xml +++ b/legend-engine-xts-iceberg/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-java/legend-engine-external-language-java/pom.xml b/legend-engine-xts-java/legend-engine-external-language-java/pom.xml index 13312566e13..458b750eea1 100644 --- a/legend-engine-xts-java/legend-engine-external-language-java/pom.xml +++ b/legend-engine-xts-java/legend-engine-external-language-java/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-java - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-java/legend-engine-xt-javaGeneration-featureBased-pure/pom.xml b/legend-engine-xts-java/legend-engine-xt-javaGeneration-featureBased-pure/pom.xml index 50c295c9cbc..2b836572060 100644 --- a/legend-engine-xts-java/legend-engine-xt-javaGeneration-featureBased-pure/pom.xml +++ b/legend-engine-xts-java/legend-engine-xt-javaGeneration-featureBased-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-java - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/pom.xml b/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/pom.xml index c398240c537..e7035bfbe86 100644 --- a/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/pom.xml +++ b/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-java - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/src/main/resources/core_external_language_java/generation/conventions.pure b/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/src/main/resources/core_external_language_java/generation/conventions.pure index cd9b5d2d9b8..099831f1ae1 100644 --- a/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/src/main/resources/core_external_language_java/generation/conventions.pure +++ b/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/src/main/resources/core_external_language_java/generation/conventions.pure @@ -1038,13 +1038,18 @@ function meta::external::language::java::transform::newConventions(extensions : function <> meta::external::language::java::transform::buildIdentifierFactory():Function<{String[1]->String[1]}>[1] { - let keywordMap = buildJavaKeywordReplacementMap(); + let keywordMap = buildReservedKeywordReplacementMap(); {name:String[1] | $name->sanitizeIdentifier($keywordMap)}; } -function <> meta::external::language::java::transform::buildJavaKeywordReplacementMap():Map[1] +function <> meta::external::language::java::transform::buildReservedKeywordReplacementMap():Map[1] { - javaKeywords()->map(kw | pair($kw, '_' + $kw))->newMap() + reservedKeywords()->buildKeywordReplacementMap() +} + +function meta::external::language::java::transform::buildKeywordReplacementMap(Keywords: String[*]):Map[1] +{ + $Keywords->map(kw | pair($kw, '_' + $kw))->newMap() } function meta::external::language::java::transform::setBasePackageName(conventions:Conventions[1], name:String[1]): Conventions[1] @@ -1265,7 +1270,7 @@ function meta::external::language::java::transform::sanitizeIdentifier(name: Str function meta::external::language::java::transform::sanitizeIdentifier(name: String[1]): String[1] { - $name->sanitizeIdentifier(buildJavaKeywordReplacementMap()) + $name->sanitizeIdentifier(buildReservedKeywordReplacementMap()) } function <> meta::external::language::java::transform::replaceIllegalSymbols(symbol: String[1]): String[1] @@ -1293,7 +1298,7 @@ function meta::external::language::java::transform::sanitizeJavaKeywords(name: S function meta::external::language::java::transform::sanitizeJavaKeywords(name: String[1]): String[1] { - $name->sanitizeJavaKeywords(buildJavaKeywordReplacementMap()); + $name->sanitizeJavaKeywords(buildReservedKeywordReplacementMap()); } function <> meta::external::language::java::transform::startsWithNumber(s: String[1]): Boolean[1] diff --git a/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/src/main/resources/core_external_language_java/metamodel.pure b/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/src/main/resources/core_external_language_java/metamodel.pure index d41a89a0d0e..9ce339c3451 100644 --- a/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/src/main/resources/core_external_language_java/metamodel.pure +++ b/legend-engine-xts-java/legend-engine-xt-javaGeneration-pure/src/main/resources/core_external_language_java/metamodel.pure @@ -799,6 +799,16 @@ function meta::external::language::java::metamodel::javaKeywords(): String[*] ]; } +function meta::external::language::java::metamodel::modelPropertyKeywords(): String[*] +{ + ['com', 'org']; +} + +function meta::external::language::java::metamodel::reservedKeywords(): String[*] +{ + javaKeywords()->concatenate(modelPropertyKeywords()); +} + function meta::external::language::java::metamodel::isJavaLang(pkg: meta::external::language::java::metamodel::Package[1]):Boolean[1] { $pkg.name == 'lang' diff --git a/legend-engine-xts-java/legend-engine-xt-javaPlatformBinding-pure/pom.xml b/legend-engine-xts-java/legend-engine-xt-javaPlatformBinding-pure/pom.xml index 7db3c8bb05f..6b56285d9a5 100644 --- a/legend-engine-xts-java/legend-engine-xt-javaPlatformBinding-pure/pom.xml +++ b/legend-engine-xts-java/legend-engine-xt-javaPlatformBinding-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-java - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-java/pom.xml b/legend-engine-xts-java/pom.xml index f4ecc30945a..1f3a4424ac9 100644 --- a/legend-engine-xts-java/pom.xml +++ b/legend-engine-xts-java/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-json/legend-engine-external-format-jsonSchema/pom.xml b/legend-engine-xts-json/legend-engine-external-format-jsonSchema/pom.xml index 66b7c3bd864..daa7a0e0b30 100644 --- a/legend-engine-xts-json/legend-engine-external-format-jsonSchema/pom.xml +++ b/legend-engine-xts-json/legend-engine-external-format-jsonSchema/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-json - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-json/legend-engine-xt-json-javaPlatformBinding-pure/pom.xml b/legend-engine-xts-json/legend-engine-xt-json-javaPlatformBinding-pure/pom.xml index 9bdf6b11d7c..0790e451370 100644 --- a/legend-engine-xts-json/legend-engine-xt-json-javaPlatformBinding-pure/pom.xml +++ b/legend-engine-xts-json/legend-engine-xt-json-javaPlatformBinding-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-json - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-json/legend-engine-xt-json-javaPlatformBinding-test/pom.xml b/legend-engine-xts-json/legend-engine-xt-json-javaPlatformBinding-test/pom.xml index c00f6bbe83b..13c02c5c7bb 100644 --- a/legend-engine-xts-json/legend-engine-xt-json-javaPlatformBinding-test/pom.xml +++ b/legend-engine-xts-json/legend-engine-xt-json-javaPlatformBinding-test/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-json - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-json/legend-engine-xt-json-model/pom.xml b/legend-engine-xts-json/legend-engine-xt-json-model/pom.xml index da634985f6d..43fe25f72ce 100644 --- a/legend-engine-xts-json/legend-engine-xt-json-model/pom.xml +++ b/legend-engine-xts-json/legend-engine-xt-json-model/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-json - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-json/legend-engine-xt-json-pure/pom.xml b/legend-engine-xts-json/legend-engine-xt-json-pure/pom.xml index 6ab776881cd..0a7ce85ce10 100644 --- a/legend-engine-xts-json/legend-engine-xt-json-pure/pom.xml +++ b/legend-engine-xts-json/legend-engine-xt-json-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-json - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-json/legend-engine-xt-json-runtime/pom.xml b/legend-engine-xts-json/legend-engine-xt-json-runtime/pom.xml index 53b9699d5a2..cf4b2df04af 100644 --- a/legend-engine-xts-json/legend-engine-xt-json-runtime/pom.xml +++ b/legend-engine-xts-json/legend-engine-xt-json-runtime/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-json - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-json/pom.xml b/legend-engine-xts-json/pom.xml index f7f05218ab6..e5a5bf73e61 100644 --- a/legend-engine-xts-json/pom.xml +++ b/legend-engine-xts-json/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/pom.xml b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/pom.xml index c06b7dd9ebb..d427cc6d85d 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/pom.xml +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-mastery org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/MasteryParserGrammar.g4 b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/MasteryParserGrammar.g4 index b039d6becae..ababf38b27f 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/MasteryParserGrammar.g4 +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/MasteryParserGrammar.g4 @@ -363,7 +363,7 @@ pathExtension: subPath filter? ; subPath: '.' validString ; -filter: BRACE_OPEN '$' '.' combinedExpression BRACE_CLOSE +filter: BRACE_OPEN combinedExpression BRACE_CLOSE ; predicate: PREDICATE COLON lambdaFunction diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperAcquisitionBuilder.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperAcquisitionBuilder.java index 885389da4a0..d341a969673 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperAcquisitionBuilder.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperAcquisitionBuilder.java @@ -21,6 +21,7 @@ import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.DESDecryption; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.Decryption; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.FileAcquisitionProtocol; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.FileType; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.KafkaAcquisitionProtocol; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.LegendServiceAcquisitionProtocol; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.PGPDecryption; @@ -42,6 +43,7 @@ import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.PackageableElement; import static java.lang.String.format; +import static org.apache.commons.lang3.StringUtils.isEmpty; public class HelperAcquisitionBuilder { @@ -75,6 +77,7 @@ public static Root_meta_pure_mastery_metamodel_acquisition_AcquisitionProtocol b public static Root_meta_pure_mastery_metamodel_acquisition_FileAcquisitionProtocol buildFileAcquisitionProtocol(FileAcquisitionProtocol acquisitionProtocol, CompileContext context) { + validateFileAcquisitionProtocol(acquisitionProtocol); Root_meta_pure_mastery_metamodel_connection_FileConnection fileConnection; PackageableElement packageableElement = context.resolvePackageableElement(acquisitionProtocol.connection, acquisitionProtocol.sourceInformation); if (packageableElement instanceof Root_meta_pure_mastery_metamodel_connection_FileConnection) @@ -98,6 +101,15 @@ public static Root_meta_pure_mastery_metamodel_acquisition_FileAcquisitionProtoc ._decryption(acquisitionProtocol.decryption == null ? null : buildDecryption(acquisitionProtocol.decryption, context)); } + private static void validateFileAcquisitionProtocol(FileAcquisitionProtocol fileAcquisitionProtocol) + { + if (fileAcquisitionProtocol.fileType == FileType.JSON && isEmpty(fileAcquisitionProtocol.recordsKey)) + { + throw new EngineException("'recordsKey' must be specified when file type is JSON", fileAcquisitionProtocol.sourceInformation, EngineErrorType.COMPILATION); + } + } + + public static Root_meta_pure_mastery_metamodel_acquisition_file_Decryption buildDecryption(Decryption decryption, CompileContext context) { if (decryption instanceof PGPDecryption) diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperMasterRecordDefinitionBuilder.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperMasterRecordDefinitionBuilder.java index db5a9ebe315..bf54d74e517 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperMasterRecordDefinitionBuilder.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperMasterRecordDefinitionBuilder.java @@ -26,10 +26,12 @@ import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.Multiplicity; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.MasterRecordDefinition; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.Profile; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.RecordService; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.RecordSource; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.RecordSourceVisitor; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.AcquisitionProtocol; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition.KafkaAcquisitionProtocol; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.authorization.Authorization; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.identity.CollectionEquality; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.identity.IdentityResolution; @@ -45,13 +47,19 @@ import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.LambdaFunction; import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.property.Property; import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Class; +import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Enumeration; import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.type.Type; +import org.finos.legend.pure.m4.coreinstance.CoreInstance; import java.util.*; import java.util.stream.Collectors; +import static com.google.common.collect.Iterables.isEmpty; import static com.google.common.collect.Sets.newHashSet; import static java.lang.String.format; +import static java.util.Objects.nonNull; +import static org.apache.commons.lang3.BooleanUtils.isFalse; +import static org.apache.commons.lang3.BooleanUtils.isTrue; public class HelperMasterRecordDefinitionBuilder { @@ -167,6 +175,7 @@ public Root_meta_pure_mastery_metamodel_precedence_PrecedenceRule visit(Preceden } else if (precedenceRule instanceof DeleteRule) { + validateNoDataProviderScope((DeleteRule) precedenceRule); purePrecedenceRule = new Root_meta_pure_mastery_metamodel_precedence_DeleteRule_Impl(""); } else if (precedenceRule instanceof CreateRule) @@ -175,7 +184,7 @@ else if (precedenceRule instanceof CreateRule) } else if (precedenceRule instanceof ConditionalRule) { - purePrecedenceRule = visitConditionalRule(precedenceRule); + purePrecedenceRule = visitConditionalRule((ConditionalRule) precedenceRule); } else { @@ -218,10 +227,10 @@ private Root_meta_pure_mastery_metamodel_precedence_SourcePrecedenceRule visitSo return pureSourcePrecedenceRule; } - private Root_meta_pure_mastery_metamodel_precedence_ConditionalRule visitConditionalRule(PrecedenceRule precedenceRule) + private Root_meta_pure_mastery_metamodel_precedence_ConditionalRule visitConditionalRule(ConditionalRule conditionalRule) { + validateNoScopeSet(conditionalRule); Root_meta_pure_mastery_metamodel_precedence_ConditionalRule pureConditionalRule = new Root_meta_pure_mastery_metamodel_precedence_ConditionalRule_Impl(""); - ConditionalRule conditionalRule = (ConditionalRule) precedenceRule; validatePredicateInput(conditionalRule.predicate); pureConditionalRule._predicate(HelperValueSpecificationBuilder.buildLambda(conditionalRule.predicate, context)); return pureConditionalRule; @@ -234,6 +243,26 @@ private void validatePredicateInput(Lambda predicate) validateInputMultiplicity(predicate); } + private void validateNoScopeSet(ConditionalRule conditionalRule) + { + if (!isEmpty(conditionalRule.scopes)) + { + throw new EngineException( + "ConditionalRule with ruleScope is currently unsupported", conditionalRule.sourceInformation, + EngineErrorType.COMPILATION); + } + } + + private void validateNoDataProviderScope(DeleteRule deleteRule) + { + if (!isEmpty(deleteRule.scopes) && deleteRule.scopes.stream().anyMatch(scope -> scope instanceof DataProviderTypeScope)) + { + throw new EngineException( + "DataProviderTypeScope is not allowed on DeleteRule", deleteRule.sourceInformation, + EngineErrorType.COMPILATION); + } + } + private void validateInputVariableNames(Lambda predicate) { Set actualNames = predicate.parameters.stream().map(variable -> variable.name).collect(Collectors.toSet()); @@ -276,7 +305,7 @@ private Root_meta_pure_mastery_metamodel_precedence_PropertyPath visitPath(Prope Property property = (Property) context.resolveProperty(determineFullPath(parentClass), propertyPath.property); Type propertyClass = property._genericType()._rawType(); String propertyClassName; - if (propertyClass instanceof Class) + if ((propertyClass instanceof Class) || (propertyClass instanceof Enumeration)) { propertyClassName = determineFullPath(propertyClass); } @@ -359,6 +388,7 @@ public RecordSourceBuilder(CompileContext context) @Override public Root_meta_pure_mastery_metamodel_RecordSource visit(RecordSource protocolSource) { + validateRecordSource(protocolSource); List extensions = IMasteryCompilerExtension.getExtensions(); List> processors = ListIterate.flatCollect(extensions, IMasteryCompilerExtension::getExtraAuthorizationProcessors); List> triggerProcessors = ListIterate.flatCollect(extensions, IMasteryCompilerExtension::getExtraTriggerProcessors); @@ -415,6 +445,25 @@ private static RichIterable 31) + { + throw new EngineException(format("Invalid record source id '%s'; id must not be longer than 31 characters.", recordSource.id), recordSource.sourceInformation, EngineErrorType.COMPILATION); + } + + boolean kafkaSource = nonNull(recordSource.recordService) && nonNull(recordSource.recordService.acquisitionProtocol) && recordSource.recordService.acquisitionProtocol.isKafkaAcquisitionProtocol(); + + if (isTrue(recordSource.sequentialData) && kafkaSource && nonNull(recordSource.runProfile) && recordSource.runProfile != Profile.ExtraSmall) + { + throw new EngineException("'runProfile' can only be set to ExtraSmall for Delta kafka sources", recordSource.sourceInformation, EngineErrorType.COMPILATION); + } + if (kafkaSource && nonNull(recordSource.runProfile) && recordSource.runProfile != Profile.Small) + { + throw new EngineException("'runProfile' can only be set to Small for Full Universe kafka sources", recordSource.sourceInformation, EngineErrorType.COMPILATION); + } + } } private static Root_meta_pure_mastery_metamodel_DataProvider getAndValidateDataProvider(String path, SourceInformation sourceInformation, CompileContext context) diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperTriggerBuilder.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperTriggerBuilder.java index fc3172ab889..9f68f4c0b67 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperTriggerBuilder.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/toPureGraph/HelperTriggerBuilder.java @@ -16,14 +16,19 @@ import org.eclipse.collections.impl.utility.ListIterate; import org.finos.legend.engine.language.pure.compiler.toPureGraph.CompileContext; +import org.finos.legend.engine.protocol.pure.v1.model.context.EngineErrorType; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.trigger.CronTrigger; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.trigger.Frequency; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.trigger.ManualTrigger; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.trigger.Trigger; +import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; import org.finos.legend.pure.generated.Root_meta_pure_mastery_metamodel_trigger_CronTrigger; import org.finos.legend.pure.generated.Root_meta_pure_mastery_metamodel_trigger_CronTrigger_Impl; import org.finos.legend.pure.generated.Root_meta_pure_mastery_metamodel_trigger_ManualTrigger_Impl; import org.finos.legend.pure.generated.Root_meta_pure_mastery_metamodel_trigger_Trigger; +import static org.eclipse.collections.impl.utility.Iterate.isEmpty; + public class HelperTriggerBuilder { @@ -45,6 +50,7 @@ public static Root_meta_pure_mastery_metamodel_trigger_Trigger buildTrigger(Trig private static Root_meta_pure_mastery_metamodel_trigger_CronTrigger buildCronTrigger(CronTrigger cronTrigger, CompileContext context) { + validateCronTrigger(cronTrigger); return new Root_meta_pure_mastery_metamodel_trigger_CronTrigger_Impl("") ._minute(cronTrigger.minute) ._hour(cronTrigger.hour) @@ -55,4 +61,32 @@ private static Root_meta_pure_mastery_metamodel_trigger_CronTrigger buildCronTri ._month(cronTrigger.year == null ? null : context.resolveEnumValue("meta::pure::mastery::metamodel::trigger::Month", cronTrigger.month.name())) ._days(ListIterate.collect(cronTrigger.days, day -> context.resolveEnumValue("meta::pure::mastery::metamodel::trigger::Day", day.name()))); } + + private static void validateCronTrigger(CronTrigger cronTrigger) + { + if ((cronTrigger.frequency == Frequency.Intraday || cronTrigger.frequency == Frequency.Daily) && isEmpty(cronTrigger.days)) + { + throw new EngineException("'days' must not be empty when trigger frequency is Daily or Intraday", cronTrigger.sourceInformation, EngineErrorType.COMPILATION); + } + + if (cronTrigger.frequency == Frequency.Weekly && cronTrigger.days.size() != 1) + { + throw new EngineException("'days' specified must be exactly one when trigger frequency is Weekly", cronTrigger.sourceInformation, EngineErrorType.COMPILATION); + } + + if (!isInHourRange(cronTrigger.hour) || !isInMinuteRange(cronTrigger.minute)) + { + throw new EngineException("'hour' must be a number between 0 and 23 (both inclusive), and 'minute' must be a number between 0 and 59 (both inclusive)", cronTrigger.sourceInformation, EngineErrorType.COMPILATION); + } + } + + private static boolean isInHourRange(Integer hour) + { + return 0 <= hour && hour < 24; + } + + private static boolean isInMinuteRange(Integer minute) + { + return 0 <= minute && minute < 60; + } } diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/from/MasteryParseTreeWalker.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/from/MasteryParseTreeWalker.java index 8a252c6b062..9fad84905db 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/from/MasteryParseTreeWalker.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/from/MasteryParseTreeWalker.java @@ -73,7 +73,9 @@ public class MasteryParseTreeWalker private static final String SIMPLE_PRECEDENCE_LAMBDA = "{input: %s[1]| true}"; - private static final String PRECEDENCE_LAMBDA_WITH_FILTER = "{input: %s[1]| $input.%s}"; + private static final String INPUT = "\\$input"; + private static final String DOLLAR_SIGN = "\\$"; + private static final String PRECEDENCE_LAMBDA_WITH_FILTER = "{input: %s[1]| %s}"; private static final String DATA_PROVIDER_STRING = "DataProvider"; public MasteryParseTreeWalker(ParseTreeWalkerSourceInformation walkerSourceInformation, @@ -373,8 +375,9 @@ private PropertyPath visitPathExtension(MasteryParserGrammar.PathExtensionContex private Lambda visitLambdaWithFilter(String propertyName, MasteryParserGrammar.CombinedExpressionContext ctx) { + String inputFilter = ctx.getText().replaceAll(DOLLAR_SIGN, INPUT); return domainParser.parseLambda( - format(PRECEDENCE_LAMBDA_WITH_FILTER, propertyName, ctx.getText()), + format(PRECEDENCE_LAMBDA_WITH_FILTER, propertyName, inputFilter), "", 0, 0, true); } diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/from/trigger/TriggerParseTreeWalker.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/from/trigger/TriggerParseTreeWalker.java index 3d39052465e..506b8efca39 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/from/trigger/TriggerParseTreeWalker.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/from/trigger/TriggerParseTreeWalker.java @@ -93,7 +93,7 @@ private Trigger visitCronTrigger(TriggerParserGrammar.CronTriggerContext ctx) // frequency - TriggerParserGrammar.FrequencyContext frequencyContext = PureGrammarParserUtility.validateAndExtractOptionalField(ctx.frequency(), "frequency", sourceInformation); + TriggerParserGrammar.FrequencyContext frequencyContext = PureGrammarParserUtility.validateAndExtractRequiredField(ctx.frequency(), "frequency", sourceInformation); if (frequencyContext != null) { String frequencyString = frequencyContext.frequencyValue().getText(); @@ -101,7 +101,7 @@ private Trigger visitCronTrigger(TriggerParserGrammar.CronTriggerContext ctx) } // days - TriggerParserGrammar.DaysContext daysContext = PureGrammarParserUtility.validateAndExtractOptionalField(ctx.days(), "days", sourceInformation); + TriggerParserGrammar.DaysContext daysContext = PureGrammarParserUtility.validateAndExtractRequiredField(ctx.days(), "days", sourceInformation); if (daysContext != null) { cronTrigger.days = ListIterate.collect(daysContext.dayValue(), this::visitRunDay); diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/to/HelperMasteryGrammarComposer.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/to/HelperMasteryGrammarComposer.java index 38aace1908c..39f33ca4252 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/to/HelperMasteryGrammarComposer.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/to/HelperMasteryGrammarComposer.java @@ -48,6 +48,8 @@ public class HelperMasteryGrammarComposer { private static final String PRECEDENCE_LAMBDA_WITH_FILTER_PREFIX = "\\{?input: .*\\[1]\\|\\$input\\."; + private static final String INPUT = "input"; + private static final String BRACKETS = "\\(|\\)"; private static final String PRECEDENCE_LAMBDA_WITH_FILTER_SUFFIX = ".*"; private HelperMasteryGrammarComposer() @@ -402,10 +404,10 @@ private String visitPath(Lambda masterRecordFilter, List propertyP private String visitLambda(Lambda lambda) { StringBuilder builder = new StringBuilder(); - String lambdaStr = lambda.accept(DEPRECATED_PureGrammarComposerCore.Builder.newInstance(context).build()); + String lambdaStr = lambda.accept(DEPRECATED_PureGrammarComposerCore.Builder.newInstance(context).build()).replaceAll(BRACKETS, ""); if (lambdaStr.matches(PRECEDENCE_LAMBDA_WITH_FILTER_PREFIX + PRECEDENCE_LAMBDA_WITH_FILTER_SUFFIX)) { - String filterPath = lambdaStr.replaceAll(PRECEDENCE_LAMBDA_WITH_FILTER_PREFIX, ""); + String filterPath = lambdaStr.replaceAll(PRECEDENCE_LAMBDA_WITH_FILTER_PREFIX, "").replace(INPUT, ""); builder.append("{$.").append(filterPath).append("}"); } return builder.toString(); diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/to/MasteryGrammarComposerExtension.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/to/MasteryGrammarComposerExtension.java index 2cd206d3410..b59e2bc814b 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/to/MasteryGrammarComposerExtension.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/main/java/org/finos/legend/engine/language/pure/dsl/mastery/grammar/to/MasteryGrammarComposerExtension.java @@ -75,6 +75,7 @@ public List, PureGrammarComposerContext, List composableElements.addAll(ListIterate.selectInstancesOf(elements, MasterRecordDefinition.class)); composableElements.addAll(ListIterate.selectInstancesOf(elements, Connection.class)); composableElements.addAll(ListIterate.selectInstancesOf(elements, DataProvider.class)); + composableElements.addAll(ListIterate.selectInstancesOf(elements, MasteryRuntime.class)); return composableElements.isEmpty() ? null @@ -93,6 +94,10 @@ else if (element instanceof Connection) { return MasteryGrammarComposerExtension.renderConnection((Connection) element, context); } + else if (element instanceof MasteryRuntime) + { + return MasteryGrammarComposerExtension.renderMasteryRuntime((MasteryRuntime) element, context); + } throw new UnsupportedOperationException("Unsupported type " + element.getClass().getName()); }) .makeString("###" + MasteryParserExtension.NAME + "\n", "\n\n", ""), composableElements); diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/test/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/test/TestMasteryCompilationFromGrammar.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/test/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/test/TestMasteryCompilationFromGrammar.java index b9953070433..f93c109d262 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/test/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/test/TestMasteryCompilationFromGrammar.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-grammar/src/test/java/org/finos/legend/engine/language/pure/dsl/mastery/compiler/test/TestMasteryCompilationFromGrammar.java @@ -79,9 +79,15 @@ public class TestMasteryCompilationFromGrammar extends TestCompilationFromGramma "{\n" + " widgetId: String[0..1];\n" + " trigger: String[0..1];\n" + + " modelType: org::dataeng::ModelType[0..1];\n" + " runProfile: org::dataeng::Medium[0..1];\n" + " identifiers: org::dataeng::MilestonedIdentifier[*];\n" + "}\n\n" + + "Enum org::dataeng::ModelType\n" + + "{\n" + + " modelA,\n" + + " modelB\n" + + "}\n\n" + "Class org::dataeng::Medium\n" + "{\n" + " authorization: String[0..1];\n" + @@ -134,7 +140,7 @@ public class TestMasteryCompilationFromGrammar extends TestCompilationFromGramma " }\n" + " precedenceRules: [\n" + " DeleteRule: {\n" + - " path: org::dataeng::Widget.identifiers;\n" + + " path: org::dataeng::Widget.modelType;\n" + " ruleScope: [\n" + " RecordSourceScope {widget-rest-source}\n" + " ];\n" + @@ -168,7 +174,7 @@ public class TestMasteryCompilationFromGrammar extends TestCompilationFromGramma " path: org::dataeng::Widget.runProfile.authorization;\n" + " },\n" + " SourcePrecedenceRule: {\n" + - " path: org::dataeng::Widget.identifiers{$.identifier == 'XLON'};\n" + + " path: org::dataeng::Widget.identifiers{$.identifier == 'XLON' || $.identifier == 'LSE'};\n" + " action: Overwrite;\n" + " ruleScope: [\n" + " RecordSourceScope {widget-file-source-sftp, precedence: 1},\n" + @@ -575,7 +581,7 @@ public void testMasteryFullModel() Root_meta_pure_mastery_metamodel_precedence_PropertyPath propertyPath = paths.get(0); //path property - assertEquals("identifiers", propertyPath._property()._name()); + assertEquals("modelType", propertyPath._property()._name()); assertEquals("Widget", propertyPath._property()._owner()._name()); //path filter assertEquals("true", getSimpleLambdaValue(propertyPath._filter())); @@ -670,10 +676,25 @@ else if (i == 6) Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl complexLambda = getComplexLambda(firstPropertyPath._filter()); List lambdaParameters = complexLambda._parametersValues().toList(); - assertEquals("MilestonedIdentifier", getFunctionProperty(lambdaParameters.get(0))._owner()._name()); - assertEquals("identifier", getFunctionProperty(lambdaParameters.get(0))._name()); - assertEquals("equal", complexLambda._functionName()); - assertEquals("XLON", getInstanceValue(lambdaParameters.get(1))); + assertEquals("or", complexLambda._functionName()); + + // first Part of filter + Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl firstFilter = (Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl) lambdaParameters.get(0); + List firstFilterLambdaParameters = firstFilter._parametersValues().toList(); + + assertEquals("MilestonedIdentifier", getFunctionProperty(firstFilterLambdaParameters.get(0))._owner()._name()); + assertEquals("identifier", getFunctionProperty(firstFilterLambdaParameters.get(0))._name()); + assertEquals("equal", firstFilter._functionName()); + assertEquals("XLON", getInstanceValue(firstFilterLambdaParameters.get(1))); + + // second Part of filter + Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl secondFilter = (Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl) lambdaParameters.get(1); + List secondFilterLambdaParameters = secondFilter._parametersValues().toList(); + + assertEquals("MilestonedIdentifier", getFunctionProperty(secondFilterLambdaParameters.get(0))._owner()._name()); + assertEquals("identifier", getFunctionProperty(secondFilterLambdaParameters.get(0))._name()); + assertEquals("equal", secondFilter._functionName()); + assertEquals("LSE", getInstanceValue(secondFilterLambdaParameters.get(1))); //masterRecordFilter assertEquals("true", getSimpleLambdaValue(source._masterRecordFilter())); @@ -702,10 +723,25 @@ else if (i == 7) Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl complexLambda = getComplexLambda(firstPropertyPath._filter()); List lambdaParameters = complexLambda._parametersValues().toList(); - assertEquals("MilestonedIdentifier", getFunctionProperty(lambdaParameters.get(0))._owner()._name()); - assertEquals("identifier", getFunctionProperty(lambdaParameters.get(0))._name()); - assertEquals("equal", complexLambda._functionName()); - assertEquals("XLON", getInstanceValue(lambdaParameters.get(1))); + assertEquals("or", complexLambda._functionName()); + + // first Part of filter + Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl firstFilter = (Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl) lambdaParameters.get(0); + List firstFilterLambdaParameters = firstFilter._parametersValues().toList(); + + assertEquals("MilestonedIdentifier", getFunctionProperty(firstFilterLambdaParameters.get(0))._owner()._name()); + assertEquals("identifier", getFunctionProperty(firstFilterLambdaParameters.get(0))._name()); + assertEquals("equal", firstFilter._functionName()); + assertEquals("XLON", getInstanceValue(firstFilterLambdaParameters.get(1))); + + // second Part of filter + Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl secondFilter = (Root_meta_pure_metamodel_valuespecification_SimpleFunctionExpression_Impl) lambdaParameters.get(1); + List secondFilterLambdaParameters = secondFilter._parametersValues().toList(); + + assertEquals("MilestonedIdentifier", getFunctionProperty(secondFilterLambdaParameters.get(0))._owner()._name()); + assertEquals("identifier", getFunctionProperty(secondFilterLambdaParameters.get(0))._name()); + assertEquals("equal", secondFilter._functionName()); + assertEquals("LSE", getInstanceValue(secondFilterLambdaParameters.get(1))); //masterRecordFilter assertEquals("true", getSimpleLambdaValue(source._masterRecordFilter())); @@ -903,6 +939,398 @@ public void testMasteryDeprecatedModelCanStillCompile() assertEquals("Widget", masterRecordDefinition._modelClass()._name()); } + @Test + public void testCompilationErrorWhenInvalidTriggerDefinition() + { + String model = "###Pure\n" + + "Class org::dataeng::Widget\n" + + "{\n" + + " widgetId: String[0..1];\n" + + "}\n\n" + + "###Mastery\n" + "MasterRecordDefinition alloy::mastery::WidgetMasterRecord" + + "\n" + + "{\n" + + " modelClass: org::dataeng::Widget;\n" + + " identityResolution: \n" + + " {\n" + + " resolutionQueries:\n" + + " [\n" + + " {\n" + + " queries: [ {input: org::dataeng::Widget[1]|org::dataeng::Widget.all()->filter(widget|$widget.widgetId == $input.widgetId)}\n" + + " ];\n" + + " precedence: 1;\n" + + " }\n" + + " ]\n" + + " }\n" + + " recordSources:\n" + + " [\n" + + " widget-producer: {\n" + + " description: 'REST Acquisition source.';\n" + + " status: Development;\n" + + " recordService: {\n" + + " acquisitionProtocol: REST;\n" + + " };\n" + + " trigger: Cron #{\n" + + " minute: 70;\n" + + " hour: 25;\n" + + " timezone: 'UTC';\n" + + " frequency: Daily;\n" + + " days: [ Monday, Tuesday, Wednesday, Thursday, Friday ];\n" + + " }#;\n" + + " }\n" + + " ]\n" + + "}\n"; + + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test(model, "COMPILATION error at [8:1-39:1]: Error in 'alloy::mastery::WidgetMasterRecord': 'hour' must be a number between 0 and 23 (both inclusive), and 'minute' must be a number between 0 and 59 (both inclusive)"); + } + + @Test + public void testCompilationErrorWhenWeeklyFrequencyButMoreThanOneRunDaySpecified() + { + String model = "###Pure\n" + + "Class org::dataeng::Widget\n" + + "{\n" + + " widgetId: String[0..1];\n" + + "}\n\n" + + "###Mastery\n" + "MasterRecordDefinition alloy::mastery::WidgetMasterRecord" + + "\n" + + "{\n" + + " modelClass: org::dataeng::Widget;\n" + + " identityResolution: \n" + + " {\n" + + " resolutionQueries:\n" + + " [\n" + + " {\n" + + " queries: [ {input: org::dataeng::Widget[1]|org::dataeng::Widget.all()->filter(widget|$widget.widgetId == $input.widgetId)}\n" + + " ];\n" + + " precedence: 1;\n" + + " }\n" + + " ]\n" + + " }\n" + + " recordSources:\n" + + " [\n" + + " widget-producer: {\n" + + " description: 'REST Acquisition source.';\n" + + " status: Development;\n" + + " recordService: {\n" + + " acquisitionProtocol: REST;\n" + + " };\n" + + " trigger: Cron #{\n" + + " minute: 45;\n" + + " hour: 2;\n" + + " timezone: 'UTC';\n" + + " frequency: Weekly;\n" + + " days: [ Monday, Tuesday ];\n" + + " }#;\n" + + " }\n" + + " ]\n" + + "}\n"; + + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test(model, "COMPILATION error at [8:1-39:1]: Error in 'alloy::mastery::WidgetMasterRecord': 'days' specified must be exactly one when trigger frequency is Weekly"); + } + + @Test + public void testCompilationErrorWhenRecordSourceIdExceedThirtyOne() + { + String model = "###Pure\n" + + "Class org::dataeng::Widget\n" + + "{\n" + + " widgetId: String[0..1];\n" + + "}\n\n" + + "###Mastery\n" + "MasterRecordDefinition alloy::mastery::WidgetMasterRecord" + + "\n" + + "{\n" + + " modelClass: org::dataeng::Widget;\n" + + " identityResolution: \n" + + " {\n" + + " resolutionQueries:\n" + + " [\n" + + " {\n" + + " queries: [ {input: org::dataeng::Widget[1]|org::dataeng::Widget.all()->filter(widget|$widget.widgetId == $input.widgetId)}\n" + + " ];\n" + + " precedence: 1;\n" + + " }\n" + + " ]\n" + + " }\n" + + " recordSources:\n" + + " [\n" + + " widget-producer-alloy-mastery-exceed-allowed-length: {\n" + + " description: 'REST Acquisition source.';\n" + + " status: Development;\n" + + " recordService: {\n" + + " acquisitionProtocol: REST;\n" + + " };\n" + + " trigger: Manual;\n" + + " }\n" + + " ]\n" + + "}\n"; + + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test(model, "COMPILATION error at [24:5-31:5]: Invalid record source id 'widget-producer-alloy-mastery-exceed-allowed-length'; id must not be longer than 31 characters."); + } + + @Test + public void testCompilationErrorWhenDeltaKafkaSourceHasRunProfileOtherThanExtraSmall() + { + String model = "###Pure\n" + + "Class org::dataeng::Widget\n" + + "{\n" + + " widgetId: String[0..1];\n" + + "}\n\n" + + "###Mastery\n" + "MasterRecordDefinition alloy::mastery::WidgetMasterRecord" + + "\n" + + "{\n" + + " modelClass: org::dataeng::Widget;\n" + + " identityResolution: \n" + + " {\n" + + " resolutionQueries:\n" + + " [\n" + + " {\n" + + " queries: [ {input: org::dataeng::Widget[1]|org::dataeng::Widget.all()->filter(widget|$widget.widgetId == $input.widgetId)}\n" + + " ];\n" + + " precedence: 1;\n" + + " }\n" + + " ]\n" + + " }\n" + + " recordSources:\n" + + " [\n" + + " widget-kafka: {\n" + + " description: 'Kafka Acquisition source.';\n" + + " status: Development;\n" + + " recordService: {\n" + + " acquisitionProtocol: Kafka #{\n" + + " dataType: JSON;\n" + + " connection: alloy::mastery::connection::KafkaConnection;\n" + + " }#;\n" + + " };\n" + + " sequentialData: true;\n" + + " runProfile: Medium;\n" + + " trigger: Manual;\n" + + " }\n" + + " ]\n" + + "}\n\n" + + + "MasteryConnection alloy::mastery::connection::KafkaConnection\n" + + "{\n" + + " specification: Kafka #{\n" + + " topicName: 'my-topic-name';\n" + + " topicUrls: [\n" + + " 'some.url.com:2100',\n" + + " 'another.url.com:2100'\n" + + " ];\n" + + " }#;\n" + + "}"; + + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test(model, "COMPILATION error at [24:5-36:5]: 'runProfile' can only be set to ExtraSmall for Delta kafka sources"); + } + + @Test + public void testCompilationErrorWhenFullUniverseKafkaSourceHasRunProfileOtherThanSmall() + { + String model = "###Pure\n" + + "Class org::dataeng::Widget\n" + + "{\n" + + " widgetId: String[0..1];\n" + + "}\n\n" + + "###Mastery\n" + "MasterRecordDefinition alloy::mastery::WidgetMasterRecord" + + "\n" + + "{\n" + + " modelClass: org::dataeng::Widget;\n" + + " identityResolution: \n" + + " {\n" + + " resolutionQueries:\n" + + " [\n" + + " {\n" + + " queries: [ {input: org::dataeng::Widget[1]|org::dataeng::Widget.all()->filter(widget|$widget.widgetId == $input.widgetId)}\n" + + " ];\n" + + " precedence: 1;\n" + + " }\n" + + " ]\n" + + " }\n" + + " recordSources:\n" + + " [\n" + + " widget-kafka: {\n" + + " description: 'Kafka Acquisition source.';\n" + + " status: Development;\n" + + " recordService: {\n" + + " acquisitionProtocol: Kafka #{\n" + + " dataType: JSON;\n" + + " connection: alloy::mastery::connection::KafkaConnection;\n" + + " }#;\n" + + " };\n" + + " runProfile: Medium;\n" + + " trigger: Manual;\n" + + " }\n" + + " ]\n" + + "}\n\n" + + + "MasteryConnection alloy::mastery::connection::KafkaConnection\n" + + "{\n" + + " specification: Kafka #{\n" + + " topicName: 'my-topic-name';\n" + + " topicUrls: [\n" + + " 'some.url.com:2100',\n" + + " 'another.url.com:2100'\n" + + " ];\n" + + " }#;\n" + + "}"; + + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test(model, "COMPILATION error at [24:5-35:5]: 'runProfile' can only be set to Small for Full Universe kafka sources"); + } + + @Test + public void testCompilationErrorWhenJsonFileAcquisitionHasRecordsKey() + { + String model = "###Pure\n" + + "Class org::dataeng::Widget\n" + + "{\n" + + " widgetId: String[0..1];\n" + + "}\n\n" + + "###Mastery\n" + "MasterRecordDefinition alloy::mastery::WidgetMasterRecord" + + "\n" + + "{\n" + + " modelClass: org::dataeng::Widget;\n" + + " identityResolution: \n" + + " {\n" + + " resolutionQueries:\n" + + " [\n" + + " {\n" + + " queries: [ {input: org::dataeng::Widget[1]|org::dataeng::Widget.all()->filter(widget|$widget.widgetId == $input.widgetId)}\n" + + " ];\n" + + " precedence: 1;\n" + + " }\n" + + " ]\n" + + " }\n" + + " recordSources:\n" + + " [\n" + + " widget-kafka: {\n" + + " description: 'Kafka Acquisition source.';\n" + + " status: Development;\n" + + " recordService: {\n" + + " acquisitionProtocol: File #{\n" + + " fileType: JSON;\n" + + " filePath: '/download/day-file.json';\n" + + " headerLines: 0;\n" + + " connection: alloy::mastery::connection::HTTPConnection;\n" + + " }#;\n" + + " };\n" + + " trigger: Manual;\n" + + " }\n" + + " ]\n" + + "}\n\n" + + + "MasteryConnection alloy::mastery::connection::HTTPConnection\n" + + "{\n" + + " specification: HTTP #{\n" + + " url: 'https://some.url.com';\n" + + " proxy: {\n" + + " host: 'proxy.url.com';\n" + + " port: 85;\n" + + " };\n" + + " }#;\n" + + "}\n\n"; + + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test(model, "COMPILATION error at [8:1-38:1]: Error in 'alloy::mastery::WidgetMasterRecord': 'recordsKey' must be specified when file type is JSON"); + } + + @Test + public void testCompilationErrorWhenDeleteRuleHasDataProviderScope() + { + String model = "###Pure\n" + + "Class org::dataeng::Widget\n" + + "{\n" + + " widgetId: String[0..1];\n" + + "}\n\n" + + "###Mastery\n" + "MasterRecordDefinition alloy::mastery::WidgetMasterRecord" + + "\n" + + "{\n" + + " modelClass: org::dataeng::Widget;\n" + + " identityResolution: \n" + + " {\n" + + " resolutionQueries:\n" + + " [\n" + + " {\n" + + " queries: [ {input: org::dataeng::Widget[1]|org::dataeng::Widget.all()->filter(widget|$widget.widgetId == $input.widgetId)}\n" + + " ];\n" + + " precedence: 1;\n" + + " }\n" + + " ]\n" + + " }\n" + + " precedenceRules: [\n" + + " DeleteRule: {\n" + + " path: org::dataeng::Widget.widgetId;\n" + + " ruleScope: [\n" + + " DataProviderTypeScope {Exchange}\n" + + " ];\n" + + " }\n" + + "]\n" + + " recordSources:\n" + + " [\n" + + " widget-producer: {\n" + + " description: 'REST Acquisition source.';\n" + + " status: Development;\n" + + " recordService: {\n" + + " acquisitionProtocol: REST;\n" + + " };\n" + + " trigger: Manual;\n" + + " }\n" + + " ]\n" + + "}\n\n" + + + "ExchangeDataProvider alloy::mastery::dataprovider::LSE;\n\n\n"; + + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test(model, "COMPILATION error at [23:5-28:5]: DataProviderTypeScope is not allowed on DeleteRule"); + } + + @Test + public void testCompilationErrorWhenConditionalRuleHasScopeDefined() + { + String model = "###Pure\n" + + "Class org::dataeng::Widget\n" + + "{\n" + + " widgetId: String[0..1];\n" + + "}\n\n" + + "###Mastery\n" + "MasterRecordDefinition alloy::mastery::WidgetMasterRecord" + + "\n" + + "{\n" + + " modelClass: org::dataeng::Widget;\n" + + " identityResolution: \n" + + " {\n" + + " resolutionQueries:\n" + + " [\n" + + " {\n" + + " queries: [ {input: org::dataeng::Widget[1]|org::dataeng::Widget.all()->filter(widget|$widget.widgetId == $input.widgetId)}\n" + + " ];\n" + + " precedence: 1;\n" + + " }\n" + + " ]\n" + + " }\n" + + " precedenceRules: [\n" + + " ConditionalRule: {\n" + + " predicate: {incoming: org::dataeng::Widget[1],current: org::dataeng::Widget[1]|$incoming.widgetId == $current.widgetId};\n" + + " path: org::dataeng::Widget.widgetId;\n" + + " ruleScope: [\n" + + " DataProviderTypeScope {Exchange}\n" + + " ];\n" + + " }\n" + + "]\n" + + " recordSources:\n" + + " [\n" + + " widget-producer: {\n" + + " description: 'REST Acquisition source.';\n" + + " status: Development;\n" + + " recordService: {\n" + + " acquisitionProtocol: REST;\n" + + " };\n" + + " trigger: Manual;\n" + + " }\n" + + " ]\n" + + "}\n\n" + + + "ExchangeDataProvider alloy::mastery::dataprovider::LSE;\n\n\n"; + + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test(model, "COMPILATION error at [23:5-29:5]: ConditionalRule with ruleScope is currently unsupported"); + } + private void assertDataProviders(PureModel model) { PackageableElement lseDataProvider = model.getPackageableElement("alloy::mastery::dataprovider::LSE"); diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/pom.xml b/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/pom.xml index c05f13ee744..68114e95f16 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/pom.xml +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-mastery - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/mastery/acquisition/AcquisitionProtocol.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/mastery/acquisition/AcquisitionProtocol.java index 9648ef04e15..8dc98ee33d4 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/mastery/acquisition/AcquisitionProtocol.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/mastery/acquisition/AcquisitionProtocol.java @@ -14,6 +14,7 @@ package org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonTypeInfo; import org.finos.legend.engine.protocol.pure.v1.model.SourceInformation; @@ -26,4 +27,10 @@ public T accept(AcquisitionProtocolVisitor visitor) { return visitor.visit(this); } + + @JsonIgnore + public boolean isKafkaAcquisitionProtocol() + { + return false; + } } diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/mastery/acquisition/KafkaAcquisitionProtocol.java b/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/mastery/acquisition/KafkaAcquisitionProtocol.java index 9238731b48c..ea3cfa86d62 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/mastery/acquisition/KafkaAcquisitionProtocol.java +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/packageableElement/mastery/acquisition/KafkaAcquisitionProtocol.java @@ -14,12 +14,15 @@ package org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.acquisition; -import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.mastery.connection.KafkaConnection; - public class KafkaAcquisitionProtocol extends AcquisitionProtocol { public String recordTag; public KafkaDataType kafkaDataType; public String connection; + @Override + public boolean isKafkaAcquisitionProtocol() + { + return true; + } } diff --git a/legend-engine-xts-mastery/legend-engine-xt-mastery-pure/pom.xml b/legend-engine-xts-mastery/legend-engine-xt-mastery-pure/pom.xml index 1f113512bde..bebd99bdaa5 100644 --- a/legend-engine-xts-mastery/legend-engine-xt-mastery-pure/pom.xml +++ b/legend-engine-xts-mastery/legend-engine-xt-mastery-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-mastery - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-mastery/pom.xml b/legend-engine-xts-mastery/pom.xml index a9ed29d1635..74bace7d5d6 100644 --- a/legend-engine-xts-mastery/pom.xml +++ b/legend-engine-xts-mastery/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-executionPlan-test/pom.xml b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-executionPlan-test/pom.xml index 2f10d87be78..e82229078f7 100644 --- a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-executionPlan-test/pom.xml +++ b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-executionPlan-test/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-mongodb - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-executionPlan/pom.xml b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-executionPlan/pom.xml index 7a78503c94c..f60850adf41 100644 --- a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-executionPlan/pom.xml +++ b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-executionPlan/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-mongodb - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-nonrelationalStore-mongodb-executionPlan diff --git a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-grammar-integration/pom.xml b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-grammar-integration/pom.xml index 7aad8f265c9..32246d51982 100644 --- a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-grammar-integration/pom.xml +++ b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-grammar-integration/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-mongodb - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-nonrelationalStore-mongodb-grammar-integration diff --git a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-grammar/pom.xml b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-grammar/pom.xml index 13796b13157..639a081093e 100644 --- a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-grammar/pom.xml +++ b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-grammar/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-mongodb - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-nonrelationalStore-mongodb-grammar diff --git a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure/pom.xml b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure/pom.xml index 6fb4d43b4ab..c7f7cee884e 100644 --- a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure/pom.xml +++ b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-mongodb - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure diff --git a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure/src/main/resources/core_nonrelational_mongodb_java_platform_binding/mongodbStoreLegendJavaPlatformBindingExtension.pure b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure/src/main/resources/core_nonrelational_mongodb_java_platform_binding/mongodbStoreLegendJavaPlatformBindingExtension.pure index 9a3211907d3..0bde751a8c5 100644 --- a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure/src/main/resources/core_nonrelational_mongodb_java_platform_binding/mongodbStoreLegendJavaPlatformBindingExtension.pure +++ b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-javaPlatformBinding-pure/src/main/resources/core_nonrelational_mongodb_java_platform_binding/mongodbStoreLegendJavaPlatformBindingExtension.pure @@ -237,7 +237,7 @@ function meta::external::store::mongodb::executionPlan::platformBinding::legendJ let contextWithNodeInfo = $context->addNodeInfo($nodeInfo); let resolvedModel = $node.binding.modelUnit->resolve(); - let updatedTypeInfo = $context.typeInfos->addForClassWithAllProperties(meta::external::store::model::JsonDataRecord) + let updatedTypeInfo = $context.typeInfos->addForClassWithAllProperties(meta::pure::mapping::modelToModel::JsonDataRecord) ->map(ti| if($node.tree->isEmpty(),| $ti->addForClass($class)->addConstraints($class),| $ti->addForGraphFetchTree($node.tree->toOne())->addConstraintsForGraphFetchTree($node.tree->toOne()))); let newTypeInfos = if($externalFormatJavaBindingDescriptor.internalizeDescriptor.internalizePreparator->isNotEmpty(), @@ -390,7 +390,7 @@ function <> meta::external::store::mongodb::executionPlan::platf let sourceRead = $pureClass->readMethodName($conv); let ioEx = j_parameter(javaIOException(), 'e'); let jThis = j_this($proto); - let recordType = $conv->className(meta::external::store::model::JsonDataRecord); + let recordType = $conv->className(meta::pure::mapping::modelToModel::JsonDataRecord); let json = j_variable(javaString(), 'json'); let recordNumber = j_variable(javaLong(), 'recordNumber'); let source = j_variable($recordType, 'source'); diff --git a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-protocol/pom.xml b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-protocol/pom.xml index ac178f01934..6629054858c 100644 --- a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-protocol/pom.xml +++ b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-mongodb - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-nonrelationalStore-mongodb-protocol diff --git a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-pure/pom.xml b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-pure/pom.xml index fb363441515..4de0709c3f6 100644 --- a/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-pure/pom.xml +++ b/legend-engine-xts-mongodb/legend-engine-xt-nonrelationalStore-mongodb-pure/pom.xml @@ -20,7 +20,7 @@ org.finos.legend.engine legend-engine-xts-mongodb - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-nonrelationalStore-mongodb-pure diff --git a/legend-engine-xts-mongodb/pom.xml b/legend-engine-xts-mongodb/pom.xml index 9e098bf5b0d..46f11a713f2 100644 --- a/legend-engine-xts-mongodb/pom.xml +++ b/legend-engine-xts-mongodb/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-morphir/legend-engine-xt-morphir-pure/pom.xml b/legend-engine-xts-morphir/legend-engine-xt-morphir-pure/pom.xml index 8619cf357fe..416882d766a 100644 --- a/legend-engine-xts-morphir/legend-engine-xt-morphir-pure/pom.xml +++ b/legend-engine-xts-morphir/legend-engine-xt-morphir-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-morphir - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-morphir/legend-engine-xt-morphir/pom.xml b/legend-engine-xts-morphir/legend-engine-xt-morphir/pom.xml index 6aeaf7a15e1..9daa9369904 100644 --- a/legend-engine-xts-morphir/legend-engine-xt-morphir/pom.xml +++ b/legend-engine-xts-morphir/legend-engine-xt-morphir/pom.xml @@ -19,7 +19,7 @@ legend-engine-xts-morphir org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-morphir/pom.xml b/legend-engine-xts-morphir/pom.xml index 1e554be7668..d7813f68987 100644 --- a/legend-engine-xts-morphir/pom.xml +++ b/legend-engine-xts-morphir/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-openapi/legend-engine-xt-openapi-generation/pom.xml b/legend-engine-xts-openapi/legend-engine-xt-openapi-generation/pom.xml index f3ee40368da..460aff0b2f9 100644 --- a/legend-engine-xts-openapi/legend-engine-xt-openapi-generation/pom.xml +++ b/legend-engine-xts-openapi/legend-engine-xt-openapi-generation/pom.xml @@ -20,7 +20,7 @@ org.finos.legend.engine legend-engine-xts-openapi - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-openapi-generation diff --git a/legend-engine-xts-openapi/legend-engine-xt-openapi-pure/pom.xml b/legend-engine-xts-openapi/legend-engine-xt-openapi-pure/pom.xml index 89b2ca01ded..fcf2f9ce59f 100644 --- a/legend-engine-xts-openapi/legend-engine-xt-openapi-pure/pom.xml +++ b/legend-engine-xts-openapi/legend-engine-xt-openapi-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-openapi - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-openapi-pure diff --git a/legend-engine-xts-openapi/legend-engine-xt-openapi-pure/src/main/resources/core_external_format_openapi/transformation/fromPure/pureToOpenApi.pure b/legend-engine-xts-openapi/legend-engine-xt-openapi-pure/src/main/resources/core_external_format_openapi/transformation/fromPure/pureToOpenApi.pure index 173940126fd..28e283f2836 100644 --- a/legend-engine-xts-openapi/legend-engine-xt-openapi-pure/src/main/resources/core_external_format_openapi/transformation/fromPure/pureToOpenApi.pure +++ b/legend-engine-xts-openapi/legend-engine-xt-openapi-pure/src/main/resources/core_external_format_openapi/transformation/fromPure/pureToOpenApi.pure @@ -294,8 +294,10 @@ function <> meta::external::function::description::openapi::tran pair(Boolean, 'boolean'), pair(Integer, 'integer'), pair(Float, 'float'), + pair(Decimal, 'float'), + pair(Byte, 'string'), pair(Number, 'float') - ]->filter(t|$t.first==$type).second->toOne(); + ]->filter(t|$t.first==$type).second->toOne('Cannot find type match for primitive type: '+ $type.name->toOne()); } diff --git a/legend-engine-xts-openapi/pom.xml b/legend-engine-xts-openapi/pom.xml index 2f9cffd4dc1..a3b67f5230c 100644 --- a/legend-engine-xts-openapi/pom.xml +++ b/legend-engine-xts-openapi/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-api/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-api/pom.xml index c19a4c7ec0b..ccbff963eae 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-api/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-grammar/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-grammar/pom.xml index 47b492dbe0c..a3821cff2c2 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-grammar/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-grammar/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-protocol/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-protocol/pom.xml index 5426cab9210..025a9f59796 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-protocol/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-protocol/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-pure/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-pure/pom.xml index bba93ab206e..a5b17478509 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-pure/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-cloud-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/pom.xml index 0cdec007df2..2410fcd8b8a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/DedupAndVersionErrorStatistics.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/DedupAndVersionErrorStatistics.java new file mode 100644 index 00000000000..60c193d7938 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/DedupAndVersionErrorStatistics.java @@ -0,0 +1,21 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.common; + +public enum DedupAndVersionErrorStatistics +{ + MAX_DUPLICATES, + MAX_DATA_ERRORS; +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/FileFormat.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/FileFormat.java index c73082518ed..75cf32a3a55 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/FileFormat.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/FileFormat.java @@ -19,5 +19,5 @@ public enum FileFormat CSV, JSON, AVRO, - PARQUET + PARQUET; } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/BulkLoadBatchIdValueAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/LoadOptionsAbstract.java similarity index 62% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/BulkLoadBatchIdValueAbstract.java rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/LoadOptionsAbstract.java index 9e7b5001aad..c299b0b7aa5 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/BulkLoadBatchIdValueAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/common/LoadOptionsAbstract.java @@ -12,20 +12,33 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.persistence.components.logicalplan.values; +package org.finos.legend.engine.persistence.components.common; -import static org.immutables.value.Value.Immutable; -import static org.immutables.value.Value.Style; +import org.immutables.value.Value; -@Immutable -@Style( +import java.util.Optional; + +@Value.Immutable +@Value.Style( typeAbstract = "*Abstract", typeImmutable = "*", jdkOnly = true, optionalAcceptNullable = true, strictBuilder = true ) -public interface BulkLoadBatchIdValueAbstract extends Value +public interface LoadOptionsAbstract { - BulkLoadBatchIdValue INSTANCE = BulkLoadBatchIdValue.builder().build(); + Optional fieldDelimiter(); + + Optional encoding(); + + Optional nullMarker(); + + Optional quote(); + + Optional skipLeadingRows(); + + Optional maxBadRecords(); + + Optional compression(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyAbstract.java index 11958ce425e..bfc0302b75e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyAbstract.java @@ -15,15 +15,10 @@ package org.finos.legend.engine.persistence.components.ingestmode; import org.finos.legend.engine.persistence.components.ingestmode.audit.Auditing; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicatesAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategyVisitor; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicatesAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicatesAbstract; +import org.immutables.value.Value; import java.util.Optional; -import static org.immutables.value.Value.Check; import static org.immutables.value.Value.Immutable; import static org.immutables.value.Value.Style; @@ -39,43 +34,12 @@ public interface AppendOnlyAbstract extends IngestMode { Optional digestField(); - Optional dataSplitField(); - Auditing auditing(); - DeduplicationStrategy deduplicationStrategy(); - - @Check - default void validate() + @Value.Default + default boolean filterExistingRecords() { - deduplicationStrategy().accept(new DeduplicationStrategyVisitor() - { - @Override - public Void visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) - { - return null; - } - - @Override - public Void visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) - { - if (!digestField().isPresent()) - { - throw new IllegalStateException("Cannot build AppendOnly, [digestField] must be specified since [deduplicationStrategy] is set to filter duplicates"); - } - return null; - } - - @Override - public Void visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) - { - if (dataSplitField().isPresent()) - { - throw new IllegalStateException("Cannot build AppendOnly, DataSplits not supported for failOnDuplicates mode"); - } - return null; - } - }); + return false; } @Override diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaAbstract.java index ac72586ff14..48d793c3792 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaAbstract.java @@ -14,14 +14,19 @@ package org.finos.legend.engine.persistence.components.ingestmode; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicatesAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategyVisitor; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicatesAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicatesAbstract; import org.finos.legend.engine.persistence.components.ingestmode.merge.MergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.merge.NoDeletesMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionMilestoning; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.ValidityMilestoning; - -import java.util.Optional; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningStrategyVisitor; +import org.immutables.value.Value; import static org.immutables.value.Value.Default; import static org.immutables.value.Value.Immutable; @@ -37,9 +42,13 @@ ) public interface BitemporalDeltaAbstract extends IngestMode, BitemporalMilestoned { - String digestField(); + @Value.Default + default boolean filterExistingRecords() + { + return false; + } - Optional dataSplitField(); + String digestField(); @Override TransactionMilestoning transactionMilestoning(); @@ -53,15 +62,59 @@ default MergeStrategy mergeStrategy() return NoDeletesMergeStrategy.builder().build(); } - @Default - default DeduplicationStrategy deduplicationStrategy() - { - return AllowDuplicates.builder().build(); - } - @Override default T accept(IngestModeVisitor visitor) { return visitor.visitBitemporalDelta(this); } + + @Value.Check + default void validate() + { + versioningStrategy().accept(new VersioningStrategyVisitor() + { + @Override + public Void visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return null; + } + + @Override + public Void visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + throw new IllegalStateException("Cannot build BitemporalDelta, max version is not supported"); + } + + @Override + public Void visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + if (allVersionsStrategyAbstract.performStageVersioning()) + { + throw new IllegalStateException("Cannot build BitemporalDelta, perform versioning not allowed"); + } + return null; + } + }); + + deduplicationStrategy().accept(new DeduplicationStrategyVisitor() + { + @Override + public Void visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) + { + return null; + } + + @Override + public Void visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + { + throw new IllegalStateException("Cannot build BitemporalDelta, filter duplicates is not supported"); + } + + @Override + public Void visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) + { + throw new IllegalStateException("Cannot build BitemporalDelta, fail on duplicates is not supported"); + } + }); + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadAbstract.java index 800ca9c7a6c..5099b17e229 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadAbstract.java @@ -15,7 +15,15 @@ package org.finos.legend.engine.persistence.components.ingestmode; import org.finos.legend.engine.persistence.components.ingestmode.audit.Auditing; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicatesAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategyVisitor; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicatesAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicatesAbstract; import org.finos.legend.engine.persistence.components.ingestmode.digest.DigestGenStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningStrategyVisitor; import org.immutables.value.Value; @Value.Immutable @@ -39,4 +47,50 @@ default T accept(IngestModeVisitor visitor) { return visitor.visitBulkLoad(this); } + + @Value.Check + default void validate() + { + deduplicationStrategy().accept(new DeduplicationStrategyVisitor() + { + @Override + public Void visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) + { + return null; + } + + @Override + public Void visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + { + throw new IllegalStateException("Cannot build BulkLoad, filter duplicates is not supported"); + } + + @Override + public Void visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) + { + throw new IllegalStateException("Cannot build BulkLoad, fail on duplicates is not supported"); + } + }); + + versioningStrategy().accept(new VersioningStrategyVisitor() + { + @Override + public Void visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return null; + } + + @Override + public Void visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + throw new IllegalStateException("Cannot build BulkLoad, max version is not supported"); + } + + @Override + public Void visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + throw new IllegalStateException("Cannot build BulkLoad, all version is not supported"); + } + }); + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/DeriveMainDatasetSchemaFromStaging.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/DeriveMainDatasetSchemaFromStaging.java index cf333ca3b41..b6e12b71adf 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/DeriveMainDatasetSchemaFromStaging.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/DeriveMainDatasetSchemaFromStaging.java @@ -17,10 +17,6 @@ import org.finos.legend.engine.persistence.components.ingestmode.audit.AuditingVisitor; import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditingAbstract; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditingAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicatesAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategyVisitor; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicatesAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicatesAbstract; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategyAbstract; import org.finos.legend.engine.persistence.components.ingestmode.merge.MergeStrategyVisitor; import org.finos.legend.engine.persistence.components.ingestmode.merge.NoDeletesMergeStrategyAbstract; @@ -72,13 +68,13 @@ public DeriveMainDatasetSchemaFromStaging(Dataset mainDataset, Dataset stagingDa @Override public Dataset visitAppendOnly(AppendOnlyAbstract appendOnly) { + boolean isAuditingFieldPK = doesDatasetContainsAnyPK(mainSchemaFields); + appendOnly.auditing().accept(new EnrichSchemaWithAuditing(mainSchemaFields, isAuditingFieldPK)); if (appendOnly.digestField().isPresent()) { addDigestField(mainSchemaFields, appendOnly.digestField().get()); } removeDataSplitField(appendOnly.dataSplitField()); - boolean isAuditingFieldPK = appendOnly.deduplicationStrategy().accept(new DeriveAuditingFieldPKForAppendOnly(appendOnly.dataSplitField().isPresent())); - appendOnly.auditing().accept(new EnrichSchemaWithAuditing(mainSchemaFields, isAuditingFieldPK)); return mainDatasetDefinitionBuilder.schema(mainSchemaDefinitionBuilder.addAllFields(mainSchemaFields).build()).build(); } @@ -149,7 +145,7 @@ public Dataset visitBulkLoad(BulkLoadAbstract bulkLoad) } Field batchIdField = Field.builder() .name(bulkLoad.batchIdField()) - .type(FieldType.of(DataType.VARCHAR, Optional.empty(), Optional.empty())) + .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) .primaryKey(false) .build(); mainSchemaFields.add(batchIdField); @@ -180,37 +176,9 @@ public static void addDigestField(List schemaFields, String digestFieldNa private boolean doesDatasetContainsAnyPK(List mainSchemaFields) { - return mainSchemaFields.stream().anyMatch(field -> field.primaryKey()); + return mainSchemaFields.stream().anyMatch(Field::primaryKey); } - public static class DeriveAuditingFieldPKForAppendOnly implements DeduplicationStrategyVisitor - { - - private boolean isDataSplitEnabled; - - public DeriveAuditingFieldPKForAppendOnly(boolean isDataSplitEnabled) - { - this.isDataSplitEnabled = isDataSplitEnabled; - } - - @Override - public Boolean visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) - { - return isDataSplitEnabled; - } - - @Override - public Boolean visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) - { - return true; - } - - @Override - public Boolean visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) - { - return false; - } - } public static class EnrichSchemaWithMergeStrategy implements MergeStrategyVisitor { diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestMode.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestMode.java index c6e239e3522..78c91752437 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestMode.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestMode.java @@ -14,7 +14,34 @@ package org.finos.legend.engine.persistence.components.ingestmode; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningVisitors; +import org.immutables.value.Value; + +import java.util.Optional; + public interface IngestMode { + @Value.Derived + default Optional dataSplitField() + { + return this.versioningStrategy().accept(VersioningVisitors.EXTRACT_DATA_SPLIT_FIELD); + } + + @Value.Default + default DeduplicationStrategy deduplicationStrategy() + { + return AllowDuplicates.builder().build(); + } + + @Value.Default + default VersioningStrategy versioningStrategy() + { + return NoVersioningStrategy.builder().build(); + } + T accept(IngestModeVisitor visitor); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeCaseConverter.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeCaseConverter.java index fcdca2edae4..220b55532bb 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeCaseConverter.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeCaseConverter.java @@ -20,11 +20,7 @@ import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditingAbstract; import org.finos.legend.engine.persistence.components.ingestmode.audit.AuditingVisitor; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningStrategyVisitor; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.MaxVersionStrategyAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.MaxVersionStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.NoVersioningStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.*; import org.finos.legend.engine.persistence.components.ingestmode.digest.DigestGenStrategy; import org.finos.legend.engine.persistence.components.ingestmode.digest.NoDigestGenStrategyAbstract; import org.finos.legend.engine.persistence.components.ingestmode.digest.UDFBasedDigestGenStrategy; @@ -53,6 +49,8 @@ import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.ValidityDerivation; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.ValidityDerivationVisitor; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromDateTimeAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; import java.util.Optional; import java.util.List; @@ -77,10 +75,11 @@ public IngestMode visitAppendOnly(AppendOnlyAbstract appendOnly) { return AppendOnly .builder() - .dataSplitField(applyCase(appendOnly.dataSplitField())) .digestField(applyCase(appendOnly.digestField())) .auditing(appendOnly.auditing().accept(new AuditingCaseConverter())) .deduplicationStrategy(appendOnly.deduplicationStrategy()) + .versioningStrategy(appendOnly.versioningStrategy().accept(new VersionStrategyCaseConverter())) + .filterExistingRecords(appendOnly.filterExistingRecords()) .build(); } @@ -89,8 +88,9 @@ public IngestMode visitNontemporalSnapshot(NontemporalSnapshotAbstract nontempor { return NontemporalSnapshot .builder() - .dataSplitField(applyCase(nontemporalSnapshot.dataSplitField())) .auditing(nontemporalSnapshot.auditing().accept(new AuditingCaseConverter())) + .deduplicationStrategy(nontemporalSnapshot.deduplicationStrategy()) + .versioningStrategy(nontemporalSnapshot.versioningStrategy().accept(new VersionStrategyCaseConverter())) .build(); } @@ -100,9 +100,9 @@ public IngestMode visitNontemporalDelta(NontemporalDeltaAbstract nontemporalDelt return NontemporalDelta .builder() .digestField(applyCase(nontemporalDelta.digestField())) - .dataSplitField(applyCase(nontemporalDelta.dataSplitField())) .mergeStrategy(nontemporalDelta.mergeStrategy().accept(new MergeStrategyCaseConverter())) .auditing(nontemporalDelta.auditing().accept(new AuditingCaseConverter())) + .deduplicationStrategy(nontemporalDelta.deduplicationStrategy()) .versioningStrategy(nontemporalDelta.versioningStrategy().accept(new VersionStrategyCaseConverter())) .build(); } @@ -117,6 +117,8 @@ public IngestMode visitUnitemporalSnapshot(UnitemporalSnapshotAbstract unitempor .addAllPartitionFields(applyCase(unitemporalSnapshot.partitionFields())) .putAllPartitionValuesByField(applyCase(unitemporalSnapshot.partitionValuesByField())) .emptyDatasetHandling(unitemporalSnapshot.emptyDatasetHandling()) + .deduplicationStrategy(unitemporalSnapshot.deduplicationStrategy()) + .versioningStrategy(unitemporalSnapshot.versioningStrategy().accept(new VersionStrategyCaseConverter())) .build(); } @@ -126,10 +128,10 @@ public IngestMode visitUnitemporalDelta(UnitemporalDeltaAbstract unitemporalDelt return UnitemporalDelta .builder() .digestField(applyCase(unitemporalDelta.digestField())) - .dataSplitField(applyCase(unitemporalDelta.dataSplitField())) .addAllOptimizationFilters(unitemporalDelta.optimizationFilters().stream().map(filter -> applyCase(filter)).collect(Collectors.toList())) .transactionMilestoning(unitemporalDelta.transactionMilestoning().accept(new TransactionMilestoningCaseConverter())) .mergeStrategy(unitemporalDelta.mergeStrategy().accept(new MergeStrategyCaseConverter())) + .deduplicationStrategy(unitemporalDelta.deduplicationStrategy()) .versioningStrategy(unitemporalDelta.versioningStrategy().accept(new VersionStrategyCaseConverter())) .build(); } @@ -144,6 +146,8 @@ public IngestMode visitBitemporalSnapshot(BitemporalSnapshotAbstract bitemporalS .validityMilestoning(bitemporalSnapshot.validityMilestoning().accept(new ValidityMilestoningCaseConverter())) .addAllPartitionFields(applyCase(bitemporalSnapshot.partitionFields())) .putAllPartitionValuesByField(applyCase(bitemporalSnapshot.partitionValuesByField())) + .deduplicationStrategy(bitemporalSnapshot.deduplicationStrategy()) + .versioningStrategy(bitemporalSnapshot.versioningStrategy().accept(new VersionStrategyCaseConverter())) .build(); } @@ -153,11 +157,12 @@ public IngestMode visitBitemporalDelta(BitemporalDeltaAbstract bitemporalDelta) return BitemporalDelta .builder() .digestField(applyCase(bitemporalDelta.digestField())) - .dataSplitField(applyCase(bitemporalDelta.dataSplitField())) .transactionMilestoning(bitemporalDelta.transactionMilestoning().accept(new TransactionMilestoningCaseConverter())) .validityMilestoning(bitemporalDelta.validityMilestoning().accept(new ValidityMilestoningCaseConverter())) .deduplicationStrategy(bitemporalDelta.deduplicationStrategy()) .mergeStrategy(bitemporalDelta.mergeStrategy().accept(new MergeStrategyCaseConverter())) + .versioningStrategy(bitemporalDelta.versioningStrategy().accept(new VersionStrategyCaseConverter())) + .filterExistingRecords(bitemporalDelta.filterExistingRecords()) .build(); } @@ -168,6 +173,8 @@ public IngestMode visitBulkLoad(BulkLoadAbstract bulkLoad) .batchIdField(applyCase(bulkLoad.batchIdField())) .digestGenStrategy(bulkLoad.digestGenStrategy().accept(new DigestGenStrategyCaseConverter())) .auditing(bulkLoad.auditing().accept(new AuditingCaseConverter())) + .deduplicationStrategy(bulkLoad.deduplicationStrategy()) + .versioningStrategy(bulkLoad.versioningStrategy().accept(new VersionStrategyCaseConverter())) .build(); } @@ -341,11 +348,23 @@ public VersioningStrategy visitMaxVersionStrategy(MaxVersionStrategyAbstract max { return MaxVersionStrategy .builder() - .versioningComparator(maxVersionStrategy.versioningComparator()) + .mergeDataVersionResolver(maxVersionStrategy.mergeDataVersionResolver()) .versioningField(strategy.apply(maxVersionStrategy.versioningField())) - .performDeduplication(maxVersionStrategy.performDeduplication()) + .performStageVersioning(maxVersionStrategy.performStageVersioning()) .build(); } - } + @Override + public VersioningStrategy visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + return AllVersionsStrategy + .builder() + .mergeDataVersionResolver(allVersionsStrategyAbstract.mergeDataVersionResolver()) + .versioningField(strategy.apply(allVersionsStrategyAbstract.versioningField())) + .versioningOrder(allVersionsStrategyAbstract.versioningOrder()) + .dataSplitFieldName(strategy.apply(allVersionsStrategyAbstract.dataSplitFieldName())) + .performStageVersioning(allVersionsStrategyAbstract.performStageVersioning()) + .build(); + } + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeOptimizationColumnHandler.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeOptimizationColumnHandler.java index b24cabc68ab..381980a1c5c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeOptimizationColumnHandler.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeOptimizationColumnHandler.java @@ -64,11 +64,11 @@ public IngestMode visitUnitemporalDelta(UnitemporalDeltaAbstract unitemporalDelt return UnitemporalDelta .builder() .digestField(unitemporalDelta.digestField()) - .dataSplitField(unitemporalDelta.dataSplitField()) .addAllOptimizationFilters(deriveOptimizationFilters(unitemporalDelta)) .transactionMilestoning(unitemporalDelta.transactionMilestoning()) .mergeStrategy(unitemporalDelta.mergeStrategy()) .versioningStrategy(unitemporalDelta.versioningStrategy()) + .deduplicationStrategy(unitemporalDelta.deduplicationStrategy()) .build(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeVisitors.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeVisitors.java index 3cb54406d79..0a255978ace 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeVisitors.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeVisitors.java @@ -15,10 +15,6 @@ package org.finos.legend.engine.persistence.components.ingestmode; import org.finos.legend.engine.persistence.components.common.OptimizationFilter; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicatesAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategyVisitor; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicatesAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicatesAbstract; import org.finos.legend.engine.persistence.components.ingestmode.digest.DigestGenStrategyVisitor; import org.finos.legend.engine.persistence.components.ingestmode.digest.NoDigestGenStrategyAbstract; import org.finos.legend.engine.persistence.components.ingestmode.digest.UDFBasedDigestGenStrategyAbstract; @@ -41,7 +37,7 @@ private IngestModeVisitors() @Override public Boolean visitAppendOnly(AppendOnlyAbstract appendOnly) { - return appendOnly.deduplicationStrategy().accept(DEDUPLICATION_STRATEGY_DIGEST_REQUIRED); + return appendOnly.filterExistingRecords(); } @Override @@ -158,7 +154,12 @@ public Set visitNontemporalSnapshot(NontemporalSnapshotAbstract nontempo @Override public Set visitNontemporalDelta(NontemporalDeltaAbstract nontemporalDelta) { - return Collections.singleton(nontemporalDelta.digestField()); + Set metaFields = new HashSet<>(); + + metaFields.add(nontemporalDelta.digestField()); + nontemporalDelta.dataSplitField().ifPresent(metaFields::add); + + return metaFields; } @Override @@ -360,27 +361,6 @@ public List visitBulkLoad(BulkLoadAbstract bulkLoad) } }; - private static final DeduplicationStrategyVisitor DEDUPLICATION_STRATEGY_DIGEST_REQUIRED = new DeduplicationStrategyVisitor() - { - @Override - public Boolean visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) - { - return false; - } - - @Override - public Boolean visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) - { - return true; - } - - @Override - public Boolean visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) - { - return false; - } - }; - private static final DigestGenStrategyVisitor DIGEST_GEN_STRATEGY_DIGEST_REQUIRED = new DigestGenStrategyVisitor() { @Override diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaAbstract.java index 7e5953f6ad1..962a2e3258f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaAbstract.java @@ -15,12 +15,13 @@ package org.finos.legend.engine.persistence.components.ingestmode; import org.finos.legend.engine.persistence.components.ingestmode.audit.Auditing; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.NoVersioningStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningStrategy; import org.finos.legend.engine.persistence.components.ingestmode.merge.MergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.merge.NoDeletesMergeStrategy; - -import java.util.Optional; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningStrategyVisitor; +import org.immutables.value.Value; import static org.immutables.value.Value.Default; import static org.immutables.value.Value.Immutable; @@ -40,14 +41,6 @@ public interface NontemporalDeltaAbstract extends IngestMode Auditing auditing(); - Optional dataSplitField(); - - @Default - default VersioningStrategy versioningStrategy() - { - return NoVersioningStrategy.builder().build(); - } - @Default default MergeStrategy mergeStrategy() { @@ -59,4 +52,38 @@ default T accept(IngestModeVisitor visitor) { return visitor.visitNontemporalDelta(this); } + + @Value.Check + default void validate() + { + versioningStrategy().accept(new VersioningStrategyVisitor() + { + + @Override + public Void visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return null; + } + + @Override + public Void visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + if (!maxVersionStrategy.mergeDataVersionResolver().isPresent()) + { + throw new IllegalStateException("Cannot build NontemporalDelta, VersioningResolver is mandatory for MaxVersionStrategy"); + } + return null; + } + + @Override + public Void visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + if (!allVersionsStrategyAbstract.mergeDataVersionResolver().isPresent()) + { + throw new IllegalStateException("Cannot build NontemporalDelta, VersioningResolver is mandatory for AllVersionsStrategy"); + } + return null; + } + }); + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotAbstract.java index 4b01c4c9c52..7b3984bd871 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotAbstract.java @@ -15,8 +15,11 @@ package org.finos.legend.engine.persistence.components.ingestmode; import org.finos.legend.engine.persistence.components.ingestmode.audit.Auditing; - -import java.util.Optional; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningStrategyVisitor; +import org.immutables.value.Value; import static org.immutables.value.Value.Immutable; import static org.immutables.value.Value.Style; @@ -33,11 +36,35 @@ public interface NontemporalSnapshotAbstract extends IngestMode { Auditing auditing(); - Optional dataSplitField(); - @Override default T accept(IngestModeVisitor visitor) { return visitor.visitNontemporalSnapshot(this); } + + @Value.Check + default void validate() + { + // Allowed Versioning Strategy - NoVersioning, MaxVersioining + this.versioningStrategy().accept(new VersioningStrategyVisitor() + { + @Override + public Void visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return null; + } + + @Override + public Void visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + return null; + } + + @Override + public Void visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + throw new IllegalStateException("Cannot build NontemporalSnapshot, AllVersionsStrategy not supported"); + } + }); + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaAbstract.java index 1a6cbc3cb29..c363a0dbcc7 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaAbstract.java @@ -15,12 +15,15 @@ package org.finos.legend.engine.persistence.components.ingestmode; import org.finos.legend.engine.persistence.components.common.OptimizationFilter; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.NoVersioningStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningStrategy; import org.finos.legend.engine.persistence.components.ingestmode.merge.MergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.merge.NoDeletesMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionMilestoned; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionMilestoning; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningStrategyVisitor; +import org.immutables.value.Value; import java.util.List; import java.util.Optional; @@ -41,19 +44,11 @@ public interface UnitemporalDeltaAbstract extends IngestMode, TransactionMilesto { String digestField(); - Optional dataSplitField(); - List optimizationFilters(); @Override TransactionMilestoning transactionMilestoning(); - @Default - default VersioningStrategy versioningStrategy() - { - return NoVersioningStrategy.builder().build(); - } - @Default default MergeStrategy mergeStrategy() { @@ -65,4 +60,39 @@ default T accept(IngestModeVisitor visitor) { return visitor.visitUnitemporalDelta(this); } -} + + @Value.Check + default void validate() + { + versioningStrategy().accept(new VersioningStrategyVisitor() + { + + @Override + public Void visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return null; + } + + @Override + public Void visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + if (!maxVersionStrategy.mergeDataVersionResolver().isPresent()) + { + throw new IllegalStateException("Cannot build UnitemporalDelta, MergeDataVersionResolver is mandatory for MaxVersionStrategy"); + } + return null; + } + + @Override + public Void visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + if (!allVersionsStrategyAbstract.mergeDataVersionResolver().isPresent()) + { + throw new IllegalStateException("Cannot build UnitemporalDelta, MergeDataVersionResolver is mandatory for AllVersionsStrategy"); + } + return null; + } + }); + } + +} \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotAbstract.java index 0ed6847395f..3ad65815215 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotAbstract.java @@ -18,10 +18,17 @@ import org.finos.legend.engine.persistence.components.ingestmode.emptyhandling.EmptyDatasetHandling; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionMilestoned; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionMilestoning; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningStrategyVisitor; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategyAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MergeDataVersionResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolverAbstract; import org.immutables.value.Value; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import static org.immutables.value.Value.Derived; @@ -83,5 +90,37 @@ default void validate() } } } + + // Allowed Versioning Strategy - NoVersioning, MaxVersioining + this.versioningStrategy().accept(new VersioningStrategyVisitor() + { + @Override + public Void visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return null; + } + + @Override + public Void visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + Optional versionResolver = maxVersionStrategy.mergeDataVersionResolver(); + if (!versionResolver.isPresent()) + { + throw new IllegalStateException("Cannot build UnitemporalSnapshot, MergeDataVersionResolver is mandatory for MaxVersionStrategy"); + } + if (!(versionResolver.orElseThrow(IllegalStateException::new) instanceof DigestBasedResolverAbstract)) + { + throw new IllegalStateException("Cannot build UnitemporalSnapshot, Only DIGEST_BASED VersioningResolver allowed for this ingest mode"); + } + return null; + } + + @Override + public Void visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + throw new IllegalStateException("Cannot build UnitemporalSnapshot, AllVersionsStrategy not supported"); + } + }); + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DatasetDeduplicationHandler.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DatasetDeduplicationHandler.java new file mode 100644 index 00000000000..8f2217604d0 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DatasetDeduplicationHandler.java @@ -0,0 +1,69 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.deduplication; + +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; +import org.finos.legend.engine.persistence.components.logicalplan.values.*; + +import java.util.ArrayList; +import java.util.List; + +public class DatasetDeduplicationHandler implements DeduplicationStrategyVisitor +{ + + public static final String COUNT = "legend_persistence_count"; + + Dataset stagingDataset; + + public DatasetDeduplicationHandler(Dataset stagingDataset) + { + this.stagingDataset = stagingDataset; + } + + @Override + public Dataset visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) + { + return stagingDataset; + } + + @Override + public Dataset visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + { + return selectionWithGroupByAllColumns(); + } + + @Override + public Dataset visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) + { + return selectionWithGroupByAllColumns(); + } + + private Dataset selectionWithGroupByAllColumns() + { + List allColumns = new ArrayList<>(stagingDataset.schemaReference().fieldValues()); + List allColumnsWithCount = new ArrayList<>(stagingDataset.schemaReference().fieldValues()); + + Value count = FunctionImpl.builder().functionName(FunctionName.COUNT).addValue(All.INSTANCE).alias(COUNT).build(); + allColumnsWithCount.add(count); + Selection selectionWithGroupByAllColumns = Selection.builder() + .source(stagingDataset) + .addAllFields(allColumnsWithCount) + .groupByFields(allColumns) + .build(); + return selectionWithGroupByAllColumns; + } + +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DatasetDeduplicator.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DatasetDeduplicator.java deleted file mode 100644 index 275515bb448..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DatasetDeduplicator.java +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.engine.persistence.components.ingestmode.deduplication; - -import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; -import org.finos.legend.engine.persistence.components.logicalplan.conditions.Equals; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; -import org.finos.legend.engine.persistence.components.logicalplan.values.FieldValue; -import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionImpl; -import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionName; -import org.finos.legend.engine.persistence.components.logicalplan.values.ObjectValue; -import org.finos.legend.engine.persistence.components.logicalplan.values.Order; -import org.finos.legend.engine.persistence.components.logicalplan.values.OrderedField; -import org.finos.legend.engine.persistence.components.logicalplan.values.Value; -import org.finos.legend.engine.persistence.components.logicalplan.values.WindowFunction; - -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - -public class DatasetDeduplicator implements VersioningStrategyVisitor -{ - - Dataset stagingDataset; - List primaryKeys; - - private static final String ROW_NUMBER = "legend_persistence_row_num"; - - public DatasetDeduplicator(Dataset stagingDataset, List primaryKeys) - { - this.stagingDataset = stagingDataset; - this.primaryKeys = primaryKeys; - } - - @Override - public Dataset visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) - { - return this.stagingDataset; - } - - @Override - public Dataset visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) - { - Dataset enrichedStagingDataset = this.stagingDataset; - if (maxVersionStrategy.performDeduplication()) - { - OrderedField orderByField = OrderedField.builder() - .fieldName(maxVersionStrategy.versioningField()) - .datasetRef(stagingDataset.datasetReference()) - .order(Order.DESC).build(); - List allColumns = new ArrayList<>(stagingDataset.schemaReference().fieldValues()); - List allColumnsWithRowNumber = new ArrayList<>(stagingDataset.schemaReference().fieldValues()); - List partitionFields = primaryKeys.stream() - .map(field -> FieldValue.builder().fieldName(field).datasetRef(stagingDataset.datasetReference()).build()) - .collect(Collectors.toList()); - Value rowNumber = WindowFunction.builder() - .windowFunction(FunctionImpl.builder().functionName(FunctionName.ROW_NUMBER).build()) - .addAllPartitionByFields(partitionFields) - .addOrderByFields(orderByField) - .alias(ROW_NUMBER) - .build(); - allColumnsWithRowNumber.add(rowNumber); - Selection selectionWithRowNumber = Selection.builder() - .source(stagingDataset) - .addAllFields(allColumnsWithRowNumber) - .alias(stagingDataset.datasetReference().alias()) - .build(); - - Condition rowNumberFilterCondition = Equals.of(FieldValue.builder().fieldName(ROW_NUMBER).datasetRefAlias(stagingDataset.datasetReference().alias()).build(), ObjectValue.of(1)); - - enrichedStagingDataset = Selection.builder() - .source(selectionWithRowNumber) - .addAllFields(allColumns) - .condition(rowNumberFilterCondition) - .alias(stagingDataset.datasetReference().alias()) - .build(); - } - return enrichedStagingDataset; - } -} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DeduplicationVisitors.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DeduplicationVisitors.java new file mode 100644 index 00000000000..27f2986faa0 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/DeduplicationVisitors.java @@ -0,0 +1,67 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.deduplication; + +import java.util.Optional; + +import static org.finos.legend.engine.persistence.components.ingestmode.deduplication.DatasetDeduplicationHandler.COUNT; + +public class DeduplicationVisitors +{ + + public static final DeduplicationStrategyVisitor> EXTRACT_DEDUP_FIELD = new DeduplicationStrategyVisitor>() + { + + @Override + public Optional visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) + { + return Optional.empty(); + } + + @Override + public Optional visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + { + return Optional.of(COUNT); + } + + @Override + public Optional visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) + { + return Optional.of(COUNT); + } + }; + + public static final DeduplicationStrategyVisitor IS_TEMP_TABLE_NEEDED = new DeduplicationStrategyVisitor() + { + + @Override + public Boolean visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) + { + return false; + } + + @Override + public Boolean visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + { + return true; + } + + @Override + public Boolean visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) + { + return true; + } + }; +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/AllVersionsStrategyAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/AllVersionsStrategyAbstract.java new file mode 100644 index 00000000000..cf17b66861f --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/AllVersionsStrategyAbstract.java @@ -0,0 +1,100 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +import org.immutables.value.Value; + +import java.util.Optional; + +import static org.immutables.value.Value.Immutable; +import static org.immutables.value.Value.Style; + +@Immutable +@Style( + typeAbstract = "*Abstract", + typeImmutable = "*", + jdkOnly = true, + optionalAcceptNullable = true, + strictBuilder = true +) +public interface AllVersionsStrategyAbstract extends VersioningStrategy +{ + String DATA_SPLIT = "legend_persistence_data_split"; + + String versioningField(); + + @Value.Default + default VersioningOrder versioningOrder() + { + return VersioningOrder.ASC; + } + + Optional mergeDataVersionResolver(); + + @Value.Default + default boolean performStageVersioning() + { + return true; + } + + @Value.Default + default String dataSplitFieldName() + { + return DATA_SPLIT; + } + + @Override + default T accept(VersioningStrategyVisitor visitor) + { + return visitor.visitAllVersionsStrategy(this); + } + + @Value.Check + default void validate() + { + // For VersionColumnBasedResolver, + // Versioning Order ASC: allowed comparators: > , >= + // Versioning Order DESC: allowed comparators: < , <= + mergeDataVersionResolver().ifPresent(mergeDataVersionResolver -> new MergeDataVersionResolverVisitor() + { + @Override + public Void visitDigestBasedResolver(DigestBasedResolverAbstract digestBasedResolver) + { + return null; + } + + @Override + public Void visitVersionColumnBasedResolver(VersionColumnBasedResolverAbstract versionColumnBasedResolver) + { + if (versioningOrder().equals(VersioningOrder.ASC) && + (versionColumnBasedResolver.versionComparator().equals(VersionComparator.LESS_THAN) || + versionColumnBasedResolver.versionComparator().equals(VersionComparator.LESS_THAN_EQUAL_TO))) + { + throw new IllegalStateException("Cannot build AllVersionsStrategy, Invalid comparator :" + + versionColumnBasedResolver.versionComparator()); + } + + if (versioningOrder().equals(VersioningOrder.DESC) && + (versionColumnBasedResolver.versionComparator().equals(VersionComparator.GREATER_THAN) || + versionColumnBasedResolver.versionComparator().equals(VersionComparator.GREATER_THAN_EQUAL_TO))) + { + throw new IllegalStateException("Cannot build AllVersionsStrategy, Invalid comparator :" + + versionColumnBasedResolver.versionComparator()); + } + return null; + } + }); + } +} \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DatasetVersioningHandler.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DatasetVersioningHandler.java new file mode 100644 index 00000000000..a6179bca580 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DatasetVersioningHandler.java @@ -0,0 +1,116 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; +import org.finos.legend.engine.persistence.components.logicalplan.conditions.Equals; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; +import org.finos.legend.engine.persistence.components.logicalplan.values.*; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +public class DatasetVersioningHandler implements VersioningStrategyVisitor +{ + + Dataset dataset; + List primaryKeys; + + private static final String RANK = "legend_persistence_rank"; + + public DatasetVersioningHandler(Dataset dataset, List primaryKeys) + { + this.dataset = dataset; + this.primaryKeys = primaryKeys; + } + + @Override + public Dataset visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return this.dataset; + } + + @Override + public Dataset visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + if (!maxVersionStrategy.performStageVersioning()) + { + return this.dataset; + } + OrderedField orderByField = OrderedField.builder() + .fieldName(maxVersionStrategy.versioningField()) + .datasetRef(dataset.datasetReference()) + .order(Order.DESC).build(); + List allColumns = new ArrayList<>(dataset.schemaReference().fieldValues()); + List allColumnsWithRank = new ArrayList<>(dataset.schemaReference().fieldValues()); + List partitionFields = primaryKeys.stream() + .map(field -> FieldValue.builder().fieldName(field).datasetRef(dataset.datasetReference()).build()) + .collect(Collectors.toList()); + Value rank = WindowFunction.builder() + .windowFunction(FunctionImpl.builder().functionName(FunctionName.DENSE_RANK).build()) + .addAllPartitionByFields(partitionFields) + .addOrderByFields(orderByField) + .alias(RANK) + .build(); + allColumnsWithRank.add(rank); + Selection selectionWithRank = Selection.builder() + .source(dataset) + .addAllFields(allColumnsWithRank) + .alias(dataset.datasetReference().alias()) + .build(); + + Condition rankFilterCondition = Equals.of(FieldValue.builder().fieldName(RANK).datasetRefAlias(dataset.datasetReference().alias()).build(), ObjectValue.of(1)); + + Dataset enrichedStagingDataset = Selection.builder() + .source(selectionWithRank) + .addAllFields(allColumns) + .condition(rankFilterCondition) + .build(); + + return enrichedStagingDataset; + } + + @Override + public Dataset visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + if (!allVersionsStrategyAbstract.performStageVersioning()) + { + return this.dataset; + } + OrderedField orderByField = OrderedField.builder() + .fieldName(allVersionsStrategyAbstract.versioningField()) + .datasetRef(dataset.datasetReference()) + .order(Order.ASC).build(); + List partitionFields = primaryKeys.stream() + .map(field -> FieldValue.builder().fieldName(field).datasetRef(dataset.datasetReference()).build()) + .collect(Collectors.toList()); + Value rank = WindowFunction.builder() + .windowFunction(FunctionImpl.builder().functionName(FunctionName.DENSE_RANK).build()) + .addAllPartitionByFields(partitionFields) + .addOrderByFields(orderByField) + .alias(allVersionsStrategyAbstract.dataSplitFieldName()) + .build(); + List allColumnsWithRank = new ArrayList<>(dataset.schemaReference().fieldValues()); + + allColumnsWithRank.add(rank); + Selection selectionWithRank = Selection.builder() + .source(dataset) + .addAllFields(allColumnsWithRank) + .build(); + return selectionWithRank; + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DeriveDataErrorCheckLogicalPlan.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DeriveDataErrorCheckLogicalPlan.java new file mode 100644 index 00000000000..86cd4c59e4e --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DeriveDataErrorCheckLogicalPlan.java @@ -0,0 +1,115 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; +import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.*; +import org.finos.legend.engine.persistence.components.logicalplan.values.FieldValue; +import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionImpl; +import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionName; +import org.finos.legend.engine.persistence.components.logicalplan.values.Value; + +import java.util.ArrayList; +import java.util.List; + +public class DeriveDataErrorCheckLogicalPlan implements VersioningStrategyVisitor +{ + + List primaryKeys; + List remainingColumns; + Dataset tempStagingDataset; + + public DeriveDataErrorCheckLogicalPlan(List primaryKeys, List remainingColumns, Dataset tempStagingDataset) + { + this.primaryKeys = primaryKeys; + this.remainingColumns = remainingColumns; + this.tempStagingDataset = tempStagingDataset; + } + + @Override + public LogicalPlan visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return null; + } + + @Override + public LogicalPlan visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + if (maxVersionStrategy.performStageVersioning()) + { + return getLogicalPlanForDataErrorCheck(maxVersionStrategy.versioningField()); + } + else + { + return null; + } + } + + @Override + public LogicalPlan visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + if (allVersionsStrategyAbstract.performStageVersioning()) + { + return getLogicalPlanForDataErrorCheck(allVersionsStrategyAbstract.versioningField()); + } + else + { + return null; + } + } + + private LogicalPlan getLogicalPlanForDataErrorCheck(String versionField) + { + String maxDataErrorAlias = DedupAndVersionErrorStatistics.MAX_DATA_ERRORS.name(); + String distinctRowCount = "legend_persistence_distinct_rows"; + List pKsAndVersion = new ArrayList<>(); + for (String pk: primaryKeys) + { + pKsAndVersion.add(FieldValue.builder().fieldName(pk).build()); + } + pKsAndVersion.add(FieldValue.builder().fieldName(versionField).build()); + + List distinctValueFields = new ArrayList<>(); + for (String field: remainingColumns) + { + distinctValueFields.add(FieldValue.builder().fieldName(field).build()); + } + + FunctionImpl countDistinct = FunctionImpl.builder() + .functionName(FunctionName.COUNT) + .addValue(FunctionImpl.builder().functionName(FunctionName.DISTINCT).addAllValue(distinctValueFields).build()) + .alias(distinctRowCount) + .build(); + + Selection selectCountDataError = Selection.builder() + .source(tempStagingDataset) + .groupByFields(pKsAndVersion) + .addFields(countDistinct) + .alias(tempStagingDataset.datasetReference().alias()) + .build(); + FunctionImpl maxCount = FunctionImpl.builder() + .functionName(FunctionName.MAX) + .addValue(FieldValue.builder().fieldName(distinctRowCount).build()) + .alias(maxDataErrorAlias) + .build(); + Selection maxDataErrorCount = Selection.builder() + .source(selectCountDataError) + .addFields(maxCount) + .build(); + return LogicalPlan.builder().addOps(maxDataErrorCount).build(); + } + +} \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DeriveTempStagingSchemaDefinition.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DeriveTempStagingSchemaDefinition.java new file mode 100644 index 00000000000..b7892f3e7ef --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DeriveTempStagingSchemaDefinition.java @@ -0,0 +1,107 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.*; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.FieldType; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DataType; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import static org.finos.legend.engine.persistence.components.ingestmode.deduplication.DatasetDeduplicationHandler.COUNT; + +public class DeriveTempStagingSchemaDefinition implements VersioningStrategyVisitor +{ + DeduplicationStrategy deduplicationStrategy; + private SchemaDefinition.Builder schemaDefBuilder; + private List schemaFields; + + boolean anyPKInStaging; + + public DeriveTempStagingSchemaDefinition(SchemaDefinition stagingSchema, DeduplicationStrategy deduplicationStrategy) + { + this.deduplicationStrategy = deduplicationStrategy; + this.schemaDefBuilder = SchemaDefinition.builder() + .addAllIndexes(stagingSchema.indexes()) + .shardSpecification(stagingSchema.shardSpecification()) + .columnStoreSpecification(stagingSchema.columnStoreSpecification()); + anyPKInStaging = stagingSchema.fields().stream().anyMatch(field -> field.primaryKey()); + this.schemaFields = new ArrayList<>(stagingSchema.fields()); + Optional fieldToAddForDedup = deduplicationStrategy.accept(GET_FIELD_NEEDED_FOR_DEDUPLICATION); + fieldToAddForDedup.ifPresent(this.schemaFields::add); + } + + @Override + public SchemaDefinition visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + + return schemaDefBuilder.addAllFields(schemaFields).build(); + } + + @Override + public SchemaDefinition visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + return schemaDefBuilder.addAllFields(schemaFields).build(); + } + + @Override + public SchemaDefinition visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + if (allVersionsStrategyAbstract.performStageVersioning()) + { + Field dataSplit = Field.builder().name(allVersionsStrategyAbstract.dataSplitFieldName()) + .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) + .primaryKey(anyPKInStaging) + .build(); + schemaFields.add(dataSplit); + } + return schemaDefBuilder.addAllFields(schemaFields).build(); + } + + public static final DeduplicationStrategyVisitor> GET_FIELD_NEEDED_FOR_DEDUPLICATION = new DeduplicationStrategyVisitor>() + { + @Override + public Optional visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) + { + return Optional.empty(); + } + + @Override + public Optional visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + { + return getDedupField(); + } + + @Override + public Optional visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) + { + return getDedupField(); + } + + private Optional getDedupField() + { + Field count = Field.builder().name(COUNT) + .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) + .primaryKey(false) + .build(); + return Optional.of(count); + } + + }; +} \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DigestBasedResolverAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DigestBasedResolverAbstract.java new file mode 100644 index 00000000000..6f5f549e87d --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/DigestBasedResolverAbstract.java @@ -0,0 +1,37 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +import org.immutables.value.Value; + +@Value.Immutable +@Value.Style( + typeAbstract = "*Abstract", + typeImmutable = "*", + jdkOnly = true, + optionalAcceptNullable = true, + strictBuilder = true +) +public interface DigestBasedResolverAbstract extends MergeDataVersionResolver +{ + + org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver INSTANCE = org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver.builder().build(); + + @Override + default T accept(MergeDataVersionResolverVisitor visitor) + { + return visitor.visitDigestBasedResolver(this); + } +} \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MaxVersionStrategyAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MaxVersionStrategyAbstract.java new file mode 100644 index 00000000000..53c9cbe5863 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MaxVersionStrategyAbstract.java @@ -0,0 +1,77 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +import org.immutables.value.Value; + +import java.util.Optional; + +import static org.immutables.value.Value.Immutable; +import static org.immutables.value.Value.Style; + +@Immutable +@Style( + typeAbstract = "*Abstract", + typeImmutable = "*", + jdkOnly = true, + optionalAcceptNullable = true, + strictBuilder = true +) +public interface MaxVersionStrategyAbstract extends VersioningStrategy +{ + @Value.Parameter(order = 0) + String versioningField(); + + Optional mergeDataVersionResolver(); + + @Value.Default + default boolean performStageVersioning() + { + return true; + } + + @Override + default T accept(VersioningStrategyVisitor visitor) + { + return visitor.visitMaxVersionStrategy(this); + } + + + @Value.Check + default void validate() + { + // For VersionColumnBasedResolver, allowed comparators: > , >= + mergeDataVersionResolver().ifPresent(mergeDataVersionResolver -> new MergeDataVersionResolverVisitor() + { + @Override + public Void visitDigestBasedResolver(DigestBasedResolverAbstract digestBasedResolver) + { + return null; + } + + @Override + public Void visitVersionColumnBasedResolver(VersionColumnBasedResolverAbstract versionColumnBasedResolver) + { + if (versionColumnBasedResolver.versionComparator().equals(VersionComparator.LESS_THAN) || + versionColumnBasedResolver.versionComparator().equals(VersionComparator.LESS_THAN_EQUAL_TO)) + { + throw new IllegalStateException("Cannot build MaxVersionStrategy, Invalid comparator :" + + versionColumnBasedResolver.versionComparator()); + } + return null; + } + }); + } +} \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MergeDataVersionResolver.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MergeDataVersionResolver.java new file mode 100644 index 00000000000..c647c2b71db --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MergeDataVersionResolver.java @@ -0,0 +1,20 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +public interface MergeDataVersionResolver +{ + T accept(MergeDataVersionResolverVisitor visitor); +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MergeDataVersionResolverVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MergeDataVersionResolverVisitor.java new file mode 100644 index 00000000000..9c57571c4a7 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/MergeDataVersionResolverVisitor.java @@ -0,0 +1,22 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +public interface MergeDataVersionResolverVisitor +{ + T visitDigestBasedResolver(DigestBasedResolverAbstract digestBasedResolver); + + T visitVersionColumnBasedResolver(VersionColumnBasedResolverAbstract versionColumnBasedResolver); +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/NoVersioningStrategyAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/NoVersioningStrategyAbstract.java similarity index 88% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/NoVersioningStrategyAbstract.java rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/NoVersioningStrategyAbstract.java index 910c79836c0..ec3ae32ae8e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/NoVersioningStrategyAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/NoVersioningStrategyAbstract.java @@ -12,13 +12,12 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.persistence.components.ingestmode.deduplication; +package org.finos.legend.engine.persistence.components.ingestmode.versioning; -import static org.immutables.value.Value.Immutable; -import static org.immutables.value.Value.Style; +import org.immutables.value.Value; -@Immutable -@Style( +@Value.Immutable +@Value.Style( typeAbstract = "*Abstract", typeImmutable = "*", jdkOnly = true, diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/MaxVersionStrategyAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersionColumnBasedResolverAbstract.java similarity index 60% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/MaxVersionStrategyAbstract.java rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersionColumnBasedResolverAbstract.java index 1f0763c5d8b..a64f1d6f97b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/MaxVersionStrategyAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersionColumnBasedResolverAbstract.java @@ -12,41 +12,26 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.persistence.components.ingestmode.deduplication; +package org.finos.legend.engine.persistence.components.ingestmode.versioning; import org.immutables.value.Value; -import static org.immutables.value.Value.Immutable; -import static org.immutables.value.Value.Style; - -@Immutable -@Style( +@Value.Immutable +@Value.Style( typeAbstract = "*Abstract", typeImmutable = "*", jdkOnly = true, optionalAcceptNullable = true, strictBuilder = true ) -public interface MaxVersionStrategyAbstract extends VersioningStrategy +public interface VersionColumnBasedResolverAbstract extends MergeDataVersionResolver { @Value.Parameter(order = 0) - String versioningField(); - - @Value.Default - default VersioningComparator versioningComparator() - { - return VersioningComparator.GREATER_THAN; - } - - @Value.Default - default boolean performDeduplication() - { - return true; - } + VersionComparator versionComparator(); @Override - default T accept(VersioningStrategyVisitor visitor) + default T accept(MergeDataVersionResolverVisitor visitor) { - return visitor.visitMaxVersionStrategy(this); + return visitor.visitVersionColumnBasedResolver(this); } } \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersionComparator.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersionComparator.java new file mode 100644 index 00000000000..f53db8d3248 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersionComparator.java @@ -0,0 +1,23 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +public enum VersionComparator +{ + GREATER_THAN, + GREATER_THAN_EQUAL_TO, + LESS_THAN, + LESS_THAN_EQUAL_TO; +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningConditionVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningConditionVisitor.java similarity index 61% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningConditionVisitor.java rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningConditionVisitor.java index c560799045d..28514fad3fa 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningConditionVisitor.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningConditionVisitor.java @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.persistence.components.ingestmode.deduplication; +package org.finos.legend.engine.persistence.components.ingestmode.versioning; import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; import org.finos.legend.engine.persistence.components.logicalplan.conditions.GreaterThan; @@ -43,23 +43,50 @@ public VersioningConditionVisitor(Dataset mainDataset, Dataset stagingDataset, b @Override public Condition visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) { - if (invertComparison) + return getDigestBasedVersioningCondition(); + } + + @Override + public Condition visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + MergeDataVersionResolver versionResolver = maxVersionStrategy.mergeDataVersionResolver().orElseThrow(IllegalStateException::new); + return versionResolver.accept(new VersioningCondition(maxVersionStrategy.versioningField())); + } + + @Override + public Condition visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategy) + { + MergeDataVersionResolver versionResolver = allVersionsStrategy.mergeDataVersionResolver().orElseThrow(IllegalStateException::new); + return versionResolver.accept(new VersioningCondition(allVersionsStrategy.versioningField())); + } + + private class VersioningCondition implements MergeDataVersionResolverVisitor + { + private String versioningField; + + public VersioningCondition(String versioningField) { - return LogicalPlanUtils.getDigestMatchCondition(mainDataset, stagingDataset, digestField); + this.versioningField = versioningField; } - else + + @Override + public Condition visitDigestBasedResolver(DigestBasedResolverAbstract digestBasedResolver) { - return LogicalPlanUtils.getDigestDoesNotMatchCondition(mainDataset, stagingDataset, digestField); + return getDigestBasedVersioningCondition(); + } + + @Override + public Condition visitVersionColumnBasedResolver(VersionColumnBasedResolverAbstract versionColumnBasedResolver) + { + FieldValue mainVersioningField = FieldValue.builder().datasetRef(mainDataset.datasetReference()).fieldName(versioningField).build(); + FieldValue stagingVersioningField = FieldValue.builder().datasetRef(stagingDataset.datasetReference()).fieldName(versioningField).build(); + return getVersioningCondition(mainVersioningField, stagingVersioningField, versionColumnBasedResolver.versionComparator()); } } - @Override - public Condition visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + private Condition getVersioningCondition(FieldValue mainVersioningField, FieldValue stagingVersioningField, VersionComparator versionComparator) { - FieldValue mainVersioningField = FieldValue.builder().datasetRef(mainDataset.datasetReference()).fieldName(maxVersionStrategy.versioningField()).build(); - FieldValue stagingVersioningField = FieldValue.builder().datasetRef(stagingDataset.datasetReference()).fieldName(maxVersionStrategy.versioningField()).build(); - - switch (maxVersionStrategy.versioningComparator()) + switch (versionComparator) { case GREATER_THAN: if (invertComparison) @@ -83,4 +110,16 @@ public Condition visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionSt throw new IllegalStateException("Unsupported versioning comparator type"); } } + + private Condition getDigestBasedVersioningCondition() + { + if (invertComparison) + { + return LogicalPlanUtils.getDigestMatchCondition(mainDataset, stagingDataset, digestField); + } + else + { + return LogicalPlanUtils.getDigestDoesNotMatchCondition(mainDataset, stagingDataset, digestField); + } + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningComparator.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningOrder.java similarity index 87% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningComparator.java rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningOrder.java index fe1aee3c611..20d70cb8262 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningComparator.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningOrder.java @@ -12,10 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.persistence.components.ingestmode.deduplication; +package org.finos.legend.engine.persistence.components.ingestmode.versioning; -public enum VersioningComparator +public enum VersioningOrder { - GREATER_THAN, - GREATER_THAN_EQUAL_TO + ASC, DESC } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningStrategy.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningStrategy.java similarity index 97% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningStrategy.java rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningStrategy.java index dc1ce0e2ada..fd503e4ec87 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningStrategy.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningStrategy.java @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.persistence.components.ingestmode.deduplication; +package org.finos.legend.engine.persistence.components.ingestmode.versioning; public interface VersioningStrategy { diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningStrategyVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningStrategyVisitor.java similarity index 89% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningStrategyVisitor.java rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningStrategyVisitor.java index 8aa8af545b7..c3e38ce1bd3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/deduplication/VersioningStrategyVisitor.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningStrategyVisitor.java @@ -12,11 +12,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.persistence.components.ingestmode.deduplication; +package org.finos.legend.engine.persistence.components.ingestmode.versioning; public interface VersioningStrategyVisitor { T visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy); T visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy); + + T visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningVisitors.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningVisitors.java new file mode 100644 index 00000000000..9276d441340 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/ingestmode/versioning/VersioningVisitors.java @@ -0,0 +1,88 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode.versioning; + +import java.util.Optional; + +public class VersioningVisitors +{ + + public static final VersioningStrategyVisitor> EXTRACT_DATA_SPLIT_FIELD = new VersioningStrategyVisitor>() + { + @Override + public Optional visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return Optional.empty(); + } + + @Override + public Optional visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + return Optional.empty(); + } + + @Override + public Optional visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + return Optional.of(allVersionsStrategyAbstract.dataSplitFieldName()); + } + }; + + public static final VersioningStrategyVisitor IS_TEMP_TABLE_NEEDED = new VersioningStrategyVisitor() + { + + @Override + public Boolean visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return false; + } + + @Override + public Boolean visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + return maxVersionStrategy.performStageVersioning(); + } + + @Override + public Boolean visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + return allVersionsStrategyAbstract.performStageVersioning(); + } + }; + + public static final VersioningStrategyVisitor> EXTRACT_VERSIONING_FIELD = new VersioningStrategyVisitor>() + { + @Override + public Optional visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return Optional.empty(); + } + + @Override + public Optional visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + return Optional.of(maxVersionStrategy.versioningField()); + } + + @Override + public Optional visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + return Optional.of(allVersionsStrategyAbstract.versioningField()); + } + }; + + + +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/LogicalPlanFactory.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/LogicalPlanFactory.java index 33ae2fcad34..f841210323d 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/LogicalPlanFactory.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/LogicalPlanFactory.java @@ -15,7 +15,8 @@ package org.finos.legend.engine.persistence.components.logicalplan; import org.finos.legend.engine.persistence.components.common.Datasets; -import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; +import org.finos.legend.engine.persistence.components.ingestmode.BulkLoad; +import org.finos.legend.engine.persistence.components.ingestmode.IngestMode; import org.finos.legend.engine.persistence.components.logicalplan.datasets.CsvExternalDatasetReference; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; @@ -32,12 +33,13 @@ import org.finos.legend.engine.persistence.components.logicalplan.values.StringValue; import org.finos.legend.engine.persistence.components.logicalplan.values.TabularValues; import org.finos.legend.engine.persistence.components.logicalplan.values.Value; +import org.finos.legend.engine.persistence.components.util.BulkLoadMetadataDataset; +import org.finos.legend.engine.persistence.components.util.BulkLoadMetadataUtils; import org.finos.legend.engine.persistence.components.util.LogicalPlanUtils; import org.finos.legend.engine.persistence.components.util.MetadataDataset; import org.finos.legend.engine.persistence.components.util.MetadataUtils; import java.util.List; -import java.util.Optional; public class LogicalPlanFactory { @@ -91,14 +93,23 @@ public static LogicalPlan getLogicalPlanForConstantStats(String stats, Long valu .build(); } - public static LogicalPlan getLogicalPlanForNextBatchId(Datasets datasets) + public static LogicalPlan getLogicalPlanForNextBatchId(Datasets datasets, IngestMode ingestMode) { StringValue mainTable = StringValue.of(datasets.mainDataset().datasetReference().name().orElseThrow(IllegalStateException::new)); - MetadataDataset metadataDataset = datasets.metadataDataset().isPresent() - ? datasets.metadataDataset().get() - : MetadataDataset.builder().build(); - MetadataUtils metadataUtils = new MetadataUtils(metadataDataset); - Selection selection = metadataUtils.getBatchId(mainTable).selection(); + Selection selection; + if (ingestMode instanceof BulkLoad) + { + BulkLoadMetadataDataset bulkLoadMetadataDataset = datasets.bulkLoadMetadataDataset().orElse(BulkLoadMetadataDataset.builder().build()); + BulkLoadMetadataUtils bulkLoadMetadataUtils = new BulkLoadMetadataUtils(bulkLoadMetadataDataset); + selection = bulkLoadMetadataUtils.getBatchId(mainTable).selection(); + } + else + { + MetadataDataset metadataDataset = datasets.metadataDataset().orElse(MetadataDataset.builder().build()); + MetadataUtils metadataUtils = new MetadataUtils(metadataDataset); + selection = metadataUtils.getBatchId(mainTable).selection(); + } + return LogicalPlan.builder().addOps(selection).build(); } @@ -111,4 +122,13 @@ public static LogicalPlan getLogicalPlanForMinAndMaxForField(Dataset dataset, St .source(dataset).build(); return LogicalPlan.builder().addOps(selection).build(); } + + public static LogicalPlan getLogicalPlanForMaxOfField(Dataset dataset, String fieldName) + { + FieldValue field = FieldValue.builder().datasetRef(dataset.datasetReference()).fieldName(fieldName).build(); + Selection selection = Selection.builder() + .addFields(FunctionImpl.builder().functionName(FunctionName.MAX).addValue(field).alias(MAX_OF_FIELD).build()) + .source(dataset).build(); + return LogicalPlan.builder().addOps(selection).build(); + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/datasets/SelectionAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/datasets/SelectionAbstract.java index 03c778e940d..917525b2fc8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/datasets/SelectionAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/datasets/SelectionAbstract.java @@ -17,11 +17,13 @@ import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; import org.finos.legend.engine.persistence.components.logicalplan.operations.Operation; import org.finos.legend.engine.persistence.components.logicalplan.quantifiers.Quantifier; +import org.finos.legend.engine.persistence.components.logicalplan.values.FieldValue; import org.finos.legend.engine.persistence.components.logicalplan.values.Value; import org.immutables.value.Value.Derived; import org.immutables.value.Value.Immutable; import org.immutables.value.Value.Style; +import java.util.ArrayList; import java.util.List; import java.util.Optional; @@ -56,4 +58,22 @@ default DatasetReference datasetReference() .alias(alias()) .build(); } + + @Derived + default SchemaReference schemaReference() + { + List list = new ArrayList<>(); + for (Value value: fields()) + { + if (value instanceof FieldValue) + { + list.add((FieldValue) value); + } + else if (value.alias().isPresent()) + { + list.add(FieldValue.builder().fieldName(value.alias().get()).alias(value.alias()).datasetRef(datasetReference()).build()); + } + } + return SchemaReference.builder().addAllFieldValues(list).build(); + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/DigestUdfAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/DigestUdfAbstract.java index 06f153770df..e6c35ff9c7b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/DigestUdfAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/DigestUdfAbstract.java @@ -14,7 +14,10 @@ package org.finos.legend.engine.persistence.components.logicalplan.values; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; + import java.util.List; +import java.util.Optional; @org.immutables.value.Value.Immutable @org.immutables.value.Value.Style( @@ -32,4 +35,6 @@ public interface DigestUdfAbstract extends Value List fieldNames(); List values(); + + Optional dataset(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/FunctionName.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/FunctionName.java index 37e611e7ee9..ac3a3e047fe 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/FunctionName.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/logicalplan/values/FunctionName.java @@ -20,6 +20,7 @@ public enum FunctionName MAX, MIN, COUNT, + DISTINCT, COALESCE, CURRENT_TIME, CURRENT_DATE, @@ -27,6 +28,7 @@ public enum FunctionName CURRENT_TIMESTAMP, UPPER, ROW_NUMBER, + DENSE_RANK, SUBSTRING, PARSE_JSON, DATE, @@ -37,5 +39,6 @@ public enum FunctionName GENERATE_ARRAY, PARSE_DATETIME, OBJECT_CONSTRUCT, - TO_VARIANT; + TO_VARIANT, + TO_JSON; } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/AppendOnlyPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/AppendOnlyPlanner.java index 21a21628db3..c2227940d49 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/AppendOnlyPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/AppendOnlyPlanner.java @@ -22,10 +22,6 @@ import org.finos.legend.engine.persistence.components.ingestmode.audit.AuditingVisitors; import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditingAbstract; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditingAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicatesAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationStrategyVisitor; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicatesAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicatesAbstract; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.conditions.And; import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; @@ -34,9 +30,7 @@ import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Create; import org.finos.legend.engine.persistence.components.logicalplan.operations.Insert; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Operation; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchStartTimestamp; import org.finos.legend.engine.persistence.components.logicalplan.values.FieldValue; import org.finos.legend.engine.persistence.components.logicalplan.values.Value; @@ -44,12 +38,12 @@ import org.finos.legend.engine.persistence.components.util.LogicalPlanUtils; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; -import java.util.function.Consumer; import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_INSERTED; import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.ALL_COLUMNS; @@ -60,17 +54,24 @@ class AppendOnlyPlanner extends Planner { private final Optional dataSplitInRangeCondition; - AppendOnlyPlanner(Datasets datasets, AppendOnly ingestMode, PlannerOptions plannerOptions) + AppendOnlyPlanner(Datasets datasets, AppendOnly ingestMode, PlannerOptions plannerOptions, Set capabilities) { - super(datasets, ingestMode, plannerOptions); + super(datasets, ingestMode, plannerOptions, capabilities); - // validate - ingestMode.deduplicationStrategy().accept(new ValidatePrimaryKeys(primaryKeys, this::validatePrimaryKeysIsEmpty, - this::validatePrimaryKeysNotEmpty, ingestMode.dataSplitField().isPresent())); - // if data splits are present, then audit Column must be a PK - if (ingestMode.dataSplitField().isPresent()) + // Validation + // 1. If primary keys are present, then auditing must be turned on and the auditing column must be one of the primary keys + if (!primaryKeys.isEmpty()) { - ingestMode.auditing().accept(ValidateAuditingForDataSplits); + ingestMode.auditing().accept(new ValidateAuditingForPrimaryKeys(mainDataset())); + } + + // 2. For filterExistingRecords, we must have digest and primary keys to filter them + if (ingestMode.filterExistingRecords()) + { + if (!ingestMode.digestField().isPresent() || primaryKeys.isEmpty()) + { + throw new IllegalStateException("Primary keys and digest are mandatory for filterExistingRecords"); + } } this.dataSplitInRangeCondition = ingestMode.dataSplitField().map(field -> LogicalPlanUtils.getDataSplitInRangeCondition(stagingDataset(), field)); @@ -83,16 +84,11 @@ protected AppendOnly ingestMode() } @Override - public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) + public LogicalPlan buildLogicalPlanForIngest(Resources resources) { - List fieldsToSelect = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); - List fieldsToInsert = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); - - if (ingestMode().dataSplitField().isPresent()) - { - LogicalPlanUtils.removeField(fieldsToSelect, ingestMode().dataSplitField().get()); - LogicalPlanUtils.removeField(fieldsToInsert, ingestMode().dataSplitField().get()); - } + List dataFields = getDataFields(); + List fieldsToSelect = new ArrayList<>(dataFields); + List fieldsToInsert = new ArrayList<>(dataFields); if (ingestMode().auditing().accept(AUDIT_ENABLED)) { @@ -104,237 +100,114 @@ public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set getDigestOrRemainingColumns() { - List operations = new ArrayList<>(); - operations.add(Create.of(true, mainDataset())); - if (options().createStagingDataset()) + List remainingCols = new ArrayList<>(); + if (ingestMode().digestField().isPresent()) { - operations.add(Create.of(true, stagingDataset())); + remainingCols = Arrays.asList(ingestMode().digestField().get()); } - if (options().enableConcurrentSafety()) + else if (!primaryKeys.isEmpty()) { - operations.add(Create.of(true, lockInfoDataset().orElseThrow(IllegalStateException::new).get())); + remainingCols = getNonPKNonVersionDataFields(); } - return LogicalPlan.of(operations); - } - - protected void addPostRunStatsForRowsInserted(Map postRunStatisticsResult) - { - Optional dataSplitInRangeCondition = dataSplitExecutionSupported() ? getDataSplitInRangeConditionForStatistics() : Optional.empty(); - ingestMode().deduplicationStrategy().accept(new PopulatePostRunStatisticsBreakdown(ingestMode(), mainDataset(), stagingDataset(), postRunStatisticsResult, dataSplitInRangeCondition)); - } - - public Optional getDataSplitInRangeConditionForStatistics() - { - return dataSplitInRangeCondition; + return remainingCols; } - private AuditingVisitor ValidateAuditingForDataSplits = new AuditingVisitor() + private Dataset getSelectStage(List fieldsToSelect) { - @Override - public Void visitNoAuditing(NoAuditingAbstract noAuditing) + if (ingestMode().dataSplitField().isPresent()) { - throw new IllegalStateException("DataSplits not supported for NoAuditing mode"); + return Selection.builder().source(stagingDataset()).condition(dataSplitInRangeCondition).addAllFields(fieldsToSelect).build(); } - - @Override - public Void visitDateTimeAuditing(DateTimeAuditingAbstract dateTimeAuditing) + else { - // For Data splits, audit column must be a PK - Field dateTimeAuditingField = mainDataset().schema().fields().stream() - .filter(field -> field.name().equalsIgnoreCase(dateTimeAuditing.dateTimeField())) - .findFirst().orElseThrow(() -> new IllegalStateException("dateTimeField is mandatory Field for dateTimeAuditing mode")); - if (!dateTimeAuditingField.primaryKey()) - { - throw new IllegalStateException("dateTimeField must be a Primary Key for Data Splits"); - } - return null; + return Selection.builder().source(stagingDataset()).addAllFields(fieldsToSelect).build(); } - }; + } - static class ValidatePrimaryKeys implements DeduplicationStrategyVisitor + private Dataset getSelectStageWithFilterExistingRecords(List fieldsToSelect) { - final List primaryKeys; - final Consumer> validatePrimaryKeysIsEmpty; - final Consumer> validatePrimaryKeysNotEmpty; - final boolean dataSplitsEnabled; - - ValidatePrimaryKeys(List primaryKeys, Consumer> validatePrimaryKeysIsEmpty, Consumer> validatePrimaryKeysNotEmpty, boolean dataSplitsEnabled) - { - this.primaryKeys = primaryKeys; - this.validatePrimaryKeysIsEmpty = validatePrimaryKeysIsEmpty; - this.validatePrimaryKeysNotEmpty = validatePrimaryKeysNotEmpty; - this.dataSplitsEnabled = dataSplitsEnabled; - } - - @Override - public Void visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) + Condition notExistInSinkCondition = Not.of(Exists.of(Selection.builder() + .source(mainDataset()) + .condition(And.builder() + .addConditions( + getPrimaryKeyMatchCondition(mainDataset(), stagingDataset(), primaryKeys.toArray(new String[0])), + getDigestMatchCondition(mainDataset(), stagingDataset(), ingestMode().digestField().orElseThrow(IllegalStateException::new))) + .build()) + .addAllFields(ALL_COLUMNS()) + .build())); + + Condition selectCondition; + if (ingestMode().dataSplitField().isPresent()) { - // If data splits are enabled, then PKs are allowed, Otherwise PKs are not allowed - if (!dataSplitsEnabled) - { - validatePrimaryKeysIsEmpty.accept(primaryKeys); - } - return null; + selectCondition = And.builder().addConditions(dataSplitInRangeCondition.orElseThrow(IllegalStateException::new), notExistInSinkCondition).build(); } - - @Override - public Void visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + else { - validatePrimaryKeysNotEmpty.accept(primaryKeys); - return null; + selectCondition = notExistInSinkCondition; } - @Override - public Void visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) - { - validatePrimaryKeysNotEmpty.accept(primaryKeys); - return null; - } + return Selection.builder().source(stagingDataset()).condition(selectCondition).addAllFields(fieldsToSelect).build(); } - static class SelectStageDatasetBuilder implements DeduplicationStrategyVisitor + protected void addPostRunStatsForRowsInserted(Map postRunStatisticsResult) { - final Dataset mainDataset; - final Dataset stagingDataset; - final AppendOnly ingestMode; - final List primaryKeys; - final Optional dataSplitInRangeCondition; - - final List fieldsToSelect; - - SelectStageDatasetBuilder(Dataset mainDataset, Dataset stagingDataset, AppendOnly ingestMode, List primaryKeys, Optional dataSplitInRangeCondition, List fieldsToSelect) - { - this.mainDataset = mainDataset; - this.stagingDataset = stagingDataset; - this.ingestMode = ingestMode; - this.primaryKeys = primaryKeys; - this.dataSplitInRangeCondition = dataSplitInRangeCondition; - this.fieldsToSelect = fieldsToSelect; - } - - @Override - public Dataset visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) - { - return selectStageDatasetWithoutDuplicateFiltering(); - } - - @Override - public Dataset visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + if (ingestMode().auditing().accept(AUDIT_ENABLED)) { - Condition notExistInSinkCondition = Not.of(Exists.of(Selection.builder() - .source(mainDataset) - .condition(And.builder() - .addConditions( - getPrimaryKeyMatchCondition(mainDataset, stagingDataset, primaryKeys.toArray(new String[0])), - getDigestMatchCondition(mainDataset, stagingDataset, ingestMode.digestField().orElseThrow(IllegalStateException::new))) - .build()) - .addAllFields(ALL_COLUMNS()) - .build())); - - Condition selectCondition; - if (ingestMode.dataSplitField().isPresent()) - { - selectCondition = And.builder().addConditions(dataSplitInRangeCondition.orElseThrow(IllegalStateException::new), notExistInSinkCondition).build(); - } - else - { - selectCondition = notExistInSinkCondition; - } - - return Selection.builder().source(stagingDataset).condition(selectCondition).addAllFields(fieldsToSelect).build(); + // Rows inserted = rows in main with audit column equals latest timestamp + String auditField = ingestMode().auditing().accept(AuditingVisitors.EXTRACT_AUDIT_FIELD).orElseThrow(IllegalStateException::new); + postRunStatisticsResult.put(ROWS_INSERTED, LogicalPlan.builder() + .addOps(LogicalPlanUtils.getRowsBasedOnLatestTimestamp(mainDataset(), auditField, ROWS_INSERTED.get())) + .build()); } - - @Override - public Dataset visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) + else { - return selectStageDatasetWithoutDuplicateFiltering(); + // Not supported at the moment } + } - private Dataset selectStageDatasetWithoutDuplicateFiltering() - { - if (ingestMode.dataSplitField().isPresent() && !primaryKeys.isEmpty()) - { - return Selection.builder().source(stagingDataset).condition(dataSplitInRangeCondition).addAllFields(fieldsToSelect).build(); - } - else - { - return Selection.builder().source(stagingDataset).addAllFields(fieldsToSelect).build(); - } - } + public Optional getDataSplitInRangeConditionForStatistics() + { + return dataSplitInRangeCondition; } - static class PopulatePostRunStatisticsBreakdown implements DeduplicationStrategyVisitor + static class ValidateAuditingForPrimaryKeys implements AuditingVisitor { - final AppendOnly ingestMode; final Dataset mainDataset; - final Dataset stagingDataset; - final Map postRunStatisticsResult; - Optional dataSplitInRangeCondition; - PopulatePostRunStatisticsBreakdown(AppendOnly ingestMode, Dataset mainDataset, Dataset stagingDataset, Map postRunStatisticsResult, Optional dataSplitInRangeCondition) + ValidateAuditingForPrimaryKeys(Dataset mainDataset) { - this.ingestMode = ingestMode; this.mainDataset = mainDataset; - this.stagingDataset = stagingDataset; - this.postRunStatisticsResult = postRunStatisticsResult; - this.dataSplitInRangeCondition = dataSplitInRangeCondition; } @Override - public Void visitAllowDuplicates(AllowDuplicatesAbstract allowDuplicates) - { - return populateInsertedRecordsCountUsingStagingDataset(); - } - - @Override - public Void visitFailOnDuplicates(FailOnDuplicatesAbstract failOnDuplicates) + public Void visitNoAuditing(NoAuditingAbstract noAuditing) { - return populateInsertedRecordsCountUsingStagingDataset(); + throw new IllegalStateException("NoAuditing not allowed when there are primary keys"); } @Override - public Void visitFilterDuplicates(FilterDuplicatesAbstract filterDuplicates) + public Void visitDateTimeAuditing(DateTimeAuditingAbstract dateTimeAuditing) { - if (ingestMode.auditing().accept(AUDIT_ENABLED)) - { - // Rows inserted = rows in main with audit column equals latest timestamp - String auditField = ingestMode.auditing().accept(AuditingVisitors.EXTRACT_AUDIT_FIELD).orElseThrow(IllegalStateException::new); - postRunStatisticsResult.put(ROWS_INSERTED, LogicalPlan.builder() - .addOps(LogicalPlanUtils.getRowsBasedOnLatestTimestamp(mainDataset, auditField, ROWS_INSERTED.get())) - .build()); - } - else + Field dateTimeAuditingField = mainDataset.schema().fields().stream() + .filter(field -> field.name().equalsIgnoreCase(dateTimeAuditing.dateTimeField())) + .findFirst().orElseThrow(() -> new IllegalStateException("dateTimeField is mandatory Field for dateTimeAuditing mode")); + if (!dateTimeAuditingField.primaryKey()) { - // Not supported at the moment + throw new IllegalStateException("auditing dateTimeField must be a primary key when there are other primary keys"); } return null; } - - private Void populateInsertedRecordsCountUsingStagingDataset() - { - LogicalPlan incomingRecordCountPlan = LogicalPlan.builder() - .addOps(LogicalPlanUtils.getRecordCount(stagingDataset, ROWS_INSERTED.get(), dataSplitInRangeCondition)) - .build(); - postRunStatisticsResult.put(ROWS_INSERTED, incomingRecordCountPlan); - return null; - } - } - - @Override - public boolean dataSplitExecutionSupported() - { - return !primaryKeys.isEmpty(); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BitemporalDeltaPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BitemporalDeltaPlanner.java index d3b06d47a4c..10502d86c7e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BitemporalDeltaPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BitemporalDeltaPlanner.java @@ -18,7 +18,6 @@ import org.finos.legend.engine.persistence.components.common.Resources; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.ingestmode.BitemporalDelta; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.merge.MergeStrategyVisitors; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromAndThruDateTime; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromDateTime; @@ -105,11 +104,11 @@ class BitemporalDeltaPlanner extends BitemporalPlanner private List primaryKeyFieldsAndFromFieldForSelection; private List dataFields; - BitemporalDeltaPlanner(Datasets datasets, BitemporalDelta ingestMode, PlannerOptions plannerOptions) + BitemporalDeltaPlanner(Datasets datasets, BitemporalDelta ingestMode, PlannerOptions plannerOptions, Set capabilities) { - super(datasets, ingestMode, plannerOptions); + super(datasets, ingestMode, plannerOptions, capabilities); - if (ingestMode().validityMilestoning().validityDerivation() instanceof SourceSpecifiesFromDateTime && ingestMode().deduplicationStrategy() instanceof FilterDuplicates) + if (ingestMode().validityMilestoning().validityDerivation() instanceof SourceSpecifiesFromDateTime && ingestMode().filterExistingRecords()) { this.stagingDataset = getStagingDatasetWithoutDuplicates(datasets); this.stagingDatasetWithoutDuplicates = Optional.of(this.stagingDataset); @@ -201,7 +200,7 @@ protected BitemporalDelta ingestMode() } @Override - public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) + public LogicalPlan buildLogicalPlanForIngest(Resources resources) { List operations = new ArrayList<>(); @@ -214,7 +213,7 @@ public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) { - super(datasets, bitemporalMilestoned, plannerOptions); + super(datasets, bitemporalMilestoned, plannerOptions, capabilities); // validate String targetValidDateTimeFrom = bitemporalMilestoned.validityMilestoning().accept(EXTRACT_TARGET_VALID_DATE_TIME_FROM); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BitemporalSnapshotPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BitemporalSnapshotPlanner.java index 854d0cceeb4..f032983ba00 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BitemporalSnapshotPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BitemporalSnapshotPlanner.java @@ -45,9 +45,9 @@ class BitemporalSnapshotPlanner extends BitemporalPlanner { - BitemporalSnapshotPlanner(Datasets datasets, BitemporalSnapshot ingestMode, PlannerOptions plannerOptions) + BitemporalSnapshotPlanner(Datasets datasets, BitemporalSnapshot ingestMode, PlannerOptions plannerOptions, Set capabilities) { - super(datasets, ingestMode, plannerOptions); + super(datasets, ingestMode, plannerOptions, capabilities); // validate @@ -67,7 +67,7 @@ protected BitemporalSnapshot ingestMode() } @Override - public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) + public LogicalPlan buildLogicalPlanForIngest(Resources resources) { List> keyValuePairs = keyValuesForMilestoningUpdate(); @@ -87,23 +87,6 @@ public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set operations = new ArrayList<>(); - operations.add(Create.of(true, mainDataset())); - if (options().createStagingDataset()) - { - operations.add(Create.of(true, stagingDataset())); - } - operations.add(Create.of(true, metadataDataset().orElseThrow(IllegalStateException::new).get())); - if (options().enableConcurrentSafety()) - { - operations.add(Create.of(true, lockInfoDataset().orElseThrow(IllegalStateException::new).get())); - } - return LogicalPlan.of(operations); - } - /* insert into main_table ( diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BulkLoadPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BulkLoadPlanner.java index 27cc89caa0f..495ca6799f4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BulkLoadPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/BulkLoadPlanner.java @@ -26,7 +26,11 @@ import org.finos.legend.engine.persistence.components.ingestmode.digest.UDFBasedDigestGenStrategyAbstract; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.conditions.Equals; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesSelection; +import org.finos.legend.engine.persistence.components.logicalplan.operations.Drop; +import org.finos.legend.engine.persistence.components.logicalplan.operations.Insert; import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionImpl; import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionName; import org.finos.legend.engine.persistence.components.logicalplan.values.All; @@ -38,12 +42,10 @@ import org.finos.legend.engine.persistence.components.logicalplan.operations.Create; import org.finos.legend.engine.persistence.components.logicalplan.operations.Copy; import org.finos.legend.engine.persistence.components.logicalplan.operations.Operation; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Insert; import org.finos.legend.engine.persistence.components.logicalplan.values.DigestUdf; import org.finos.legend.engine.persistence.components.logicalplan.values.Value; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchStartTimestamp; import org.finos.legend.engine.persistence.components.logicalplan.values.FieldValue; -import org.finos.legend.engine.persistence.components.logicalplan.values.BulkLoadBatchIdValue; import org.finos.legend.engine.persistence.components.util.BulkLoadMetadataDataset; import org.finos.legend.engine.persistence.components.util.BulkLoadMetadataUtils; import org.finos.legend.engine.persistence.components.util.Capability; @@ -53,26 +55,53 @@ import java.util.stream.Collectors; import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_INSERTED; +import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.TEMP_DATASET_BASE_NAME; +import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.UNDERSCORE; class BulkLoadPlanner extends Planner { + private boolean transformWhileCopy; + private Dataset tempDataset; private StagedFilesDataset stagedFilesDataset; - private BulkLoadMetadataDataset bulkLoadMetadataDataset; + private Optional bulkLoadTaskIdValue; - BulkLoadPlanner(Datasets datasets, BulkLoad ingestMode, PlannerOptions plannerOptions) + BulkLoadPlanner(Datasets datasets, BulkLoad ingestMode, PlannerOptions plannerOptions, Set capabilities) { - super(datasets, ingestMode, plannerOptions); + super(datasets, ingestMode, plannerOptions, capabilities); // validation + validateNoPrimaryKeysInStageAndMain(); if (!(datasets.stagingDataset() instanceof StagedFilesDataset)) { throw new IllegalArgumentException("Only StagedFilesDataset are allowed under Bulk Load"); } + bulkLoadTaskIdValue = plannerOptions.bulkLoadTaskIdValue(); stagedFilesDataset = (StagedFilesDataset) datasets.stagingDataset(); bulkLoadMetadataDataset = bulkLoadMetadataDataset().orElseThrow(IllegalStateException::new); + + transformWhileCopy = capabilities.contains(Capability.TRANSFORM_WHILE_COPY); + if (!transformWhileCopy) + { + tempDataset = DatasetDefinition.builder() + .schema(datasets.stagingDataset().schema()) + .database(datasets.mainDataset().datasetReference().database()) + .group(datasets.mainDataset().datasetReference().group()) + .name(datasets.mainDataset().datasetReference().name().orElseThrow((IllegalStateException::new)) + UNDERSCORE + TEMP_DATASET_BASE_NAME) + .alias(TEMP_DATASET_BASE_NAME) + .build(); + } + } + + private void validateNoPrimaryKeysInStageAndMain() + { + List primaryKeysFromMain = mainDataset().schema().fields().stream().filter(Field::primaryKey).map(Field::name).collect(Collectors.toList()); + validatePrimaryKeysIsEmpty(primaryKeysFromMain); + + List primaryKeysFromStage = stagingDataset().schema().fields().stream().filter(Field::primaryKey).map(Field::name).collect(Collectors.toList()); + validatePrimaryKeysIsEmpty(primaryKeysFromStage); } @Override @@ -82,46 +111,120 @@ protected BulkLoad ingestMode() } @Override - public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) + public LogicalPlan buildLogicalPlanForIngest(Resources resources) + { + if (transformWhileCopy) + { + return buildLogicalPlanForTransformWhileCopy(resources); + } + else + { + return buildLogicalPlanForCopyAndTransform(resources); + } + } + + private LogicalPlan buildLogicalPlanForTransformWhileCopy(Resources resources) { List fieldsToSelect = LogicalPlanUtils.extractStagedFilesFieldValues(stagingDataset()); List fieldsToInsert = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); - // Digest Generation + // Add digest ingestMode().digestGenStrategy().accept(new DigestGeneration(mainDataset(), stagingDataset(), fieldsToSelect, fieldsToInsert)); // Add batch_id field fieldsToInsert.add(FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(ingestMode().batchIdField()).build()); - fieldsToSelect.add(BulkLoadBatchIdValue.INSTANCE); + fieldsToSelect.add(new BulkLoadMetadataUtils(bulkLoadMetadataDataset).getBatchId(StringValue.of(mainDataset().datasetReference().name().orElseThrow(IllegalStateException::new)))); + // Add auditing if (ingestMode().auditing().accept(AUDIT_ENABLED)) { - BatchStartTimestamp batchStartTimestamp = BatchStartTimestamp.INSTANCE; - fieldsToSelect.add(batchStartTimestamp); - String auditField = ingestMode().auditing().accept(AuditingVisitors.EXTRACT_AUDIT_FIELD).orElseThrow(IllegalStateException::new); - fieldsToInsert.add(FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(auditField).build()); + addAuditing(fieldsToInsert, fieldsToSelect); } Dataset selectStage = StagedFilesSelection.builder().source(stagedFilesDataset).addAllFields(fieldsToSelect).build(); return LogicalPlan.of(Collections.singletonList(Copy.of(mainDataset(), selectStage, fieldsToInsert))); } + private LogicalPlan buildLogicalPlanForCopyAndTransform(Resources resources) + { + List operations = new ArrayList<>(); + + + // Operation 1: Copy into a temp table + List fieldsToSelectFromStage = LogicalPlanUtils.extractStagedFilesFieldValues(stagingDataset()); + Dataset selectStage = StagedFilesSelection.builder().source(stagedFilesDataset).addAllFields(fieldsToSelectFromStage).build(); + operations.add(Copy.of(tempDataset, selectStage, fieldsToSelectFromStage)); + + + // Operation 2: Transfer from temp table into target table, adding extra columns at the same time + List fieldsToSelect = new ArrayList<>(tempDataset.schemaReference().fieldValues()); + List fieldsToInsertIntoMain = new ArrayList<>(tempDataset.schemaReference().fieldValues()); + + // Add digest + ingestMode().digestGenStrategy().accept(new DigestGeneration(mainDataset(), tempDataset, fieldsToSelect, fieldsToInsertIntoMain)); + + // Add batch_id field + fieldsToInsertIntoMain.add(FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(ingestMode().batchIdField()).build()); + fieldsToSelect.add(new BulkLoadMetadataUtils(bulkLoadMetadataDataset).getBatchId(StringValue.of(mainDataset().datasetReference().name().orElseThrow(IllegalStateException::new)))); + + // Add auditing + if (ingestMode().auditing().accept(AUDIT_ENABLED)) + { + addAuditing(fieldsToInsertIntoMain, fieldsToSelect); + } + + operations.add(Insert.of(mainDataset(), Selection.builder().source(tempDataset).addAllFields(fieldsToSelect).build(), fieldsToInsertIntoMain)); + + + return LogicalPlan.of(operations); + } + + private void addAuditing(List fieldsToInsert, List fieldsToSelect) + { + BatchStartTimestamp batchStartTimestamp = BatchStartTimestamp.INSTANCE; + String auditField = ingestMode().auditing().accept(AuditingVisitors.EXTRACT_AUDIT_FIELD).orElseThrow(IllegalStateException::new); + fieldsToInsert.add(FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(auditField).build()); + fieldsToSelect.add(batchStartTimestamp); + } + @Override public LogicalPlan buildLogicalPlanForPreActions(Resources resources) { List operations = new ArrayList<>(); operations.add(Create.of(true, mainDataset())); operations.add(Create.of(true, bulkLoadMetadataDataset.get())); + if (!transformWhileCopy) + { + operations.add(Create.of(true, tempDataset)); + } return LogicalPlan.of(operations); } @Override public LogicalPlan buildLogicalPlanForPostActions(Resources resources) { + // there is no need to delete from the temp table for big query because we always use "overwrite" when loading List operations = new ArrayList<>(); return LogicalPlan.of(operations); } + @Override + public LogicalPlan buildLogicalPlanForPostCleanup(Resources resources) + { + List operations = new ArrayList<>(); + if (!transformWhileCopy) + { + operations.add(Drop.of(true, tempDataset, false)); + } + return LogicalPlan.of(operations); + } + + @Override + List getDigestOrRemainingColumns() + { + return Collections.emptyList(); + } + @Override public LogicalPlan buildLogicalPlanForMetadataIngest(Resources resources) { @@ -162,9 +265,10 @@ private Selection getRowsBasedOnAppendTimestamp(Dataset dataset, String field, S private String jsonifyBatchSourceInfo(StagedFilesDatasetProperties stagedFilesDatasetProperties) { - List files = stagedFilesDatasetProperties.files(); Map batchSourceMap = new HashMap(); + List files = stagedFilesDatasetProperties.files(); batchSourceMap.put("files", files); + bulkLoadTaskIdValue.ifPresent(taskId -> batchSourceMap.put("task_id", taskId)); ObjectMapper objectMapper = new ObjectMapper(); try { @@ -206,6 +310,7 @@ public Void visitUDFBasedDigestGenStrategy(UDFBasedDigestGenStrategyAbstract udf .udfName(udfBasedDigestGenStrategy.digestUdfName()) .addAllFieldNames(stagingDataset.schemaReference().fieldValues().stream().map(fieldValue -> fieldValue.fieldName()).collect(Collectors.toList())) .addAllValues(fieldsToSelect) + .dataset(stagingDataset) .build(); String digestField = udfBasedDigestGenStrategy.digestField(); fieldsToInsert.add(FieldValue.builder().datasetRef(mainDataset.datasetReference()).fieldName(digestField).build()); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/NontemporalDeltaPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/NontemporalDeltaPlanner.java index fe4d9638fba..8215ecc7af0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/NontemporalDeltaPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/NontemporalDeltaPlanner.java @@ -20,8 +20,7 @@ import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.ingestmode.NontemporalDelta; import org.finos.legend.engine.persistence.components.ingestmode.audit.AuditingVisitors; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DatasetDeduplicator; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningConditionVisitor; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningConditionVisitor; import org.finos.legend.engine.persistence.components.ingestmode.merge.MergeStrategyVisitors; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.conditions.And; @@ -30,21 +29,18 @@ import org.finos.legend.engine.persistence.components.logicalplan.conditions.Not; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Create; import org.finos.legend.engine.persistence.components.logicalplan.operations.Delete; import org.finos.legend.engine.persistence.components.logicalplan.operations.Insert; import org.finos.legend.engine.persistence.components.logicalplan.operations.Merge; import org.finos.legend.engine.persistence.components.logicalplan.operations.Operation; import org.finos.legend.engine.persistence.components.logicalplan.operations.Update; import org.finos.legend.engine.persistence.components.logicalplan.operations.UpdateAbstract; -import org.finos.legend.engine.persistence.components.logicalplan.values.BatchStartTimestamp; -import org.finos.legend.engine.persistence.components.logicalplan.values.FieldValue; -import org.finos.legend.engine.persistence.components.logicalplan.values.Pair; -import org.finos.legend.engine.persistence.components.logicalplan.values.Value; +import org.finos.legend.engine.persistence.components.logicalplan.values.*; import org.finos.legend.engine.persistence.components.util.Capability; import org.finos.legend.engine.persistence.components.util.LogicalPlanUtils; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Optional; @@ -54,7 +50,6 @@ class NontemporalDeltaPlanner extends Planner { - private final Dataset enrichedStagingDataset; private final Condition pkMatchCondition; private final Condition digestMatchCondition; private final Condition versioningCondition; @@ -67,15 +62,15 @@ class NontemporalDeltaPlanner extends Planner private final BatchStartTimestamp batchStartTimestamp; private final Optional dataSplitInRangeCondition; + private List dataFields; - NontemporalDeltaPlanner(Datasets datasets, NontemporalDelta ingestMode, PlannerOptions plannerOptions) + NontemporalDeltaPlanner(Datasets datasets, NontemporalDelta ingestMode, PlannerOptions plannerOptions, Set capabilities) { - super(datasets, ingestMode, plannerOptions); + super(datasets, ingestMode, plannerOptions, capabilities); // validate validatePrimaryKeysNotEmpty(primaryKeys); - // TODO validate interBatchDedup Strategies this.pkMatchCondition = LogicalPlanUtils.getPrimaryKeyMatchCondition(mainDataset(), stagingDataset(), primaryKeys.toArray(new String[0])); this.digestMatchCondition = LogicalPlanUtils.getDigestMatchCondition(mainDataset(), stagingDataset(), ingestMode().digestField()); this.versioningCondition = ingestMode().versioningStrategy() @@ -86,14 +81,9 @@ class NontemporalDeltaPlanner extends Planner this.deleteIndicatorIsNotSetCondition = deleteIndicatorField.map(field -> LogicalPlanUtils.getDeleteIndicatorIsNotSetCondition(stagingDataset(), field, deleteIndicatorValues)); this.deleteIndicatorIsSetCondition = deleteIndicatorField.map(field -> LogicalPlanUtils.getDeleteIndicatorIsSetCondition(stagingDataset(), field, deleteIndicatorValues)); - this.batchStartTimestamp = BatchStartTimestamp.INSTANCE; - this.dataSplitInRangeCondition = ingestMode.dataSplitField().map(field -> LogicalPlanUtils.getDataSplitInRangeCondition(stagingDataset(), field)); - - // Perform Deduplication & Filtering of Staging Dataset - this.enrichedStagingDataset = ingestMode().versioningStrategy() - .accept(new DatasetDeduplicator(stagingDataset(), primaryKeys)); + this.dataFields = getDataFields(); } @Override @@ -103,7 +93,7 @@ protected NontemporalDelta ingestMode() } @Override - public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) + public LogicalPlan buildLogicalPlanForIngest(Resources resources) { List operations = new ArrayList<>(); // Op1: Merge data from staging to main @@ -131,27 +121,21 @@ public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set stagingFields = stagingDataset().schemaReference().fieldValues() - .stream() - .filter(field -> this.deleteIndicatorField.isPresent() ? !field.fieldName().equals(this.deleteIndicatorField.get()) : !field.fieldName().isEmpty()) - .collect(Collectors.toList()); - - Delete delete = Delete.builder() + return Delete.builder() .dataset(mainDataset()) .condition(Exists.builder() .source(Selection.builder() - .source(this.enrichedStagingDataset) - .addAllFields(stagingFields) + .source(stagingDataset()) + .addFields(All.INSTANCE) .condition(And.builder().addConditions(this.pkMatchCondition, this.digestMatchCondition, this.deleteIndicatorIsSetCondition.get()).build()) .build()) .build()) .build(); - - return delete; } /* @@ -166,22 +150,12 @@ WHEN MATCHED AND ((DIGEST does not match) or (delete indicator NOT match)) THEN */ private Merge getMergeOperation() { - List> keyValuePairs = stagingDataset().schemaReference().fieldValues() - .stream() - .filter(field -> this.deleteIndicatorField.isPresent() ? !field.fieldName().equals(this.deleteIndicatorField.get()) : !field.fieldName().isEmpty()) - .map(field -> Pair.of( - FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(field.fieldName()).build(), - FieldValue.builder().datasetRef(stagingDataset().datasetReference()).fieldName(field.fieldName()).build())) - .collect(Collectors.toList()); - - Dataset stagingDataset = this.enrichedStagingDataset; + List> keyValuePairs = getKeyValuePairs(); + Dataset stagingDataset = stagingDataset(); if (ingestMode().dataSplitField().isPresent()) { - keyValuePairs.removeIf(field -> field.key().fieldName().equals(ingestMode().dataSplitField().get())); - List fieldsToSelect = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); - LogicalPlanUtils.removeField(fieldsToSelect, ingestMode().dataSplitField().get()); - stagingDataset = Selection.builder().source(stagingDataset).condition(this.dataSplitInRangeCondition).addAllFields(fieldsToSelect).alias(stagingDataset().datasetReference().alias()).build(); + stagingDataset = Selection.builder().source(stagingDataset).condition(this.dataSplitInRangeCondition).addAllFields(dataFields).alias(stagingDataset().datasetReference().alias()).build(); } Condition versioningCondition; @@ -224,15 +198,8 @@ private Merge getMergeOperation() private Update getUpdateOperation() { Condition joinCondition = And.builder().addConditions(this.pkMatchCondition, this.versioningCondition).build(); - Dataset stagingDataset = this.enrichedStagingDataset; - - List> keyValuePairs = stagingDataset().schemaReference().fieldValues() - .stream() - .filter(field -> this.deleteIndicatorField.isPresent() ? !field.fieldName().equals(this.deleteIndicatorField.get()) : !field.fieldName().isEmpty()) - .map(field -> Pair.of( - FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(field.fieldName()).build(), - FieldValue.builder().datasetRef(stagingDataset().datasetReference()).fieldName(field.fieldName()).build())) - .collect(Collectors.toList()); + Dataset stagingDataset = stagingDataset(); + List> keyValuePairs = getKeyValuePairs(); if (ingestMode().auditing().accept(AUDIT_ENABLED)) { @@ -242,7 +209,6 @@ private Update getUpdateOperation() if (ingestMode().dataSplitField().isPresent()) { - keyValuePairs.removeIf(field -> field.key().fieldName().equals(ingestMode().dataSplitField().get())); stagingDataset = Selection.builder().source(stagingDataset).condition(this.dataSplitInRangeCondition).addAllFields(LogicalPlanUtils.ALL_COLUMNS()).alias(stagingDataset().datasetReference().alias()).build(); } Update update = UpdateAbstract.of(mainDataset(), stagingDataset, keyValuePairs, joinCondition); @@ -250,6 +216,22 @@ private Update getUpdateOperation() return update; } + private List> getKeyValuePairs() + { + List fieldsToSelect = new ArrayList<>(dataFields); + if (deleteIndicatorField.isPresent()) + { + LogicalPlanUtils.removeField(fieldsToSelect, deleteIndicatorField.get()); + } + List> keyValuePairs = fieldsToSelect + .stream() + .map(field -> Pair.of( + FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(((FieldValue) field).fieldName()).build(), + FieldValue.builder().datasetRef(stagingDataset().datasetReference()).fieldName(((FieldValue) field).fieldName()).build())) + .collect(Collectors.toList()); + return keyValuePairs; + } + /* insert into main_table (staging_columns) (select staging_columns from stage_table @@ -258,15 +240,13 @@ insert into main_table (staging_columns) */ private Insert getInsertOperation() { - List fieldsToInsert = stagingDataset().schemaReference().fieldValues() - .stream() - .filter(field -> this.deleteIndicatorField.isPresent() ? !field.fieldName().equals(this.deleteIndicatorField.get()) : !field.fieldName().isEmpty()) - .collect(Collectors.toList()); - - List fieldsToSelect = stagingDataset().schemaReference().fieldValues() - .stream() - .filter(field -> this.deleteIndicatorField.isPresent() ? !field.fieldName().equals(this.deleteIndicatorField.get()) : !field.fieldName().isEmpty()) - .collect(Collectors.toList()); + List fieldsToSelect = new ArrayList<>(dataFields); + List fieldsToInsert = new ArrayList<>(dataFields); + if (deleteIndicatorField.isPresent()) + { + LogicalPlanUtils.removeField(fieldsToSelect, deleteIndicatorField.get()); + LogicalPlanUtils.removeField(fieldsToInsert, deleteIndicatorField.get()); + } Condition notExistInSinkCondition = Not.of(Exists.of( Selection.builder() @@ -279,8 +259,6 @@ private Insert getInsertOperation() Condition selectCondition = notExistInSinkCondition; if (ingestMode().dataSplitField().isPresent()) { - LogicalPlanUtils.removeField(fieldsToSelect, ingestMode().dataSplitField().get()); - LogicalPlanUtils.removeField(fieldsToInsert, ingestMode().dataSplitField().get()); selectCondition = And.builder().addConditions(this.dataSplitInRangeCondition.get(), notExistInSinkCondition).build(); } @@ -290,28 +268,9 @@ private Insert getInsertOperation() fieldsToInsert.add(FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(auditField).build()); fieldsToSelect.add(this.batchStartTimestamp); } - else if (!ingestMode().dataSplitField().isPresent() && !this.deleteIndicatorField.isPresent()) - { - fieldsToSelect = LogicalPlanUtils.ALL_COLUMNS(); - } - Dataset selectStage = Selection.builder().source(this.enrichedStagingDataset).condition(selectCondition).addAllFields(fieldsToSelect).build(); - return Insert.of(mainDataset(), selectStage, fieldsToInsert); - } - @Override - public LogicalPlan buildLogicalPlanForPreActions(Resources resources) - { - List operations = new ArrayList<>(); - operations.add(Create.of(true, mainDataset())); - if (options().createStagingDataset()) - { - operations.add(Create.of(true, stagingDataset())); - } - if (options().enableConcurrentSafety()) - { - operations.add(Create.of(true, lockInfoDataset().orElseThrow(IllegalStateException::new).get())); - } - return LogicalPlan.of(operations); + Dataset selectStage = Selection.builder().source(stagingDataset()).condition(selectCondition).addAllFields(fieldsToSelect).build(); + return Insert.of(mainDataset(), selectStage, fieldsToInsert); } public Optional getDataSplitInRangeConditionForStatistics() @@ -345,6 +304,12 @@ public Map buildLogicalPlanForPreRunStatistics(Resou return preRunStatisticsResult; } + @Override + List getDigestOrRemainingColumns() + { + return Arrays.asList(ingestMode().digestField()); + } + @Override protected void addPostRunStatsForRowsDeleted(Map postRunStatisticsResult) { @@ -359,11 +324,6 @@ protected void addPreRunStatsForRowsDeleted(Map preR { if (this.deleteIndicatorField.isPresent() && this.deleteIndicatorIsSetCondition.isPresent()) { - List stagingFields = stagingDataset().schemaReference().fieldValues() - .stream() - .filter(field -> !field.fieldName().equals(this.deleteIndicatorField.get())) - .collect(Collectors.toList()); - // Rows Deleted = rows removed (hard-deleted) from sink table LogicalPlan rowsDeletedCountPlan = LogicalPlan.builder().addOps(LogicalPlanUtils .getRecordCount(mainDataset(), @@ -371,7 +331,7 @@ protected void addPreRunStatsForRowsDeleted(Map preR Optional.of(Exists.builder() .source(Selection.builder() .source(stagingDataset()) - .addAllFields(stagingFields) + .addFields(All.INSTANCE) .condition(And.builder().addConditions(this.pkMatchCondition, this.digestMatchCondition, this.deleteIndicatorIsSetCondition.get()).build()) .build()) .build()))).build(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/NontemporalSnapshotPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/NontemporalSnapshotPlanner.java index a0424f612f7..e8a5f536099 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/NontemporalSnapshotPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/NontemporalSnapshotPlanner.java @@ -20,15 +20,7 @@ import org.finos.legend.engine.persistence.components.ingestmode.NontemporalSnapshot; import org.finos.legend.engine.persistence.components.ingestmode.audit.AuditingVisitors; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; -import org.finos.legend.engine.persistence.components.logicalplan.conditions.And; -import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; -import org.finos.legend.engine.persistence.components.logicalplan.conditions.LessThan; -import org.finos.legend.engine.persistence.components.logicalplan.conditions.Not; -import org.finos.legend.engine.persistence.components.logicalplan.conditions.Exists; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetReference; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Create; import org.finos.legend.engine.persistence.components.logicalplan.operations.Delete; import org.finos.legend.engine.persistence.components.logicalplan.operations.Insert; import org.finos.legend.engine.persistence.components.logicalplan.operations.Operation; @@ -43,21 +35,14 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.Optional; -import static org.finos.legend.engine.persistence.components.common.StatisticName.INCOMING_RECORD_COUNT; import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_DELETED; -import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_INSERTED; -import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_TERMINATED; -import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_UPDATED; -import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.ALL_COLUMNS; -import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.getPrimaryKeyMatchCondition; class NontemporalSnapshotPlanner extends Planner { - NontemporalSnapshotPlanner(Datasets datasets, NontemporalSnapshot ingestMode, PlannerOptions plannerOptions) + NontemporalSnapshotPlanner(Datasets datasets, NontemporalSnapshot ingestMode, PlannerOptions plannerOptions, Set capabilities) { - super(datasets, ingestMode, plannerOptions); + super(datasets, ingestMode, plannerOptions, capabilities); } @Override @@ -67,35 +52,12 @@ protected NontemporalSnapshot ingestMode() } @Override - public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) + public LogicalPlan buildLogicalPlanForIngest(Resources resources) { - Dataset stagingDataset = stagingDataset(); - List fieldsToSelect = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); - List fieldsToInsert = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); - Optional selectCondition = Optional.empty(); + List dataFields = getDataFields(); + List fieldsToSelect = new ArrayList<>(dataFields); + List fieldsToInsert = new ArrayList<>(dataFields); - // If data splits is enabled, add the condition to pick only the latest data split - if (ingestMode().dataSplitField().isPresent()) - { - String dataSplitField = ingestMode().dataSplitField().get(); - LogicalPlanUtils.removeField(fieldsToSelect, dataSplitField); - LogicalPlanUtils.removeField(fieldsToInsert, dataSplitField); - DatasetReference stagingRight = stagingDataset.datasetReference().withAlias("stage_right"); - FieldValue dataSplitLeft = FieldValue.builder() - .fieldName(dataSplitField) - .datasetRef(stagingDataset.datasetReference()) - .build(); - FieldValue dataSplitRight = dataSplitLeft.withDatasetRef(stagingRight.datasetReference()); - selectCondition = Optional.of(Not.of(Exists.of(Selection.builder() - .source(stagingRight) - .condition(And.builder() - .addConditions( - LessThan.of(dataSplitLeft, dataSplitRight), - getPrimaryKeyMatchCondition(stagingDataset, stagingRight, primaryKeys.toArray(new String[0]))) - .build()) - .addAllFields(ALL_COLUMNS()) - .build()))); - } // If audit is enabled, add audit column to select and insert fields if (ingestMode().auditing().accept(AUDIT_ENABLED)) { @@ -103,16 +65,8 @@ public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set operations = new ArrayList<>(); // Step 1: Delete all rows from existing table @@ -123,22 +77,6 @@ else if (!ingestMode().dataSplitField().isPresent()) return LogicalPlan.of(operations); } - @Override - public LogicalPlan buildLogicalPlanForPreActions(Resources resources) - { - List operations = new ArrayList<>(); - operations.add(Create.of(true, mainDataset())); - if (options().createStagingDataset()) - { - operations.add(Create.of(true, stagingDataset())); - } - if (options().enableConcurrentSafety()) - { - operations.add(Create.of(true, lockInfoDataset().orElseThrow(IllegalStateException::new).get())); - } - return LogicalPlan.of(operations); - } - @Override public Map buildLogicalPlanForPreRunStatistics(Resources resources) { @@ -156,6 +94,17 @@ protected void addPostRunStatsForRowsDeleted(Map pos { } + @Override + List getDigestOrRemainingColumns() + { + List remainingCols = new ArrayList<>(); + if (!primaryKeys.isEmpty()) + { + remainingCols = getNonPKNonVersionDataFields(); + } + return remainingCols; + } + @Override public boolean dataSplitExecutionSupported() { diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/Planner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/Planner.java index dbc17287cbd..98b1ecf7a11 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/Planner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/Planner.java @@ -14,23 +14,30 @@ package org.finos.legend.engine.persistence.components.planner; +import java.util.function.Consumer; import java.util.stream.Collectors; + import org.finos.legend.engine.persistence.components.common.Datasets; import org.finos.legend.engine.persistence.components.common.Resources; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.ingestmode.IngestMode; import org.finos.legend.engine.persistence.components.ingestmode.audit.AuditingVisitor; import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditingAbstract; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditingAbstract; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationVisitors; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.*; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory; import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Drop; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Operation; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Delete; -import org.finos.legend.engine.persistence.components.logicalplan.values.BatchStartTimestampAbstract; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; +import org.finos.legend.engine.persistence.components.logicalplan.operations.*; +import org.finos.legend.engine.persistence.components.logicalplan.values.*; +import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionImpl; +import org.finos.legend.engine.persistence.components.logicalplan.values.ObjectValue; import org.finos.legend.engine.persistence.components.util.BulkLoadMetadataDataset; import org.finos.legend.engine.persistence.components.util.Capability; import org.finos.legend.engine.persistence.components.util.LockInfoDataset; @@ -52,6 +59,8 @@ import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_TERMINATED; import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_UPDATED; +import static org.finos.legend.engine.persistence.components.ingestmode.deduplication.DatasetDeduplicationHandler.COUNT; +import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.SUPPORTED_DATA_TYPES_FOR_VERSIONING_COLUMNS; import static org.immutables.value.Value.Default; import static org.immutables.value.Value.Immutable; import static org.immutables.value.Value.Style; @@ -97,19 +106,57 @@ default boolean enableConcurrentSafety() { return false; } + + Optional bulkLoadTaskIdValue(); } private final Datasets datasets; private final IngestMode ingestMode; private final PlannerOptions plannerOptions; + protected final Set capabilities; protected final List primaryKeys; + private final Optional tempStagingDataset; + private final Optional tempStagingDatasetWithoutPks; + private final Dataset effectiveStagingDataset; + protected final boolean isTempTableNeededForStaging; - Planner(Datasets datasets, IngestMode ingestMode, PlannerOptions plannerOptions) + Planner(Datasets datasets, IngestMode ingestMode, PlannerOptions plannerOptions, Set capabilities) { this.datasets = datasets; this.ingestMode = ingestMode; this.plannerOptions = plannerOptions == null ? PlannerOptions.builder().build() : plannerOptions; + this.isTempTableNeededForStaging = LogicalPlanUtils.isTempTableNeededForStaging(ingestMode); + this.tempStagingDataset = getTempStagingDataset(); + this.tempStagingDatasetWithoutPks = getTempStagingDatasetWithoutPks(); + this.effectiveStagingDataset = isTempTableNeededForStaging ? tempStagingDataset() : originalStagingDataset(); + this.capabilities = capabilities; this.primaryKeys = findCommonPrimaryKeysBetweenMainAndStaging(); + + // Validation + // 1. MaxVersion & AllVersion strategies must have primary keys + ingestMode.versioningStrategy().accept(new ValidatePrimaryKeysForVersioningStrategy(primaryKeys, this::validatePrimaryKeysNotEmpty)); + // 2. Validate if the versioningField is comparable if a versioningStrategy is present + validateVersioningField(ingestMode().versioningStrategy(), stagingDataset()); + } + + private Optional getTempStagingDataset() + { + Optional tempStagingDataset = Optional.empty(); + if (isTempTableNeededForStaging) + { + tempStagingDataset = Optional.of(LogicalPlanUtils.getTempStagingDatasetDefinition(originalStagingDataset(), ingestMode)); + } + return tempStagingDataset; + } + + private Optional getTempStagingDatasetWithoutPks() + { + Optional tempStagingDatasetWithoutPks = Optional.empty(); + if (isTempTableNeededForStaging) + { + tempStagingDatasetWithoutPks = Optional.of(LogicalPlanUtils.getTempStagingDatasetWithoutPks(tempStagingDataset())); + } + return tempStagingDatasetWithoutPks; } private List findCommonPrimaryKeysBetweenMainAndStaging() @@ -123,11 +170,42 @@ protected Dataset mainDataset() return datasets.mainDataset(); } - protected Dataset stagingDataset() + public Dataset stagingDataset() + { + return effectiveStagingDataset; + } + + protected Dataset originalStagingDataset() { return datasets.stagingDataset(); } + protected Dataset tempStagingDataset() + { + return tempStagingDataset.orElseThrow(IllegalStateException::new); + } + + protected Dataset tempStagingDatasetWithoutPks() + { + return tempStagingDatasetWithoutPks.orElseThrow(IllegalStateException::new); + } + + protected List getDataFields() + { + List dataFields = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); + Optional dedupField = ingestMode.deduplicationStrategy().accept(DeduplicationVisitors.EXTRACT_DEDUP_FIELD); + + if (ingestMode().dataSplitField().isPresent()) + { + LogicalPlanUtils.removeField(dataFields, ingestMode().dataSplitField().get()); + } + if (dedupField.isPresent()) + { + LogicalPlanUtils.removeField(dataFields, dedupField.get()); + } + return dataFields; + } + protected Optional metadataDataset() { return datasets.metadataDataset(); @@ -153,7 +231,7 @@ protected PlannerOptions options() return plannerOptions; } - public abstract LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities); + public abstract LogicalPlan buildLogicalPlanForIngest(Resources resources); public LogicalPlan buildLogicalPlanForMetadataIngest(Resources resources) { @@ -180,26 +258,62 @@ public LogicalPlan buildLogicalPlanForAcquireLock(Resources resources) return null; } - public abstract LogicalPlan buildLogicalPlanForPreActions(Resources resources); + public LogicalPlan buildLogicalPlanForPreActions(Resources resources) + { + List operations = new ArrayList<>(); + operations.add(Create.of(true, mainDataset())); + if (options().createStagingDataset()) + { + operations.add(Create.of(true, originalStagingDataset())); + } + if (options().enableConcurrentSafety()) + { + operations.add(Create.of(true, lockInfoDataset().orElseThrow(IllegalStateException::new).get())); + } + if (isTempTableNeededForStaging) + { + operations.add(Create.of(true, tempStagingDatasetWithoutPks())); + } + return LogicalPlan.of(operations); + } - public LogicalPlan buildLogicalPlanForPostActions(Resources resources) + public LogicalPlan buildLogicalPlanForDeduplicationAndVersioning(Resources resources) { List operations = new ArrayList<>(); - // Drop table or clean table based on flags - if (resources.externalDatasetImported()) + if (isTempTableNeededForStaging) { - operations.add(Drop.of(true, stagingDataset(), true)); + operations.add(Delete.builder().dataset(tempStagingDataset()).build()); + Dataset dedupAndVersionedDataset = LogicalPlanUtils.getDedupedAndVersionedDataset(ingestMode.deduplicationStrategy(), ingestMode.versioningStrategy(), originalStagingDataset(), primaryKeys); + List fieldsToInsert = new ArrayList<>(dedupAndVersionedDataset.schemaReference().fieldValues()); + operations.add(Insert.of(tempStagingDataset(), dedupAndVersionedDataset, fieldsToInsert)); } - else if (plannerOptions.cleanupStagingData()) + return LogicalPlan.of(operations); + } + + public LogicalPlan buildLogicalPlanForPostActions(Resources resources) + { + List operations = new ArrayList<>(); + if (plannerOptions.cleanupStagingData()) { - operations.add(Delete.builder().dataset(stagingDataset()).build()); + operations.add(Delete.builder().dataset(originalStagingDataset()).build()); } return LogicalPlan.of(operations); } + // Introduce a flag public LogicalPlan buildLogicalPlanForPostCleanup(Resources resources) { - return null; + List operations = new ArrayList<>(); + // Drop table + if (resources.externalDatasetImported()) + { + operations.add(Drop.of(true, originalStagingDataset(), true)); + } + if (isTempTableNeededForStaging) + { + operations.add(Drop.of(true, tempStagingDataset(), true)); + } + return LogicalPlan.of(operations); } public Map buildLogicalPlanForPreRunStatistics(Resources resources) @@ -227,6 +341,47 @@ public Map buildLogicalPlanForPostRunStatistics(Reso return postRunStatisticsResult; } + public Map buildLogicalPlanForDeduplicationAndVersioningErrorChecks(Resources resources) + { + Map dedupAndVersioningErrorChecks = new HashMap<>(); + addMaxDuplicatesErrorCheck(dedupAndVersioningErrorChecks); + addDataErrorCheck(dedupAndVersioningErrorChecks); + return dedupAndVersioningErrorChecks; + } + + protected void addMaxDuplicatesErrorCheck(Map dedupAndVersioningErrorChecks) + { + if (ingestMode.deduplicationStrategy() instanceof FailOnDuplicates) + { + FunctionImpl maxCount = FunctionImpl.builder() + .functionName(FunctionName.MAX) + .addValue(FieldValue.builder().datasetRef(tempStagingDataset().datasetReference()).fieldName(COUNT).build()) + .alias(DedupAndVersionErrorStatistics.MAX_DUPLICATES.name()) + .build(); + Selection selectMaxDupsCount = Selection.builder() + .source(tempStagingDataset()) + .addFields(maxCount) + .build(); + LogicalPlan maxDuplicatesCountPlan = LogicalPlan.builder().addOps(selectMaxDupsCount).build(); + dedupAndVersioningErrorChecks.put(DedupAndVersionErrorStatistics.MAX_DUPLICATES, maxDuplicatesCountPlan); + } + } + + protected void addDataErrorCheck(Map dedupAndVersioningErrorChecks) + { + List remainingColumns = getDigestOrRemainingColumns(); + if (ingestMode.versioningStrategy().accept(VersioningVisitors.IS_TEMP_TABLE_NEEDED)) + { + LogicalPlan logicalPlan = ingestMode.versioningStrategy().accept(new DeriveDataErrorCheckLogicalPlan(primaryKeys, remainingColumns, tempStagingDataset())); + if (logicalPlan != null) + { + dedupAndVersioningErrorChecks.put(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS, logicalPlan); + } + } + } + + abstract List getDigestOrRemainingColumns(); + protected void validatePrimaryKeysNotEmpty(List primaryKeys) { if (primaryKeys.isEmpty()) @@ -262,17 +417,30 @@ protected void addPreRunStatsForRowsDeleted(Map preR protected void addPostRunStatsForIncomingRecords(Map postRunStatisticsResult) { Optional filterCondition = Optional.empty(); - if (dataSplitExecutionSupported()) + Value countIncomingRecords = FunctionImpl.builder().functionName(FunctionName.COUNT).alias(INCOMING_RECORD_COUNT.get()).addValue(All.INSTANCE).build(); + Dataset dataset = originalStagingDataset(); + + // If data splits are present + if (ingestMode.dataSplitField().isPresent()) { - Optional dataSplitInRangeCondition = getDataSplitInRangeConditionForStatistics(); - if (dataSplitInRangeCondition.isPresent()) + dataset = stagingDataset(); + filterCondition = getDataSplitInRangeConditionForStatistics(); + Optional duplicateCountFieldName = ingestMode.deduplicationStrategy().accept(DeduplicationVisitors.EXTRACT_DEDUP_FIELD); + // If deduplication has been performed + if (duplicateCountFieldName.isPresent()) { - filterCondition = Optional.of(dataSplitInRangeCondition.get()); + FieldValue duplicateCountField = FieldValue.builder().fieldName(duplicateCountFieldName.get()).datasetRef(dataset.datasetReference()).build(); + FunctionImpl sumOfDuplicateFieldCount = FunctionImpl.builder().functionName(FunctionName.SUM).addValue(duplicateCountField).build(); + countIncomingRecords = FunctionImpl.builder().functionName(FunctionName.COALESCE).alias(INCOMING_RECORD_COUNT.get()).addValue(sumOfDuplicateFieldCount, ObjectValue.of(0)).build(); } } LogicalPlan incomingRecordCountPlan = LogicalPlan.builder() - .addOps(LogicalPlanUtils.getRecordCount(stagingDataset(), INCOMING_RECORD_COUNT.get(), filterCondition)) + .addOps(Selection.builder() + .source(dataset) + .addFields(countIncomingRecords) + .condition(filterCondition) + .build()) .build(); postRunStatisticsResult.put(INCOMING_RECORD_COUNT, incomingRecordCountPlan); } @@ -301,6 +469,35 @@ protected void addPostRunStatsForRowsDeleted(Map pos postRunStatisticsResult.put(ROWS_DELETED, rowsDeletedCountPlan); } + protected List getNonPKNonVersionDataFields() + { + List nonPkDataFields = stagingDataset().schemaReference().fieldValues().stream() + .map(fieldValue -> fieldValue.fieldName()) + .filter(fieldName -> !primaryKeys.contains(fieldName)) + .collect(Collectors.toList()); + Optional dedupField = ingestMode.deduplicationStrategy().accept(DeduplicationVisitors.EXTRACT_DEDUP_FIELD); + Optional versioningField = ingestMode.versioningStrategy().accept(VersioningVisitors.EXTRACT_VERSIONING_FIELD); + nonPkDataFields.removeIf(field -> ingestMode().dataSplitField().isPresent() && field.equals(ingestMode().dataSplitField().get())); + nonPkDataFields.removeIf(field -> dedupField.isPresent() && field.equals(dedupField.get())); + nonPkDataFields.removeIf(field -> versioningField.isPresent() && field.equals(versioningField.get())); + return nonPkDataFields; + } + + protected void validateVersioningField(VersioningStrategy versioningStrategy, Dataset dataset) + { + Optional versioningField = versioningStrategy.accept(VersioningVisitors.EXTRACT_VERSIONING_FIELD); + if (versioningField.isPresent()) + { + Field filterField = dataset.schema().fields().stream() + .filter(field -> field.name().equals(versioningField.get())) + .findFirst().orElseThrow(() -> new IllegalStateException(String.format("Versioning field [%s] not found in Staging Schema", versioningField.get()))); + if (!SUPPORTED_DATA_TYPES_FOR_VERSIONING_COLUMNS.contains(filterField.type().dataType())) + { + throw new IllegalStateException(String.format("Versioning field's data type [%s] is not supported", filterField.type().dataType())); + } + } + } + // auditing visitor protected static final AuditEnabled AUDIT_ENABLED = new AuditEnabled(); @@ -323,4 +520,44 @@ public Boolean visitDateTimeAuditing(DateTimeAuditingAbstract dateTimeAuditing) return true; } } + + static class ValidatePrimaryKeysForVersioningStrategy implements VersioningStrategyVisitor + { + final List primaryKeys; + final Consumer> validatePrimaryKeysNotEmpty; + + ValidatePrimaryKeysForVersioningStrategy(List primaryKeys, Consumer> validatePrimaryKeysNotEmpty) + { + this.primaryKeys = primaryKeys; + this.validatePrimaryKeysNotEmpty = validatePrimaryKeysNotEmpty; + } + + @Override + public Void visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) + { + return null; + } + + @Override + public Void visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) + { + validatePrimaryKeysNotEmpty.accept(primaryKeys); + if (primaryKeys.contains(maxVersionStrategy.versioningField())) + { + throw new IllegalStateException("Versioning field cannot be a primary key"); + } + return null; + } + + @Override + public Void visitAllVersionsStrategy(AllVersionsStrategyAbstract allVersionsStrategyAbstract) + { + validatePrimaryKeysNotEmpty.accept(primaryKeys); + if (primaryKeys.contains(allVersionsStrategyAbstract.versioningField())) + { + throw new IllegalStateException("Versioning field cannot be a primary key"); + } + return null; + } + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/Planners.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/Planners.java index c6949395af4..321c480b129 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/Planners.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/Planners.java @@ -33,6 +33,9 @@ import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalSnapshotAbstract; import org.finos.legend.engine.persistence.components.ingestmode.BulkLoadAbstract; import org.finos.legend.engine.persistence.components.ingestmode.BulkLoad; +import org.finos.legend.engine.persistence.components.util.Capability; + +import java.util.Set; public class Planners { @@ -40,73 +43,75 @@ private Planners() { } - public static Planner get(Datasets datasets, IngestMode ingestMode) + public static Planner get(Datasets datasets, IngestMode ingestMode, Set capabilities) { - return ingestMode.accept(new PlannerFactory(datasets, PlannerOptions.builder().build())); + return ingestMode.accept(new PlannerFactory(datasets, PlannerOptions.builder().build(), capabilities)); } - public static Planner get(Datasets datasets, IngestMode ingestMode, PlannerOptions plannerOptions) + public static Planner get(Datasets datasets, IngestMode ingestMode, PlannerOptions plannerOptions, Set capabilities) { - return ingestMode.accept(new PlannerFactory(datasets, plannerOptions)); + return ingestMode.accept(new PlannerFactory(datasets, plannerOptions, capabilities)); } static class PlannerFactory implements IngestModeVisitor { private final Datasets datasets; private final PlannerOptions plannerOptions; + private final Set capabilities; - PlannerFactory(Datasets datasets, PlannerOptions plannerOptions) + PlannerFactory(Datasets datasets, PlannerOptions plannerOptions, Set capabilities) { this.datasets = datasets; this.plannerOptions = plannerOptions; + this.capabilities = capabilities; } @Override public Planner visitAppendOnly(AppendOnlyAbstract appendOnly) { - return new AppendOnlyPlanner(datasets, (AppendOnly) appendOnly, plannerOptions); + return new AppendOnlyPlanner(datasets, (AppendOnly) appendOnly, plannerOptions, capabilities); } @Override public Planner visitNontemporalSnapshot(NontemporalSnapshotAbstract nontemporalSnapshot) { - return new NontemporalSnapshotPlanner(datasets, (NontemporalSnapshot) nontemporalSnapshot, plannerOptions); + return new NontemporalSnapshotPlanner(datasets, (NontemporalSnapshot) nontemporalSnapshot, plannerOptions, capabilities); } @Override public Planner visitNontemporalDelta(NontemporalDeltaAbstract nontemporalDelta) { - return new NontemporalDeltaPlanner(datasets, (NontemporalDelta) nontemporalDelta, plannerOptions); + return new NontemporalDeltaPlanner(datasets, (NontemporalDelta) nontemporalDelta, plannerOptions, capabilities); } @Override public Planner visitUnitemporalSnapshot(UnitemporalSnapshotAbstract unitemporalSnapshot) { - return new UnitemporalSnapshotPlanner(datasets, (UnitemporalSnapshot) unitemporalSnapshot, plannerOptions); + return new UnitemporalSnapshotPlanner(datasets, (UnitemporalSnapshot) unitemporalSnapshot, plannerOptions, capabilities); } @Override public Planner visitUnitemporalDelta(UnitemporalDeltaAbstract unitemporalDelta) { - return new UnitemporalDeltaPlanner(datasets, (UnitemporalDelta) unitemporalDelta, plannerOptions); + return new UnitemporalDeltaPlanner(datasets, (UnitemporalDelta) unitemporalDelta, plannerOptions, capabilities); } @Override public Planner visitBitemporalSnapshot(BitemporalSnapshotAbstract bitemporalSnapshot) { - return new BitemporalSnapshotPlanner(datasets, (BitemporalSnapshot) bitemporalSnapshot, plannerOptions); + return new BitemporalSnapshotPlanner(datasets, (BitemporalSnapshot) bitemporalSnapshot, plannerOptions, capabilities); } @Override public Planner visitBitemporalDelta(BitemporalDeltaAbstract bitemporalDelta) { - return new BitemporalDeltaPlanner(datasets, (BitemporalDelta) bitemporalDelta, plannerOptions); + return new BitemporalDeltaPlanner(datasets, (BitemporalDelta) bitemporalDelta, plannerOptions, capabilities); } @Override public Planner visitBulkLoad(BulkLoadAbstract bulkLoad) { - return new BulkLoadPlanner(datasets, (BulkLoad) bulkLoad, plannerOptions); + return new BulkLoadPlanner(datasets, (BulkLoad) bulkLoad, plannerOptions, capabilities); } } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalDeltaPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalDeltaPlanner.java index b93ef293767..2431aec2ac8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalDeltaPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalDeltaPlanner.java @@ -18,8 +18,7 @@ import org.finos.legend.engine.persistence.components.common.Resources; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalDelta; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DatasetDeduplicator; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningConditionVisitor; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningConditionVisitor; import org.finos.legend.engine.persistence.components.ingestmode.merge.MergeStrategyVisitors; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory; @@ -30,7 +29,6 @@ import org.finos.legend.engine.persistence.components.logicalplan.conditions.Or; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; -import org.finos.legend.engine.persistence.components.logicalplan.operations.Create; import org.finos.legend.engine.persistence.components.logicalplan.operations.Insert; import org.finos.legend.engine.persistence.components.logicalplan.operations.Operation; import org.finos.legend.engine.persistence.components.logicalplan.operations.Update; @@ -56,25 +54,21 @@ class UnitemporalDeltaPlanner extends UnitemporalPlanner { private final Optional deleteIndicatorField; private final List deleteIndicatorValues; - private final Dataset enrichedStagingDataset; private final Condition versioningCondition; private final Condition inverseVersioningCondition; - private final Optional deleteIndicatorIsNotSetCondition; private final Optional deleteIndicatorIsSetCondition; private final Optional dataSplitInRangeCondition; - UnitemporalDeltaPlanner(Datasets datasets, UnitemporalDelta ingestMode, PlannerOptions plannerOptions) + UnitemporalDeltaPlanner(Datasets datasets, UnitemporalDelta ingestMode, PlannerOptions plannerOptions, Set capabilities) { - super(datasets, ingestMode, plannerOptions); + super(datasets, ingestMode, plannerOptions, capabilities); // Validate if the optimizationFilters are comparable if (!ingestMode.optimizationFilters().isEmpty()) { validateOptimizationFilters(ingestMode.optimizationFilters(), stagingDataset()); } - // Validate if the versioningField is comparable if a versioningStrategy is present - validateVersioningField(ingestMode().versioningStrategy(), stagingDataset()); this.deleteIndicatorField = ingestMode.mergeStrategy().accept(MergeStrategyVisitors.EXTRACT_DELETE_FIELD); this.deleteIndicatorValues = ingestMode.mergeStrategy().accept(MergeStrategyVisitors.EXTRACT_DELETE_VALUES); @@ -82,9 +76,6 @@ class UnitemporalDeltaPlanner extends UnitemporalPlanner this.deleteIndicatorIsNotSetCondition = deleteIndicatorField.map(field -> LogicalPlanUtils.getDeleteIndicatorIsNotSetCondition(stagingDataset(), field, deleteIndicatorValues)); this.deleteIndicatorIsSetCondition = deleteIndicatorField.map(field -> LogicalPlanUtils.getDeleteIndicatorIsSetCondition(stagingDataset(), field, deleteIndicatorValues)); this.dataSplitInRangeCondition = ingestMode.dataSplitField().map(field -> LogicalPlanUtils.getDataSplitInRangeCondition(stagingDataset(), field)); - // Perform Deduplication & Filtering of Staging Dataset - this.enrichedStagingDataset = ingestMode().versioningStrategy() - .accept(new DatasetDeduplicator(stagingDataset(), primaryKeys)); this.versioningCondition = ingestMode().versioningStrategy() .accept(new VersioningConditionVisitor(mainDataset(), stagingDataset(), false, ingestMode().digestField())); this.inverseVersioningCondition = ingestMode.versioningStrategy() @@ -98,7 +89,7 @@ protected UnitemporalDelta ingestMode() } @Override - public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) + public LogicalPlan buildLogicalPlanForIngest(Resources resources) { List operations = new ArrayList<>(); @@ -113,24 +104,6 @@ public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set operations = new ArrayList<>(); - operations.add(Create.of(true, mainDataset())); - if (options().createStagingDataset()) - { - operations.add(Create.of(true, stagingDataset())); - } - operations.add(Create.of(true, metadataDataset().orElseThrow(IllegalStateException::new).get())); - if (options().enableConcurrentSafety()) - { - operations.add(Create.of(true, lockInfoDataset().orElseThrow(IllegalStateException::new).get())); - } - return LogicalPlan.of(operations); - } - - /* ------------------ Upsert Logic: @@ -145,10 +118,10 @@ INSERT INTO main_table (staging_columns, special_columns) */ private Insert getUpsertLogic() { - List columnsToInsert = new ArrayList<>(); - List stagingColumns = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); + List dataFields = getDataFields(); + List columnsToInsert = new ArrayList<>(dataFields); + List stagingColumns = new ArrayList<>(dataFields); List milestoneColumns = transactionMilestoningFields(); - columnsToInsert.addAll(stagingColumns); columnsToInsert.addAll(milestoneColumns); List columnsToSelect = new ArrayList<>(stagingColumns); @@ -158,12 +131,6 @@ private Insert getUpsertLogic() LogicalPlanUtils.removeField(columnsToInsert, deleteIndicatorField); }); - if (ingestMode().dataSplitField().isPresent()) - { - LogicalPlanUtils.removeField(columnsToSelect, ingestMode().dataSplitField().get()); - LogicalPlanUtils.removeField(columnsToInsert, ingestMode().dataSplitField().get()); - } - List milestoneUpdateValues = transactionMilestoningFieldValues(); columnsToSelect.addAll(milestoneUpdateValues); @@ -206,7 +173,7 @@ private Insert getUpsertLogic() } } - Dataset selectStage = Selection.builder().source(enrichedStagingDataset).condition(selectCondition).addAllFields(columnsToSelect).build(); + Dataset selectStage = Selection.builder().source(stagingDataset()).condition(selectCondition).addAllFields(columnsToSelect).build(); return Insert.of(mainDataset(), selectStage, columnsToInsert); } @@ -241,7 +208,7 @@ private Update getMilestoningLogic() Condition existsCondition = Exists.of( Selection.builder() - .source(enrichedStagingDataset) + .source(stagingDataset()) .condition(selectCondition) .addAllFields(LogicalPlanUtils.ALL_COLUMNS()) .build()); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalPlanner.java index 4b43d78ce4c..d811ed6cfd6 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalPlanner.java @@ -18,10 +18,6 @@ import org.finos.legend.engine.persistence.components.common.OptimizationFilter; import org.finos.legend.engine.persistence.components.common.Resources; import org.finos.legend.engine.persistence.components.common.StatisticName; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.MaxVersionStrategyAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.NoVersioningStrategyAbstract; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningStrategyVisitor; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAbstract; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAndDateTimeAbstract; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionDateTimeAbstract; @@ -35,6 +31,8 @@ import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; +import org.finos.legend.engine.persistence.components.logicalplan.operations.Create; +import org.finos.legend.engine.persistence.components.logicalplan.operations.Operation; import org.finos.legend.engine.persistence.components.logicalplan.values.All; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchEndTimestamp; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchStartTimestamp; @@ -46,6 +44,7 @@ import org.finos.legend.engine.persistence.components.logicalplan.values.StringValue; import org.finos.legend.engine.persistence.components.logicalplan.values.Value; import org.finos.legend.engine.persistence.components.logicalplan.values.DiffBinaryValueOperator; +import org.finos.legend.engine.persistence.components.util.Capability; import org.finos.legend.engine.persistence.components.util.LogicalPlanUtils; import org.finos.legend.engine.persistence.components.util.MetadataDataset; import org.finos.legend.engine.persistence.components.util.MetadataUtils; @@ -56,13 +55,12 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Optional; +import java.util.Set; import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_INSERTED; import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_UPDATED; import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_TERMINATED; import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.SUPPORTED_DATA_TYPES_FOR_OPTIMIZATION_COLUMNS; -import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.SUPPORTED_DATA_TYPES_FOR_VERSIONING_COLUMNS; abstract class UnitemporalPlanner extends Planner { @@ -73,16 +71,17 @@ abstract class UnitemporalPlanner extends Planner protected final Condition openRecordCondition; protected final Condition digestMatchCondition; protected final Condition digestDoesNotMatchCondition; - + protected final String digestField; protected Condition primaryKeysMatchCondition; - UnitemporalPlanner(Datasets datasets, TransactionMilestoned transactionMilestoned, PlannerOptions plannerOptions) + UnitemporalPlanner(Datasets datasets, TransactionMilestoned transactionMilestoned, PlannerOptions plannerOptions, Set capabilities) { super(datasets.metadataDataset().isPresent() ? datasets : datasets.withMetadataDataset(MetadataDataset.builder().build()), transactionMilestoned, - plannerOptions); + plannerOptions, + capabilities); // validate validatePrimaryKeysNotEmpty(primaryKeys); @@ -93,6 +92,7 @@ abstract class UnitemporalPlanner extends Planner this.mainTableName = StringValue.of(mainDataset().datasetReference().name().orElseThrow(IllegalStateException::new)); this.batchStartTimestamp = BatchStartTimestamp.INSTANCE; this.batchEndTimestamp = BatchEndTimestamp.INSTANCE; + this.digestField = transactionMilestoned.digestField(); this.openRecordCondition = transactionMilestoned.transactionMilestoning().accept(new DetermineOpenRecordCondition(mainDataset())); this.digestMatchCondition = LogicalPlanUtils.getDigestMatchCondition(mainDataset(), stagingDataset(), transactionMilestoned.digestField()); this.primaryKeysMatchCondition = LogicalPlanUtils.getPrimaryKeyMatchCondition(mainDataset(), stagingDataset(), primaryKeys.toArray(new String[0])); @@ -108,10 +108,37 @@ protected TransactionMilestoned ingestMode() @Override public LogicalPlan buildLogicalPlanForMetadataIngest(Resources resources) { - List stagingFilters = LogicalPlanUtils.getDatasetFilters(stagingDataset()); + List stagingFilters = LogicalPlanUtils.getDatasetFilters(originalStagingDataset()); return LogicalPlan.of(Arrays.asList(metadataUtils.insertMetaData(mainTableName, batchStartTimestamp, batchEndTimestamp, stagingFilters))); } + @Override + List getDigestOrRemainingColumns() + { + return Arrays.asList(digestField); + } + + @Override + public LogicalPlan buildLogicalPlanForPreActions(Resources resources) + { + List operations = new ArrayList<>(); + operations.add(Create.of(true, mainDataset())); + if (options().createStagingDataset()) + { + operations.add(Create.of(true, originalStagingDataset())); + } + operations.add(Create.of(true, metadataDataset().orElseThrow(IllegalStateException::new).get())); + if (options().enableConcurrentSafety()) + { + operations.add(Create.of(true, lockInfoDataset().orElseThrow(IllegalStateException::new).get())); + } + if (isTempTableNeededForStaging) + { + operations.add(Create.of(true, tempStagingDatasetWithoutPks())); + } + return LogicalPlan.of(operations); + } + protected void validatePrimaryKey(List fields, String targetFieldName) { Field targetField = fields.stream().filter(field -> field.name().equals(targetFieldName)).findFirst().orElse(null); @@ -151,35 +178,6 @@ protected void validateOptimizationFilters(List optimization } } - protected void validateVersioningField(VersioningStrategy versioningStrategy, Dataset dataset) - { - Optional versioningField = versioningStrategy.accept(new VersioningStrategyVisitor>() - { - @Override - public Optional visitNoVersioningStrategy(NoVersioningStrategyAbstract noVersioningStrategy) - { - return Optional.empty(); - } - - @Override - public Optional visitMaxVersionStrategy(MaxVersionStrategyAbstract maxVersionStrategy) - { - return Optional.of(maxVersionStrategy.versioningField()); - } - }); - - if (versioningField.isPresent()) - { - Field filterField = dataset.schema().fields().stream() - .filter(field -> field.name().equals(versioningField.get())) - .findFirst().orElseThrow(() -> new IllegalStateException(String.format("Versioning field [%s] not found in Staging Schema", versioningField.get()))); - if (!SUPPORTED_DATA_TYPES_FOR_VERSIONING_COLUMNS.contains(filterField.type().dataType())) - { - throw new IllegalStateException(String.format("Versioning field's data type [%s] is not supported", filterField.type().dataType())); - } - } - } - protected List> keyValuesForMilestoningUpdate() { return ingestMode().transactionMilestoning().accept(new DetermineMilestoningUpdateKeyValues(mainDataset(), metadataUtils, batchStartTimestamp)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalSnapshotPlanner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalSnapshotPlanner.java index 019adbbffc5..e0cbd780cd0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalSnapshotPlanner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/planner/UnitemporalSnapshotPlanner.java @@ -49,9 +49,9 @@ class UnitemporalSnapshotPlanner extends UnitemporalPlanner { - UnitemporalSnapshotPlanner(Datasets datasets, UnitemporalSnapshot ingestMode, PlannerOptions plannerOptions) + UnitemporalSnapshotPlanner(Datasets datasets, UnitemporalSnapshot ingestMode, PlannerOptions plannerOptions, Set capabilities) { - super(datasets, ingestMode, plannerOptions); + super(datasets, ingestMode, plannerOptions, capabilities); // validate if (ingestMode.partitioned()) @@ -72,7 +72,7 @@ protected UnitemporalSnapshot ingestMode() } @Override - public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set capabilities) + public LogicalPlan buildLogicalPlanForIngest(Resources resources) { List> keyValuePairs = keyValuesForMilestoningUpdate(); @@ -92,23 +92,6 @@ public LogicalPlan buildLogicalPlanForIngest(Resources resources, Set operations = new ArrayList<>(); - operations.add(Create.of(true, mainDataset())); - if (options().createStagingDataset()) - { - operations.add(Create.of(true, stagingDataset())); - } - operations.add(Create.of(true, metadataDataset().orElseThrow(IllegalStateException::new).get())); - if (options().enableConcurrentSafety()) - { - operations.add(Create.of(true, lockInfoDataset().orElseThrow(IllegalStateException::new).get())); - } - return LogicalPlan.of(operations); - } - /* insert into main_table ( @@ -156,12 +139,13 @@ protected Insert sqlToUpsertRows() .addFields(FieldValue.builder().datasetRef(mainDataset().datasetReference()).fieldName(ingestMode().digestField()).build()) .build())); - List fieldsToSelect = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); + List dataFields = getDataFields(); + List fieldsToSelect = new ArrayList<>(dataFields); List milestoneUpdateValues = transactionMilestoningFieldValues(); fieldsToSelect.addAll(milestoneUpdateValues); Dataset selectStage = Selection.builder().source(stagingDataset()).condition(notInSinkCondition).addAllFields(fieldsToSelect).build(); - List fieldsToInsert = new ArrayList<>(stagingDataset().schemaReference().fieldValues()); + List fieldsToInsert = new ArrayList<>(dataFields); fieldsToInsert.addAll(transactionMilestoningFields()); return Insert.of(mainDataset(), selectStage, fieldsToInsert); @@ -177,7 +161,7 @@ protected Insert sqlToUpsertRows() sink."batch_id_out" = 999999999 and not exists ( - sink."digest" <> stage."digest" and sink.primaryKeys = stage.primaryKeys + sink."digest" = stage."digest" and sink.primaryKeys = stage.primaryKeys ) Partition : diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/BulkLoadMetadataDatasetAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/BulkLoadMetadataDatasetAbstract.java index 65054f86e39..3cfea00878f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/BulkLoadMetadataDatasetAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/BulkLoadMetadataDatasetAbstract.java @@ -95,7 +95,7 @@ default Dataset get() .group(group()) .name(name()) .schema(SchemaDefinition.builder() - .addFields(Field.builder().name(batchIdField()).type(FieldType.of(DataType.VARCHAR, 255, null)).build()) + .addFields(Field.builder().name(batchIdField()).type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())).build()) .addFields(Field.builder().name(tableNameField()).type(FieldType.of(DataType.VARCHAR, 255, null)).build()) .addFields(Field.builder().name(batchStartTimeField()).type(FieldType.of(DataType.DATETIME, Optional.empty(), Optional.empty())).build()) .addFields(Field.builder().name(batchEndTimeField()).type(FieldType.of(DataType.DATETIME, Optional.empty(), Optional.empty())).build()) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/BulkLoadMetadataUtils.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/BulkLoadMetadataUtils.java index 161a25e345b..0ff58bbbcdc 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/BulkLoadMetadataUtils.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/BulkLoadMetadataUtils.java @@ -14,17 +14,23 @@ package org.finos.legend.engine.persistence.components.util; +import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; +import org.finos.legend.engine.persistence.components.logicalplan.conditions.Equals; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetReference; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; import org.finos.legend.engine.persistence.components.logicalplan.operations.Insert; +import org.finos.legend.engine.persistence.components.logicalplan.values.BatchIdValue; +import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionImpl; +import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionName; +import org.finos.legend.engine.persistence.components.logicalplan.values.NumericalValue; import org.finos.legend.engine.persistence.components.logicalplan.values.StringValue; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchStartTimestamp; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchEndTimestamp; import org.finos.legend.engine.persistence.components.logicalplan.values.FieldValue; +import org.finos.legend.engine.persistence.components.logicalplan.values.SumBinaryValueOperator; import org.finos.legend.engine.persistence.components.logicalplan.values.Value; import org.finos.legend.engine.persistence.components.logicalplan.values.ParseJsonFunction; -import org.finos.legend.engine.persistence.components.logicalplan.values.BulkLoadBatchIdValue; import org.finos.legend.engine.persistence.components.logicalplan.values.BulkLoadBatchStatusValue; import java.util.ArrayList; @@ -41,6 +47,27 @@ public BulkLoadMetadataUtils(BulkLoadMetadataDataset bulkLoadMetadataDataset) this.dataset = bulkLoadMetadataDataset.get(); } + /* + SELECT COALESCE(MAX("table_batch_id"),0)+1 FROM batch_metadata WHERE "table_name" = mainTableName + */ + public BatchIdValue getBatchId(StringValue mainTableName) + { + FieldValue tableNameFieldValue = FieldValue.builder().datasetRef(dataset.datasetReference()).fieldName(bulkLoadMetadataDataset.tableNameField()).build(); + FunctionImpl tableNameInUpperCase = FunctionImpl.builder().functionName(FunctionName.UPPER).addValue(tableNameFieldValue).build(); + StringValue mainTableNameInUpperCase = StringValue.builder().value(mainTableName.value().map(field -> field.toUpperCase())) + .alias(mainTableName.alias()).build(); + Condition whereCondition = Equals.of(tableNameInUpperCase, mainTableNameInUpperCase); + FieldValue tableBatchIdFieldValue = FieldValue.builder().datasetRef(dataset.datasetReference()).fieldName(bulkLoadMetadataDataset.batchIdField()).build(); + FunctionImpl maxBatchId = FunctionImpl.builder().functionName(FunctionName.MAX).addValue(tableBatchIdFieldValue).build(); + FunctionImpl coalesce = FunctionImpl.builder().functionName(FunctionName.COALESCE).addValue(maxBatchId, NumericalValue.of(0L)).build(); + + return BatchIdValue.of(Selection.builder() + .source(dataset) + .condition(whereCondition) + .addFields(SumBinaryValueOperator.of(coalesce, NumericalValue.of(1L))) + .build()); + } + /* INSERT INTO batch_metadata ("batchIdField", "tableNameField", "batchStartTimeField", "batchEndTimeField", "batchStatusField","batchSourceInfoField") @@ -63,7 +90,7 @@ public Insert insertMetaData(StringValue tableNameValue, StringValue batchSource List metaSelectFields = new ArrayList<>(); metaInsertFields.add(batchId); - metaSelectFields.add(BulkLoadBatchIdValue.INSTANCE); + metaSelectFields.add(getBatchId(tableNameValue)); metaInsertFields.add(tableName); metaSelectFields.add(tableNameValue); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/Capability.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/Capability.java index 16acfd6bd94..f99f9f94a23 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/Capability.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/Capability.java @@ -22,4 +22,5 @@ public enum Capability EXPLICIT_DATA_TYPE_CONVERSION, DATA_TYPE_LENGTH_CHANGE, DATA_TYPE_SCALE_CHANGE, + TRANSFORM_WHILE_COPY; } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/LogicalPlanUtils.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/LogicalPlanUtils.java index 7ce63a97f29..bcc67933972 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/LogicalPlanUtils.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-logical-plan/src/main/java/org/finos/legend/engine/persistence/components/util/LogicalPlanUtils.java @@ -19,6 +19,9 @@ import org.finos.legend.engine.persistence.components.common.DatasetFilter; import org.finos.legend.engine.persistence.components.common.Datasets; import org.finos.legend.engine.persistence.components.common.OptimizationFilter; +import org.finos.legend.engine.persistence.components.ingestmode.IngestMode; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.*; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.*; import org.finos.legend.engine.persistence.components.logicalplan.conditions.And; import org.finos.legend.engine.persistence.components.logicalplan.conditions.Condition; import org.finos.legend.engine.persistence.components.logicalplan.conditions.Equals; @@ -36,6 +39,7 @@ import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; import org.finos.legend.engine.persistence.components.logicalplan.datasets.FieldType; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; import org.finos.legend.engine.persistence.components.logicalplan.values.All; import org.finos.legend.engine.persistence.components.logicalplan.values.Array; import org.finos.legend.engine.persistence.components.logicalplan.values.DatetimeValue; @@ -80,6 +84,7 @@ public class LogicalPlanUtils public static final String DATA_SPLIT_UPPER_BOUND_PLACEHOLDER = "{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}"; public static final String UNDERSCORE = "_"; public static final String TEMP_DATASET_BASE_NAME = "legend_persistence_temp"; + public static final String TEMP_STAGING_DATASET_BASE_NAME = "legend_persistence_temp_staging"; public static final String TEMP_DATASET_WITH_DELETE_INDICATOR_BASE_NAME = "legend_persistence_tempWithDeleteIndicator"; private LogicalPlanUtils() @@ -438,6 +443,47 @@ public static Dataset getTempDatasetWithDeleteIndicator(Datasets datasets, Strin } } + public static Dataset getTempStagingDatasetDefinition(Dataset stagingDataset, IngestMode ingestMode) + { + String alias = stagingDataset.datasetReference().alias().orElse(TEMP_STAGING_DATASET_BASE_NAME); + String datasetName = stagingDataset.datasetReference().name().orElseThrow(IllegalStateException::new) + UNDERSCORE + TEMP_STAGING_DATASET_BASE_NAME; + SchemaDefinition tempStagingSchema = ingestMode.versioningStrategy().accept(new DeriveTempStagingSchemaDefinition(stagingDataset.schema(), ingestMode.deduplicationStrategy())); + return DatasetDefinition.builder() + .schema(tempStagingSchema) + .database(stagingDataset.datasetReference().database()) + .group(stagingDataset.datasetReference().group()) + .name(datasetName) + .alias(alias) + .build(); + } + + public static Dataset getTempStagingDatasetWithoutPks(Dataset tempStagingDataset) + { + List fieldsWithoutPk = tempStagingDataset.schema().fields().stream() + .map(field -> field.withPrimaryKey(false)).collect(Collectors.toList()); + return tempStagingDataset.withSchema(tempStagingDataset.schema().withFields(fieldsWithoutPk)); + } + + public static Dataset getDedupedAndVersionedDataset(DeduplicationStrategy deduplicationStrategy, VersioningStrategy versioningStrategy, Dataset stagingDataset, List primaryKeys) + { + Dataset dedupedDataset = deduplicationStrategy.accept(new DatasetDeduplicationHandler(stagingDataset)); + boolean isTempTableNeededForVersioning = versioningStrategy.accept(VersioningVisitors.IS_TEMP_TABLE_NEEDED); + if (isTempTableNeededForVersioning && dedupedDataset instanceof Selection) + { + Selection selection = (Selection) dedupedDataset; + dedupedDataset = selection.withAlias(stagingDataset.datasetReference().alias()); + } + Dataset versionedDataset = versioningStrategy.accept(new DatasetVersioningHandler(dedupedDataset, primaryKeys)); + return versionedDataset; + } + + public static boolean isTempTableNeededForStaging(IngestMode ingestMode) + { + boolean isTempTableNeededForVersioning = ingestMode.versioningStrategy().accept(VersioningVisitors.IS_TEMP_TABLE_NEEDED); + boolean isTempTableNeededForDedup = ingestMode.deduplicationStrategy().accept(DeduplicationVisitors.IS_TEMP_TABLE_NEEDED); + return isTempTableNeededForVersioning || isTempTableNeededForDedup; + } + public static Set SUPPORTED_DATA_TYPES_FOR_OPTIMIZATION_COLUMNS = new HashSet<>(Arrays.asList(INT, INTEGER, BIGINT, FLOAT, DOUBLE, DECIMAL, DATE)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/pom.xml index 79909146efd..5763153d243 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/schemaevolution/SchemaEvolution.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/schemaevolution/SchemaEvolution.java index 15765b38b52..a04839f00bf 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/schemaevolution/SchemaEvolution.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/schemaevolution/SchemaEvolution.java @@ -25,6 +25,7 @@ import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalSnapshotAbstract; import org.finos.legend.engine.persistence.components.ingestmode.BulkLoadAbstract; import org.finos.legend.engine.persistence.components.ingestmode.audit.AuditingVisitors; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DeduplicationVisitors; import org.finos.legend.engine.persistence.components.ingestmode.merge.MergeStrategyVisitors; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAbstract; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAndDateTimeAbstract; @@ -35,6 +36,7 @@ import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromAndThruDateTimeAbstract; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromDateTimeAbstract; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.ValidityDerivationVisitor; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningVisitors; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; @@ -307,47 +309,54 @@ private SchemaDefinition evolveSchemaDefinition(SchemaDefinition schema, Set visitAppendOnly(AppendOnlyAbstract appendOnly) { - return Collections.emptySet(); + Set stagingFieldsToIgnore = getDedupAndVersioningFields(appendOnly); + return stagingFieldsToIgnore; } @Override public Set visitNontemporalSnapshot(NontemporalSnapshotAbstract nontemporalSnapshot) { - return Collections.emptySet(); + Set stagingFieldsToIgnore = getDedupAndVersioningFields(nontemporalSnapshot); + return stagingFieldsToIgnore; } @Override public Set visitNontemporalDelta(NontemporalDeltaAbstract nontemporalDelta) { - return Collections.emptySet(); + Set stagingFieldsToIgnore = getDedupAndVersioningFields(nontemporalDelta); + return stagingFieldsToIgnore; } @Override public Set visitUnitemporalSnapshot(UnitemporalSnapshotAbstract unitemporalSnapshot) { - return Collections.emptySet(); + Set stagingFieldsToIgnore = getDedupAndVersioningFields(unitemporalSnapshot); + return stagingFieldsToIgnore; } @Override public Set visitUnitemporalDelta(UnitemporalDeltaAbstract unitemporalDelta) { - return unitemporalDelta.mergeStrategy().accept(MergeStrategyVisitors.EXTRACT_DELETE_FIELD) - .map(Collections::singleton) - .orElse(Collections.emptySet()); + Set stagingFieldsToIgnore = getDedupAndVersioningFields(unitemporalDelta); + unitemporalDelta.mergeStrategy().accept(MergeStrategyVisitors.EXTRACT_DELETE_FIELD).ifPresent(stagingFieldsToIgnore::add); + return stagingFieldsToIgnore; } @Override public Set visitBitemporalSnapshot(BitemporalSnapshotAbstract bitemporalSnapshot) { - return bitemporalSnapshot.validityMilestoning().accept(VALIDITY_FIELDS_TO_IGNORE_IN_STAGING); + Set stagingFieldsToIgnore = getDedupAndVersioningFields(bitemporalSnapshot); + stagingFieldsToIgnore.addAll(bitemporalSnapshot.validityMilestoning().accept(VALIDITY_FIELDS_TO_IGNORE_IN_STAGING)); + return stagingFieldsToIgnore; } @Override public Set visitBitemporalDelta(BitemporalDeltaAbstract bitemporalDelta) { - Set fieldsToIgnore = bitemporalDelta.validityMilestoning().accept(VALIDITY_FIELDS_TO_IGNORE_IN_STAGING); - bitemporalDelta.mergeStrategy().accept(MergeStrategyVisitors.EXTRACT_DELETE_FIELD).ifPresent(fieldsToIgnore::add); - return fieldsToIgnore; + Set stagingFieldsToIgnore = getDedupAndVersioningFields(bitemporalDelta); + stagingFieldsToIgnore.addAll(bitemporalDelta.validityMilestoning().accept(VALIDITY_FIELDS_TO_IGNORE_IN_STAGING)); + bitemporalDelta.mergeStrategy().accept(MergeStrategyVisitors.EXTRACT_DELETE_FIELD).ifPresent(stagingFieldsToIgnore::add); + return stagingFieldsToIgnore; } @Override @@ -355,6 +364,14 @@ public Set visitBulkLoad(BulkLoadAbstract bulkLoad) { return Collections.emptySet(); } + + private Set getDedupAndVersioningFields(IngestMode ingestMode) + { + Set dedupAndVersioningFields = new HashSet<>(); + ingestMode.dataSplitField().ifPresent(dedupAndVersioningFields::add); + ingestMode.deduplicationStrategy().accept(DeduplicationVisitors.EXTRACT_DEDUP_FIELD).ifPresent(dedupAndVersioningFields::add); + return dedupAndVersioningFields; + } }; private static final IngestModeVisitor> MAIN_TABLE_FIELDS_TO_IGNORE = new IngestModeVisitor>() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/AbstractTransformer.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/AbstractTransformer.java index e9cab94d2d7..7f7e667044f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/AbstractTransformer.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/AbstractTransformer.java @@ -69,7 +69,6 @@ protected VisitorContext createContext(TransformOptions options) .batchStartTimestamp(options.batchStartTimestampValue()) .batchIdPattern(options.batchIdPattern()) .infiniteBatchIdValue(options.infiniteBatchIdValue()) - .bulkLoadBatchIdValue(options.bulkLoadBatchIdValue()) .bulkLoadBatchStatusPattern(options.bulkLoadBatchStatusPattern()) .addAllOptimizers(options.optimizers()) .quoteIdentifier(sink.quoteIdentifier()) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/LogicalPlanVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/LogicalPlanVisitor.java index 7b59312d55f..5801b636aa0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/LogicalPlanVisitor.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/LogicalPlanVisitor.java @@ -51,8 +51,6 @@ interface VisitorContextAbstract Optional infiniteBatchIdValue(); - Optional bulkLoadBatchIdValue(); - Optional bulkLoadBatchStatusPattern(); List optimizers(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/Transformer.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/Transformer.java index be6f7a440d6..a76a9babe61 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/Transformer.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-physical-plan/src/main/java/org/finos/legend/engine/persistence/components/transformer/Transformer.java @@ -41,7 +41,7 @@ public interface Transformer infiniteBatchIdValue(); - public abstract Optional bulkLoadBatchIdValue(); - public abstract Optional bulkLoadBatchStatusPattern(); public abstract List optimizers(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/pom.xml index 86c331c2263..679941384b5 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/pom.xml @@ -15,7 +15,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/main/java/org/finos/legend/engine/persistence/components/relational/ansi/AnsiSqlSink.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/main/java/org/finos/legend/engine/persistence/components/relational/ansi/AnsiSqlSink.java index 63a2d109bb4..f7e3d5e6ac4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/main/java/org/finos/legend/engine/persistence/components/relational/ansi/AnsiSqlSink.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/main/java/org/finos/legend/engine/persistence/components/relational/ansi/AnsiSqlSink.java @@ -61,7 +61,6 @@ import org.finos.legend.engine.persistence.components.logicalplan.values.BatchEndTimestamp; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchIdValue; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchStartTimestamp; -import org.finos.legend.engine.persistence.components.logicalplan.values.BulkLoadBatchIdValue; import org.finos.legend.engine.persistence.components.logicalplan.values.BulkLoadBatchStatusValue; import org.finos.legend.engine.persistence.components.logicalplan.values.Case; import org.finos.legend.engine.persistence.components.logicalplan.values.DatetimeValue; @@ -95,7 +94,6 @@ import org.finos.legend.engine.persistence.components.relational.ansi.sql.visitors.BatchEndTimestampVisitor; import org.finos.legend.engine.persistence.components.relational.ansi.sql.visitors.BatchIdValueVisitor; import org.finos.legend.engine.persistence.components.relational.ansi.sql.visitors.BatchStartTimestampVisitor; -import org.finos.legend.engine.persistence.components.relational.ansi.sql.visitors.BulkLoadBatchIdValueVisitor; import org.finos.legend.engine.persistence.components.relational.ansi.sql.visitors.BulkLoadBatchStatusValueVisitor; import org.finos.legend.engine.persistence.components.relational.ansi.sql.visitors.CaseVisitor; import org.finos.legend.engine.persistence.components.relational.ansi.sql.visitors.DatasetAdditionalPropertiesVisitor; @@ -235,7 +233,6 @@ public class AnsiSqlSink extends RelationalSink logicalPlanVisitorByClass.put(Show.class, new ShowVisitor()); logicalPlanVisitorByClass.put(BatchIdValue.class, new BatchIdValueVisitor()); logicalPlanVisitorByClass.put(InfiniteBatchIdValue.class, new InfiniteBatchIdValueVisitor()); - logicalPlanVisitorByClass.put(BulkLoadBatchIdValue.class, new BulkLoadBatchIdValueVisitor()); logicalPlanVisitorByClass.put(BulkLoadBatchStatusValue.class, new BulkLoadBatchStatusValueVisitor()); LOGICAL_PLAN_VISITOR_BY_CLASS = Collections.unmodifiableMap(logicalPlanVisitorByClass); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/AnsiTestArtifacts.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/AnsiTestArtifacts.java index eab7b769251..7cb8a6174a4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/AnsiTestArtifacts.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/AnsiTestArtifacts.java @@ -149,17 +149,17 @@ public class AnsiTestArtifacts public static String lockInitializedQuery = "INSERT INTO \"mydb\".\"main_legend_persistence_lock\" " + "(\"insert_ts_utc\", \"table_name\") " + - "(SELECT '2000-01-01 00:00:00','main' " + + "(SELECT '2000-01-01 00:00:00.000000','main' " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main_legend_persistence_lock\" as main_legend_persistence_lock)))"; public static String lockInitializedUpperCaseQuery = "INSERT INTO \"MYDB\".\"MAIN_LEGEND_PERSISTENCE_LOCK\" (\"INSERT_TS_UTC\", \"TABLE_NAME\")" + - " (SELECT '2000-01-01 00:00:00','MAIN' WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN_LEGEND_PERSISTENCE_LOCK\" as MAIN_LEGEND_PERSISTENCE_LOCK)))"; + " (SELECT '2000-01-01 00:00:00.000000','MAIN' WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN_LEGEND_PERSISTENCE_LOCK\" as MAIN_LEGEND_PERSISTENCE_LOCK)))"; public static String lockAcquiredQuery = "UPDATE \"mydb\".\"main_legend_persistence_lock\" as main_legend_persistence_lock " + - "SET main_legend_persistence_lock.\"last_used_ts_utc\" = '2000-01-01 00:00:00'"; + "SET main_legend_persistence_lock.\"last_used_ts_utc\" = '2000-01-01 00:00:00.000000'"; public static String lockAcquiredUpperCaseQuery = "UPDATE \"MYDB\".\"MAIN_LEGEND_PERSISTENCE_LOCK\" as MAIN_LEGEND_PERSISTENCE_LOCK " + - "SET MAIN_LEGEND_PERSISTENCE_LOCK.\"LAST_USED_TS_UTC\" = '2000-01-01 00:00:00'"; + "SET MAIN_LEGEND_PERSISTENCE_LOCK.\"LAST_USED_TS_UTC\" = '2000-01-01 00:00:00.000000'"; public static String getDropTempTableQuery(String tableName) { @@ -186,6 +186,15 @@ public static String getDropTempTableQuery(String tableName) "\"batch_update_time\" DATETIME NOT NULL," + "PRIMARY KEY (\"id\", \"name\", \"batch_update_time\"))"; + public static String expectedBaseTablePlusDigestPlusUpdateTimestampCreateQueryUpperCase = "CREATE TABLE IF NOT EXISTS \"MYDB\".\"MAIN\"(" + + "\"ID\" INTEGER NOT NULL," + + "\"NAME\" VARCHAR NOT NULL," + + "\"AMOUNT\" DOUBLE," + + "\"BIZ_DATE\" DATE," + + "\"DIGEST\" VARCHAR," + + "\"BATCH_UPDATE_TIME\" DATETIME NOT NULL," + + "PRIMARY KEY (\"ID\", \"NAME\", \"BATCH_UPDATE_TIME\"))"; + public static String expectedBaseTableWithAuditNotPkCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"main\"(" + "\"id\" INTEGER NOT NULL," + "\"name\" VARCHAR NOT NULL," + @@ -203,6 +212,80 @@ public static String getDropTempTableQuery(String tableName) "\"batch_update_time\" DATETIME NOT NULL," + "PRIMARY KEY (\"id\", \"name\", \"batch_update_time\"))"; + public static String expectedBaseTempStagingTablePlusDigest = "CREATE TABLE IF NOT EXISTS \"mydb\".\"staging_legend_persistence_temp_staging\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"biz_date\" DATE," + + "\"digest\" VARCHAR)"; + + public static String expectedBaseTempStagingTableWithCount = "CREATE TABLE IF NOT EXISTS \"mydb\".\"staging_legend_persistence_temp_staging\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"biz_date\" DATE," + + "\"legend_persistence_count\" INTEGER)"; + + public static String expectedBaseTempStagingTableWithVersionAndCount = "CREATE TABLE IF NOT EXISTS \"mydb\".\"staging_legend_persistence_temp_staging\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"biz_date\" DATE," + + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + + "\"legend_persistence_count\" INTEGER)"; + + public static String expectedBaseTempStagingTablePlusDigestWithCount = "CREATE TABLE IF NOT EXISTS \"mydb\".\"staging_legend_persistence_temp_staging\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"biz_date\" DATE," + + "\"digest\" VARCHAR," + + "\"legend_persistence_count\" INTEGER)"; + + public static String expectedBaseTempStagingTablePlusDigestWithCountUpperCase = "CREATE TABLE IF NOT EXISTS \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\"" + + "(\"ID\" INTEGER NOT NULL," + + "\"NAME\" VARCHAR NOT NULL," + + "\"AMOUNT\" DOUBLE," + + "\"BIZ_DATE\" DATE," + + "\"DIGEST\" VARCHAR," + + "\"LEGEND_PERSISTENCE_COUNT\" INTEGER)"; + + public static String expectedBaseTempStagingTablePlusDigestWithVersionUpperCase = "CREATE TABLE IF NOT EXISTS \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\"" + + "(\"ID\" INTEGER NOT NULL," + + "\"NAME\" VARCHAR NOT NULL," + + "\"AMOUNT\" DOUBLE," + + "\"BIZ_DATE\" DATE," + + "\"DIGEST\" VARCHAR," + + "\"VERSION\" INTEGER)"; + + public static String expectedBaseTempStagingTablePlusDigestWithDataSplit = "CREATE TABLE IF NOT EXISTS \"mydb\".\"staging_legend_persistence_temp_staging\"(" + + "\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"biz_date\" DATE," + + "\"digest\" VARCHAR," + + "\"data_split\" INTEGER NOT NULL)"; + + public static String expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit = "CREATE TABLE IF NOT EXISTS \"mydb\".\"staging_legend_persistence_temp_staging\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"biz_date\" DATE," + + "\"digest\" VARCHAR," + + "\"legend_persistence_count\" INTEGER," + + "\"data_split\" INTEGER NOT NULL)"; + + public static String expectedBaseTempStagingTablePlusDigestWithDataSplitAndCount = "CREATE TABLE IF NOT EXISTS \"mydb\".\"staging_legend_persistence_temp_staging\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"biz_date\" DATE," + + "\"digest\" VARCHAR," + + "\"data_split\" BIGINT NOT NULL," + + "\"legend_persistence_count\" INTEGER)"; + + public static String expectedBitemporalMainTableCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"main\"" + "(\"id\" INTEGER NOT NULL," + "\"name\" VARCHAR NOT NULL," + @@ -223,11 +306,12 @@ public static String getDropTempTableQuery(String tableName) "\"digest\" VARCHAR," + "PRIMARY KEY (\"id\", \"name\", \"validity_from_reference\"))"; - public static String expectedBitemporalMainTableWithBatchIdDatetimeCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"main\"" + + public static String expectedBitemporalMainTableWithVersionWithBatchIdDatetimeCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"main\"" + "(\"id\" INTEGER NOT NULL," + "\"name\" VARCHAR NOT NULL," + "\"amount\" DOUBLE," + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + "\"batch_id_in\" INTEGER NOT NULL," + "\"batch_id_out\" INTEGER," + "\"batch_time_in\" DATETIME," + @@ -236,11 +320,12 @@ public static String getDropTempTableQuery(String tableName) "\"validity_through_target\" DATETIME," + "PRIMARY KEY (\"id\", \"name\", \"batch_id_in\", \"validity_from_target\"))"; - public static String expectedBitemporalMainTableWithDatetimeCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"main\"" + + public static String expectedBitemporalMainTableWithVersionBatchDateTimeCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"main\"" + "(\"id\" INTEGER NOT NULL," + "\"name\" VARCHAR NOT NULL," + "\"amount\" DOUBLE," + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + "\"batch_time_in\" DATETIME NOT NULL," + "\"batch_time_out\" DATETIME," + "\"validity_from_target\" DATETIME NOT NULL," + @@ -258,6 +343,18 @@ public static String getDropTempTableQuery(String tableName) "\"validity_through_target\" DATETIME," + "PRIMARY KEY (\"id\", \"name\", \"batch_id_in\", \"validity_from_target\"))"; + public static String expectedBitemporalFromOnlyMainTableWithVersionCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"main\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + + "\"batch_id_in\" INTEGER NOT NULL," + + "\"batch_id_out\" INTEGER," + + "\"validity_from_target\" DATETIME NOT NULL," + + "\"validity_through_target\" DATETIME," + + "PRIMARY KEY (\"id\", \"name\", \"batch_id_in\", \"validity_from_target\"))"; + public static String expectedBitemporalFromOnlyStagingTableCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"staging\"" + "(\"id\" INTEGER NOT NULL," + "\"name\" VARCHAR NOT NULL," + @@ -324,6 +421,18 @@ public static String getDropTempTableQuery(String tableName) "\"validity_through_target\" DATETIME," + "PRIMARY KEY (\"id\", \"name\", \"batch_id_in\", \"validity_from_target\"))"; + public static String expectedBitemporalFromOnlyTempTableWithVersionCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"temp\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR NOT NULL," + + "\"amount\" DOUBLE," + + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + + "\"batch_id_in\" INTEGER NOT NULL," + + "\"batch_id_out\" INTEGER," + + "\"validity_from_target\" DATETIME NOT NULL," + + "\"validity_through_target\" DATETIME," + + "PRIMARY KEY (\"id\", \"name\", \"batch_id_in\", \"validity_from_target\"))"; + public static String expectedBitemporalFromOnlyTempTableBatchIdAndTimeBasedCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"temp\"(" + "\"id\" INTEGER NOT NULL," + "\"name\" VARCHAR NOT NULL," + @@ -368,12 +477,13 @@ public static String getDropTempTableQuery(String tableName) "\"delete_indicator\" VARCHAR," + "PRIMARY KEY (\"id\", \"name\", \"batch_id_in\", \"validity_from_target\"))"; - public static String expectedBitemporalFromOnlyStageWithDataSplitWithoutDuplicatesTableCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"stagingWithoutDuplicates\"" + + public static String expectedBitemporalFromOnlyStageWithVersionWithDataSplitWithoutDuplicatesTableCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"stagingWithoutDuplicates\"" + "(\"id\" INTEGER NOT NULL," + "\"name\" VARCHAR NOT NULL," + "\"amount\" DOUBLE," + "\"validity_from_reference\" DATETIME NOT NULL," + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + "\"data_split\" BIGINT NOT NULL," + "PRIMARY KEY (\"id\", \"name\", \"validity_from_reference\", \"data_split\"))"; @@ -387,20 +497,109 @@ public static String getDropTempTableQuery(String tableName) "PRIMARY KEY (\"id\", \"name\", \"validity_from_reference\"))"; public static String expectedStagingCleanupQuery = "DELETE FROM \"mydb\".\"staging\" as stage"; - + public static String expectedTempStagingCleanupQuery = "DELETE FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage"; + public static String expectedTempStagingCleanupQueryInUpperCase = "DELETE FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage"; public static String expectedDropTableQuery = "DROP TABLE IF EXISTS \"mydb\".\"staging\" CASCADE"; public static String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata " + "(\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\")" + " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata " + - "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; public static String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA " + "(\"TABLE_NAME\", \"TABLE_BATCH_ID\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\")" + " (SELECT 'MAIN',(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + - "WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + "WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; public static String expectedMetadataTableIngestQueryWithPlaceHolders = "INSERT INTO batch_metadata " + "(\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\") " + "(SELECT 'main',{BATCH_ID_PATTERN},'{BATCH_START_TS_PATTERN}','{BATCH_END_TS_PATTERN}','DONE')"; + + public static String expectedInsertIntoBaseTempStagingWithMaxVersionAndFilterDuplicates = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"legend_persistence_count\" as \"legend_persistence_count\" FROM " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"legend_persistence_count\" as \"legend_persistence_count\",DENSE_RANK() OVER " + + "(PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"biz_date\" DESC) as \"legend_persistence_rank\" FROM " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",COUNT(*) as \"legend_persistence_count\" FROM " + + "\"mydb\".\"staging\" as stage GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\") as stage) as stage " + + "WHERE stage.\"legend_persistence_rank\" = 1)"; + + public static String expectedInsertIntoBaseTempStagingWithFilterDuplicates = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\"," + + "COUNT(*) as \"legend_persistence_count\" FROM \"mydb\".\"staging\" as stage " + + "GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\")"; + + public static String expectedInsertIntoBaseTempStagingWithFilterDupsAndMaxVersion = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"legend_persistence_count\" as " + + "\"legend_persistence_count\" FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + + "stage.\"legend_persistence_count\" as \"legend_persistence_count\",DENSE_RANK() OVER " + + "(PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"biz_date\" DESC) as \"legend_persistence_rank\" " + + "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",COUNT(*) as \"legend_persistence_count\" FROM \"mydb\".\"staging\" as stage " + + "GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\", stage.\"digest\") as stage) as stage WHERE stage.\"legend_persistence_rank\" = 1)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + + "COUNT(*) as \"legend_persistence_count\" FROM \"mydb\".\"staging\" as stage " + + "GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\", stage.\"digest\")"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"legend_persistence_count\" as \"legend_persistence_count\" FROM " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"legend_persistence_count\" as \"legend_persistence_count\",DENSE_RANK() OVER " + + "(PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"biz_date\" DESC) as \"legend_persistence_rank\" FROM " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",COUNT(*) as \"legend_persistence_count\" FROM " + + "\"mydb\".\"staging\" as stage GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\", stage.\"digest\") as stage) as stage " + + "WHERE stage.\"legend_persistence_rank\" = 1)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicates = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",DENSE_RANK() " + + "OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"biz_date\" DESC) as \"legend_persistence_rank\" " + + "FROM \"mydb\".\"staging\" as stage) as stage WHERE stage.\"legend_persistence_rank\" = 1)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicatesUpperCase = "INSERT INTO " + + "\"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" " + + "(\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"LEGEND_PERSISTENCE_COUNT\") " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"LEGEND_PERSISTENCE_COUNT\" as \"LEGEND_PERSISTENCE_COUNT\" FROM " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"LEGEND_PERSISTENCE_COUNT\" as \"LEGEND_PERSISTENCE_COUNT\",DENSE_RANK() " + + "OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"BIZ_DATE\" DESC) as \"LEGEND_PERSISTENCE_RANK\" " + + "FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",COUNT(*) as \"LEGEND_PERSISTENCE_COUNT\" FROM \"MYDB\".\"STAGING\" as stage " + + "GROUP BY stage.\"ID\", stage.\"NAME\", stage.\"AMOUNT\", stage.\"BIZ_DATE\", stage.\"DIGEST\") as stage) as stage WHERE stage.\"LEGEND_PERSISTENCE_RANK\" = 1)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"legend_persistence_count\", \"data_split\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"legend_persistence_count\" as \"legend_persistence_count\",DENSE_RANK() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"biz_date\" ASC) as \"data_split\" " + + "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",COUNT(*) as \"legend_persistence_count\" FROM \"mydb\".\"staging\" as stage " + + "GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\", stage.\"digest\") as stage)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndAllowDups = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"data_split\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + + "DENSE_RANK() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"biz_date\" ASC) as \"data_split\" " + + "FROM \"mydb\".\"staging\" as stage)"; + + public static String maxDupsErrorCheckSql = "SELECT MAX(stage.\"legend_persistence_count\") as \"MAX_DUPLICATES\" FROM " + + "\"mydb\".\"staging_legend_persistence_temp_staging\" as stage"; + + public static String dataErrorCheckSqlWithBizDateVersion = "SELECT MAX(\"legend_persistence_distinct_rows\") as \"MAX_DATA_ERRORS\" FROM " + + "(SELECT COUNT(DISTINCT(\"digest\")) as \"legend_persistence_distinct_rows\" FROM " + + "\"mydb\".\"staging_legend_persistence_temp_staging\" as stage GROUP BY \"id\", \"name\", \"biz_date\") as stage"; + + public static String dataErrorCheckSql = "SELECT MAX(\"legend_persistence_distinct_rows\") as \"MAX_DATA_ERRORS\" FROM " + + "(SELECT COUNT(DISTINCT(\"digest\")) as \"legend_persistence_distinct_rows\" FROM " + + "\"mydb\".\"staging_legend_persistence_temp_staging\" as stage GROUP BY \"id\", \"name\", \"version\") as stage"; + + public static String dataErrorCheckSqlUpperCase = "SELECT MAX(\"LEGEND_PERSISTENCE_DISTINCT_ROWS\") as \"MAX_DATA_ERRORS\" FROM" + + " (SELECT COUNT(DISTINCT(\"DIGEST\")) as \"LEGEND_PERSISTENCE_DISTINCT_ROWS\" FROM " + + "\"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage GROUP BY \"ID\", \"NAME\", \"VERSION\") as stage"; + + public static String dataErrorCheckSqlWithBizDateAsVersionUpperCase = "SELECT MAX(\"LEGEND_PERSISTENCE_DISTINCT_ROWS\") as \"MAX_DATA_ERRORS\" " + + "FROM (SELECT COUNT(DISTINCT(\"DIGEST\")) as \"LEGEND_PERSISTENCE_DISTINCT_ROWS\" FROM " + + "\"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage GROUP BY \"ID\", \"NAME\", \"BIZ_DATE\") as stage"; + + } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/IngestModeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/IngestModeTest.java index 85e38908429..2aad9ead027 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/IngestModeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/IngestModeTest.java @@ -69,7 +69,7 @@ public class IngestModeTest protected String[] partitionKeys = new String[]{"biz_date"}; protected Map> partitionFilter = new HashMap>() {{ - put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00", "2000-01-02 00:00:00"))); + put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00.000000", "2000-01-02 00:00:00"))); }}; // Base Columns: Primary keys : id, name @@ -402,10 +402,10 @@ public class IngestModeTest "\"TABLE_BATCH_ID\" INTEGER)"; protected String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\")" + - " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; protected String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (\"TABLE_NAME\", \"TABLE_BATCH_ID\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\")" + - " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as batch_metadata WHERE batch_metadata.\"TABLE_NAME\" = 'main'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as batch_metadata WHERE batch_metadata.\"TABLE_NAME\" = 'main'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; protected String expectedMetadataTableIngestQueryWithPlaceHolders = "INSERT INTO batch_metadata (\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\") (SELECT 'main',{BATCH_ID_PATTERN},'{BATCH_START_TS_PATTERN}','{BATCH_END_TS_PATTERN}','DONE')"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java index ef02b5b78d1..cfed0b48c69 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java @@ -82,24 +82,24 @@ public void verifyBitemporalDeltaBatchIdDateTimeBasedNoDeleteIndWithDataSplits(L { String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET sink.\"batch_id_out\" = " + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata " + - "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) AND (EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE " + "((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + "(sink.\"validity_from_target\" = stage.\"validity_from_reference\") AND (sink.\"digest\" <> stage.\"digest\")))"; String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"validity_from_target\", " + - "\"validity_through_target\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + + "\"validity_through_target\", \"digest\", \"version\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"validity_through_reference\"," + - "stage.\"digest\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata " + - "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "stage.\"digest\",stage.\"version\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata " + + "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage WHERE (NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE " + "(sink.\"batch_id_out\" = 999999999) AND (sink.\"digest\" = stage.\"digest\") " + "AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + "(sink.\"validity_from_target\" = stage.\"validity_from_reference\")))) AND " + "((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalMainTableWithBatchIdDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalMainTableWithVersionWithBatchIdDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedMilestoneQuery, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); @@ -164,7 +164,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndNoDataSplits(Generator public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND " + "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND " + @@ -173,10 +173,10 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List stage.\"digest\") OR (stage.\"delete_indicator\" IN ('yes','1','true')))))"; String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + - "(\"id\", \"name\", \"amount\", \"validity_from_target\", \"validity_through_target\", \"digest\", " + + "(\"id\", \"name\", \"amount\", \"validity_from_target\", \"validity_through_target\", \"digest\", \"version\", " + "\"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\"," + - "stage.\"validity_through_reference\",stage.\"digest\",'2000-01-01 00:00:00'," + + "stage.\"validity_through_reference\",stage.\"digest\",stage.\"version\",'2000-01-01 00:00:00.000000'," + "'9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage WHERE " + "((NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') " + "AND (sink.\"digest\" = stage.\"digest\") AND ((sink.\"id\" = stage.\"id\") AND " + @@ -184,7 +184,7 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) AND " + "(stage.\"delete_indicator\" NOT IN ('yes','1','true')))"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalMainTableWithDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalMainTableWithVersionBatchDateTimeCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedMilestoneQuery, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); @@ -196,10 +196,10 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List= 1) AND (stage.\"data_split\" <= 1)"; - String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\") AND (sink2.\"validity_from_target\" = sink.\"validity_from_target\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))"; + String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\") AND (sink2.\"validity_from_target\" = sink.\"validity_from_target\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\") AND (sink2.\"validity_from_target\" = sink.\"validity_from_target\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))) as \"rowsInserted\""; - String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\") AND (sink2.\"validity_from_target\" = sink.\"validity_from_target\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))) as \"rowsTerminated\""; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\") AND (sink2.\"validity_from_target\" = sink.\"validity_from_target\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))) as \"rowsInserted\""; + String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\") AND (sink2.\"validity_from_target\" = sink.\"validity_from_target\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))) as \"rowsTerminated\""; verifyStats(operations.get(0), incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaSourceSpecifiesFromTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaSourceSpecifiesFromTest.java index 777c8bb3a4f..aa9ac70d4c0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaSourceSpecifiesFromTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaSourceSpecifiesFromTest.java @@ -123,10 +123,10 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndNoDataSplits(GeneratorRe public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) { String expectedStageToTemp = "INSERT INTO \"mydb\".\"temp\" " + - "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + - "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_reference\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + + "(\"id\", \"name\", \"amount\", \"version\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"version\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_reference\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + "FROM " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"data_split\" FROM \"mydb\".\"staging\" as stage WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"version\",stage.\"data_split\" FROM \"mydb\".\"staging\" as stage WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"legend_persistence_start_date\",COALESCE(MIN(legend_persistence_y.\"legend_persistence_start_date\"),MIN(legend_persistence_x.\"legend_persistence_end_date\")) as \"legend_persistence_end_date\" " + "FROM " + @@ -144,10 +144,10 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"version\",stage.\"delete_indicator\",stage.\"data_split\" FROM \"mydb\".\"staging\" as stage WHERE (stage.\"delete_indicator\" NOT IN ('yes','1','true')) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"legend_persistence_start_date\",COALESCE(MIN(legend_persistence_y.\"legend_persistence_start_date\"),MIN(legend_persistence_x.\"legend_persistence_end_date\")) as \"legend_persistence_end_date\" " + "FROM " + @@ -372,10 +374,10 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplits(List legend_persistence_x.\"validity_from_target\") AND (legend_persistence_y.\"delete_indicator\" = 0) " + "WHERE legend_persistence_x.\"delete_indicator\" = 0 " + - "GROUP BY legend_persistence_x.\"id\", legend_persistence_x.\"name\", legend_persistence_x.\"amount\", legend_persistence_x.\"digest\", legend_persistence_x.\"validity_from_target\", legend_persistence_x.\"batch_id_in\", legend_persistence_x.\"batch_id_out\") as legend_persistence_x " + + "GROUP BY legend_persistence_x.\"id\", legend_persistence_x.\"name\", legend_persistence_x.\"amount\", legend_persistence_x.\"version\", legend_persistence_x.\"digest\", legend_persistence_x.\"validity_from_target\", legend_persistence_x.\"batch_id_in\", legend_persistence_x.\"batch_id_out\") as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.\"id\" = legend_persistence_y.\"id\") AND (legend_persistence_x.\"name\" = legend_persistence_y.\"name\")) AND (legend_persistence_y.\"validity_through_target\" > legend_persistence_x.\"legend_persistence_start_date\") AND (legend_persistence_y.\"validity_through_target\" <= legend_persistence_x.\"legend_persistence_end_date\") AND (legend_persistence_y.\"delete_indicator\" <> 0) " + - "GROUP BY legend_persistence_x.\"id\", legend_persistence_x.\"name\", legend_persistence_x.\"amount\", legend_persistence_x.\"digest\", legend_persistence_x.\"legend_persistence_start_date\", legend_persistence_x.\"batch_id_in\", legend_persistence_x.\"batch_id_out\")"; + "GROUP BY legend_persistence_x.\"id\", legend_persistence_x.\"name\", legend_persistence_x.\"amount\", legend_persistence_x.\"version\", legend_persistence_x.\"digest\", legend_persistence_x.\"legend_persistence_start_date\", legend_persistence_x.\"batch_id_in\", legend_persistence_x.\"batch_id_out\")"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableWithDeleteIndicatorCreateQuery, operations.get(0).preActionsSql().get(3)); @@ -564,15 +566,15 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndNoDataSplitsFilterDuplic public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDuplicates(List operations, List dataSplitRanges) { String expectedStageToStageWithoutDuplicates = "INSERT INTO \"mydb\".\"stagingWithoutDuplicates\" " + - "(\"id\", \"name\", \"amount\", \"validity_from_reference\", \"digest\", \"data_split\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"data_split\" FROM \"mydb\".\"staging\" as stage " + + "(\"id\", \"name\", \"amount\", \"validity_from_reference\", \"digest\", \"version\", \"data_split\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"version\",stage.\"data_split\" FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"digest\" = stage.\"digest\") AND (sink.\"batch_id_out\" = 999999999))))"; String expectedStageToTemp = "INSERT INTO \"mydb\".\"temp\" " + - "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + - "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_reference\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + + "(\"id\", \"name\", \"amount\", \"version\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"version\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_reference\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + "FROM " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"data_split\" FROM \"mydb\".\"stagingWithoutDuplicates\" as stage WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"version\",stage.\"data_split\" FROM \"mydb\".\"stagingWithoutDuplicates\" as stage WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"legend_persistence_start_date\",COALESCE(MIN(legend_persistence_y.\"legend_persistence_start_date\"),MIN(legend_persistence_x.\"legend_persistence_end_date\")) as \"legend_persistence_end_date\" " + "FROM " + @@ -590,10 +592,10 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDupl "ON ((legend_persistence_x.\"id\" = legend_persistence_y.\"id\") AND (legend_persistence_x.\"name\" = legend_persistence_y.\"name\")) AND (legend_persistence_x.\"validity_from_reference\" = legend_persistence_y.\"legend_persistence_start_date\"))"; String expectedMainToTemp = "INSERT INTO \"mydb\".\"temp\" " + - "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + - "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + + "(\"id\", \"name\", \"amount\", \"version\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"version\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + "FROM " + - "(SELECT sink.\"id\",sink.\"name\",sink.\"amount\",sink.\"digest\",sink.\"batch_id_in\",sink.\"batch_id_out\",sink.\"validity_from_target\",sink.\"validity_through_target\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = 999999999) as legend_persistence_x " + + "(SELECT sink.\"id\",sink.\"name\",sink.\"amount\",sink.\"digest\",sink.\"version\",sink.\"batch_id_in\",sink.\"batch_id_out\",sink.\"validity_from_target\",sink.\"validity_through_target\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = 999999999) as legend_persistence_x " + "INNER JOIN " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"legend_persistence_start_date\",legend_persistence_x.\"legend_persistence_end_date\" as \"legend_persistence_end_date\" " + "FROM " + @@ -618,13 +620,13 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDupl "AND (sink.\"batch_id_out\" = 999999999)"; String expectedTempToMain = "INSERT INTO \"mydb\".\"main\" " + - "(\"id\", \"name\", \"amount\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"validity_from_target\", \"validity_through_target\") " + - "(SELECT temp.\"id\",temp.\"name\",temp.\"amount\",temp.\"digest\",temp.\"batch_id_in\",temp.\"batch_id_out\",temp.\"validity_from_target\",temp.\"validity_through_target\" FROM \"mydb\".\"temp\" as temp)"; + "(\"id\", \"name\", \"amount\", \"digest\", \"version\", \"batch_id_in\", \"batch_id_out\", \"validity_from_target\", \"validity_through_target\") " + + "(SELECT temp.\"id\",temp.\"name\",temp.\"amount\",temp.\"digest\",temp.\"version\",temp.\"batch_id_in\",temp.\"batch_id_out\",temp.\"validity_from_target\",temp.\"validity_through_target\" FROM \"mydb\".\"temp\" as temp)"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), operations.get(0).preActionsSql().get(1)); - Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); - Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyStageWithDataSplitWithoutDuplicatesTableCreateQuery, operations.get(0).preActionsSql().get(3)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyTempTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(2)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyStageWithVersionWithDataSplitWithoutDuplicatesTableCreateQuery, operations.get(0).preActionsSql().get(3)); Assertions.assertEquals(expectedStageToStageWithoutDuplicates, operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedStageToTemp, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(1)); @@ -795,6 +797,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "\"name\" VARCHAR NOT NULL," + "\"amount\" DOUBLE," + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + "\"batch_id_in\" INTEGER NOT NULL," + "\"batch_id_out\" INTEGER," + "\"validity_from_target\" DATETIME NOT NULL," + @@ -806,6 +809,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "\"name\" VARCHAR NOT NULL," + "\"amount\" DOUBLE," + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + "\"batch_id_in\" INTEGER NOT NULL," + "\"batch_id_out\" INTEGER," + "\"validity_from_target\" DATETIME NOT NULL," + @@ -819,20 +823,21 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "\"amount\" DOUBLE," + "\"validity_from_reference\" DATETIME NOT NULL," + "\"digest\" VARCHAR," + + "\"version\" INTEGER," + "\"delete_indicator\" VARCHAR," + "\"data_split\" BIGINT NOT NULL," + "PRIMARY KEY (\"id\", \"name\", \"validity_from_reference\", \"data_split\"))"; String expectedStageToStageWithoutDuplicates = "INSERT INTO " + stageWithoutDuplicatesName + " " + - "(\"id\", \"name\", \"amount\", \"validity_from_reference\", \"digest\", \"delete_indicator\", \"data_split\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"delete_indicator\",stage.\"data_split\" FROM \"mydb\".\"staging\" as stage " + + "(\"id\", \"name\", \"amount\", \"validity_from_reference\", \"digest\", \"version\", \"delete_indicator\", \"data_split\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\",stage.\"version\",stage.\"delete_indicator\",stage.\"data_split\" FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"digest\" = stage.\"digest\") AND (sink.\"batch_id_out\" = 999999999))))"; String expectedStageToTemp = "INSERT INTO " + tempName + " " + - "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + - "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_reference\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + + "(\"id\", \"name\", \"amount\", \"version\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"version\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_reference\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + "FROM " + - "(SELECT legend_persistence_stageWithoutDuplicates.\"id\",legend_persistence_stageWithoutDuplicates.\"name\",legend_persistence_stageWithoutDuplicates.\"amount\",legend_persistence_stageWithoutDuplicates.\"validity_from_reference\",legend_persistence_stageWithoutDuplicates.\"digest\",legend_persistence_stageWithoutDuplicates.\"delete_indicator\",legend_persistence_stageWithoutDuplicates.\"data_split\" FROM " + stageWithoutDuplicatesName + " as legend_persistence_stageWithoutDuplicates WHERE (legend_persistence_stageWithoutDuplicates.\"delete_indicator\" NOT IN ('yes','1','true')) AND ((legend_persistence_stageWithoutDuplicates.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (legend_persistence_stageWithoutDuplicates.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + + "(SELECT legend_persistence_stageWithoutDuplicates.\"id\",legend_persistence_stageWithoutDuplicates.\"name\",legend_persistence_stageWithoutDuplicates.\"amount\",legend_persistence_stageWithoutDuplicates.\"validity_from_reference\",legend_persistence_stageWithoutDuplicates.\"digest\",legend_persistence_stageWithoutDuplicates.\"version\",legend_persistence_stageWithoutDuplicates.\"delete_indicator\",legend_persistence_stageWithoutDuplicates.\"data_split\" FROM " + stageWithoutDuplicatesName + " as legend_persistence_stageWithoutDuplicates WHERE (legend_persistence_stageWithoutDuplicates.\"delete_indicator\" NOT IN ('yes','1','true')) AND ((legend_persistence_stageWithoutDuplicates.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (legend_persistence_stageWithoutDuplicates.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"legend_persistence_start_date\",COALESCE(MIN(legend_persistence_y.\"legend_persistence_start_date\"),MIN(legend_persistence_x.\"legend_persistence_end_date\")) as \"legend_persistence_end_date\" " + "FROM " + @@ -850,10 +855,10 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "ON ((legend_persistence_x.\"id\" = legend_persistence_y.\"id\") AND (legend_persistence_x.\"name\" = legend_persistence_y.\"name\")) AND (legend_persistence_x.\"validity_from_reference\" = legend_persistence_y.\"legend_persistence_start_date\"))"; String expectedMainToTemp = "INSERT INTO " + tempName + " " + - "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + - "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + + "(\"id\", \"name\", \"amount\", \"version\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"version\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + "FROM " + - "(SELECT sink.\"id\",sink.\"name\",sink.\"amount\",sink.\"digest\",sink.\"batch_id_in\",sink.\"batch_id_out\",sink.\"validity_from_target\",sink.\"validity_through_target\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = 999999999) as legend_persistence_x " + + "(SELECT sink.\"id\",sink.\"name\",sink.\"amount\",sink.\"digest\",sink.\"version\",sink.\"batch_id_in\",sink.\"batch_id_out\",sink.\"validity_from_target\",sink.\"validity_through_target\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = 999999999) as legend_persistence_x " + "INNER JOIN " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"legend_persistence_start_date\",legend_persistence_x.\"legend_persistence_end_date\" as \"legend_persistence_end_date\" " + "FROM " + @@ -878,12 +883,12 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "AND (sink.\"batch_id_out\" = 999999999)"; String expectedTempToMain = "INSERT INTO \"mydb\".\"main\" " + - "(\"id\", \"name\", \"amount\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"validity_from_target\", \"validity_through_target\") " + - "(SELECT legend_persistence_temp.\"id\",legend_persistence_temp.\"name\",legend_persistence_temp.\"amount\",legend_persistence_temp.\"digest\",legend_persistence_temp.\"batch_id_in\",legend_persistence_temp.\"batch_id_out\",legend_persistence_temp.\"validity_from_target\",legend_persistence_temp.\"validity_through_target\" FROM " + tempName + " as legend_persistence_temp)"; + "(\"id\", \"name\", \"amount\", \"digest\", \"version\", \"batch_id_in\", \"batch_id_out\", \"validity_from_target\", \"validity_through_target\") " + + "(SELECT legend_persistence_temp.\"id\",legend_persistence_temp.\"name\",legend_persistence_temp.\"amount\",legend_persistence_temp.\"digest\",legend_persistence_temp.\"version\",legend_persistence_temp.\"batch_id_in\",legend_persistence_temp.\"batch_id_out\",legend_persistence_temp.\"validity_from_target\",legend_persistence_temp.\"validity_through_target\" FROM " + tempName + " as legend_persistence_temp)"; String expectedMainToTempForDeletion = "INSERT INTO " + tempWithDeleteIndicatorName + " " + - "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\", \"delete_indicator\") " + - "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",legend_persistence_x.\"validity_through_target\" as \"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,(CASE WHEN legend_persistence_y.\"delete_indicator\" IS NULL THEN 0 ELSE 1 END) " + + "(\"id\", \"name\", \"amount\", \"version\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\", \"delete_indicator\") " + + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"version\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",legend_persistence_x.\"validity_through_target\" as \"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,(CASE WHEN legend_persistence_y.\"delete_indicator\" IS NULL THEN 0 ELSE 1 END) " + "FROM " + "(SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = 999999999) " + "AND (EXISTS " + @@ -903,19 +908,19 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "AND (sink.\"batch_id_out\" = 999999999)"; String expectedTempToMainForDeletion = "INSERT INTO \"mydb\".\"main\" " + - "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + - "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"legend_persistence_start_date\" as \"legend_persistence_start_date\",MAX(legend_persistence_y.\"validity_through_target\") as \"legend_persistence_end_date\",legend_persistence_x.\"batch_id_in\",legend_persistence_x.\"batch_id_out\" FROM " + - "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",COALESCE(MIN(legend_persistence_y.\"validity_from_target\"),'9999-12-31 23:59:59') as \"legend_persistence_end_date\",legend_persistence_x.\"batch_id_in\",legend_persistence_x.\"batch_id_out\" " + + "(\"id\", \"name\", \"amount\", \"version\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\") " + + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"version\",legend_persistence_x.\"digest\",legend_persistence_x.\"legend_persistence_start_date\" as \"legend_persistence_start_date\",MAX(legend_persistence_y.\"validity_through_target\") as \"legend_persistence_end_date\",legend_persistence_x.\"batch_id_in\",legend_persistence_x.\"batch_id_out\" FROM " + + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"version\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",COALESCE(MIN(legend_persistence_y.\"validity_from_target\"),'9999-12-31 23:59:59') as \"legend_persistence_end_date\",legend_persistence_x.\"batch_id_in\",legend_persistence_x.\"batch_id_out\" " + "FROM " + tempWithDeleteIndicatorName + " as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.\"id\" = legend_persistence_y.\"id\") AND (legend_persistence_x.\"name\" = legend_persistence_y.\"name\")) AND (legend_persistence_y.\"validity_from_target\" > legend_persistence_x.\"validity_from_target\") AND (legend_persistence_y.\"delete_indicator\" = 0) " + "WHERE legend_persistence_x.\"delete_indicator\" = 0 " + - "GROUP BY legend_persistence_x.\"id\", legend_persistence_x.\"name\", legend_persistence_x.\"amount\", legend_persistence_x.\"digest\", legend_persistence_x.\"validity_from_target\", legend_persistence_x.\"batch_id_in\", legend_persistence_x.\"batch_id_out\") as legend_persistence_x " + + "GROUP BY legend_persistence_x.\"id\", legend_persistence_x.\"name\", legend_persistence_x.\"amount\", legend_persistence_x.\"version\", legend_persistence_x.\"digest\", legend_persistence_x.\"validity_from_target\", legend_persistence_x.\"batch_id_in\", legend_persistence_x.\"batch_id_out\") as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.\"id\" = legend_persistence_y.\"id\") AND (legend_persistence_x.\"name\" = legend_persistence_y.\"name\")) AND (legend_persistence_y.\"validity_through_target\" > legend_persistence_x.\"legend_persistence_start_date\") AND (legend_persistence_y.\"validity_through_target\" <= legend_persistence_x.\"legend_persistence_end_date\") AND (legend_persistence_y.\"delete_indicator\" <> 0) " + - "GROUP BY legend_persistence_x.\"id\", legend_persistence_x.\"name\", legend_persistence_x.\"amount\", legend_persistence_x.\"digest\", legend_persistence_x.\"legend_persistence_start_date\", legend_persistence_x.\"batch_id_in\", legend_persistence_x.\"batch_id_out\")"; + "GROUP BY legend_persistence_x.\"id\", legend_persistence_x.\"name\", legend_persistence_x.\"amount\", legend_persistence_x.\"version\", legend_persistence_x.\"digest\", legend_persistence_x.\"legend_persistence_start_date\", legend_persistence_x.\"batch_id_in\", legend_persistence_x.\"batch_id_out\")"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableWithDeleteIndicatorCreateQuery, operations.get(0).preActionsSql().get(3)); @@ -1041,7 +1046,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedStageToTemp = "INSERT INTO \"mydb\".\"temp\" " + "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_reference\" as \"legend_persistence_start_date\"," + - "legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "legend_persistence_y.\"legend_persistence_end_date\",(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\" FROM \"mydb\".\"staging\" as stage) as legend_persistence_x " + "LEFT OUTER JOIN " + @@ -1063,7 +1068,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedMainToTemp = "INSERT INTO \"mydb\".\"temp\" " + "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\"," + - "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM " + "(SELECT sink.\"id\",sink.\"name\",sink.\"amount\",sink.\"digest\",sink.\"batch_id_in\",sink.\"batch_id_out\",sink.\"batch_time_in\"," + "sink.\"batch_time_out\",sink.\"validity_from_target\",sink.\"validity_through_target\" FROM \"mydb\".\"main\" as sink " + @@ -1087,7 +1092,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedUpdateMain = "UPDATE \"mydb\".\"main\" as sink SET " + "sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (EXISTS " + "(SELECT * FROM \"mydb\".\"temp\" as temp WHERE ((sink.\"id\" = temp.\"id\") AND (sink.\"name\" = temp.\"name\")) " + "AND (sink.\"validity_from_target\" = temp.\"validity_from_target\"))) AND (sink.\"batch_id_out\" = 999999999)"; @@ -1122,7 +1127,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR String expectedStageToTemp = "INSERT INTO \"mydb\".\"temp\" " + "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\",legend_persistence_x.\"validity_from_reference\" as \"legend_persistence_start_date\"," + - "legend_persistence_y.\"legend_persistence_end_date\",'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "legend_persistence_y.\"legend_persistence_end_date\",'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"validity_from_reference\",stage.\"digest\" FROM \"mydb\".\"staging\" as stage) as legend_persistence_x " + "LEFT OUTER JOIN " + @@ -1145,7 +1150,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR "(\"id\", \"name\", \"amount\", \"digest\", \"validity_from_target\", \"validity_through_target\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT legend_persistence_x.\"id\",legend_persistence_x.\"name\",legend_persistence_x.\"amount\",legend_persistence_x.\"digest\"," + "legend_persistence_x.\"validity_from_target\" as \"legend_persistence_start_date\",legend_persistence_y.\"legend_persistence_end_date\"," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM (SELECT sink.\"id\",sink.\"name\",sink.\"amount\",sink.\"digest\",sink.\"batch_time_in\"," + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM (SELECT sink.\"id\",sink.\"name\",sink.\"amount\",sink.\"digest\",sink.\"batch_time_in\"," + "sink.\"batch_time_out\",sink.\"validity_from_target\",sink.\"validity_through_target\" " + "FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '9999-12-31 23:59:59') as legend_persistence_x " + "INNER JOIN " + @@ -1168,7 +1173,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR "AND (legend_persistence_x.\"validity_from_target\" = legend_persistence_y.\"legend_persistence_start_date\"))"; String expectedUpdateMain = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (EXISTS (SELECT * FROM \"mydb\".\"temp\" as temp WHERE " + "((sink.\"id\" = temp.\"id\") AND (sink.\"name\" = temp.\"name\")) AND " + "(sink.\"validity_from_target\" = temp.\"validity_from_target\"))) AND (sink.\"batch_time_out\" = '9999-12-31 23:59:59')"; @@ -1190,8 +1195,8 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; - String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00'"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00') as \"rowsInserted\""; + String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000'"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') as \"rowsInserted\""; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyTest.java index 69799b67769..305698ea828 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyTest.java @@ -35,10 +35,9 @@ public class AppendOnlyTest extends AppendOnlyTestCases String rowsUpdated = "SELECT 0 as \"rowsUpdated\""; String rowsTerminated = "SELECT 0 as \"rowsTerminated\""; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; - String rowsInserted = "SELECT COUNT(*) as \"rowsInserted\" FROM \"mydb\".\"staging\" as stage"; @Override - public void verifyAppendOnlyAllowDuplicatesNoAuditing(GeneratorResult operations) + public void verifyAppendOnlyNoAuditingNoDedupNoVersioningNoFilterExistingRecordsDeriveMainSchema(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -56,45 +55,36 @@ public void verifyAppendOnlyAllowDuplicatesNoAuditing(GeneratorResult operations Assertions.assertEquals(lockAcquiredQuery, acquireLockSql.get(0)); // Stats - verifyStats(operations); - } - - @Override - public void verifyAppendOnlyAllowDuplicatesWithAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO \"mydb\".\"main\" " + - "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00' " + - "FROM \"mydb\".\"staging\" as stage)"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTableCreateQueryWithAuditAndNoPKs, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); + Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); + Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + Assertions.assertNull(operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); } @Override - public void verifyAppendOnlyAllowDuplicatesWithAuditingWithDataSplits(List generatorResults, List dataSplitRanges) + public void verifyAppendOnlyWithAuditingFailOnDuplicatesAllVersionNoFilterExistingRecords(List generatorResults, List dataSplitRanges) { String insertSql = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00' " + - "FROM \"mydb\".\"staging\" as stage " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000' " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, generatorResults.get(0).preActionsSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit, generatorResults.get(0).preActionsSql().get(1)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, generatorResults.get(0).deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates, generatorResults.get(0).deduplicationAndVersioningSql().get(1)); + Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), generatorResults.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), generatorResults.get(1).ingestSql().get(0)); Assertions.assertEquals(2, generatorResults.size()); // Stats - String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage " + - "WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; - String rowsInserted = "SELECT COUNT(*) as \"rowsInserted\" FROM \"mydb\".\"staging\" as stage " + + String incomingRecordCount = "SELECT COALESCE(SUM(stage.\"legend_persistence_count\"),0) as \"incomingRecordCount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String rowsInserted = "SELECT COUNT(*) as \"rowsInserted\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_update_time\" = (SELECT MAX(sink.\"batch_update_time\") FROM \"mydb\".\"main\" as sink)"; Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(0)), generatorResults.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(1)), generatorResults.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -105,70 +95,23 @@ public void verifyAppendOnlyAllowDuplicatesWithAuditingWithDataSplits(List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage)"; - - Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyAppendOnlyFailOnDuplicatesWithAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",'2000-01-01 00:00:00' FROM \"mydb\".\"staging\" as stage)"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTableWithAuditNotPkCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyAppendOnlyFilterDuplicatesNoAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO \"mydb\".\"main\" " + - "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage " + - "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + - "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + - "(sink.\"digest\" = stage.\"digest\"))))"; - - Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); - Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); - Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); - Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); - } - - @Override - public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries) + public void verifyAppendOnlyWithAuditingFilterDuplicatesNoVersioningWithFilterExistingRecords(GeneratorResult queries) { List preActionsSqlList = queries.preActionsSql(); List milestoningSqlList = queries.ingestSql(); + List deduplicationAndVersioningSql = queries.deduplicationAndVersioningSql(); String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00' FROM \"mydb\".\"staging\" as stage " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000' FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE ((sink.\"id\" = stage.\"id\") AND " + "(sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\"))))"; + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); List postActionsSql = queries.postActionsSql(); @@ -177,6 +120,7 @@ public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries assertIfListsAreSameIgnoringOrder(expectedSQL, postActionsSql); // Stats + String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; String rowsInserted = "SELECT COUNT(*) as \"rowsInserted\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_update_time\" = (SELECT MAX(sink.\"batch_update_time\") FROM \"mydb\".\"main\" as sink)"; Assertions.assertEquals(incomingRecordCount, queries.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, queries.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); @@ -186,25 +130,29 @@ public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries } @Override - public void verifyAppendOnlyFilterDuplicatesWithAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyAppendOnlyWithAuditingFilterDuplicatesAllVersionWithFilterExistingRecords(List operations, List dataSplitRanges) { String insertSql = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00' " + - "FROM \"mydb\".\"staging\" as stage " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000' " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + "(sink.\"digest\" = stage.\"digest\")))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit, operations.get(0).preActionsSql().get(1)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, operations.get(0).deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates, operations.get(0).deduplicationAndVersioningSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); Assertions.assertEquals(2, operations.size()); // Stats - String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage " + + String incomingRecordCount = "SELECT COALESCE(SUM(stage.\"legend_persistence_count\"),0) as \"incomingRecordCount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; String rowsInserted = "SELECT COUNT(*) as \"rowsInserted\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_update_time\" = (SELECT MAX(sink.\"batch_update_time\") FROM \"mydb\".\"main\" as sink)"; @@ -222,14 +170,13 @@ public void verifyAppendOnlyWithUpperCaseOptimizer(GeneratorResult operations) List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); - String insertSql = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\") " + - "(SELECT * FROM \"MYDB\".\"STAGING\" as stage " + - "WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink " + - "WHERE ((sink.\"ID\" = stage.\"ID\") " + - "AND (sink.\"NAME\" = stage.\"NAME\")) " + - "AND (sink.\"DIGEST\" = stage.\"DIGEST\"))))"; + String insertSql = "INSERT INTO \"MYDB\".\"MAIN\" " + + "(\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"BATCH_UPDATE_TIME\") " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",'2000-01-01 00:00:00.000000' FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage " + + "WHERE NOT (EXISTS " + + "(SELECT * FROM \"MYDB\".\"MAIN\" as sink WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (sink.\"DIGEST\" = stage.\"DIGEST\"))))"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestCreateQueryWithUpperCase, preActionsSqlList.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQueryUpperCase, preActionsSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); } @@ -239,24 +186,72 @@ public void verifyAppendOnlyWithLessColumnsInStaging(GeneratorResult operations) List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); - String insertSql = "INSERT INTO \"mydb\".\"main\" " + - "(\"id\", \"name\", \"amount\", \"digest\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage " + - "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + - "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + - "(sink.\"digest\" = stage.\"digest\"))))"; + String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"digest\", \"batch_update_time\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"digest\",'2000-01-01 00:00:00.000000' FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE ((sink.\"id\" = stage.\"id\") AND " + + "(sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\"))))"; - Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); } - private void verifyStats(GeneratorResult operations) + @Override + public void verifyAppendOnlyWithAuditingFailOnDuplicatesMaxVersionWithFilterExistingRecords(GeneratorResult operations) { + List preActionsSqlList = operations.preActionsSql(); + List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + + String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000' FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE ((sink.\"id\" = stage.\"id\") AND " + + "(sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\"))))"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); + + // Stats + String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; + String rowsInserted = "SELECT COUNT(*) as \"rowsInserted\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_update_time\" = (SELECT MAX(sink.\"batch_update_time\") FROM \"mydb\".\"main\" as sink)"; Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); + Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + } + + @Override + public void verifyAppendOnlyWithAuditingFilterDupsMaxVersionNoFilterExistingRecords(GeneratorResult operations) + { + List preActionsSqlList = operations.preActionsSql(); + List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + + String insertSql = "INSERT INTO \"mydb\".\"main\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000' FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage)"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); + + // Stats + String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; + String rowsInserted = "SELECT COUNT(*) as \"rowsInserted\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_update_time\" = (SELECT MAX(sink.\"batch_update_time\") FROM \"mydb\".\"main\" as sink)"; + Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); + Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); } public RelationalSink getRelationalSink() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaTest.java index 948a3132866..7bc27355818 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaTest.java @@ -15,6 +15,7 @@ package org.finos.legend.engine.persistence.components.ingestmode.nontemporal; import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; @@ -25,26 +26,36 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; -import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.lockAcquiredQuery; -import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.lockInitializedQuery; +import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.*; +import static org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics.MAX_DATA_ERRORS; +import static org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics.MAX_DUPLICATES; public class NontemporalDeltaTest extends NontemporalDeltaTestCases { protected String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; protected String incomingRecordCountWithSplits = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage WHERE " + "(stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + protected String incomingRecordCountWithSplitsTempStagingTable = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE " + + "(stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + protected String incomingRecordCountWithSplitsWithDuplicates = "SELECT COALESCE(SUM(stage.\"legend_persistence_count\"),0) as \"incomingRecordCount\" " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE " + + "(stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + protected String rowsTerminated = "SELECT 0 as \"rowsTerminated\""; protected String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; - protected String rowsDeletedWithDeleteIndicator = "SELECT COUNT(*) as \"rowsDeleted\" FROM \"mydb\".\"main\" as sink WHERE EXISTS (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\") AND (stage.\"delete_indicator\" IN ('yes','1','true')))"; + protected String rowsDeletedWithDeleteIndicator = "SELECT COUNT(*) as \"rowsDeleted\" FROM \"mydb\".\"main\" as sink WHERE EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\") AND (stage.\"delete_indicator\" IN ('yes','1','true')))"; @Override - public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalDeltaNoAuditingNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); List initializeLockSql = operations.initializeLockSql(); List acquireLockSql = operations.acquireLockSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); String updateSql = "UPDATE \"mydb\".\"main\" as sink SET " + "sink.\"id\" = (SELECT stage.\"id\" FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + @@ -57,7 +68,7 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operatio String insertSql = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))"; @@ -65,6 +76,9 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operatio Assertions.assertEquals(AnsiTestArtifacts.expectedStagingTableWithDigestCreateQuery, preActionsSqlList.get(1)); Assertions.assertEquals(AnsiTestArtifacts.expectedLockInfoTableCreateQuery, preActionsSqlList.get(2)); + Assertions.assertTrue(deduplicationAndVersioningSql.isEmpty()); + Assertions.assertTrue(deduplicationAndVersioningErrorChecksSql.isEmpty()); + Assertions.assertEquals(updateSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); @@ -78,31 +92,37 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operatio } @Override - public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalDeltaWithAuditingFilterDupsNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); String updateSql = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"id\" = (SELECT stage.\"id\" FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + - "sink.\"name\" = (SELECT stage.\"name\" FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + - "sink.\"amount\" = (SELECT stage.\"amount\" FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + - "sink.\"biz_date\" = (SELECT stage.\"biz_date\" FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + - "sink.\"digest\" = (SELECT stage.\"digest\" FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + - "sink.\"batch_update_time\" = '2000-01-01 00:00:00' " + - "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))"; + "SET sink.\"id\" = (SELECT stage.\"id\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + + "sink.\"name\" = (SELECT stage.\"name\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + + "sink.\"amount\" = (SELECT stage.\"amount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + + "sink.\"biz_date\" = (SELECT stage.\"biz_date\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + + "sink.\"digest\" = (SELECT stage.\"digest\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))," + + "sink.\"batch_update_time\" = '2000-01-01 00:00:00.000000' " + + "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))"; String insertSql = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00' " + - "FROM \"mydb\".\"staging\" as stage " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000' " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); Assertions.assertEquals(updateSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertTrue(deduplicationAndVersioningErrorChecksSql.isEmpty()); // Stats Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); @@ -110,7 +130,44 @@ public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operat } @Override - public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyNonTemporalDeltaNoAuditingNoDedupAllVersion(List operations, List dataSplitRanges) + { + String updateSql = "UPDATE \"mydb\".\"main\" as sink SET " + + "sink.\"id\" = (SELECT stage.\"id\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"name\" = (SELECT stage.\"name\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"amount\" = (SELECT stage.\"amount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"biz_date\" = (SELECT stage.\"biz_date\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"digest\" = (SELECT stage.\"digest\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) " + + "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE " + + "(((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) " + + "AND ((stage.\"data_split\" >= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))"; + + String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) " + + "AND (NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")))))"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(expectedBaseTempStagingTablePlusDigestWithDataSplit, operations.get(0).preActionsSql().get(1)); + Assertions.assertEquals(enrichSqlWithDataSplits(updateSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(1)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, operations.get(0).deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndAllowDups, operations.get(0).deduplicationAndVersioningSql().get(1)); + Assertions.assertEquals(AnsiTestArtifacts.dataErrorCheckSqlWithBizDateVersion, operations.get(0).deduplicationAndVersioningErrorChecksSql().get(MAX_DATA_ERRORS)); + + Assertions.assertEquals(enrichSqlWithDataSplits(updateSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(1)); + + // Stats + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsTempStagingTable, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsTempStagingTable, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsTerminated, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + Assertions.assertEquals(rowsDeleted, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + } + + @Override + public void verifyNonTemporalDeltaNoAuditingNoDedupAllVersionWithoutPerform(List operations, List dataSplitRanges) { String updateSql = "UPDATE \"mydb\".\"main\" as sink SET " + "sink.\"id\" = (SELECT stage.\"id\" FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + @@ -131,9 +188,12 @@ public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List Assertions.assertEquals(enrichSqlWithDataSplits(updateSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(1)); + Assertions.assertTrue(operations.get(0).deduplicationAndVersioningSql().isEmpty()); + Assertions.assertTrue(operations.get(0).deduplicationAndVersioningErrorChecksSqlPlan().isEmpty()); + Assertions.assertEquals(enrichSqlWithDataSplits(updateSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(1)); - + // Stats Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplits, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplits, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -142,19 +202,19 @@ public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List } @Override - public void verifyNonTemporalDeltaWithWithAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyNonTemporalDeltaWithWithAuditingFailOnDupsAllVersion(List operations, List dataSplitRanges) { String updateSql = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"id\" = (SELECT stage.\"id\" FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + - "sink.\"name\" = (SELECT stage.\"name\" FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + - "sink.\"amount\" = (SELECT stage.\"amount\" FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + - "sink.\"biz_date\" = (SELECT stage.\"biz_date\" FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + - "sink.\"digest\" = (SELECT stage.\"digest\" FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + - "sink.\"batch_update_time\" = '2000-01-01 00:00:00' " + - "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))"; + "sink.\"id\" = (SELECT stage.\"id\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"name\" = (SELECT stage.\"name\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"amount\" = (SELECT stage.\"amount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"biz_date\" = (SELECT stage.\"biz_date\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"digest\" = (SELECT stage.\"digest\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))," + + "sink.\"batch_update_time\" = '2000-01-01 00:00:00.000000' " + + "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))"; String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00' FROM \"mydb\".\"staging\" as stage " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000' FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) " + "AND (NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")))))"; @@ -164,15 +224,21 @@ public void verifyNonTemporalDeltaWithWithAuditingWithDataSplit(List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -193,8 +259,8 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplitWithDeleteIndicator(Gener "WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))"; String deleteSql = "DELETE FROM \"mydb\".\"main\" as sink " + - "WHERE EXISTS (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" " + - "FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) " + + "WHERE EXISTS " + + "(SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) " + "AND (sink.\"digest\" = stage.\"digest\") AND (stage.\"delete_indicator\" IN ('yes','1','true')))"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); @@ -227,7 +293,7 @@ public void verifyNontemporalDeltaWithUpperCaseOptimizer(GeneratorResult operati "AND (sink.\"DIGEST\" <> stage.\"DIGEST\"))"; String insertSql = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\") " + - "(SELECT * FROM \"MYDB\".\"STAGING\" as stage WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\" FROM \"MYDB\".\"STAGING\" as stage WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink " + "WHERE (sink.\"ID\" = stage.\"ID\") " + "AND (sink.\"NAME\" = stage.\"NAME\"))))"; @@ -251,7 +317,7 @@ public void verifyNontemporalDeltaWithLessColumnsInStaging(GeneratorResult opera "((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\"))"; String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"digest\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"digest\" FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))"; @@ -284,7 +350,7 @@ public void verifyNontemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")) AND ((stage.\"biz_date\" > '2020-01-01') AND (stage.\"biz_date\" < '2020-01-03')))"; String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage WHERE (NOT (EXISTS " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM \"mydb\".\"staging\" as stage WHERE (NOT (EXISTS " + "(SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"id\" = stage.\"id\") AND " + "(sink.\"name\" = stage.\"name\")))) AND ((stage.\"biz_date\" > '2020-01-01') AND (stage.\"biz_date\" < '2020-01-03')))"; @@ -300,31 +366,46 @@ public void verifyNontemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult } @Override - public void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(GeneratorResult operations) + public void verifyNontemporalDeltaWithFilterDupsMaxVersionWithStagingFilters(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String updateSql = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"id\" = (SELECT stage.\"id\" FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972) as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) AND (stage.\"legend_persistence_row_num\" = 1))," + - "sink.\"name\" = (SELECT stage.\"name\" FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972) as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) AND (stage.\"legend_persistence_row_num\" = 1))," + - "sink.\"amount\" = (SELECT stage.\"amount\" FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972) as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) AND (stage.\"legend_persistence_row_num\" = 1))," + - "sink.\"biz_date\" = (SELECT stage.\"biz_date\" FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972) as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) AND (stage.\"legend_persistence_row_num\" = 1))," + - "sink.\"digest\" = (SELECT stage.\"digest\" FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972) as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) AND (stage.\"legend_persistence_row_num\" = 1))," + - "sink.\"version\" = (SELECT stage.\"version\" FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972) as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) AND (stage.\"legend_persistence_row_num\" = 1)) " + - "WHERE EXISTS (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\" FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972) as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) AND (stage.\"legend_persistence_row_num\" = 1))"; + "sink.\"id\" = (SELECT stage.\"id\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\"))," + + "sink.\"name\" = (SELECT stage.\"name\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\"))," + + "sink.\"amount\" = (SELECT stage.\"amount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\"))," + + "sink.\"biz_date\" = (SELECT stage.\"biz_date\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\"))," + + "sink.\"digest\" = (SELECT stage.\"digest\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\"))," + + "sink.\"version\" = (SELECT stage.\"version\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) " + + "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\"))"; String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"version\") " + - "(SELECT * FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\" FROM " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage " + - "WHERE stage.\"snapshot_id\" > 18972) as stage " + - "WHERE stage.\"legend_persistence_row_num\" = 1) as stage " + - "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))"; + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\" FROM " + + "\"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))"; + + String expectedInsertIntoBaseTempStagingWithMaxVersionFilterDupsWithStagingFilters = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"version\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\"," + + "stage.\"legend_persistence_count\" as \"legend_persistence_count\" " + + "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\"," + + "stage.\"legend_persistence_count\" as \"legend_persistence_count\"," + + "DENSE_RANK() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_rank\" " + + "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",COUNT(*) as \"legend_persistence_count\" " + + "FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972 GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\", stage.\"digest\", stage.\"version\") as stage) " + + "as stage WHERE stage.\"legend_persistence_rank\" = 1)"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusVersionCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(expectedBaseTempStagingTableWithVersionAndCount, preActionsSqlList.get(1)); Assertions.assertEquals(updateSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, operations.deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(expectedInsertIntoBaseTempStagingWithMaxVersionFilterDupsWithStagingFilters, operations.deduplicationAndVersioningSql().get(1)); + + Assertions.assertEquals(dataErrorCheckSql, operations.deduplicationAndVersioningErrorChecksSql().get(MAX_DATA_ERRORS)); + String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972"; // Stats Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -333,7 +414,7 @@ public void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(Ge } @Override - public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaWithNoDedupMaxVersioningWithoutPerformWithStagingFilters(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -348,7 +429,7 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(Gene "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE (((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")) AND (stage.\"snapshot_id\" > 18972))"; String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"version\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage WHERE (NOT (EXISTS " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\" FROM \"mydb\".\"staging\" as stage WHERE (NOT (EXISTS " + "(SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"id\" = stage.\"id\") AND " + "(sink.\"name\" = stage.\"name\")))) AND (stage.\"snapshot_id\" > 18972))"; @@ -356,6 +437,9 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(Gene Assertions.assertEquals(updateSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + Assertions.assertTrue(operations.deduplicationAndVersioningSql().isEmpty()); + Assertions.assertTrue(operations.deduplicationAndVersioningErrorChecksSql().isEmpty()); + String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972"; // Stats Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -364,7 +448,7 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(Gene } @Override - public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaNoDedupMaxVersionWithoutPerform(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -379,13 +463,16 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( "WHERE EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\"))"; String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"version\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\" FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusVersionCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(updateSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + Assertions.assertTrue(operations.deduplicationAndVersioningSql().isEmpty()); + Assertions.assertTrue(operations.deduplicationAndVersioningErrorChecksSql().isEmpty()); + // Stats Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); @@ -393,29 +480,40 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( } @Override - public void verifyNontemporalDeltaWithWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaAllowDuplicatesMaxVersionWithUpperCase(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); - String updateSql = "UPDATE \"MYDB\".\"MAIN\" as sink SET " + - "sink.\"ID\" = (SELECT stage.\"ID\" FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE (((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")) AND (stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1))," + - "sink.\"NAME\" = (SELECT stage.\"NAME\" FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE (((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")) AND (stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1))," + - "sink.\"AMOUNT\" = (SELECT stage.\"AMOUNT\" FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE (((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")) AND (stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1))," + - "sink.\"BIZ_DATE\" = (SELECT stage.\"BIZ_DATE\" FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE (((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")) AND (stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1))," + - "sink.\"DIGEST\" = (SELECT stage.\"DIGEST\" FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE (((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")) AND (stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1))," + - "sink.\"VERSION\" = (SELECT stage.\"VERSION\" FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE (((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")) AND (stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1)) " + - "WHERE EXISTS (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\" FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE (((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")) AND (stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1))"; + String updateSql = "UPDATE \"MYDB\".\"MAIN\" as sink " + + "SET sink.\"ID\" = (SELECT stage.\"ID\" FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\"))," + + "sink.\"NAME\" = (SELECT stage.\"NAME\" FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\"))," + + "sink.\"AMOUNT\" = (SELECT stage.\"AMOUNT\" FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\"))," + + "sink.\"BIZ_DATE\" = (SELECT stage.\"BIZ_DATE\" FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\"))," + + "sink.\"DIGEST\" = (SELECT stage.\"DIGEST\" FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\"))," + + "sink.\"VERSION\" = (SELECT stage.\"VERSION\" FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")) " + + "WHERE EXISTS (SELECT * FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\"))"; String insertSql = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"VERSION\") " + - "(SELECT * FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\" FROM " + - "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage " + - "WHERE stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1) as stage " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\" FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink WHERE (sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\"))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusVersionCreateQueryUpperCase, preActionsSqlList.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithVersionUpperCase, preActionsSqlList.get(1)); Assertions.assertEquals(updateSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + + String insertTempStagingTable = "INSERT INTO \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" " + + "(\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"VERSION\") " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\" FROM " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\"," + + "DENSE_RANK() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_RANK\" " + + "FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE stage.\"LEGEND_PERSISTENCE_RANK\" = 1)"; + + Assertions.assertEquals(expectedTempStagingCleanupQueryInUpperCase, operations.deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(insertTempStagingTable, operations.deduplicationAndVersioningSql().get(1)); + + Assertions.assertEquals(dataErrorCheckSqlUpperCase, operations.deduplicationAndVersioningErrorChecksSql().get(MAX_DATA_ERRORS)); } public RelationalSink getRelationalSink() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalSnapshotTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalSnapshotTest.java index 3771c827314..d1b1e403e56 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalSnapshotTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalSnapshotTest.java @@ -15,6 +15,7 @@ package org.finos.legend.engine.persistence.components.ingestmode.nontemporal; import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.SqlPlan; @@ -25,30 +26,31 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; -import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.lockAcquiredQuery; -import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.lockInitializedQuery; +import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.*; public class NontemporalSnapshotTest extends NontemporalSnapshotTestCases { String cleanUpMainTableSql = "DELETE FROM \"mydb\".\"main\" as sink"; String cleanupMainTableSqlUpperCase = "DELETE FROM \"MYDB\".\"MAIN\" as sink"; String rowsDeleted = "SELECT COUNT(*) as \"rowsDeleted\" FROM \"mydb\".\"main\" as sink"; - String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; String rowsUpdated = "SELECT 0 as \"rowsUpdated\""; String rowsInserted = "SELECT COUNT(*) as \"rowsInserted\" FROM \"mydb\".\"main\" as sink"; String rowsTerminated = "SELECT 0 as \"rowsTerminated\""; @Override - public void verifyNontemporalSnapshotNoAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotNoAuditingNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); List initializeLockSql = operations.initializeLockSql(); List acquireLockSql = operations.acquireLockSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map andVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage)"; + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\" FROM \"mydb\".\"staging\" as stage)"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(AnsiTestArtifacts.expectedBaseStagingTableCreateQuery, preActionsSqlList.get(1)); @@ -58,68 +60,67 @@ public void verifyNontemporalSnapshotNoAuditingNoDataSplit(GeneratorResult opera Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); Assertions.assertEquals(lockInitializedQuery, initializeLockSql.get(0)); Assertions.assertEquals(lockAcquiredQuery, acquireLockSql.get(0)); + Assertions.assertTrue(deduplicationAndVersioningSql.isEmpty()); + Assertions.assertTrue(andVersioningErrorChecksSql.isEmpty()); // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override - public void verifyNontemporalSnapshotNoAuditingWithDataSplit(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\" FROM \"mydb\".\"staging\" as stage " + - "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage_right WHERE " + - "(stage.\"data_split\" < stage_right.\"data_split\") AND ((stage.\"id\" = stage_right.\"id\") AND (stage.\"name\" = stage_right.\"name\")))))"; - - Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(cleanUpMainTableSql, milestoningSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyNontemporalSnapshotWithAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotWithAuditingFilterDupsNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); String insertSql = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",'2000-01-01 00:00:00' " + - "FROM \"mydb\".\"staging\" as stage)"; + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",'2000-01-01 00:00:00.000000' " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage)"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTableWithAuditPkCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTableWithCount, preActionsSqlList.get(1)); Assertions.assertEquals(cleanUpMainTableSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertTrue(deduplicationAndVersioningErrorChecksSql.isEmpty()); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override - public void verifyNontemporalSnapshotWithAuditingWithDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotWithAuditingFailOnDupMaxVersion(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String insertSql = "INSERT INTO \"mydb\".\"main\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"batch_update_time\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",'2000-01-01 00:00:00.000000' " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage)"; - String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", \"batch_update_time\") " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",'2000-01-01 00:00:00' " + - "FROM \"mydb\".\"staging\" as stage WHERE NOT (EXISTS " + - "(SELECT * FROM \"mydb\".\"staging\" as stage_right " + - "WHERE (stage.\"data_split\" < stage_right.\"data_split\") AND ((stage.\"id\" = stage_right.\"id\") AND " + - "(stage.\"name\" = stage_right.\"name\")))))"; + String maxDataErrorCheckSql = "SELECT MAX(\"legend_persistence_distinct_rows\") as \"MAX_DATA_ERRORS\" FROM " + + "(SELECT COUNT(DISTINCT(\"amount\")) as \"legend_persistence_distinct_rows\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "as stage GROUP BY \"id\", \"name\", \"biz_date\") as stage"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTableWithAuditPkCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTableWithCount, preActionsSqlList.get(1)); Assertions.assertEquals(cleanUpMainTableSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(maxDupsErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + Assertions.assertEquals(maxDataErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override @@ -129,7 +130,7 @@ public void verifyNontemporalSnapshotWithUpperCaseOptimizer(GeneratorResult quer List milestoningSqlList = queries.ingestSql(); String insertSql = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\") " + - "(SELECT * FROM \"MYDB\".\"STAGING\" as stage)"; + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\" FROM \"MYDB\".\"STAGING\" as stage)"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTableCreateQueryWithUpperCase, preActionsSqlList.get(0)); Assertions.assertEquals(cleanupMainTableSqlUpperCase, milestoningSqlList.get(0)); @@ -143,7 +144,7 @@ public void verifyNontemporalSnapshotWithLessColumnsInStaging(GeneratorResult op List milestoningSqlList = operations.ingestSql(); String insertSql = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\") " + - "(SELECT * FROM \"mydb\".\"staging\" as stage)"; + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\" FROM \"mydb\".\"staging\" as stage)"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(cleanUpMainTableSql, milestoningSqlList.get(0)); @@ -160,9 +161,9 @@ public void verifyNontemporalSnapshotWithCleanStagingData(GeneratorResult operat } @Override - public void verifyNontemporalSnapshotWithDropStagingData(SqlPlan physicalPlanForPostActions) + public void verifyNontemporalSnapshotWithDropStagingData(SqlPlan physicalPlanForPostCleanup) { - List sqlsForPostActions = physicalPlanForPostActions.getSqlList(); + List sqlsForPostActions = physicalPlanForPostCleanup.getSqlList(); List expectedSQL = new ArrayList<>(); expectedSQL.add(AnsiTestArtifacts.expectedDropTableQuery); assertIfListsAreSameIgnoringOrder(expectedSQL, sqlsForPostActions); @@ -174,12 +175,13 @@ public RelationalSink getRelationalSink() return AnsiSqlSink.get(); } - private void verifyStats(GeneratorResult operations) + private void verifyStats(GeneratorResult operations, String stageTableName) { // Pre stats: Assertions.assertEquals(rowsDeleted, operations.preIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); // Post Stats: + String incomingRecordCount = String.format("SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"%s\" as stage", stageTableName); Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaBatchIdBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaBatchIdBasedTest.java index 8470cdbeb0f..f95df9e8b3d 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaBatchIdBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaBatchIdBasedTest.java @@ -15,6 +15,7 @@ package org.finos.legend.engine.persistence.components.ingestmode.unitemporal; import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; import org.finos.legend.engine.persistence.components.relational.api.DataSplitRange; @@ -23,13 +24,15 @@ import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.*; +import static org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics.MAX_DATA_ERRORS; public class UnitemporalDeltaBatchIdBasedTest extends UnitmemporalDeltaBatchIdBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -75,7 +78,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupAllVersionsWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1 " + @@ -106,6 +109,9 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; @@ -116,17 +122,19 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET sink.\"batch_id_out\" = " + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1 " + "WHERE " + "(sink.\"batch_id_out\" = 999999999) AND " + - "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + + "(EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) " + "AND ((sink.\"digest\" <> stage.\"digest\") OR (stage.\"delete_indicator\" IN ('yes','1','true')))))"; @@ -134,7 +142,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "999999999 FROM \"mydb\".\"staging\" as stage " + + "999999999 FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE (NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) AND (sink.\"digest\" = stage.\"digest\") " + "AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))) AND " + @@ -146,6 +154,16 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + String expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"delete_indicator\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"delete_indicator\"," + + "COUNT(*) as \"legend_persistence_count\" FROM \"mydb\".\"staging\" as stage " + + "GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\", stage.\"digest\", stage.\"delete_indicator\")"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertTrue(deduplicationAndVersioningErrorChecksSql.isEmpty()); + String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1) AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_id_in\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'))))"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; @@ -155,13 +173,13 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndNoDedupAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET sink.\"batch_id_out\" = " + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1 " + "WHERE " + "(sink.\"batch_id_out\" = 999999999) AND " + - "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + + "(EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) " + "AND ((sink.\"digest\" <> stage.\"digest\") OR (stage.\"delete_indicator\" IN ('yes','1','true')))))"; @@ -169,7 +187,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) AND (sink.\"digest\" = stage.\"digest\") " + @@ -187,7 +205,17 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndAllowDups = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"delete_indicator\", \"data_split\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"delete_indicator\"," + + "DENSE_RANK() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"biz_date\" ASC) as \"data_split\" " + + "FROM \"mydb\".\"staging\" as stage)"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, operations.get(0).deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndAllowDups, operations.get(0).deduplicationAndVersioningSql().get(1)); + Assertions.assertEquals(AnsiTestArtifacts.dataErrorCheckSqlWithBizDateVersion, operations.get(0).deduplicationAndVersioningErrorChecksSql().get(MAX_DATA_ERRORS)); + + String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1) AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_id_in\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'))))"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_in\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'))-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1) AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_id_in\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'))))) as \"rowsInserted\""; @@ -270,7 +298,7 @@ public void verifyUnitemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult } @Override - public void verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(GeneratorResult operations) + public void verifyUnitemporalDeltaWithFilterDupsMaxVersionWithStagingFilter(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -279,29 +307,36 @@ public void verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(Gen "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 " + "FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1 " + "WHERE (sink.\"batch_id_out\" = 999999999) AND (EXISTS " + - "(SELECT * FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\" " + - "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() " + - "OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" " + - "FROM \"mydb\".\"staging\" as stage WHERE stage.\"batch_id_in\" > 5) as stage " + - "WHERE stage.\"legend_persistence_row_num\" = 1) as stage " + + "(SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (stage.\"version\" > sink.\"version\")))"; - String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" (\"id\", \"name\", \"amount\", \"biz_date\", " + - "\"digest\", \"version\", \"batch_id_in\", \"batch_id_out\") " + + String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"version\", \"batch_id_in\", \"batch_id_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\"," + - "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 " + - "FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + - "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\" " + - "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\"," + - "ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) " + - "as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"batch_id_in\" > 5) as stage " + - "WHERE stage.\"legend_persistence_row_num\" = 1) as stage " + - "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + - "WHERE (sink.\"batch_id_out\" = 999999999) AND (stage.\"version\" <= sink.\"version\") " + - "AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")))))"; + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata " + + "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 FROM \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "as stage WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = 999999999) " + + "AND (stage.\"version\" <= sink.\"version\") AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableBatchIdAndVersionBasedCreateQuery, preActionsSql.get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), preActionsSql.get(1)); + Assertions.assertEquals(expectedBaseTempStagingTableWithVersionAndCount, preActionsSql.get(2)); + + String expectedInsertIntoBaseTempStagingWithMaxVersionFilterDupsWithStagingFilters = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"version\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\"," + + "stage.\"legend_persistence_count\" as \"legend_persistence_count\" FROM " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\"," + + "stage.\"legend_persistence_count\" as \"legend_persistence_count\",DENSE_RANK() OVER " + + "(PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_rank\" " + + "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + + "stage.\"version\",COUNT(*) as \"legend_persistence_count\" FROM \"mydb\".\"staging\" as stage " + + "WHERE stage.\"batch_id_in\" > 5 GROUP BY stage.\"id\", stage.\"name\", stage.\"amount\", stage.\"biz_date\", " + + "stage.\"digest\", stage.\"version\") as stage) as stage WHERE stage.\"legend_persistence_rank\" = 1)"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, operations.deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(expectedInsertIntoBaseTempStagingWithMaxVersionFilterDupsWithStagingFilters, operations.deduplicationAndVersioningSql().get(1)); + Assertions.assertEquals(dataErrorCheckSql, operations.deduplicationAndVersioningErrorChecksSql().get(MAX_DATA_ERRORS)); Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); @@ -309,7 +344,7 @@ public void verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(Gen } @Override - public void verifyUnitemporalDeltaWithMaxVersionNoDedupAndStagingFilter(GeneratorResult operations) + public void verifyUnitemporalDeltaWithNoDedupMaxVersionWithoutPerformAndStagingFilters(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -347,7 +382,7 @@ public void verifyUnitemporalDeltaWithMaxVersionNoDedupAndStagingFilter(Generato } @Override - public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations) + public void verifyUnitemporalDeltaWithFailOnDupsMaxVersioningWithoutPerform(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -355,7 +390,7 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1 " + "WHERE (sink.\"batch_id_out\" = 999999999) AND " + - "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + + "(EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + "(stage.\"version\" > sink.\"version\")))"; @@ -364,13 +399,24 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + "999999999 " + - "FROM \"mydb\".\"staging\" as stage " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) " + "AND (stage.\"version\" <= sink.\"version\") AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableBatchIdAndVersionBasedCreateQuery, preActionsSql.get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), preActionsSql.get(1)); + Assertions.assertEquals(expectedBaseTempStagingTableWithVersionAndCount, preActionsSql.get(2)); + + String expectedInsertIntoBaseTempStagingWithFilterDuplicates = "INSERT INTO \"mydb\".\"staging_legend_persistence_temp_staging\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"version\", \"legend_persistence_count\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\"," + + "COUNT(*) as \"legend_persistence_count\" FROM \"mydb\".\"staging\" as stage GROUP BY stage.\"id\", " + + "stage.\"name\", stage.\"amount\", stage.\"biz_date\", stage.\"digest\", stage.\"version\")"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, operations.deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(expectedInsertIntoBaseTempStagingWithFilterDuplicates, operations.deduplicationAndVersioningSql().get(1)); + Assertions.assertEquals(AnsiTestArtifacts.maxDupsErrorCheckSql, operations.deduplicationAndVersioningErrorChecksSql().get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); @@ -378,37 +424,39 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( } @Override - public void verifyUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations) + public void verifyUnitemporalDeltaWithNoDedupMaxVersioningAndUpperCaseWithoutStagingFilters(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink SET sink.\"BATCH_ID_OUT\" = " + - "(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + - "WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN')-1 " + - "WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND " + - "(EXISTS (SELECT * FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\" " + - "FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\"," + - "ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" " + - "FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1) as stage " + - "WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")))"; + String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink " + + "SET sink.\"BATCH_ID_OUT\" = (SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + + "WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN')-1 WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND " + + "(EXISTS (SELECT * FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage WHERE " + + "((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (stage.\"VERSION\" >= sink.\"VERSION\")))"; - String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", " + - "\"VERSION\", \"BATCH_ID_IN\", \"BATCH_ID_OUT\") " + + String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" " + + "(\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"VERSION\", \"BATCH_ID_IN\", \"BATCH_ID_OUT\") " + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\"," + "(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + - "WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),999999999 FROM " + - "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\" " + - "FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\"," + - "ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) " + - "as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage " + - "WHERE stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1) as stage WHERE NOT " + - "(EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink " + - "WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND (stage.\"VERSION\" < sink.\"VERSION\") " + - "AND ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")))))"; + "WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),999999999 FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage " + + "WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND " + + "(stage.\"VERSION\" < sink.\"VERSION\") AND ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableBatchIdAndVersionBasedCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQueryWithUpperCase(), preActionsSql.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithVersionUpperCase, preActionsSql.get(2)); + + String expectedInsertIntoTempStagingMaxVersion = "INSERT INTO \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" " + + "(\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"VERSION\") " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\" " + + "FROM (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\"," + + "DENSE_RANK() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_RANK\" " + + "FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE stage.\"LEGEND_PERSISTENCE_RANK\" = 1)"; + + Assertions.assertEquals(expectedTempStagingCleanupQueryInUpperCase, operations.deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(expectedInsertIntoTempStagingMaxVersion, operations.deduplicationAndVersioningSql().get(1)); + Assertions.assertEquals(dataErrorCheckSqlUpperCase, operations.deduplicationAndVersioningErrorChecksSql().get(MAX_DATA_ERRORS)); Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); @@ -416,7 +464,7 @@ public void verifyUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWitho } @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFilters(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndWithOptimizationFilters(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -457,7 +505,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFilters(G } @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFiltersIncludesNullValues(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndWithOptimizationFiltersIncludesNullValues(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -529,7 +577,7 @@ protected String getExpectedMetadataTableIngestQueryWithStagingFilters(String st "(\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\", \"staging_filters\") " + "(SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata " + "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE'," + + "'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE'," + String.format("PARSE_JSON('%s'))", stagingFilters); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaBatchIdDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaBatchIdDateTimeBasedTest.java index 505b9893733..55890efccd2 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaBatchIdDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaBatchIdDateTimeBasedTest.java @@ -25,10 +25,14 @@ import java.util.ArrayList; import java.util.List; +import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.*; +import static org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics.MAX_DATA_ERRORS; +import static org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics.MAX_DUPLICATES; + public class UnitemporalDeltaBatchIdDateTimeBasedTest extends UnitmemporalDeltaBatchIdDateTimeBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -36,7 +40,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) AND " + "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + @@ -46,7 +50,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) " + @@ -69,13 +73,13 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndFilterDupsAllVersionWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) AND " + - "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + + "(EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + "(sink.\"digest\" <> stage.\"digest\")))"; @@ -83,8 +87,8 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) " + @@ -92,6 +96,7 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String incomingRecordCount = "SELECT COALESCE(SUM(stage.\"legend_persistence_count\"),0) as \"incomingRecordCount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_in\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'))-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1) as \"rowsInserted\""; @@ -112,7 +126,7 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -120,7 +134,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET sink.\"batch_id_out\" = " + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE " + "(sink.\"batch_id_out\" = 999999999) AND " + "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + @@ -132,7 +146,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator "\"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage " + "WHERE (NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) AND (sink.\"digest\" = stage.\"digest\") " + "AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))) AND " + @@ -155,7 +169,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator } @Override - public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalDeltaWithDeleteInd(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -163,7 +177,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET sink.\"batch_id_out\" = " + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE " + "(sink.\"batch_id_out\" = 999999999) AND " + "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + @@ -175,7 +189,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "\"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage " + "WHERE (NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) AND (sink.\"digest\" = stage.\"digest\") " + "AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))) AND " + @@ -190,13 +204,13 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndFailOnDupsAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET " + "sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) AND " + - "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE " + + "(EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE " + "((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + "((sink.\"digest\" <> stage.\"digest\") OR (stage.\"delete_indicator\" IN ('yes','1','true')))))"; @@ -204,7 +218,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = 999999999) AND " + "(sink.\"digest\" = stage.\"digest\") AND ((sink.\"id\" = stage.\"id\") AND " + @@ -213,6 +227,20 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink SET sink.\"BATCH_ID_OUT\" = (SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN')-1,sink.\"BATCH_TIME_OUT\" = '2000-01-01 00:00:00' WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND (EXISTS (SELECT * FROM \"MYDB\".\"STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (sink.\"DIGEST\" <> stage.\"DIGEST\")))"; - String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"BATCH_ID_IN\", \"BATCH_ID_OUT\", \"BATCH_TIME_IN\", \"BATCH_TIME_OUT\") (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM \"MYDB\".\"STAGING\" as stage WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND (sink.\"DIGEST\" = stage.\"DIGEST\") AND ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")))))"; + String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink SET sink.\"BATCH_ID_OUT\" = (SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN')-1,sink.\"BATCH_TIME_OUT\" = '2000-01-01 00:00:00.000000' WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND (EXISTS (SELECT * FROM \"MYDB\".\"STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (sink.\"DIGEST\" <> stage.\"DIGEST\")))"; + String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"BATCH_ID_IN\", \"BATCH_ID_OUT\", \"BATCH_TIME_IN\", \"BATCH_TIME_OUT\") (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM \"MYDB\".\"STAGING\" as stage WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND (sink.\"DIGEST\" = stage.\"DIGEST\") AND ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQueryWithUpperCase(), preActionsSql.get(1)); Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); @@ -255,7 +283,7 @@ public void verifyUnitemporalDeltaWithLessColumnsInStaging(GeneratorResult opera String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) AND " + "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE " + "((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" <> stage.\"digest\")))"; @@ -264,7 +292,7 @@ public void verifyUnitemporalDeltaWithLessColumnsInStaging(GeneratorResult opera "(\"id\", \"name\", \"amount\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"digest\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) AND (sink.\"digest\" = stage.\"digest\") " + @@ -330,7 +358,7 @@ public void verifyUnitemporalDeltaWithOnlySchemaSet(GeneratorResult operations) String expectedMilestoneQuery = "UPDATE \"my_schema\".\"main\" as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) AND " + "(EXISTS (SELECT * FROM \"my_schema\".\"staging\" as stage " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + @@ -340,7 +368,7 @@ public void verifyUnitemporalDeltaWithOnlySchemaSet(GeneratorResult operations) "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"my_schema\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"my_schema\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) " + @@ -375,7 +403,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothSet(GeneratorResult operati String expectedMilestoneQuery = "UPDATE \"mydb\".\"my_schema\".\"main\" as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) AND " + "(EXISTS (SELECT * FROM \"mydb\".\"my_schema\".\"staging\" as stage " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + @@ -385,7 +413,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothSet(GeneratorResult operati "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"my_schema\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"my_schema\".\"main\" as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) " + @@ -420,7 +448,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothNotSet(GeneratorResult oper String expectedMilestoneQuery = "UPDATE main as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1," + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) AND " + "(EXISTS (SELECT * FROM staging as stage " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + @@ -430,7 +458,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothNotSet(GeneratorResult oper "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM staging as stage " + "WHERE NOT (EXISTS (SELECT * FROM main as sink " + "WHERE (sink.\"batch_id_out\" = 999999999) " + diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaDateTimeBasedTest.java index 148bc47d4d1..e2891b503f3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaDateTimeBasedTest.java @@ -25,17 +25,22 @@ import java.util.ArrayList; import java.util.List; +import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.dataErrorCheckSqlWithBizDateVersion; +import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.maxDupsErrorCheckSql; +import static org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics.MAX_DATA_ERRORS; +import static org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics.MAX_DUPLICATES; + public class UnitemporalDeltaDateTimeBasedTest extends UnitmemporalDeltaDateTimeBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND " + "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + @@ -44,7 +49,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') " + @@ -59,29 +64,29 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio // Stats String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; - String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00'"; + String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000'"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00') as \"rowsInserted\""; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') as \"rowsInserted\""; String rowsTerminated = "SELECT 0 as \"rowsTerminated\""; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndFailOnDupsAllVersionWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND " + - "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + + "(EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND " + "(sink.\"digest\" <> stage.\"digest\")))"; String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' " + - "FROM \"mydb\".\"staging\" as stage " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') " + @@ -89,6 +94,7 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; - String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00'"; + String incomingRecordCount = "SELECT COALESCE(SUM(stage.\"legend_persistence_count\"),0) as \"incomingRecordCount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000'"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00') as \"rowsInserted\""; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') as \"rowsInserted\""; String rowsTerminated = "SELECT 0 as \"rowsTerminated\""; verifyStats(operations.get(0), enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(0)), rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } @Override - public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalDeltaWithDeleteIndNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE " + "(sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND " + "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + @@ -126,7 +142,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", " + "\"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage " + "WHERE (NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND (sink.\"digest\" = stage.\"digest\") " + "AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\"))))) AND " + @@ -141,21 +157,21 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper // Stats String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; - String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))"; + String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))) as \"rowsInserted\""; - String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))) as \"rowsTerminated\""; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))) as \"rowsInserted\""; + String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))) as \"rowsTerminated\""; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndFilterDupsAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE " + "(sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND " + - "(EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage " + + "(EXISTS (SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE ((stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) " + "AND ((sink.\"digest\" <> stage.\"digest\") OR (stage.\"delete_indicator\" IN ('yes','1','true')))))"; @@ -163,7 +179,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND (sink.\"digest\" = stage.\"digest\") " + @@ -173,6 +189,19 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; - String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))"; + String incomingRecordCount = "SELECT COALESCE(SUM(stage.\"legend_persistence_count\"),0) as \"incomingRecordCount\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))) as \"rowsInserted\""; - String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))) as \"rowsTerminated\""; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))) as \"rowsInserted\""; + String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))) as \"rowsTerminated\""; verifyStats(operations.get(0), enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(0)), rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } @@ -197,9 +226,9 @@ public void verifyUnitemporalDeltaWithUpperCaseOptimizer(GeneratorResult operati List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink SET sink.\"BATCH_TIME_OUT\" = '2000-01-01 00:00:00' WHERE (sink.\"BATCH_TIME_OUT\" = '9999-12-31 23:59:59') AND (EXISTS (SELECT * FROM \"MYDB\".\"STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (sink.\"DIGEST\" <> stage.\"DIGEST\")))"; + String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink SET sink.\"BATCH_TIME_OUT\" = '2000-01-01 00:00:00.000000' WHERE (sink.\"BATCH_TIME_OUT\" = '9999-12-31 23:59:59') AND (EXISTS (SELECT * FROM \"MYDB\".\"STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (sink.\"DIGEST\" <> stage.\"DIGEST\")))"; - String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"BATCH_TIME_IN\", \"BATCH_TIME_OUT\") (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM \"MYDB\".\"STAGING\" as stage WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink WHERE (sink.\"BATCH_TIME_OUT\" = '9999-12-31 23:59:59') AND (sink.\"DIGEST\" = stage.\"DIGEST\") AND ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")))))"; + String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"BATCH_TIME_IN\", \"BATCH_TIME_OUT\") (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM \"MYDB\".\"STAGING\" as stage WHERE NOT (EXISTS (SELECT * FROM \"MYDB\".\"MAIN\" as sink WHERE (sink.\"BATCH_TIME_OUT\" = '9999-12-31 23:59:59') AND (sink.\"DIGEST\" = stage.\"DIGEST\") AND ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")))))"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableTimeBasedCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQueryWithUpperCase(), preActionsSql.get(1)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotBatchIdBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotBatchIdBasedTest.java index b30f182ed29..b79aeb6c903 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotBatchIdBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotBatchIdBasedTest.java @@ -15,6 +15,7 @@ package org.finos.legend.engine.persistence.components.ingestmode.unitemporal; import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; @@ -22,6 +23,7 @@ import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.lockAcquiredQuery; import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.lockInitializedQuery; @@ -35,13 +37,15 @@ public class UnitemporalSnapshotBatchIdBasedTest extends UnitmemporalSnapshotBat String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1)-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1) AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_id_in\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'))))) as \"rowsTerminated\""; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); List initializeLockSql = operations.initializeLockSql(); List acquireLockSql = operations.acquireLockSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1 " + @@ -61,6 +65,52 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), preActionsSql.get(2)); Assertions.assertEquals(AnsiTestArtifacts.expectedLockInfoTableCreateQuery, preActionsSql.get(3)); + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + Assertions.assertTrue(deduplicationAndVersioningSql.isEmpty()); + Assertions.assertTrue(deduplicationAndVersioningErrorChecksSql.isEmpty()); + + Assertions.assertEquals(lockInitializedQuery, initializeLockSql.get(0)); + Assertions.assertEquals(lockAcquiredQuery, acquireLockSql.get(0)); + + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + } + + @Override + public void verifyUnitemporalSnapshotWithoutPartitionFailOnDupsNoVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List initializeLockSql = operations.initializeLockSql(); + List acquireLockSql = operations.acquireLockSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1 " + + "WHERE (sink.\"batch_id_out\" = 999999999) " + + "AND (NOT (EXISTS " + + "(SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\"))))"; + + String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999 " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = 999999999)))"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableBatchIdBasedCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedStagingTableWithDigestCreateQuery, preActionsSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), preActionsSql.get(2)); + Assertions.assertEquals(AnsiTestArtifacts.expectedLockInfoTableCreateQuery, preActionsSql.get(3)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSql.get(4)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.maxDupsErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); @@ -101,7 +151,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -131,7 +181,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotBatchIdDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotBatchIdDateTimeBasedTest.java index b9fed006365..65111b98915 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotBatchIdDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotBatchIdDateTimeBasedTest.java @@ -15,14 +15,15 @@ package org.finos.legend.engine.persistence.components.ingestmode.unitemporal; import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; -import org.finos.legend.engine.persistence.components.testcases.ingestmode.unitemporal.UnitmemporalSnapshotBatchIdBasedTestCases; import org.finos.legend.engine.persistence.components.testcases.ingestmode.unitemporal.UnitmemporalSnapshotBatchIdDateTimeBasedTestCases; import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; public class UnitemporalSnapshotBatchIdDateTimeBasedTest extends UnitmemporalSnapshotBatchIdDateTimeBasedTestCases { @@ -33,14 +34,14 @@ public class UnitemporalSnapshotBatchIdDateTimeBasedTest extends UnitmemporalSna String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1)-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1) AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_id_in\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'))))) as \"rowsTerminated\""; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\"))))"; @@ -48,7 +49,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = 999999999)))"; @@ -62,6 +63,41 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } + @Override + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupMaxVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + + "WHERE (sink.\"batch_id_out\" = 999999999) " + + "AND (NOT (EXISTS " + + "(SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\"))))"; + + String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = 999999999)))"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), preActionsSql.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigest, preActionsSql.get(2)); + + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.dataErrorCheckSqlWithBizDateVersion, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + } + @Override public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBatchHandling(GeneratorResult operations) { @@ -69,7 +105,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBa List milestoningSql = operations.ingestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE sink.\"batch_id_out\" = 999999999"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableCreateQuery, preActionsSql.get(0)); @@ -78,16 +114,32 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBa } @Override - public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizerFilterDupsMaxVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - - String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink SET sink.\"BATCH_ID_OUT\" = (SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN')-1,sink.\"BATCH_TIME_OUT\" = '2000-01-01 00:00:00' WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND (NOT (EXISTS (SELECT * FROM \"MYDB\".\"STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (sink.\"DIGEST\" = stage.\"DIGEST\"))))"; - String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" (\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"BATCH_ID_IN\", \"BATCH_ID_OUT\", \"BATCH_TIME_IN\", \"BATCH_TIME_OUT\") (SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM \"MYDB\".\"STAGING\" as stage WHERE NOT (stage.\"DIGEST\" IN (SELECT sink.\"DIGEST\" FROM \"MYDB\".\"MAIN\" as sink WHERE sink.\"BATCH_ID_OUT\" = 999999999)))"; + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink SET sink.\"BATCH_ID_OUT\" = " + + "(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE " + + "UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN')-1,sink.\"BATCH_TIME_OUT\" = '2000-01-01 00:00:00.000000' " + + "WHERE (sink.\"BATCH_ID_OUT\" = 999999999) AND (NOT (EXISTS (SELECT * FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage " + + "WHERE ((sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\")) AND (sink.\"DIGEST\" = stage.\"DIGEST\"))))"; + String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" " + + "(\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"BATCH_ID_IN\", \"BATCH_ID_OUT\", \"BATCH_TIME_IN\", \"BATCH_TIME_OUT\") " + + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\"," + + "(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + + "WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM \"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage " + + "WHERE NOT (stage.\"DIGEST\" IN (SELECT sink.\"DIGEST\" FROM \"MYDB\".\"MAIN\" as sink WHERE sink.\"BATCH_ID_OUT\" = 999999999)))"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQueryWithUpperCase(), preActionsSql.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCountUpperCase, preActionsSql.get(2)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQueryInUpperCase, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicatesUpperCase, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.dataErrorCheckSqlWithBizDateAsVersionUpperCase, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); @@ -95,14 +147,14 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\")))) " + @@ -111,7 +163,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = 999999999) AND (sink.\"biz_date\" = stage.\"biz_date\"))))"; @@ -138,14 +190,14 @@ public void verifyUnitemporalSnapshotWithPartitionWithDefaultEmptyDataHandling(G } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\")))) " + @@ -154,7 +206,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorR String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_id_out\" = 999999999) AND (sink.\"biz_date\" IN ('2000-01-01 00:00:00','2000-01-02 00:00:00')))))"; @@ -174,7 +226,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersWithDeleteTargetDataEmp List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) " + "AND (sink.\"biz_date\" IN ('2000-01-01 00:00:00','2000-01-02 00:00:00'))"; @@ -200,7 +252,7 @@ public void verifyUnitemporalSnapshotWithLessColumnsInStaging(GeneratorResult op List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "SET sink.\"batch_id_out\" = (SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')-1,sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_id_out\" = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\"))))"; @@ -208,7 +260,7 @@ public void verifyUnitemporalSnapshotWithLessColumnsInStaging(GeneratorResult op String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"digest\", \"batch_id_in\", \"batch_id_out\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"digest\"," + - "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_id_out\" = 999999999)))"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotDateTimeBasedTest.java index acec611ab3a..4db9bb8ae4d 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotDateTimeBasedTest.java @@ -15,33 +15,36 @@ package org.finos.legend.engine.persistence.components.ingestmode.unitemporal; import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; -import org.finos.legend.engine.persistence.components.testcases.ingestmode.unitemporal.UnitmemporalSnapshotBatchIdBasedTestCases; import org.finos.legend.engine.persistence.components.testcases.ingestmode.unitemporal.UnitmemporalSnapshotDateTimeBasedTestCases; import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; public class UnitemporalSnapshotDateTimeBasedTest extends UnitmemporalSnapshotDateTimeBasedTestCases { String incomingRecordCount = "SELECT COUNT(*) as \"incomingRecordCount\" FROM \"mydb\".\"staging\" as stage"; - String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))"; + String rowsUpdated = "SELECT COUNT(*) as \"rowsUpdated\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))"; String rowsDeleted = "SELECT 0 as \"rowsDeleted\""; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))) as \"rowsInserted\""; - String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00')))) as \"rowsTerminated\""; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_in\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))) as \"rowsInserted\""; + String rowsTerminated = "SELECT (SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM \"mydb\".\"main\" as sink2 WHERE ((sink2.\"id\" = sink.\"id\") AND (sink2.\"name\" = sink.\"name\")) AND (sink2.\"batch_time_in\" = '2000-01-01 00:00:00.000000')))) as \"rowsTerminated\""; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "SET sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') " + "AND (NOT (EXISTS " + "(SELECT * FROM \"mydb\".\"staging\" as stage " + @@ -50,12 +53,53 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '9999-12-31 23:59:59')))"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableTimeBasedCreateQuery, preActionsSql.get(0)); Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), preActionsSql.get(1)); + Assertions.assertTrue(deduplicationAndVersioningSql.isEmpty()); + Assertions.assertTrue(deduplicationAndVersioningErrorChecksSql.isEmpty()); + + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + } + + @Override + public void verifyUnitemporalSnapshotWithoutPartitionFailOnDupsMaxVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + + "SET sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') " + + "AND (NOT (EXISTS " + + "(SELECT * FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\"))))"; + + String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_time_in\", \"batch_time_out\") " + + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + + "FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE sink.\"batch_time_out\" = '9999-12-31 23:59:59')))"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableTimeBasedCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(getExpectedMetadataTableCreateQuery(), preActionsSql.get(1)); + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSql.get(2)); + + Assertions.assertEquals(AnsiTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(AnsiTestArtifacts.expectedInsertIntoBaseTempStagingWithFilterDupsAndMaxVersion, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(AnsiTestArtifacts.maxDupsErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + Assertions.assertEquals(AnsiTestArtifacts.dataErrorCheckSqlWithBizDateVersion, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); @@ -71,7 +115,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDefaultEmptyBatchHandli List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE sink.\"batch_time_out\" = '9999-12-31 23:59:59'"; Assertions.assertEquals(AnsiTestArtifacts.expectedMainTableTimeBasedCreateQuery, preActionsSql.get(0)); @@ -89,7 +133,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"MYDB\".\"MAIN\" as sink SET " + - "sink.\"BATCH_TIME_OUT\" = '2000-01-01 00:00:00' " + + "sink.\"BATCH_TIME_OUT\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"BATCH_TIME_OUT\" = '9999-12-31 23:59:59') AND " + "(NOT (EXISTS (SELECT * FROM \"MYDB\".\"STAGING\" as stage WHERE ((sink.\"ID\" = stage.\"ID\") " + "AND (sink.\"NAME\" = stage.\"NAME\")) AND (sink.\"DIGEST\" = stage.\"DIGEST\"))))"; @@ -97,7 +141,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene String expectedUpsertQuery = "INSERT INTO \"MYDB\".\"MAIN\" " + "(\"ID\", \"NAME\", \"AMOUNT\", \"BIZ_DATE\", \"DIGEST\", \"BATCH_TIME_IN\", \"BATCH_TIME_OUT\") " + "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\"," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM \"MYDB\".\"STAGING\" as stage " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM \"MYDB\".\"STAGING\" as stage " + "WHERE NOT (stage.\"DIGEST\" IN (SELECT sink.\"DIGEST\" FROM \"MYDB\".\"MAIN\" as sink " + "WHERE sink.\"BATCH_TIME_OUT\" = '9999-12-31 23:59:59')))"; @@ -110,14 +154,14 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink " + - "SET sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "SET sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') " + "AND (NOT (EXISTS " + "(SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\")))) " + @@ -126,7 +170,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND (sink.\"biz_date\" = stage.\"biz_date\"))))"; @@ -140,14 +184,14 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE \"mydb\".\"main\" as sink SET " + - "sink.\"batch_time_out\" = '2000-01-01 00:00:00' " + + "sink.\"batch_time_out\" = '2000-01-01 00:00:00.000000' " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND " + "(NOT (EXISTS (SELECT * FROM \"mydb\".\"staging\" as stage WHERE ((sink.\"id\" = stage.\"id\") AND " + "(sink.\"name\" = stage.\"name\")) AND (sink.\"digest\" = stage.\"digest\")))) AND " + @@ -156,7 +200,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorR String expectedUpsertQuery = "INSERT INTO \"mydb\".\"main\" " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_time_in\", \"batch_time_out\") " + "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\"," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM \"mydb\".\"staging\" as stage " + "WHERE NOT (stage.\"digest\" IN (SELECT sink.\"digest\" FROM \"mydb\".\"main\" as sink " + "WHERE (sink.\"batch_time_out\" = '9999-12-31 23:59:59') AND " + "(sink.\"biz_date\" IN ('2000-01-01 00:00:00','2000-01-02 00:00:00')))))"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsAnsiTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsAnsiTest.java index 67a3337de07..c0177c01732 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsAnsiTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsAnsiTest.java @@ -24,13 +24,13 @@ public String getExpectedSqlForMetadata() { return "INSERT INTO bulk_load_batch_metadata " + "(\"batch_id\", \"table_name\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\", \"batch_source_info\")" + - " (SELECT 'batch_id_123','appeng_log_table_name','2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'',PARSE_JSON('my_lineage_value'))"; + " (SELECT (SELECT COALESCE(MAX(bulk_load_batch_metadata.\"batch_id\"),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.\"table_name\") = 'APPENG_LOG_TABLE_NAME'),'appeng_log_table_name','2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'',PARSE_JSON('my_lineage_value'))"; } public String getExpectedSqlForMetadataUpperCase() { return "INSERT INTO BULK_LOAD_BATCH_METADATA (\"BATCH_ID\", \"TABLE_NAME\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\", \"BATCH_SOURCE_INFO\") " + - "(SELECT 'batch_id_123','BULK_LOAD_TABLE_NAME','2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'',PARSE_JSON('my_lineage_value'))"; + "(SELECT (SELECT COALESCE(MAX(bulk_load_batch_metadata.\"BATCH_ID\"),0)+1 FROM BULK_LOAD_BATCH_METADATA as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.\"TABLE_NAME\") = 'BULK_LOAD_TABLE_NAME'),'BULK_LOAD_TABLE_NAME','2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'',PARSE_JSON('my_lineage_value'))"; } public RelationalSink getRelationalSink() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsTest.java index 65e5861a277..6e563621a28 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsTest.java @@ -37,7 +37,6 @@ public abstract class BulkLoadDatasetUtilsTest private final TransformOptions transformOptions = TransformOptions .builder() .executionTimestampClock(Clock.fixed(executionZonedDateTime.toInstant(), ZoneOffset.UTC)) - .bulkLoadBatchIdValue("batch_id_123") .bulkLoadBatchStatusPattern("") .build(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/DatasetDeduplicationHandlerTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/DatasetDeduplicationHandlerTest.java new file mode 100644 index 00000000000..6e73d57a9b8 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/DatasetDeduplicationHandlerTest.java @@ -0,0 +1,81 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.util; + +import org.finos.legend.engine.persistence.components.IngestModeTest; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DatasetDeduplicationHandler; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; +import org.finos.legend.engine.persistence.components.relational.SqlPlan; +import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; +import org.finos.legend.engine.persistence.components.relational.transformer.RelationalTransformer; +import org.finos.legend.engine.persistence.components.transformer.TransformOptions; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import java.util.List; + +public class DatasetDeduplicationHandlerTest extends IngestModeTest +{ + private final TransformOptions transformOptions = TransformOptions.builder().build(); + Dataset stagingDataset = DatasetDefinition.builder() + .database("my_db") + .group("my_schema") + .name("my_table") + .alias("stage") + .schema(baseTableSchemaWithVersion) + .build(); + + String expectedSql = "SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\",COUNT(*) as \"legend_persistence_count\" " + + "FROM \"my_db\".\"my_schema\".\"my_table\" as stage " + + "GROUP BY stage.\"id\", stage.\"name\", stage.\"version\", stage.\"biz_date\""; + + @Test + public void testDatasetDeduplicationFailOnDuplicates() + { + Dataset dedupedDataset = FailOnDuplicates.builder().build().accept(new DatasetDeduplicationHandler(stagingDataset)); + Selection dedupedSelection = (Selection) dedupedDataset; + RelationalTransformer transformer = new RelationalTransformer(AnsiSqlSink.get(), transformOptions); + LogicalPlan logicalPlan = LogicalPlan.builder().addOps(dedupedSelection).build(); + SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); + List list = physicalPlan.getSqlList(); + Assertions.assertEquals(expectedSql, list.get(0)); + } + + @Test + public void testDatasetDeduplicationFilterDuplicates() + { + Dataset dedupedDataset = FilterDuplicates.builder().build().accept(new DatasetDeduplicationHandler(stagingDataset)); + Selection dedupedSelection = (Selection) dedupedDataset; + RelationalTransformer transformer = new RelationalTransformer(AnsiSqlSink.get(), transformOptions); + LogicalPlan logicalPlan = LogicalPlan.builder().addOps(dedupedSelection).build(); + SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); + List list = physicalPlan.getSqlList(); + Assertions.assertEquals(expectedSql, list.get(0)); + } + + @Test + public void testDatasetDeduplicationAllowDuplicates() + { + Dataset dedupedDataset = AllowDuplicates.builder().build().accept(new DatasetDeduplicationHandler(stagingDataset)); + Assertions.assertTrue(dedupedDataset instanceof DatasetDefinition); + DatasetDefinition dedupedDatasetDef = (DatasetDefinition) dedupedDataset; + Assertions.assertEquals(dedupedDatasetDef, stagingDataset); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/DatasetVersioningHandlerTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/DatasetVersioningHandlerTest.java new file mode 100644 index 00000000000..58255358e61 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/DatasetVersioningHandlerTest.java @@ -0,0 +1,119 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.util; + +import org.finos.legend.engine.persistence.components.IngestModeTest; +import org.finos.legend.engine.persistence.components.common.DatasetFilter; +import org.finos.legend.engine.persistence.components.common.FilterType; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.*; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.*; +import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DerivedDataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; +import org.finos.legend.engine.persistence.components.relational.SqlPlan; +import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; +import org.finos.legend.engine.persistence.components.relational.transformer.RelationalTransformer; +import org.finos.legend.engine.persistence.components.transformer.TransformOptions; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.List; + +public class DatasetVersioningHandlerTest extends IngestModeTest +{ + private final TransformOptions transformOptions = TransformOptions.builder().build(); + Dataset stagingDataset = DatasetDefinition.builder() + .database("my_db") + .group("my_schema") + .name("my_table") + .alias("stage") + .schema(baseTableSchemaWithVersion) + .build(); + + Dataset derivedStagingDataset = DerivedDataset.builder() + .database("my_db") + .group("my_schema") + .name("my_table") + .alias("stage") + .schema(baseTableSchemaWithVersion) + .addDatasetFilters(DatasetFilter.of("bizDate", FilterType.EQUAL_TO, "2020-01-01")) + .build(); + + List primaryKeys = Arrays.asList("id", "name"); + + @Test + public void testVersioningHandlerNoVersioningStrategy() + { + Dataset versionedDataset = NoVersioningStrategy.builder().build().accept(new DatasetVersioningHandler(stagingDataset, primaryKeys)); + Assertions.assertTrue(versionedDataset instanceof DatasetDefinition); + DatasetDefinition versionedDatasetDef = (DatasetDefinition) versionedDataset; + Assertions.assertEquals(versionedDatasetDef, stagingDataset); + } + + @Test + public void testVersioningHandlerMaxVersionStrategy() + { + Dataset versionedDataset = MaxVersionStrategy.builder().versioningField("version").build().accept(new DatasetVersioningHandler(stagingDataset, primaryKeys)); + Selection versionedSelection = (Selection) versionedDataset; + RelationalTransformer transformer = new RelationalTransformer(AnsiSqlSink.get(), transformOptions); + LogicalPlan logicalPlan = LogicalPlan.builder().addOps(versionedSelection).build(); + SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); + List list = physicalPlan.getSqlList(); + String expectedSql = "SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\" " + + "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\",DENSE_RANK() OVER " + + "(PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_rank\" " + + "FROM \"my_db\".\"my_schema\".\"my_table\" as stage) as stage WHERE stage.\"legend_persistence_rank\" = 1"; + Assertions.assertEquals(expectedSql, list.get(0)); + } + + @Test + public void testVersioningHandlerAllVersionsStrategy() + { + Dataset versionedDataset = AllVersionsStrategy.builder().versioningField("version").build().accept(new DatasetVersioningHandler(stagingDataset, primaryKeys)); + Selection versionedSelection = (Selection) versionedDataset; + RelationalTransformer transformer = new RelationalTransformer(AnsiSqlSink.get(), transformOptions); + LogicalPlan logicalPlan = LogicalPlan.builder().addOps(versionedSelection).build(); + SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); + List list = physicalPlan.getSqlList(); + String expectedSql = "SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\"," + + "DENSE_RANK() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" ASC) as \"legend_persistence_data_split\" " + + "FROM \"my_db\".\"my_schema\".\"my_table\" as stage"; + Assertions.assertEquals(expectedSql, list.get(0)); + } + + @Test + public void testVersioningHandlerWithDeduplicationHandler() + { + DeduplicationStrategy deduplicationStrategy = FailOnDuplicates.builder().build(); + VersioningStrategy versioningStrategy = AllVersionsStrategy.builder().versioningField("version").build(); + Dataset dedupAndVersionedDataset = LogicalPlanUtils.getDedupedAndVersionedDataset(deduplicationStrategy, versioningStrategy, derivedStagingDataset, primaryKeys); + + Selection versionedSelection = (Selection) dedupAndVersionedDataset; + RelationalTransformer transformer = new RelationalTransformer(AnsiSqlSink.get(), transformOptions); + LogicalPlan logicalPlan = LogicalPlan.builder().addOps(versionedSelection).build(); + SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); + List list = physicalPlan.getSqlList(); + String expectedSql = "SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\"," + + "stage.\"legend_persistence_count\" as \"legend_persistence_count\"," + + "DENSE_RANK() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" ASC) as \"legend_persistence_data_split\" " + + "FROM (SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\"," + + "COUNT(*) as \"legend_persistence_count\" FROM \"my_db\".\"my_schema\".\"my_table\" as stage WHERE stage.\"bizDate\" = '2020-01-01' " + + "GROUP BY stage.\"id\", stage.\"name\", stage.\"version\", stage.\"biz_date\") as stage"; + Assertions.assertEquals(expectedSql, list.get(0)); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/LockInfoUtilsTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/LockInfoUtilsTest.java index 23007edbd49..254b721fa3b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/LockInfoUtilsTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/LockInfoUtilsTest.java @@ -49,7 +49,7 @@ public void testInitializeLockInfo() SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); List list = physicalPlan.getSqlList(); String expectedSql = "INSERT INTO main_table_lock (\"insert_ts_utc\", \"table_name\") " + - "(SELECT '2000-01-01 00:00:00','main' WHERE NOT (EXISTS (SELECT * FROM main_table_lock as main_table_lock)))"; + "(SELECT '2000-01-01 00:00:00.000000','main' WHERE NOT (EXISTS (SELECT * FROM main_table_lock as main_table_lock)))"; Assertions.assertEquals(expectedSql, list.get(0)); } @@ -62,7 +62,7 @@ public void testUpdateMetaStore() LogicalPlan logicalPlan = LogicalPlan.builder().addOps(operation).build(); SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); List list = physicalPlan.getSqlList(); - String expectedSql = "UPDATE main_table_lock as main_table_lock SET main_table_lock.\"last_used_ts_utc\" = '2000-01-01 00:00:00'"; + String expectedSql = "UPDATE main_table_lock as main_table_lock SET main_table_lock.\"last_used_ts_utc\" = '2000-01-01 00:00:00.000000'"; Assertions.assertEquals(expectedSql, list.get(0)); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/LogicalPlanUtilsTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/LogicalPlanUtilsTest.java index afc8c688070..fd0bd3c28d4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/LogicalPlanUtilsTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/LogicalPlanUtilsTest.java @@ -17,9 +17,10 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.finos.legend.engine.persistence.components.IngestModeTest; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.DatasetDeduplicator; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.MaxVersionStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionColumnBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionComparator; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersioningStrategy; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; @@ -38,71 +39,8 @@ import java.util.Map; import com.fasterxml.jackson.core.JsonProcessingException; -import static org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningComparator.GREATER_THAN; - public class LogicalPlanUtilsTest extends IngestModeTest { - - @Test - public void testDeduplicateByMaxVersion() - { - DatasetDefinition dataset = DatasetDefinition.builder() - .database("my_db") - .group("my_schema") - .name("my_table") - .alias("stage") - .schema(baseTableSchemaWithVersion) - .build(); - - RelationalTransformer transformer = new RelationalTransformer(AnsiSqlSink.get()); - - List primaryKeys = Arrays.asList("id", "name"); - VersioningStrategy versioningStrategy = MaxVersionStrategy.builder().versioningField("version").performDeduplication(true).versioningComparator(GREATER_THAN).build(); - Selection selection = (Selection) versioningStrategy.accept(new DatasetDeduplicator(dataset, primaryKeys)); - LogicalPlan logicalPlan = LogicalPlan.builder().addOps(selection).build(); - SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); - List list = physicalPlan.getSqlList(); - - String expectedSelectQuery = "(SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\" FROM " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\"," + - "ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" " + - "FROM \"my_db\".\"my_schema\".\"my_table\" as stage) as stage " + - "WHERE stage.\"legend_persistence_row_num\" = 1) as stage"; - Assertions.assertEquals(expectedSelectQuery, list.get(0)); - } - - @Test - public void testDeduplicateByMaxVersionAndFilterDataset() - { - RelationalTransformer transformer = new RelationalTransformer(AnsiSqlSink.get()); - List primaryKeys = Arrays.asList("id", "name"); - - Dataset dataset = DerivedDataset.builder() - .database("my_db") - .group("my_schema") - .name("my_table") - .alias("stage") - .schema(baseTableSchemaWithVersion) - .addDatasetFilters(DatasetFilter.of("biz_date", FilterType.GREATER_THAN, "2020-01-01")) - .addDatasetFilters(DatasetFilter.of("biz_date", FilterType.LESS_THAN, "2020-01-03")) - .build(); - - VersioningStrategy versioningStrategy = MaxVersionStrategy.builder().versioningField("version").performDeduplication(true).versioningComparator(GREATER_THAN).build(); - Selection selection = (Selection) versioningStrategy.accept(new DatasetDeduplicator(dataset, primaryKeys)); - - LogicalPlan logicalPlan = LogicalPlan.builder().addOps(selection).build(); - SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); - List list = physicalPlan.getSqlList(); - - String expectedSelectQuery = "(SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\" FROM " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"version\",stage.\"biz_date\",ROW_NUMBER() OVER " + - "(PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" " + - "FROM \"my_db\".\"my_schema\".\"my_table\" as stage " + - "WHERE (stage.\"biz_date\" > '2020-01-01') AND (stage.\"biz_date\" < '2020-01-03')) as stage " + - "WHERE stage.\"legend_persistence_row_num\" = 1) as stage"; - Assertions.assertEquals(expectedSelectQuery, list.get(0)); - } - @Test public void testJsonifyDatasetFilters() throws JsonProcessingException { diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/MetadataUtilsTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/MetadataUtilsTest.java index 85e14ea4ee2..97defb2c26e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/MetadataUtilsTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/test/java/org/finos/legend/engine/persistence/components/util/MetadataUtilsTest.java @@ -143,7 +143,7 @@ public void testInsertMetaStore() LogicalPlan logicalPlan = LogicalPlan.builder().addOps(operation).build(); SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); List list = physicalPlan.getSqlList(); - String expectedSql = "INSERT INTO " + lowerCaseTableName() + " (\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\") (SELECT 'main',(SELECT COALESCE(MAX(" + lowerCaseTableName() + ".\"table_batch_id\"),0)+1 FROM " + lowerCaseTableName() + " as " + lowerCaseTableName() + " WHERE UPPER(" + lowerCaseTableName() + ".\"table_name\") = 'MAIN'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + String expectedSql = "INSERT INTO " + lowerCaseTableName() + " (\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\") (SELECT 'main',(SELECT COALESCE(MAX(" + lowerCaseTableName() + ".\"table_batch_id\"),0)+1 FROM " + lowerCaseTableName() + " as " + lowerCaseTableName() + " WHERE UPPER(" + lowerCaseTableName() + ".\"table_name\") = 'MAIN'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; Assertions.assertEquals(expectedSql, list.get(0)); } @@ -158,7 +158,7 @@ public void testInsertMetaStoreWithUpperCase() SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); List list = physicalPlan.getSqlList(); - String expectedSql = "INSERT INTO " + upperCaseTableName() + " (\"TABLE_NAME\", \"TABLE_BATCH_ID\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\") (SELECT 'main',(SELECT COALESCE(MAX(" + lowerCaseTableName() + ".\"TABLE_BATCH_ID\"),0)+1 FROM " + upperCaseTableName() + " as " + lowerCaseTableName() + " WHERE UPPER(" + lowerCaseTableName() + ".\"TABLE_NAME\") = 'MAIN'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + String expectedSql = "INSERT INTO " + upperCaseTableName() + " (\"TABLE_NAME\", \"TABLE_BATCH_ID\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\") (SELECT 'main',(SELECT COALESCE(MAX(" + lowerCaseTableName() + ".\"TABLE_BATCH_ID\"),0)+1 FROM " + upperCaseTableName() + " as " + lowerCaseTableName() + " WHERE UPPER(" + lowerCaseTableName() + ".\"TABLE_NAME\") = 'MAIN'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; Assertions.assertEquals(expectedSql, list.get(0)); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/pom.xml index 80e32e5de49..92784d1c6a8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -56,6 +56,12 @@ legend-engine-xt-persistence-component-relational-ansi + + + org.immutables + value + + com.google.cloud @@ -66,17 +72,37 @@ commons-logging commons-logging + + com.google.errorprone + error_prone_annotations + + + com.google.code.findbugs + jsr305 + com.google.cloud google-cloud-core provided + + + com.google.code.findbugs + jsr305 + + com.google.auth google-auth-library-oauth2-http provided + + + com.google.code.findbugs + jsr305 + + org.slf4j diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/BigQuerySink.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/BigQuerySink.java index ba31be6adad..62c3df96ff0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/BigQuerySink.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/BigQuerySink.java @@ -14,6 +14,8 @@ package org.finos.legend.engine.persistence.components.relational.bigquery; +import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.executor.Executor; import org.finos.legend.engine.persistence.components.logicalplan.datasets.ClusterKey; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DataType; @@ -21,13 +23,19 @@ import org.finos.legend.engine.persistence.components.logicalplan.datasets.FieldType; import org.finos.legend.engine.persistence.components.logicalplan.datasets.PartitionKey; import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDatasetReference; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesSelection; import org.finos.legend.engine.persistence.components.logicalplan.operations.Alter; +import org.finos.legend.engine.persistence.components.logicalplan.operations.Copy; import org.finos.legend.engine.persistence.components.logicalplan.operations.Create; import org.finos.legend.engine.persistence.components.logicalplan.operations.Delete; import org.finos.legend.engine.persistence.components.logicalplan.operations.Truncate; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchEndTimestamp; import org.finos.legend.engine.persistence.components.logicalplan.values.BatchStartTimestamp; import org.finos.legend.engine.persistence.components.logicalplan.values.DatetimeValue; +import org.finos.legend.engine.persistence.components.logicalplan.values.DigestUdf; +import org.finos.legend.engine.persistence.components.logicalplan.values.StagedFilesFieldValue; import org.finos.legend.engine.persistence.components.optimizer.Optimizer; import org.finos.legend.engine.persistence.components.relational.CaseConversion; import org.finos.legend.engine.persistence.components.relational.RelationalSink; @@ -35,6 +43,8 @@ import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; import org.finos.legend.engine.persistence.components.relational.ansi.optimizer.LowerCaseOptimizer; import org.finos.legend.engine.persistence.components.relational.ansi.optimizer.UpperCaseOptimizer; +import org.finos.legend.engine.persistence.components.relational.api.IngestStatus; +import org.finos.legend.engine.persistence.components.relational.api.IngestorResult; import org.finos.legend.engine.persistence.components.relational.api.RelationalConnection; import org.finos.legend.engine.persistence.components.relational.bigquery.executor.BigQueryConnection; import org.finos.legend.engine.persistence.components.relational.bigquery.executor.BigQueryExecutor; @@ -45,12 +55,18 @@ import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.BatchEndTimestampVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.BatchStartTimestampVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.ClusterKeyVisitor; +import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.CopyVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.DatetimeValueVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.DeleteVisitor; +import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.DigestUdfVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.FieldVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.PartitionKeyVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.SQLCreateVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.SchemaDefinitionVisitor; +import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.StagedFilesDatasetReferenceVisitor; +import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.StagedFilesDatasetVisitor; +import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.StagedFilesFieldValueVisitor; +import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.StagedFilesSelectionVisitor; import org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor.TruncateVisitor; import org.finos.legend.engine.persistence.components.relational.sql.TabularData; import org.finos.legend.engine.persistence.components.relational.sqldom.SqlGen; @@ -66,6 +82,9 @@ import java.util.Optional; import java.util.Set; +import static org.finos.legend.engine.persistence.components.relational.api.RelationalIngestorAbstract.BATCH_ID_PATTERN; +import static org.finos.legend.engine.persistence.components.relational.api.RelationalIngestorAbstract.BATCH_START_TS_PATTERN; + public class BigQuerySink extends AnsiSqlSink { private static final RelationalSink INSTANCE; @@ -97,6 +116,12 @@ public class BigQuerySink extends AnsiSqlSink logicalPlanVisitorByClass.put(DatetimeValue.class, new DatetimeValueVisitor()); logicalPlanVisitorByClass.put(BatchEndTimestamp.class, new BatchEndTimestampVisitor()); logicalPlanVisitorByClass.put(BatchStartTimestamp.class, new BatchStartTimestampVisitor()); + logicalPlanVisitorByClass.put(Copy.class, new CopyVisitor()); + logicalPlanVisitorByClass.put(StagedFilesFieldValue.class, new StagedFilesFieldValueVisitor()); + logicalPlanVisitorByClass.put(StagedFilesDataset.class, new StagedFilesDatasetVisitor()); + logicalPlanVisitorByClass.put(StagedFilesSelection.class, new StagedFilesSelectionVisitor()); + logicalPlanVisitorByClass.put(StagedFilesDatasetReference.class, new StagedFilesDatasetReferenceVisitor()); + logicalPlanVisitorByClass.put(DigestUdf.class, new DigestUdfVisitor()); LOGICAL_PLAN_VISITOR_BY_CLASS = Collections.unmodifiableMap(logicalPlanVisitorByClass); Map> implicitDataTypeMapping = new HashMap<>(); @@ -229,4 +254,32 @@ public Field createNewField(Field evolveTo, Field evolveFrom, Optional .identity(evolveTo.identity()).unique(evolveTo.unique()) .defaultValue(evolveTo.defaultValue()).type(modifiedFieldType).build(); } + + @Override + public IngestorResult performBulkLoad(Datasets datasets, Executor executor, SqlPlan ingestSqlPlan, Map statisticsSqlPlan, Map placeHolderKeyValues) + { + BigQueryExecutor bigQueryExecutor = (BigQueryExecutor) executor; + Map stats = bigQueryExecutor.executeLoadPhysicalPlanAndGetStats(ingestSqlPlan, placeHolderKeyValues); + + IngestorResult.Builder resultBuilder = IngestorResult.builder() + .updatedDatasets(datasets) + .putAllStatisticByName(stats) + .ingestionTimestampUTC(placeHolderKeyValues.get(BATCH_START_TS_PATTERN)) + .batchId(Optional.ofNullable(placeHolderKeyValues.containsKey(BATCH_ID_PATTERN) ? Integer.valueOf(placeHolderKeyValues.get(BATCH_ID_PATTERN)) : null)); + IngestorResult result; + + if ((long) stats.get(StatisticName.ROWS_WITH_ERRORS) == 0) + { + result = resultBuilder + .status(IngestStatus.SUCCEEDED) + .build(); + } + else + { + result = resultBuilder + .status(IngestStatus.FAILED) + .build(); + } + return result; + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryExecutor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryExecutor.java index ddddb6a06e7..046d2088a44 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryExecutor.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryExecutor.java @@ -14,6 +14,7 @@ package org.finos.legend.engine.persistence.components.relational.bigquery.executor; +import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.executor.Executor; import org.finos.legend.engine.persistence.components.executor.RelationalExecutionHelper; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; @@ -70,6 +71,19 @@ public void executePhysicalPlan(SqlPlan physicalPlan, Map placeh } } + public Map executeLoadPhysicalPlanAndGetStats(SqlPlan physicalPlan, Map placeholderKeyValues) + { + List sqlList = physicalPlan.getSqlList(); + + // Load statement (Not supported in Bigquery to run in a transaction) + Map loadStats = bigQueryHelper.executeLoadStatement(getEnrichedSql(placeholderKeyValues, sqlList.get(0))); + + // Isolation level of Bigquery is Snapshot, + // So Insert statement has to run in a new transaction so that it can see the changes of Load + bigQueryHelper.executeStatementInANewTransaction(getEnrichedSql(placeholderKeyValues, sqlList.get(1))); + return loadStats; + } + @Override public List executePhysicalPlanAndGetResults(SqlPlan physicalPlan) { diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryHelper.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryHelper.java index 84c5ed5f186..261226baa04 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryHelper.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryHelper.java @@ -17,6 +17,7 @@ import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.FieldList; import com.google.cloud.bigquery.TableId; +import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.executor.TypeMapping; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.conditions.And; @@ -326,6 +327,12 @@ public void executeStatement(String sql) executeStatements(sqls); } + public void executeStatementInANewTransaction(String sql) + { + List sqls = Collections.singletonList(sql); + executeStatementsInANewTransaction(sqls); + } + // Execute statements in a transaction - either use an existing one or use a new one public void executeStatements(List sqls) { @@ -345,45 +352,50 @@ public void executeStatements(List sqls) } else { - BigQueryTransactionManager txManager = null; - try + executeStatementsInANewTransaction(sqls); + } + } + + public void executeStatementsInANewTransaction(List sqls) + { + BigQueryTransactionManager txManager = null; + try + { + txManager = new BigQueryTransactionManager(bigQuery); + txManager.beginTransaction(); + for (String sql : sqls) { - txManager = new BigQueryTransactionManager(bigQuery); - txManager.beginTransaction(); - for (String sql : sqls) - { - txManager.executeInCurrentTransaction(sql); - } - txManager.commitTransaction(); + txManager.executeInCurrentTransaction(sql); } - catch (Exception e) + txManager.commitTransaction(); + } + catch (Exception e) + { + LOGGER.error("Error executing SQL statements: " + sqls, e); + if (txManager != null) { - LOGGER.error("Error executing SQL statements: " + sqls, e); - if (txManager != null) + try { - try - { - txManager.revertTransaction(); - } - catch (InterruptedException e2) - { - throw new RuntimeException(e2); - } + txManager.revertTransaction(); + } + catch (InterruptedException e2) + { + throw new RuntimeException(e2); } - throw new RuntimeException(e); } - finally + throw new RuntimeException(e); + } + finally + { + if (txManager != null) { - if (txManager != null) + try { - try - { - txManager.close(); - } - catch (InterruptedException e) - { - LOGGER.error("Error closing transaction manager.", e); - } + txManager.close(); + } + catch (InterruptedException e) + { + LOGGER.error("Error closing transaction manager.", e); } } } @@ -424,6 +436,34 @@ public List> executeQuery(String sql) } } + public Map executeLoadStatement(String sql) + { + BigQueryTransactionManager txManager = null; + try + { + txManager = new BigQueryTransactionManager(bigQuery); + return txManager.executeLoadStatement(sql); + } + catch (Exception e) + { + throw new RuntimeException("Error executing SQL query: " + sql, e); + } + finally + { + if (txManager != null) + { + try + { + txManager.close(); + } + catch (InterruptedException e) + { + LOGGER.error("Error closing transaction manager.", e); + } + } + } + } + @Override public void close() { diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryTransactionManager.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryTransactionManager.java index 7db1a4c5ad8..9f7b4e376a8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryTransactionManager.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/executor/BigQueryTransactionManager.java @@ -22,8 +22,9 @@ import com.google.cloud.bigquery.Job; import com.google.cloud.bigquery.JobId; import com.google.cloud.bigquery.JobInfo; -import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.JobStatistics; import com.google.cloud.bigquery.QueryJobConfiguration; +import org.finos.legend.engine.persistence.components.common.StatisticName; import java.util.ArrayList; import java.util.Arrays; @@ -98,6 +99,22 @@ public boolean executeInCurrentTransaction(String sql) throws InterruptedExcepti return job.getStatus().getError() == null; } + public Map executeLoadStatement(String sql) throws InterruptedException + { + Map stats = new HashMap<>(); + + Job job = this.executeSql(sql); + JobStatistics.QueryStatistics queryStatistics = job.getStatistics(); + + long recordsWritten = queryStatistics.getQueryPlan().get(0).getRecordsWritten(); + long recordsRead = queryStatistics.getQueryPlan().get(0).getRecordsRead(); + + stats.put(StatisticName.ROWS_INSERTED, recordsWritten); + stats.put(StatisticName.ROWS_WITH_ERRORS, recordsRead - recordsWritten); + + return stats; + } + public List> convertResultSetToList(String sql) { try diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/logicalplan/datasets/BigQueryStagedFilesDatasetPropertiesAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/logicalplan/datasets/BigQueryStagedFilesDatasetPropertiesAbstract.java new file mode 100644 index 00000000000..088fed20cb4 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/logicalplan/datasets/BigQueryStagedFilesDatasetPropertiesAbstract.java @@ -0,0 +1,39 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +package org.finos.legend.engine.persistence.components.relational.bigquery.logicalplan.datasets; + +import org.finos.legend.engine.persistence.components.common.FileFormat; +import org.finos.legend.engine.persistence.components.common.LoadOptions; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDatasetProperties; +import org.immutables.value.Value; + +import java.util.Optional; + + +@Value.Immutable +@Value.Style( + typeAbstract = "*Abstract", + typeImmutable = "*", + jdkOnly = true, + optionalAcceptNullable = true, + strictBuilder = true +) +public interface BigQueryStagedFilesDatasetPropertiesAbstract extends StagedFilesDatasetProperties +{ + FileFormat fileFormat(); + + Optional loadOptions(); +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/CopyVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/CopyVisitor.java new file mode 100644 index 00000000000..b383f1f6b80 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/CopyVisitor.java @@ -0,0 +1,42 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor; + +import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanNode; +import org.finos.legend.engine.persistence.components.logicalplan.operations.Copy; +import org.finos.legend.engine.persistence.components.physicalplan.PhysicalPlanNode; +import org.finos.legend.engine.persistence.components.relational.bigquery.sqldom.schemaops.statements.CopyStatement; +import org.finos.legend.engine.persistence.components.transformer.LogicalPlanVisitor; +import org.finos.legend.engine.persistence.components.transformer.VisitorContext; + +import java.util.ArrayList; +import java.util.List; + +public class CopyVisitor implements LogicalPlanVisitor +{ + + @Override + public VisitorResult visit(PhysicalPlanNode prev, Copy current, VisitorContext context) + { + CopyStatement copyStatement = new CopyStatement(); + prev.push(copyStatement); + + List logicalPlanNodes = new ArrayList<>(); + logicalPlanNodes.add(current.sourceDataset()); + logicalPlanNodes.add(current.targetDataset()); + logicalPlanNodes.addAll(current.fields()); + return new VisitorResult(copyStatement, logicalPlanNodes); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/DigestUdfVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/DigestUdfVisitor.java new file mode 100644 index 00000000000..c6b5faec90e --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/DigestUdfVisitor.java @@ -0,0 +1,40 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor; + +import org.finos.legend.engine.persistence.components.logicalplan.values.DigestUdf; +import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionImpl; +import org.finos.legend.engine.persistence.components.logicalplan.values.FunctionName; +import org.finos.legend.engine.persistence.components.logicalplan.values.ObjectValue; +import org.finos.legend.engine.persistence.components.physicalplan.PhysicalPlanNode; +import org.finos.legend.engine.persistence.components.relational.sqldom.schemaops.values.Udf; +import org.finos.legend.engine.persistence.components.transformer.LogicalPlanVisitor; +import org.finos.legend.engine.persistence.components.transformer.VisitorContext; + +import java.util.Arrays; + +public class DigestUdfVisitor implements LogicalPlanVisitor +{ + + @Override + public VisitorResult visit(PhysicalPlanNode prev, DigestUdf current, VisitorContext context) + { + Udf udf = new Udf(context.quoteIdentifier(), current.udfName()); + prev.push(udf); + + FunctionImpl function = FunctionImpl.builder().functionName(FunctionName.TO_JSON).addValue(ObjectValue.of(current.dataset().orElseThrow(IllegalStateException::new).datasetReference().alias())).build(); + return new VisitorResult(udf, Arrays.asList(function)); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesDatasetReferenceVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesDatasetReferenceVisitor.java new file mode 100644 index 00000000000..0c5e7d91bc5 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesDatasetReferenceVisitor.java @@ -0,0 +1,76 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor; + +import org.finos.legend.engine.persistence.components.common.FileFormat; +import org.finos.legend.engine.persistence.components.common.LoadOptions; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDatasetReference; +import org.finos.legend.engine.persistence.components.physicalplan.PhysicalPlanNode; +import org.finos.legend.engine.persistence.components.relational.bigquery.logicalplan.datasets.BigQueryStagedFilesDatasetProperties; +import org.finos.legend.engine.persistence.components.relational.bigquery.sqldom.schemaops.expressions.table.StagedFilesTable; +import org.finos.legend.engine.persistence.components.transformer.LogicalPlanVisitor; +import org.finos.legend.engine.persistence.components.transformer.VisitorContext; + +import java.util.HashMap; +import java.util.Map; + + +public class StagedFilesDatasetReferenceVisitor implements LogicalPlanVisitor +{ + @Override + public VisitorResult visit(PhysicalPlanNode prev, StagedFilesDatasetReference current, VisitorContext context) + { + if (!(current.properties() instanceof BigQueryStagedFilesDatasetProperties)) + { + throw new IllegalStateException("Only BigQueryStagedFilesDatasetProperties are supported for BigQuery Sink"); + } + BigQueryStagedFilesDatasetProperties datasetProperties = (BigQueryStagedFilesDatasetProperties) current.properties(); + + Map loadOptionsMap = new HashMap<>(); + FileFormat fileFormat = datasetProperties.fileFormat(); + loadOptionsMap.put("format", fileFormat.name()); + datasetProperties.loadOptions().ifPresent(options -> retrieveLoadOptions(fileFormat, options, loadOptionsMap)); + + StagedFilesTable stagedFilesTable = new StagedFilesTable(datasetProperties.files(), loadOptionsMap); + prev.push(stagedFilesTable); + + return new VisitorResult(null); + } + + private void retrieveLoadOptions(FileFormat fileFormat, LoadOptions loadOptions, Map loadOptionsMap) + { + switch (fileFormat) + { + case CSV: + loadOptions.fieldDelimiter().ifPresent(property -> loadOptionsMap.put("field_delimiter", property)); + loadOptions.encoding().ifPresent(property -> loadOptionsMap.put("encoding", property)); + loadOptions.nullMarker().ifPresent(property -> loadOptionsMap.put("null_marker", property)); + loadOptions.quote().ifPresent(property -> loadOptionsMap.put("quote", property)); + loadOptions.skipLeadingRows().ifPresent(property -> loadOptionsMap.put("skip_leading_rows", property)); + loadOptions.maxBadRecords().ifPresent(property -> loadOptionsMap.put("max_bad_records", property)); + loadOptions.compression().ifPresent(property -> loadOptionsMap.put("compression", property)); + break; + case JSON: + loadOptions.maxBadRecords().ifPresent(property -> loadOptionsMap.put("max_bad_records", property)); + loadOptions.compression().ifPresent(property -> loadOptionsMap.put("compression", property)); + break; + case AVRO: + case PARQUET: + return; + default: + throw new IllegalStateException("Unrecognized file format: " + fileFormat); + } + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/main/java/org/finos/legend/engine/persistence/components/relational/ansi/sql/visitors/BulkLoadBatchIdValueVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesDatasetVisitor.java similarity index 65% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/main/java/org/finos/legend/engine/persistence/components/relational/ansi/sql/visitors/BulkLoadBatchIdValueVisitor.java rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesDatasetVisitor.java index faf24aac182..b482a7e6e3b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-ansi/src/main/java/org/finos/legend/engine/persistence/components/relational/ansi/sql/visitors/BulkLoadBatchIdValueVisitor.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesDatasetVisitor.java @@ -12,20 +12,19 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.persistence.components.relational.ansi.sql.visitors; +package org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor; -import org.finos.legend.engine.persistence.components.logicalplan.values.BulkLoadBatchIdValue; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDatasetReference; import org.finos.legend.engine.persistence.components.physicalplan.PhysicalPlanNode; -import org.finos.legend.engine.persistence.components.relational.sqldom.schemaops.values.StringValue; import org.finos.legend.engine.persistence.components.transformer.LogicalPlanVisitor; import org.finos.legend.engine.persistence.components.transformer.VisitorContext; -public class BulkLoadBatchIdValueVisitor implements LogicalPlanVisitor +public class StagedFilesDatasetVisitor implements LogicalPlanVisitor { @Override - public VisitorResult visit(PhysicalPlanNode prev, BulkLoadBatchIdValue current, VisitorContext context) + public VisitorResult visit(PhysicalPlanNode prev, StagedFilesDataset current, VisitorContext context) { - prev.push(new StringValue(context.bulkLoadBatchIdValue().orElseThrow(IllegalStateException::new), context.quoteIdentifier())); - return new VisitorResult(); + return new StagedFilesDatasetReferenceVisitor().visit(prev, (StagedFilesDatasetReference) current.datasetReference(), context); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesFieldValueVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesFieldValueVisitor.java new file mode 100644 index 00000000000..e2841175d99 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesFieldValueVisitor.java @@ -0,0 +1,36 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor; + +import org.finos.legend.engine.persistence.components.logicalplan.values.StagedFilesFieldValue; +import org.finos.legend.engine.persistence.components.physicalplan.PhysicalPlanNode; +import org.finos.legend.engine.persistence.components.relational.bigquery.sql.BigQueryDataTypeMapping; +import org.finos.legend.engine.persistence.components.relational.bigquery.sqldom.schemaops.values.StagedFilesField; +import org.finos.legend.engine.persistence.components.relational.sqldom.schema.DataType; +import org.finos.legend.engine.persistence.components.transformer.LogicalPlanVisitor; +import org.finos.legend.engine.persistence.components.transformer.VisitorContext; + +public class StagedFilesFieldValueVisitor implements LogicalPlanVisitor +{ + + @Override + public VisitorResult visit(PhysicalPlanNode prev, StagedFilesFieldValue current, VisitorContext context) + { + DataType dataType = new BigQueryDataTypeMapping().getDataType(current.fieldType()); + StagedFilesField field = new StagedFilesField(context.quoteIdentifier(), current.fieldName(), dataType); + prev.push(field); + return new VisitorResult(null); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesSelectionVisitor.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesSelectionVisitor.java new file mode 100644 index 00000000000..c25d175be4d --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sql/visitor/StagedFilesSelectionVisitor.java @@ -0,0 +1,33 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.relational.bigquery.sql.visitor; + +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDatasetReference; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesSelection; +import org.finos.legend.engine.persistence.components.physicalplan.PhysicalPlanNode; +import org.finos.legend.engine.persistence.components.transformer.LogicalPlanVisitor; +import org.finos.legend.engine.persistence.components.transformer.VisitorContext; + +public class StagedFilesSelectionVisitor implements LogicalPlanVisitor +{ + + @Override + public VisitorResult visit(PhysicalPlanNode prev, StagedFilesSelection current, VisitorContext context) + { + StagedFilesDataset stagedFilesDataset = current.source(); + return new StagedFilesDatasetReferenceVisitor().visit(prev, (StagedFilesDatasetReference) stagedFilesDataset.datasetReference(), context); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/expressions/table/StagedFilesTable.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/expressions/table/StagedFilesTable.java new file mode 100644 index 00000000000..2a8d288eeb4 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/expressions/table/StagedFilesTable.java @@ -0,0 +1,111 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.relational.bigquery.sqldom.schemaops.expressions.table; + +import org.finos.legend.engine.persistence.components.relational.sqldom.SqlDomException; +import org.finos.legend.engine.persistence.components.relational.sqldom.common.Clause; +import org.finos.legend.engine.persistence.components.relational.sqldom.schemaops.expresssions.table.TableLike; +import org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils; + +import java.util.List; +import java.util.Map; + +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.ASSIGNMENT_OPERATOR; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.CLOSING_PARENTHESIS; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.CLOSING_SQUARE_BRACKET; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.COMMA; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.OPEN_PARENTHESIS; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.OPEN_SQUARE_BRACKET; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.WHITE_SPACE; + +public class StagedFilesTable extends TableLike +{ + private List files; + private Map loadOptions; + + public StagedFilesTable(List files, Map loadOptions) + { + this.files = files; + this.loadOptions = loadOptions; + } + + @Override + public void genSql(StringBuilder builder) throws SqlDomException + { + validate(); + + builder.append(Clause.FILES.get()); + builder.append(WHITE_SPACE); + + builder.append(OPEN_PARENTHESIS); + builder.append("uris"); + builder.append(ASSIGNMENT_OPERATOR); + builder.append(OPEN_SQUARE_BRACKET); + for (int ctr = 0; ctr < files.size(); ctr++) + { + builder.append(SqlGenUtils.singleQuote(files.get(ctr))); + if (ctr < (files.size() - 1)) + { + builder.append(COMMA); + } + } + builder.append(CLOSING_SQUARE_BRACKET); + + if (loadOptions != null && loadOptions.size() > 0) + { + builder.append(COMMA); + builder.append(WHITE_SPACE); + int ctr = 0; + for (String option : loadOptions.keySet()) + { + ctr++; + builder.append(option); + builder.append(ASSIGNMENT_OPERATOR); + if (loadOptions.get(option) instanceof String) + { + builder.append(SqlGenUtils.singleQuote(loadOptions.get(option))); + } + else + { + // number + builder.append(loadOptions.get(option)); + } + + if (ctr < loadOptions.size()) + { + builder.append(COMMA + WHITE_SPACE); + } + } + } + builder.append(CLOSING_PARENTHESIS); + } + + @Override + public void push(Object node) + { + } + + void validate() throws SqlDomException + { + if (files == null || files.isEmpty()) + { + throw new SqlDomException("files are mandatory for loading from files"); + } + if (!loadOptions.containsKey("format")) + { + throw new SqlDomException("format is mandatory for loading from files"); + } + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/statements/CopyStatement.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/statements/CopyStatement.java new file mode 100644 index 00000000000..d1fe2feaf82 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/statements/CopyStatement.java @@ -0,0 +1,101 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.relational.bigquery.sqldom.schemaops.statements; + +import org.finos.legend.engine.persistence.components.relational.bigquery.sqldom.schemaops.expressions.table.StagedFilesTable; +import org.finos.legend.engine.persistence.components.relational.sqldom.SqlDomException; +import org.finos.legend.engine.persistence.components.relational.sqldom.SqlGen; +import org.finos.legend.engine.persistence.components.relational.sqldom.common.Clause; +import org.finos.legend.engine.persistence.components.relational.sqldom.schemaops.expresssions.table.Table; +import org.finos.legend.engine.persistence.components.relational.sqldom.schemaops.statements.DDLStatement; +import org.finos.legend.engine.persistence.components.relational.sqldom.schemaops.values.Value; + +import java.util.ArrayList; +import java.util.List; + +import static org.finos.legend.engine.persistence.components.relational.sqldom.common.Clause.LOAD_DATA; +import static org.finos.legend.engine.persistence.components.relational.sqldom.common.Clause.OVERWRITE; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.CLOSING_PARENTHESIS; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.COMMA; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.EMPTY; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.OPEN_PARENTHESIS; +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.WHITE_SPACE; + +public class CopyStatement implements DDLStatement +{ + private Table table; + private StagedFilesTable stagedFilesTable; + private List columns; + + public CopyStatement() + { + columns = new ArrayList<>(); + } + + /* + Copy GENERIC PLAN for Big Query: + LOAD DATA OVERWRITE table_name + (COLUMN_LIST) + FROM FILES (LOAD_OPTIONS) + */ + @Override + public void genSql(StringBuilder builder) throws SqlDomException + { + validate(); + builder.append(LOAD_DATA.get()); + builder.append(WHITE_SPACE); + builder.append(OVERWRITE.get()); + builder.append(WHITE_SPACE); + table.genSqlWithoutAlias(builder); + builder.append(WHITE_SPACE); + + builder.append(OPEN_PARENTHESIS); + SqlGen.genSqlList(builder, columns, EMPTY, COMMA); + builder.append(CLOSING_PARENTHESIS); + + builder.append(WHITE_SPACE + Clause.FROM.get() + WHITE_SPACE); + stagedFilesTable.genSql(builder); + } + + @Override + public void push(Object node) + { + if (node instanceof Table) + { + table = (Table) node; + } + else if (node instanceof StagedFilesTable) + { + stagedFilesTable = (StagedFilesTable) node; + } + else if (node instanceof Value) + { + columns.add((Value) node); + } + } + + void validate() throws SqlDomException + { + if (stagedFilesTable == null) + { + throw new SqlDomException("stagedFilesTable is mandatory for Copy Table Command"); + } + + if (table == null) + { + throw new SqlDomException("table is mandatory for Copy Table Command"); + } + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/values/StagedFilesField.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/values/StagedFilesField.java new file mode 100644 index 00000000000..0df2a15b8e8 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/main/java/org/finos/legend/engine/persistence/components/relational/bigquery/sqldom/schemaops/values/StagedFilesField.java @@ -0,0 +1,69 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.relational.bigquery.sqldom.schemaops.values; + +import org.finos.legend.engine.persistence.components.relational.sqldom.SqlDomException; +import org.finos.legend.engine.persistence.components.relational.sqldom.schema.DataType; +import org.finos.legend.engine.persistence.components.relational.sqldom.schemaops.values.Value; +import org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils; + +import static org.finos.legend.engine.persistence.components.relational.sqldom.utils.SqlGenUtils.WHITE_SPACE; + +public class StagedFilesField extends Value +{ + private String columnName; + + private DataType dataType; + + public StagedFilesField(String quoteIdentifier, String columnName, DataType datatype) + { + super(quoteIdentifier); + this.columnName = columnName; + this.dataType = datatype; + } + + @Override + public void genSql(StringBuilder builder) throws SqlDomException + { + genSqlWithoutAlias(builder); + super.genSql(builder); + } + + @Override + public void genSqlWithoutAlias(StringBuilder builder) throws SqlDomException + { + validate(); + builder.append(SqlGenUtils.getQuotedField(columnName, getQuoteIdentifier())); + builder.append(WHITE_SPACE); + dataType.genSql(builder); + } + + @Override + public void push(Object node) + { + } + + void validate() throws SqlDomException + { + if (columnName == null) + { + throw new SqlDomException("columnName is empty"); + } + if (dataType == null) + { + throw new SqlDomException("dataType is empty"); + } + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BigQueryEndToEndTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BigQueryEndToEndTest.java index f212f81aa25..b4655ac3a60 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BigQueryEndToEndTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BigQueryEndToEndTest.java @@ -156,7 +156,7 @@ protected IngestorResult ingestViaExecutorAndVerifyStagingFilters(IngestMode ing // Load csv data loadData(path, datasets.stagingDataset(), 1); RelationalConnection connection = BigQueryConnection.of(getBigQueryConnection()); - IngestorResult ingestorResult = ingestor.performFullIngestion(connection, datasets); + IngestorResult ingestorResult = ingestor.performFullIngestion(connection, datasets).get(0); verifyStagingFilters(ingestor, connection, datasets); return ingestorResult; @@ -383,7 +383,7 @@ public IngestorResult executePlansAndVerifyForCaseConversion(IngestMode ingestMo .caseConversion(CaseConversion.TO_UPPER) .build(); - IngestorResult result = ingestor.performFullIngestion(BigQueryConnection.of(getBigQueryConnection()), datasets); + IngestorResult result = ingestor.performFullIngestion(BigQueryConnection.of(getBigQueryConnection()), datasets).get(0); Map actualStats = result.statisticByName(); @@ -430,7 +430,7 @@ protected IngestorResult executePlansAndVerifyResults(IngestMode ingestMode, Pla .enableSchemaEvolution(options.enableSchemaEvolution()) .schemaEvolutionCapabilitySet(userCapabilitySet) .build(); - IngestorResult result = ingestor.performFullIngestion(BigQueryConnection.of(getBigQueryConnection()), datasets); + IngestorResult result = ingestor.performFullIngestion(BigQueryConnection.of(getBigQueryConnection()), datasets).get(0); Map actualStats = result.statisticByName(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BulkLoadExecutorTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BulkLoadExecutorTest.java new file mode 100644 index 00000000000..559c2a0f5db --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BulkLoadExecutorTest.java @@ -0,0 +1,195 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.e2e; + +import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.common.FileFormat; +import org.finos.legend.engine.persistence.components.common.LoadOptions; +import org.finos.legend.engine.persistence.components.ingestmode.BulkLoad; +import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.digest.NoDigestGenStrategy; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DataType; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.FieldType; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDataset; +import org.finos.legend.engine.persistence.components.relational.api.IngestStatus; +import org.finos.legend.engine.persistence.components.relational.api.IngestorResult; +import org.finos.legend.engine.persistence.components.relational.api.RelationalConnection; +import org.finos.legend.engine.persistence.components.relational.api.RelationalIngestor; +import org.finos.legend.engine.persistence.components.relational.bigquery.BigQuerySink; +import org.finos.legend.engine.persistence.components.relational.bigquery.executor.BigQueryConnection; +import org.finos.legend.engine.persistence.components.relational.bigquery.logicalplan.datasets.BigQueryStagedFilesDatasetProperties; +import org.finos.legend.engine.persistence.components.util.BulkLoadMetadataDataset; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_INSERTED; +import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_WITH_ERRORS; + +@Disabled +public class BulkLoadExecutorTest extends BigQueryEndToEndTest +{ + private static final String APPEND_TIME = "append_time"; + private static final String BATCH_ID = "batch_id"; + private static final String TASK_ID_VALUE = "xyz123"; + private static final String COL_INT = "col_int"; + private static final String COL_STRING = "col_string"; + private static final String COL_DECIMAL = "col_decimal"; + private static final String COL_DATETIME = "col_datetime"; + private static final List FILE_LIST = Arrays.asList("the uri to the staged_file1.csv on GCS", "the uri to the staged_file2.csv on GCS", "the uri to the staged_file3.csv on GCS"); + private static final List BAD_FILE_LIST = Arrays.asList("the uri to the bad_file.csv on GCS", "the uri to the staged_file1.csv on GCS"); + private static Field col1 = Field.builder() + .name(COL_INT) + .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) + .build(); + private static Field col2 = Field.builder() + .name(COL_STRING) + .type(FieldType.of(DataType.STRING, Optional.empty(), Optional.empty())) + .build(); + private static Field col3 = Field.builder() + .name(COL_DECIMAL) + .type(FieldType.of(DataType.DECIMAL, 5, 2)) + .build(); + private static Field col4 = Field.builder() + .name(COL_DATETIME) + .type(FieldType.of(DataType.DATETIME, Optional.empty(), Optional.empty())) + .build(); + + @Test + public void testMilestoning() throws IOException, InterruptedException + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + BigQueryStagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(FILE_LIST).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .group("demo").name("append_log") + .schema(SchemaDefinition.builder().build()) + .build(); + + BulkLoadMetadataDataset bulkLoadMetadataDataset = BulkLoadMetadataDataset.builder().group("demo").name("bulk_load_batch_metadata").build(); + + Datasets datasets = Datasets.builder().mainDataset(mainDataset).stagingDataset(stagedFilesDataset).bulkLoadMetadataDataset(bulkLoadMetadataDataset).build(); + + // Clean up + delete("demo", "main"); + delete("demo", "staging"); + delete("demo", "batch_metadata"); + delete("demo", "append_log"); + delete("demo", "bulk_load_batch_metadata"); + + + RelationalIngestor ingestor = RelationalIngestor.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .bulkLoadTaskIdValue(TASK_ID_VALUE) + .build(); + + RelationalConnection connection = BigQueryConnection.of(getBigQueryConnection()); + IngestorResult ingestorResult = ingestor.performFullIngestion(connection, datasets).get(0); + + // Verify + List> tableData = runQuery("select * from `demo`.`append_log` order by col_int asc"); + String expectedPath = "src/test/resources/expected/bulk_load/expected_table1.csv"; + String[] schema = new String[]{COL_INT, COL_STRING, COL_DECIMAL, COL_DATETIME, BATCH_ID, APPEND_TIME}; + assertFileAndTableDataEquals(schema, expectedPath, tableData); + + long rowsInserted = (long) ingestorResult.statisticByName().get(ROWS_INSERTED); + long rowsWithErrors = (long) ingestorResult.statisticByName().get(ROWS_WITH_ERRORS); + Assertions.assertEquals(7, rowsInserted); + Assertions.assertEquals(0, rowsWithErrors); + Assertions.assertEquals(IngestStatus.SUCCEEDED, ingestorResult.status()); + } + + @Test + public void testMilestoningFailure() throws IOException, InterruptedException + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + BigQueryStagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .loadOptions(LoadOptions.builder().maxBadRecords(10L).build()) + .addAllFiles(BAD_FILE_LIST).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .group("demo").name("append_log") + .schema(SchemaDefinition.builder().build()) + .build(); + + BulkLoadMetadataDataset bulkLoadMetadataDataset = BulkLoadMetadataDataset.builder().group("demo").name("bulk_load_batch_metadata").build(); + + Datasets datasets = Datasets.builder().mainDataset(mainDataset).stagingDataset(stagedFilesDataset).bulkLoadMetadataDataset(bulkLoadMetadataDataset).build(); + + // Clean up + delete("demo", "main"); + delete("demo", "staging"); + delete("demo", "batch_metadata"); + delete("demo", "append_log"); + delete("demo", "bulk_load_batch_metadata"); + + + RelationalIngestor ingestor = RelationalIngestor.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .build(); + + RelationalConnection connection = BigQueryConnection.of(getBigQueryConnection()); + IngestorResult ingestorResult = ingestor.performFullIngestion(connection, datasets).get(0); + + // Verify + List> tableData = runQuery("select * from `demo`.`append_log` order by col_int asc"); + String expectedPath = "src/test/resources/expected/bulk_load/expected_table2.csv"; + String[] schema = new String[]{COL_INT, COL_STRING, COL_DECIMAL, COL_DATETIME, BATCH_ID, APPEND_TIME}; + assertFileAndTableDataEquals(schema, expectedPath, tableData); + + long rowsInserted = (long) ingestorResult.statisticByName().get(ROWS_INSERTED); + long rowsWithErrors = (long) ingestorResult.statisticByName().get(ROWS_WITH_ERRORS); + Assertions.assertEquals(4, rowsInserted); + Assertions.assertEquals(2, rowsWithErrors); + Assertions.assertEquals(IngestStatus.FAILED, ingestorResult.status()); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BulkLoadGeneratorTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BulkLoadGeneratorTest.java new file mode 100644 index 00000000000..410dabb69e6 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/e2e/BulkLoadGeneratorTest.java @@ -0,0 +1,138 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.e2e; + +import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.common.FileFormat; +import org.finos.legend.engine.persistence.components.ingestmode.BulkLoad; +import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.digest.NoDigestGenStrategy; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DataType; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.FieldType; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDataset; +import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; +import org.finos.legend.engine.persistence.components.relational.api.RelationalGenerator; +import org.finos.legend.engine.persistence.components.relational.bigquery.BigQuerySink; +import org.finos.legend.engine.persistence.components.relational.bigquery.logicalplan.datasets.BigQueryStagedFilesDatasetProperties; +import org.finos.legend.engine.persistence.components.util.BulkLoadMetadataDataset; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +@Disabled +public class BulkLoadGeneratorTest extends BigQueryEndToEndTest +{ + private static final String APPEND_TIME = "append_time"; + private static final String BATCH_ID = "batch_id"; + private static final String TASK_ID_VALUE = "xyz123"; + private static final String COL_INT = "col_int"; + private static final String COL_STRING = "col_string"; + private static final String COL_DECIMAL = "col_decimal"; + private static final String COL_DATETIME = "col_datetime"; + private static final List FILE_LIST = Arrays.asList("the uri to the staged_file1.csv on GCS", "the uri to the staged_file2.csv on GCS", "the uri to the staged_file3.csv on GCS"); + private static Field col1 = Field.builder() + .name(COL_INT) + .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) + .build(); + private static Field col2 = Field.builder() + .name(COL_STRING) + .type(FieldType.of(DataType.STRING, Optional.empty(), Optional.empty())) + .build(); + private static Field col3 = Field.builder() + .name(COL_DECIMAL) + .type(FieldType.of(DataType.DECIMAL, 5, 2)) + .build(); + private static Field col4 = Field.builder() + .name(COL_DATETIME) + .type(FieldType.of(DataType.DATETIME, Optional.empty(), Optional.empty())) + .build(); + + @Test + public void testMilestoning() throws IOException, InterruptedException + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + BigQueryStagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(FILE_LIST).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .group("demo").name("append_log") + .schema(SchemaDefinition.builder().build()) + .build(); + + BulkLoadMetadataDataset bulkLoadMetadataDataset = BulkLoadMetadataDataset.builder().group("demo").name("bulk_load_batch_metadata").build(); + + Datasets datasets = Datasets.builder().mainDataset(mainDataset).stagingDataset(stagedFilesDataset).bulkLoadMetadataDataset(bulkLoadMetadataDataset).build(); + + // Clean up + delete("demo", "main"); + delete("demo", "staging"); + delete("demo", "batch_metadata"); + delete("demo", "append_log"); + delete("demo", "bulk_load_batch_metadata"); + + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .bulkLoadTaskIdValue(TASK_ID_VALUE) + .bulkLoadBatchStatusPattern("{STATUS}") + .build(); + + GeneratorResult operations = generator.generateOperations(datasets); + List preActionsSqlList = operations.preActionsSql(); + List milestoningSqlList = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List postActionsSql = operations.postActionsSql(); + + List newMetadataIngestSql = new ArrayList<>(); + for (String metadataSql : metadataIngestSql) + { + String newSql = metadataSql.replace("{STATUS}", "SUCCEEDED"); + newMetadataIngestSql.add(newSql); + } + metadataIngestSql = newMetadataIngestSql; + + + ingest(preActionsSqlList, milestoningSqlList, metadataIngestSql, postActionsSql); + + // Verify + List> tableData = runQuery("select * from `demo`.`append_log` order by col_int asc"); + String expectedPath = "src/test/resources/expected/bulk_load/expected_table1.csv"; + String[] schema = new String[]{COL_INT, COL_STRING, COL_DECIMAL, COL_DATETIME, BATCH_ID, APPEND_TIME}; + assertFileAndTableDataEquals(schema, expectedPath, tableData); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyTest.java index 7cbf3000627..d0995b05d59 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyTest.java @@ -31,7 +31,6 @@ public class AppendOnlyTest extends org.finos.legend.engine.persistence.componen String rowsUpdated = "SELECT 0 as `rowsUpdated`"; String rowsTerminated = "SELECT 0 as `rowsTerminated`"; String rowsDeleted = "SELECT 0 as `rowsDeleted`"; - String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`staging` as stage"; @Override public RelationalSink getRelationalSink() @@ -40,7 +39,7 @@ public RelationalSink getRelationalSink() } @Override - public void verifyAppendOnlyAllowDuplicatesNoAuditing(GeneratorResult operations) + public void verifyAppendOnlyNoAuditingNoDedupNoVersioningNoFilterExistingRecordsDeriveMainSchema(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -52,45 +51,37 @@ public void verifyAppendOnlyAllowDuplicatesNoAuditing(GeneratorResult operations Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); // Stats - verifyStats(operations); + Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); + Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + Assertions.assertNull(operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); } - @Override - public void verifyAppendOnlyAllowDuplicatesWithAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + - "FROM `mydb`.`staging` as stage)"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTableCreateQueryWithAuditAndNoPKs, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); - } @Override - public void verifyAppendOnlyAllowDuplicatesWithAuditingWithDataSplits(List generatorResults, List dataSplitRanges) + public void verifyAppendOnlyWithAuditingFailOnDuplicatesAllVersionNoFilterExistingRecords(List generatorResults, List dataSplitRanges) { String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + - "FROM `mydb`.`staging` as stage " + - "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))"; + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, generatorResults.get(0).preActionsSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit, generatorResults.get(0).preActionsSql().get(1)); + + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, generatorResults.get(0).deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates, generatorResults.get(0).deduplicationAndVersioningSql().get(1)); + Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), generatorResults.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), generatorResults.get(1).ingestSql().get(0)); Assertions.assertEquals(2, generatorResults.size()); // Stats - String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage " + - "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; - String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`staging` as stage " + - "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String incomingRecordCount = "SELECT COALESCE(SUM(stage.`legend_persistence_count`),0) as `incomingRecordCount` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(0)), generatorResults.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(1)), generatorResults.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -101,72 +92,23 @@ public void verifyAppendOnlyAllowDuplicatesWithAuditingWithDataSplits(List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage)"; - - Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyAppendOnlyFailOnDuplicatesWithAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + - "FROM `mydb`.`staging` as stage)"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTableWithAuditNotPKCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyAppendOnlyFilterDuplicatesNoAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + - "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + - "(sink.`digest` = stage.`digest`))))"; - - Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); - Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); - Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); - Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); - } - - @Override - public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries) + public void verifyAppendOnlyWithAuditingFilterDuplicatesNoVersioningWithFilterExistingRecords(GeneratorResult queries) { List preActionsSqlList = queries.preActionsSql(); List milestoningSqlList = queries.ingestSql(); + List deduplicationAndVersioningSql = queries.deduplicationAndVersioningSql(); String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') FROM `mydb`.`staging` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE ((sink.`id` = stage.`id`) AND " + - "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE ((sink.`id` = stage.`id`) AND " + + "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); List postActionsSql = queries.postActionsSql(); @@ -175,6 +117,7 @@ public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries assertIfListsAreSameIgnoringOrder(expectedSQL, postActionsSql); // Stats + String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; Assertions.assertEquals(incomingRecordCount, queries.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, queries.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); @@ -184,26 +127,30 @@ public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries } @Override - public void verifyAppendOnlyFilterDuplicatesWithAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyAppendOnlyWithAuditingFilterDuplicatesAllVersionWithFilterExistingRecords(List operations, List dataSplitRanges) { String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + - "FROM `mydb`.`staging` as stage " + - "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + - "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + - "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + - "(sink.`digest` = stage.`digest`)))))"; + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + + "(sink.`digest` = stage.`digest`)))))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit, operations.get(0).preActionsSql().get(1)); + + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, operations.get(0).deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates, operations.get(0).deduplicationAndVersioningSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); Assertions.assertEquals(2, operations.size()); // Stats - String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage " + - "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String incomingRecordCount = "SELECT COALESCE(SUM(stage.`legend_persistence_count`),0) as `incomingRecordCount` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -220,14 +167,13 @@ public void verifyAppendOnlyWithUpperCaseOptimizer(GeneratorResult operations) List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); - String insertSql = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`) " + - "(SELECT * FROM `MYDB`.`STAGING` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink " + - "WHERE ((sink.`ID` = stage.`ID`) " + - "AND (sink.`NAME` = stage.`NAME`)) " + - "AND (sink.`DIGEST` = stage.`DIGEST`))))"; + String insertSql = "INSERT INTO `MYDB`.`MAIN` " + + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_UPDATE_TIME`) " + + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage " + + "WHERE NOT (EXISTS " + + "(SELECT * FROM `MYDB`.`MAIN` as sink WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` = stage.`DIGEST`))))"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestCreateQueryWithUpperCase, preActionsSqlList.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQueryUpperCase, preActionsSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); } @@ -237,23 +183,71 @@ public void verifyAppendOnlyWithLessColumnsInStaging(GeneratorResult operations) List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); - String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + - "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + - "(sink.`digest` = stage.`digest`))))"; + String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE ((sink.`id` = stage.`id`) AND " + + "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); } - private void verifyStats(GeneratorResult operations) + @Override + public void verifyAppendOnlyWithAuditingFailOnDuplicatesMaxVersionWithFilterExistingRecords(GeneratorResult operations) { + List preActionsSqlList = operations.preActionsSql(); + List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + + String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE ((sink.`id` = stage.`id`) AND " + + "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); + + // Stats + String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; + String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); + Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + } + + @Override + public void verifyAppendOnlyWithAuditingFilterDupsMaxVersionNoFilterExistingRecords(GeneratorResult operations) + { + List preActionsSqlList = operations.preActionsSql(); + List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + + String insertSql = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') FROM `mydb`.`staging_legend_persistence_temp_staging` as stage)"; + + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); + + // Stats + String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; + String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; + Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); + Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BigQueryTestArtifacts.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BigQueryTestArtifacts.java index 34139376753..51f7e0eec10 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BigQueryTestArtifacts.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BigQueryTestArtifacts.java @@ -53,6 +53,30 @@ public class BigQueryTestArtifacts "`digest` STRING," + "PRIMARY KEY (`id`, `name`) NOT ENFORCED)"; + public static String expectedBaseTempStagingTableWithCount = "CREATE TABLE IF NOT EXISTS `mydb`.`staging_legend_persistence_temp_staging`" + + "(`id` INT64 NOT NULL," + + "`name` STRING NOT NULL," + + "`amount` FLOAT64," + + "`biz_date` DATE," + + "`legend_persistence_count` INT64)"; + + public static String expectedBaseTempStagingTablePlusDigestWithCount = "CREATE TABLE IF NOT EXISTS `mydb`.`staging_legend_persistence_temp_staging`" + + "(`id` INT64 NOT NULL," + + "`name` STRING NOT NULL," + + "`amount` FLOAT64," + + "`biz_date` DATE," + + "`digest` STRING," + + "`legend_persistence_count` INT64)"; + + public static String expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit = "CREATE TABLE IF NOT EXISTS `mydb`.`staging_legend_persistence_temp_staging`" + + "(`id` INT64 NOT NULL," + + "`name` STRING NOT NULL," + + "`amount` FLOAT64," + + "`biz_date` DATE," + + "`digest` STRING," + + "`legend_persistence_count` INT64," + + "`data_split` INT64 NOT NULL)"; + public static String expectedBaseTablePlusDigestPlusVersionCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`main`(" + "`id` INT64 NOT NULL," + "`name` STRING NOT NULL," + @@ -113,6 +137,15 @@ public class BigQueryTestArtifacts "`batch_update_time` DATETIME NOT NULL," + "PRIMARY KEY (`id`, `name`, `batch_update_time`) NOT ENFORCED)"; + public static String expectedBaseTablePlusDigestPlusUpdateTimestampCreateQueryUpperCase = "CREATE TABLE IF NOT EXISTS `MYDB`.`MAIN`(" + + "`ID` INT64 NOT NULL," + + "`NAME` STRING NOT NULL," + + "`AMOUNT` FLOAT64," + + "`BIZ_DATE` DATE," + + "`DIGEST` STRING," + + "`BATCH_UPDATE_TIME` DATETIME NOT NULL," + + "PRIMARY KEY (`ID`, `NAME`, `BATCH_UPDATE_TIME`) NOT ENFORCED)"; + public static String expectedBaseTableWithAuditNotPKCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`main`(" + "`id` INT64 NOT NULL," + "`name` STRING NOT NULL," + @@ -132,6 +165,8 @@ public class BigQueryTestArtifacts public static String expectedStagingCleanupQuery = "DELETE FROM `mydb`.`staging` as stage WHERE 1 = 1"; + public static String expectedTempStagingCleanupQuery = "DELETE FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE 1 = 1"; + public static String expectedDropTableQuery = "DROP TABLE IF EXISTS `mydb`.`staging` CASCADE"; public static String cleanUpMainTableSql = "DELETE FROM `mydb`.`main` as sink WHERE 1 = 1"; @@ -162,16 +197,16 @@ public class BigQueryTestArtifacts "`BATCH_ID_IN` INT64 NOT NULL,`BATCH_ID_OUT` INT64,PRIMARY KEY (`ID`, `NAME`, `BATCH_ID_IN`) NOT ENFORCED)"; public static String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`)" + - " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),CURRENT_DATETIME(),'DONE')"; + " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),CURRENT_DATETIME(),'DONE')"; public static String expectedMetadataTableIngestWithStagingFiltersQuery = "INSERT INTO batch_metadata " + "(`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`, `staging_filters`) " + "(SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + - "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')," + + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')," + "CURRENT_DATETIME(),'DONE',PARSE_JSON('{\"batch_id_in\":{\"GT\":5}}'))"; public static String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (`TABLE_NAME`, `TABLE_BATCH_ID`, `BATCH_START_TS_UTC`, `BATCH_END_TS_UTC`, `BATCH_STATUS`)" + - " (SELECT 'MAIN',(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),CURRENT_DATETIME(),'DONE')"; + " (SELECT 'MAIN',(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),CURRENT_DATETIME(),'DONE')"; public static String expectedMetadataTableIngestQueryWithPlaceHolders = "INSERT INTO batch_metadata (`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`) " + "(SELECT 'main',{BATCH_ID_PATTERN},PARSE_DATETIME('%Y-%m-%d %H:%M:%S','{BATCH_START_TS_PATTERN}'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','{BATCH_END_TS_PATTERN}'),'DONE')"; @@ -228,11 +263,12 @@ public class BigQueryTestArtifacts "`digest` STRING," + "PRIMARY KEY (`id`, `name`, `validity_from_reference`) NOT ENFORCED)"; - public static String expectedBitemporalMainTableWithBatchIdDatetimeCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`main`" + + public static String expectedBitemporalMainTableWithVersionWithBatchIdDatetimeCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`main`" + "(`id` INT64 NOT NULL," + "`name` STRING NOT NULL," + "`amount` FLOAT64," + "`digest` STRING," + + "`version` INT64," + "`batch_id_in` INT64 NOT NULL," + "`batch_id_out` INT64," + "`batch_time_in` DATETIME," + @@ -241,11 +277,12 @@ public class BigQueryTestArtifacts "`validity_through_target` DATETIME," + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`) NOT ENFORCED)"; - public static String expectedBitemporalMainTableWithDatetimeCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`main`" + + public static String expectedBitemporalMainTableWithVersionBatchDateTimeCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`main`" + "(`id` INT64 NOT NULL," + "`name` STRING NOT NULL," + "`amount` FLOAT64," + "`digest` STRING," + + "`version` INT64," + "`batch_time_in` DATETIME NOT NULL," + "`batch_time_out` DATETIME," + "`validity_from_target` DATETIME NOT NULL," + @@ -263,6 +300,18 @@ public class BigQueryTestArtifacts "`validity_through_target` DATETIME," + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`) NOT ENFORCED)"; + public static String expectedBitemporalFromOnlyMainTableWithVersionCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`main`" + + "(`id` INT64 NOT NULL," + + "`name` STRING NOT NULL," + + "`amount` FLOAT64," + + "`digest` STRING," + + "`version` INT64," + + "`batch_id_in` INT64 NOT NULL," + + "`batch_id_out` INT64," + + "`validity_from_target` DATETIME NOT NULL," + + "`validity_through_target` DATETIME," + + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`) NOT ENFORCED)"; + public static String expectedBitemporalFromOnlyStagingTableCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`staging`(" + "`id` INT64 NOT NULL," + "`name` STRING NOT NULL," + @@ -317,6 +366,18 @@ public class BigQueryTestArtifacts "`validity_through_target` DATETIME," + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`) NOT ENFORCED)"; + public static String expectedBitemporalFromOnlyTempTableWithVersionCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`temp`" + + "(`id` INT64 NOT NULL," + + "`name` STRING NOT NULL," + + "`amount` FLOAT64," + + "`digest` STRING," + + "`version` INT64," + + "`batch_id_in` INT64 NOT NULL," + + "`batch_id_out` INT64," + + "`validity_from_target` DATETIME NOT NULL," + + "`validity_through_target` DATETIME," + + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`) NOT ENFORCED)"; + public static String expectedBitemporalFromOnlyTempTableBatchIdAndTimeBasedCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`temp`(" + "`id` INT64 NOT NULL," + "`name` STRING NOT NULL," + @@ -361,12 +422,13 @@ public class BigQueryTestArtifacts "`delete_indicator` STRING," + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`) NOT ENFORCED)"; - public static String expectedBitemporalFromOnlyStageWithDataSplitWithoutDuplicatesTableCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`stagingWithoutDuplicates`" + + public static String expectedBitemporalFromOnlyStageWithVersionWithDataSplitWithoutDuplicatesTableCreateQuery = "CREATE TABLE IF NOT EXISTS `mydb`.`stagingWithoutDuplicates`" + "(`id` INT64 NOT NULL," + "`name` STRING NOT NULL," + "`amount` FLOAT64," + "`validity_from_reference` DATETIME NOT NULL," + "`digest` STRING," + + "`version` INT64," + "`data_split` INT64 NOT NULL," + "PRIMARY KEY (`id`, `name`, `validity_from_reference`, `data_split`) NOT ENFORCED)"; @@ -379,4 +441,60 @@ public class BigQueryTestArtifacts "`delete_indicator` STRING," + "PRIMARY KEY (`id`, `name`, `validity_from_reference`) NOT ENFORCED)"; + public static String expectedInsertIntoBaseTempStagingWithMaxVersionAndFilterDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `legend_persistence_count`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`legend_persistence_count` as `legend_persistence_count` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`legend_persistence_count` as `legend_persistence_count`," + + "DENSE_RANK() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`biz_date` DESC) as `legend_persistence_rank` " + + "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,COUNT(*) as `legend_persistence_count` " + + "FROM `mydb`.`staging` as stage GROUP BY stage.`id`, stage.`name`, stage.`amount`, stage.`biz_date`) as stage) " + + "as stage WHERE stage.`legend_persistence_rank` = 1)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `legend_persistence_count`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + + "COUNT(*) as `legend_persistence_count` FROM `mydb`.`staging` as stage " + + "GROUP BY stage.`id`, stage.`name`, stage.`amount`, stage.`biz_date`, stage.`digest`)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `legend_persistence_count`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`legend_persistence_count` as `legend_persistence_count` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`legend_persistence_count` as `legend_persistence_count`,DENSE_RANK() OVER " + + "(PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`biz_date` DESC) as `legend_persistence_rank` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,COUNT(*) as `legend_persistence_count` FROM " + + "`mydb`.`staging` as stage GROUP BY stage.`id`, stage.`name`, stage.`amount`, stage.`biz_date`, stage.`digest`) as stage) as stage " + + "WHERE stage.`legend_persistence_rank` = 1)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `legend_persistence_count`, `data_split`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`legend_persistence_count` as `legend_persistence_count`,DENSE_RANK() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`biz_date` ASC) as `data_split` " + + "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,COUNT(*) as `legend_persistence_count` FROM `mydb`.`staging` as stage " + + "GROUP BY stage.`id`, stage.`name`, stage.`amount`, stage.`biz_date`, stage.`digest`) as stage)"; + + public static String maxDupsErrorCheckSql = "SELECT MAX(stage.`legend_persistence_count`) as `MAX_DUPLICATES` FROM " + + "`mydb`.`staging_legend_persistence_temp_staging` as stage"; + + public static String dataErrorCheckSql = "SELECT MAX(`legend_persistence_distinct_rows`) as `MAX_DATA_ERRORS` FROM " + + "(SELECT COUNT(DISTINCT(`digest`)) as `legend_persistence_distinct_rows` FROM " + + "`mydb`.`staging_legend_persistence_temp_staging` as stage GROUP BY `id`, `name`, `biz_date`) as stage"; + + public static String expectedTempStagingCleanupQueryInUpperCase = "DELETE FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage WHERE 1 = 1"; + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicatesUpperCase = "INSERT INTO `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` " + + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `LEGEND_PERSISTENCE_COUNT`) " + + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`LEGEND_PERSISTENCE_COUNT` as `LEGEND_PERSISTENCE_COUNT` " + + "FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`," + + "stage.`LEGEND_PERSISTENCE_COUNT` as `LEGEND_PERSISTENCE_COUNT`," + + "DENSE_RANK() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`BIZ_DATE` DESC) as `LEGEND_PERSISTENCE_RANK` " + + "FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,COUNT(*) as `LEGEND_PERSISTENCE_COUNT` " + + "FROM `MYDB`.`STAGING` as stage GROUP BY stage.`ID`, stage.`NAME`, stage.`AMOUNT`, stage.`BIZ_DATE`, stage.`DIGEST`) as stage) as stage WHERE stage.`LEGEND_PERSISTENCE_RANK` = 1)"; + public static String dataErrorCheckSqlUpperCase = "SELECT MAX(`LEGEND_PERSISTENCE_DISTINCT_ROWS`) as `MAX_DATA_ERRORS` " + + "FROM (SELECT COUNT(DISTINCT(`DIGEST`)) as `LEGEND_PERSISTENCE_DISTINCT_ROWS` " + + "FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage GROUP BY `ID`, `NAME`, `BIZ_DATE`) as stage"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,DENSE_RANK() " + + "OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`biz_date` DESC) as `legend_persistence_rank` " + + "FROM `mydb`.`staging` as stage) as stage WHERE stage.`legend_persistence_rank` = 1)"; } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java index 8b4ea1678a9..cc61379a451 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java @@ -71,24 +71,24 @@ public void verifyBitemporalDeltaBatchIdDateTimeBasedNoDeleteIndWithDataSplits(L { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET sink.`batch_id_out` = " + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + - "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) AND (EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE " + "((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(sink.`validity_from_target` = stage.`validity_from_reference`) AND (sink.`digest` <> stage.`digest`)))"; String expectedUpsertQuery = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `validity_from_target`, " + - "`validity_through_target`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + + "`validity_through_target`, `digest`, `version`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`validity_through_reference`," + - "stage.`digest`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + - "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "stage.`digest`,stage.`version`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE " + "(sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(sink.`validity_from_target` = stage.`validity_from_reference`)))) AND " + "((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalMainTableWithBatchIdDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalMainTableWithVersionWithBatchIdDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedMilestoneQuery, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); @@ -153,7 +153,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndNoDataSplits(Generator public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND " + @@ -162,10 +162,10 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `validity_from_target`, `validity_through_target`, `digest`, " + + "(`id`, `name`, `amount`, `validity_from_target`, `validity_through_target`, `digest`, `version`, " + "`batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`," + - "stage.`validity_through_reference`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')," + + "stage.`validity_through_reference`,stage.`digest`,stage.`version`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')," + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage WHERE " + "((NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) " + "AND (sink.`digest` = stage.`digest`) AND ((sink.`id` = stage.`id`) AND " + @@ -173,7 +173,7 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) AND " + "(stage.`delete_indicator` NOT IN ('yes','1','true')))"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalMainTableWithDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalMainTableWithVersionBatchDateTimeCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedMilestoneQuery, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); @@ -185,10 +185,10 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) { String expectedStageToTemp = "INSERT INTO `mydb`.`temp` " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`data_split` FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`data_split` FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`legend_persistence_start_date`),MIN(legend_persistence_x.`legend_persistence_end_date`)) as `legend_persistence_end_date` " + "FROM " + @@ -132,10 +134,10 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`delete_indicator`,stage.`data_split` FROM `mydb`.`staging` as stage WHERE (stage.`delete_indicator` NOT IN ('yes','1','true')) AND ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`legend_persistence_start_date`),MIN(legend_persistence_x.`legend_persistence_end_date`)) as `legend_persistence_end_date` " + "FROM " + @@ -354,10 +360,10 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplits(List legend_persistence_x.`validity_from_target`) AND (legend_persistence_y.`delete_indicator` = 0) " + "WHERE legend_persistence_x.`delete_indicator` = 0 " + - "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`digest`, legend_persistence_x.`validity_from_target`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`) as legend_persistence_x " + + "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`version`, legend_persistence_x.`digest`, legend_persistence_x.`validity_from_target`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`) as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_y.`validity_through_target` > legend_persistence_x.`legend_persistence_start_date`) AND (legend_persistence_y.`validity_through_target` <= legend_persistence_x.`legend_persistence_end_date`) AND (legend_persistence_y.`delete_indicator` <> 0) " + - "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`digest`, legend_persistence_x.`legend_persistence_start_date`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`)"; + "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`version`, legend_persistence_x.`digest`, legend_persistence_x.`legend_persistence_start_date`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`)"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableWithDeleteIndicatorCreateQuery, operations.get(0).preActionsSql().get(3)); @@ -446,6 +452,9 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) { String expectedStageToStageWithoutDuplicates = "INSERT INTO `mydb`.`stagingWithoutDuplicates` " + - "(`id`, `name`, `amount`, `validity_from_reference`, `digest`, `data_split`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`data_split` FROM `mydb`.`staging` as stage " + + "(`id`, `name`, `amount`, `validity_from_reference`, `digest`, `version`, `data_split`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`data_split` FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`digest` = stage.`digest`) AND (sink.`batch_id_out` = 999999999))))"; String expectedStageToTemp = "INSERT INTO `mydb`.`temp` " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`data_split` FROM `mydb`.`stagingWithoutDuplicates` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`data_split` FROM `mydb`.`stagingWithoutDuplicates` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`legend_persistence_start_date`),MIN(legend_persistence_x.`legend_persistence_end_date`)) as `legend_persistence_end_date` " + "FROM " + @@ -566,10 +575,10 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDupl "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_x.`validity_from_reference` = legend_persistence_y.`legend_persistence_start_date`))"; String expectedMainToTemp = "INSERT INTO `mydb`.`temp` " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999) as legend_persistence_x " + + "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`version`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999) as legend_persistence_x " + "INNER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,legend_persistence_x.`legend_persistence_end_date` as `legend_persistence_end_date` " + "FROM " + @@ -594,13 +603,13 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDupl "AND (sink.`batch_id_out` = 999999999)"; String expectedTempToMain = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `digest`, `batch_id_in`, `batch_id_out`, `validity_from_target`, `validity_through_target`) " + - "(SELECT temp.`id`,temp.`name`,temp.`amount`,temp.`digest`,temp.`batch_id_in`,temp.`batch_id_out`,temp.`validity_from_target`,temp.`validity_through_target` FROM `mydb`.`temp` as temp)"; + "(`id`, `name`, `amount`, `digest`, `version`, `batch_id_in`, `batch_id_out`, `validity_from_target`, `validity_through_target`) " + + "(SELECT temp.`id`,temp.`name`,temp.`amount`,temp.`digest`,temp.`version`,temp.`batch_id_in`,temp.`batch_id_out`,temp.`validity_from_target`,temp.`validity_through_target` FROM `mydb`.`temp` as temp)"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); - Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); - Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyStageWithDataSplitWithoutDuplicatesTableCreateQuery, operations.get(0).preActionsSql().get(3)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyTempTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(2)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyStageWithVersionWithDataSplitWithoutDuplicatesTableCreateQuery, operations.get(0).preActionsSql().get(3)); Assertions.assertEquals(expectedStageToStageWithoutDuplicates, operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedStageToTemp, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(1)); @@ -620,6 +629,9 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDupl Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), operations.get(0).metadataIngestSql().get(0)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`temp`"), operations.get(0).postCleanupSql().get(0)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`stagingWithoutDuplicates`"), operations.get(0).postCleanupSql().get(1)); + Assertions.assertEquals(2, operations.size()); String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; verifyStats(operations.get(0), enrichSqlWithDataSplits(incomingRecordCount,dataSplitRanges.get(0)), rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); @@ -763,6 +775,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "`name` STRING NOT NULL," + "`amount` FLOAT64," + "`digest` STRING," + + "`version` INT64," + "`batch_id_in` INT64 NOT NULL," + "`batch_id_out` INT64," + "`validity_from_target` DATETIME NOT NULL," + @@ -774,6 +787,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "`name` STRING NOT NULL," + "`amount` FLOAT64," + "`digest` STRING," + + "`version` INT64," + "`batch_id_in` INT64 NOT NULL," + "`batch_id_out` INT64," + "`validity_from_target` DATETIME NOT NULL," + @@ -787,20 +801,21 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "`amount` FLOAT64," + "`validity_from_reference` DATETIME NOT NULL," + "`digest` STRING," + + "`version` INT64," + "`delete_indicator` STRING," + "`data_split` INT64 NOT NULL," + "PRIMARY KEY (`id`, `name`, `validity_from_reference`, `data_split`) NOT ENFORCED)"; String expectedStageToStageWithoutDuplicates = "INSERT INTO " + stageWithoutDuplicatesName + " " + - "(`id`, `name`, `amount`, `validity_from_reference`, `digest`, `delete_indicator`, `data_split`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`delete_indicator`,stage.`data_split` FROM `mydb`.`staging` as stage " + + "(`id`, `name`, `amount`, `validity_from_reference`, `digest`, `version`, `delete_indicator`, `data_split`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`delete_indicator`,stage.`data_split` FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`digest` = stage.`digest`) AND (sink.`batch_id_out` = 999999999))))"; String expectedStageToTemp = "INSERT INTO " + tempName + " " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT legend_persistence_stageWithoutDuplicates.`id`,legend_persistence_stageWithoutDuplicates.`name`,legend_persistence_stageWithoutDuplicates.`amount`,legend_persistence_stageWithoutDuplicates.`validity_from_reference`,legend_persistence_stageWithoutDuplicates.`digest`,legend_persistence_stageWithoutDuplicates.`delete_indicator`,legend_persistence_stageWithoutDuplicates.`data_split` FROM " + stageWithoutDuplicatesName + " as legend_persistence_stageWithoutDuplicates WHERE (legend_persistence_stageWithoutDuplicates.`delete_indicator` NOT IN ('yes','1','true')) AND ((legend_persistence_stageWithoutDuplicates.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (legend_persistence_stageWithoutDuplicates.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + + "(SELECT legend_persistence_stageWithoutDuplicates.`id`,legend_persistence_stageWithoutDuplicates.`name`,legend_persistence_stageWithoutDuplicates.`amount`,legend_persistence_stageWithoutDuplicates.`validity_from_reference`,legend_persistence_stageWithoutDuplicates.`digest`,legend_persistence_stageWithoutDuplicates.`version`,legend_persistence_stageWithoutDuplicates.`delete_indicator`,legend_persistence_stageWithoutDuplicates.`data_split` FROM " + stageWithoutDuplicatesName + " as legend_persistence_stageWithoutDuplicates WHERE (legend_persistence_stageWithoutDuplicates.`delete_indicator` NOT IN ('yes','1','true')) AND ((legend_persistence_stageWithoutDuplicates.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (legend_persistence_stageWithoutDuplicates.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`legend_persistence_start_date`),MIN(legend_persistence_x.`legend_persistence_end_date`)) as `legend_persistence_end_date` " + "FROM " + @@ -818,10 +833,10 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_x.`validity_from_reference` = legend_persistence_y.`legend_persistence_start_date`))"; String expectedMainToTemp = "INSERT INTO " + tempName + " " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999) as legend_persistence_x " + + "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`version`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999) as legend_persistence_x " + "INNER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,legend_persistence_x.`legend_persistence_end_date` as `legend_persistence_end_date` " + "FROM " + @@ -846,12 +861,12 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "AND (sink.`batch_id_out` = 999999999)"; String expectedTempToMain = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `digest`, `batch_id_in`, `batch_id_out`, `validity_from_target`, `validity_through_target`) " + - "(SELECT legend_persistence_temp.`id`,legend_persistence_temp.`name`,legend_persistence_temp.`amount`,legend_persistence_temp.`digest`,legend_persistence_temp.`batch_id_in`,legend_persistence_temp.`batch_id_out`,legend_persistence_temp.`validity_from_target`,legend_persistence_temp.`validity_through_target` FROM " + tempName + " as legend_persistence_temp)"; + "(`id`, `name`, `amount`, `digest`, `version`, `batch_id_in`, `batch_id_out`, `validity_from_target`, `validity_through_target`) " + + "(SELECT legend_persistence_temp.`id`,legend_persistence_temp.`name`,legend_persistence_temp.`amount`,legend_persistence_temp.`digest`,legend_persistence_temp.`version`,legend_persistence_temp.`batch_id_in`,legend_persistence_temp.`batch_id_out`,legend_persistence_temp.`validity_from_target`,legend_persistence_temp.`validity_through_target` FROM " + tempName + " as legend_persistence_temp)"; String expectedMainToTempForDeletion = "INSERT INTO " + tempWithDeleteIndicatorName + " " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`, `delete_indicator`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_x.`validity_through_target` as `legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,(CASE WHEN legend_persistence_y.`delete_indicator` IS NULL THEN 0 ELSE 1 END) " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`, `delete_indicator`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_x.`validity_through_target` as `legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,(CASE WHEN legend_persistence_y.`delete_indicator` IS NULL THEN 0 ELSE 1 END) " + "FROM " + "(SELECT * FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) " + "AND (EXISTS " + @@ -871,19 +886,19 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "AND (sink.`batch_id_out` = 999999999)"; String expectedTempToMainForDeletion = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`legend_persistence_start_date` as `legend_persistence_start_date`,MAX(legend_persistence_y.`validity_through_target`) as `legend_persistence_end_date`,legend_persistence_x.`batch_id_in`,legend_persistence_x.`batch_id_out` FROM " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`validity_from_target`),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) as `legend_persistence_end_date`,legend_persistence_x.`batch_id_in`,legend_persistence_x.`batch_id_out` " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`legend_persistence_start_date` as `legend_persistence_start_date`,MAX(legend_persistence_y.`validity_through_target`) as `legend_persistence_end_date`,legend_persistence_x.`batch_id_in`,legend_persistence_x.`batch_id_out` FROM " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`validity_from_target`),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) as `legend_persistence_end_date`,legend_persistence_x.`batch_id_in`,legend_persistence_x.`batch_id_out` " + "FROM " + tempWithDeleteIndicatorName + " as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_y.`validity_from_target` > legend_persistence_x.`validity_from_target`) AND (legend_persistence_y.`delete_indicator` = 0) " + "WHERE legend_persistence_x.`delete_indicator` = 0 " + - "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`digest`, legend_persistence_x.`validity_from_target`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`) as legend_persistence_x " + + "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`version`, legend_persistence_x.`digest`, legend_persistence_x.`validity_from_target`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`) as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_y.`validity_through_target` > legend_persistence_x.`legend_persistence_start_date`) AND (legend_persistence_y.`validity_through_target` <= legend_persistence_x.`legend_persistence_end_date`) AND (legend_persistence_y.`delete_indicator` <> 0) " + - "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`digest`, legend_persistence_x.`legend_persistence_start_date`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`)"; + "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`version`, legend_persistence_x.`digest`, legend_persistence_x.`legend_persistence_start_date`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`)"; - Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableWithDeleteIndicatorCreateQuery, operations.get(0).preActionsSql().get(3)); @@ -915,6 +930,10 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), operations.get(0).metadataIngestSql().get(0)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`main_legend_persistence_temp`"), operations.get(0).postCleanupSql().get(0)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`main_legend_persistence_tempWithDeleteIndicator`"), operations.get(0).postCleanupSql().get(1)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`staging_legend_persistence_stageWithoutDuplicates`"), operations.get(0).postCleanupSql().get(2)); + Assertions.assertEquals(2, operations.size()); String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`) AND (sink2.`validity_from_target` = sink.`validity_from_target`)) AND (sink2.`batch_id_in` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'))))"; @@ -1005,7 +1024,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedStageToTemp = "INSERT INTO `mydb`.`temp` " + "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`," + - "legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest` FROM `mydb`.`staging` as stage) as legend_persistence_x " + "LEFT OUTER JOIN " + @@ -1027,7 +1046,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedMainToTemp = "INSERT INTO `mydb`.`temp` " + "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM " + "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`batch_time_in`," + "sink.`batch_time_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink " + @@ -1051,7 +1070,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedUpdateMain = "UPDATE `mydb`.`main` as sink SET " + "sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (EXISTS " + "(SELECT * FROM `mydb`.`temp` as temp WHERE ((sink.`id` = temp.`id`) AND (sink.`name` = temp.`name`)) " + "AND (sink.`validity_from_target` = temp.`validity_from_target`))) AND (sink.`batch_id_out` = 999999999)"; @@ -1086,7 +1105,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR String expectedStageToTemp = "INSERT INTO `mydb`.`temp` " + "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_time_in`, `batch_time_out`) " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`," + - "legend_persistence_y.`legend_persistence_end_date`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "legend_persistence_y.`legend_persistence_end_date`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest` FROM `mydb`.`staging` as stage) as legend_persistence_x " + "LEFT OUTER JOIN " + @@ -1109,7 +1128,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_time_in`, `batch_time_out`) " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`," + "legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`," + - "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM (SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_time_in`," + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM (SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_time_in`," + "sink.`batch_time_out`,sink.`validity_from_target`,sink.`validity_through_target` " + "FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) as legend_persistence_x " + "INNER JOIN " + @@ -1132,7 +1151,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR "AND (legend_persistence_x.`validity_from_target` = legend_persistence_y.`legend_persistence_start_date`))"; String expectedUpdateMain = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (EXISTS (SELECT * FROM `mydb`.`temp` as temp WHERE " + "((sink.`id` = temp.`id`) AND (sink.`name` = temp.`name`)) AND " + "(sink.`validity_from_target` = temp.`validity_from_target`))) AND (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59'))"; @@ -1154,8 +1173,8 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; - String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')) as `rowsInserted`"; + String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')) as `rowsInserted`"; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadTest.java new file mode 100644 index 00000000000..4d8899447e4 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadTest.java @@ -0,0 +1,478 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.ingestmode; + +import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.common.FileFormat; +import org.finos.legend.engine.persistence.components.common.LoadOptions; +import org.finos.legend.engine.persistence.components.common.StatisticName; +import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.digest.NoDigestGenStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.digest.UDFBasedDigestGenStrategy; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DataType; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.FieldType; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.StagedFilesDataset; +import org.finos.legend.engine.persistence.components.relational.CaseConversion; +import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; +import org.finos.legend.engine.persistence.components.relational.api.RelationalGenerator; +import org.finos.legend.engine.persistence.components.relational.bigquery.BigQuerySink; +import org.finos.legend.engine.persistence.components.relational.bigquery.logicalplan.datasets.BigQueryStagedFilesDatasetProperties; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.time.Clock; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_DELETED; +import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_INSERTED; +import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_TERMINATED; +import static org.finos.legend.engine.persistence.components.common.StatisticName.ROWS_UPDATED; + +public class BulkLoadTest +{ + private static final String APPEND_TIME = "append_time"; + private static final String DIGEST = "digest"; + private static final String DIGEST_UDF = "LAKEHOUSE_MD5"; + private static final String BATCH_ID = "batch_id"; + private static final String TASK_ID_VALUE = "xyz123"; + private static final String COL_INT = "col_int"; + private static final String COL_STRING = "col_string"; + private static final String COL_DECIMAL = "col_decimal"; + private static final String COL_DATETIME = "col_datetime"; + private static final String COL_VARIANT = "col_variant"; + + private static Field col1 = Field.builder() + .name(COL_INT) + .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) + .build(); + private static Field col2 = Field.builder() + .name(COL_STRING) + .type(FieldType.of(DataType.STRING, Optional.empty(), Optional.empty())) + .build(); + private static Field col3 = Field.builder() + .name(COL_DECIMAL) + .type(FieldType.of(DataType.DECIMAL, 5, 2)) + .build(); + private static Field col4 = Field.builder() + .name(COL_DATETIME) + .type(FieldType.of(DataType.DATETIME, Optional.empty(), Optional.empty())) + .build(); + + private static Field col5 = Field.builder() + .name(COL_VARIANT) + .type(FieldType.of(DataType.VARIANT, Optional.empty(), Optional.empty())) + .build(); + + private List filesList = Arrays.asList("/path/xyz/file1.csv", "/path/xyz/file2.csv"); + + protected final ZonedDateTime fixedZonedDateTime_2000_01_01 = ZonedDateTime.of(2000, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + protected final Clock fixedClock_2000_01_01 = Clock.fixed(fixedZonedDateTime_2000_01_01.toInstant(), ZoneOffset.UTC); + + @Test + public void testBulkLoadWithDigestNotGeneratedAuditEnabledNoExtraOptions() + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + BigQueryStagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(filesList).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4, col5)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database("my_db").name("my_name").alias("my_alias") + .schema(SchemaDefinition.builder().build()) + .build(); + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .bulkLoadTaskIdValue(TASK_ID_VALUE) + .batchIdPattern("{NEXT_BATCH_ID}") + .build(); + + GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); + + List preActionsSql = operations.preActionsSql(); + List ingestSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + Map statsSql = operations.postIngestStatisticsSql(); + + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS `my_db`.`my_name`" + + "(`col_int` INT64,`col_string` STRING,`col_decimal` NUMERIC(5,2),`col_datetime` DATETIME,`col_variant` JSON,`batch_id` INT64,`append_time` DATETIME)"; + + String expectedCopySql = "LOAD DATA OVERWRITE `my_db`.`my_name_legend_persistence_temp` " + + "(`col_int` INT64,`col_string` STRING,`col_decimal` NUMERIC(5,2),`col_datetime` DATETIME,`col_variant` JSON) " + + "FROM FILES (uris=['/path/xyz/file1.csv','/path/xyz/file2.csv'], format='CSV')"; + + String expectedInsertSql = "INSERT INTO `my_db`.`my_name` " + + "(`col_int`, `col_string`, `col_decimal`, `col_datetime`, `col_variant`, `batch_id`, `append_time`) " + + "(SELECT legend_persistence_temp.`col_int`,legend_persistence_temp.`col_string`,legend_persistence_temp.`col_decimal`,legend_persistence_temp.`col_datetime`,legend_persistence_temp.`col_variant`,{NEXT_BATCH_ID},PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "FROM `my_db`.`my_name_legend_persistence_temp` as legend_persistence_temp)"; + + String expectedMetadataIngestSql = "INSERT INTO bulk_load_batch_metadata (`batch_id`, `table_name`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`, `batch_source_info`) " + + "(SELECT {NEXT_BATCH_ID},'my_name',PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),CURRENT_DATETIME(),'{BULK_LOAD_BATCH_STATUS_PLACEHOLDER}',PARSE_JSON('{\"files\":[\"/path/xyz/file1.csv\",\"/path/xyz/file2.csv\"],\"task_id\":\"xyz123\"}'))"; + + Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); + Assertions.assertEquals(expectedCopySql, ingestSql.get(0)); + Assertions.assertEquals(expectedInsertSql, ingestSql.get(1)); + Assertions.assertEquals(expectedMetadataIngestSql, metadataIngestSql.get(0)); + + Assertions.assertEquals("SELECT 0 as `rowsDeleted`", statsSql.get(ROWS_DELETED)); + Assertions.assertEquals("SELECT 0 as `rowsTerminated`", statsSql.get(ROWS_TERMINATED)); + Assertions.assertEquals("SELECT 0 as `rowsUpdated`", statsSql.get(ROWS_UPDATED)); + Assertions.assertEquals("SELECT COUNT(*) as `rowsInserted` FROM `my_db`.`my_name` as my_alias WHERE my_alias.`append_time` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')", statsSql.get(ROWS_INSERTED)); + } + + @Test + public void testBulkLoadWithDigestNotGeneratedAuditEnabledAllOptionsNoTaskId() + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + BigQueryStagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .loadOptions(LoadOptions.builder() + .encoding("UTF8") + .maxBadRecords(100L) + .nullMarker("NULL") + .quote("'") + .compression("GZIP") + .fieldDelimiter(",") + .skipLeadingRows(1L) + .build()) + .addAllFiles(filesList).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4, col5)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database("my_db").name("my_name").alias("my_alias") + .schema(SchemaDefinition.builder().build()) + .build(); + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .build(); + + GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); + + List preActionsSql = operations.preActionsSql(); + List ingestSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + Map statsSql = operations.postIngestStatisticsSql(); + + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS `my_db`.`my_name`" + + "(`col_int` INT64,`col_string` STRING,`col_decimal` NUMERIC(5,2),`col_datetime` DATETIME,`col_variant` JSON,`batch_id` INT64,`append_time` DATETIME)"; + + String expectedCopySql = "LOAD DATA OVERWRITE `my_db`.`my_name_legend_persistence_temp` " + + "(`col_int` INT64,`col_string` STRING,`col_decimal` NUMERIC(5,2),`col_datetime` DATETIME,`col_variant` JSON) " + + "FROM FILES " + + "(uris=['/path/xyz/file1.csv','/path/xyz/file2.csv'], max_bad_records=100, quote=''', skip_leading_rows=1, format='CSV', encoding='UTF8', compression='GZIP', field_delimiter=',', null_marker='NULL')"; + + String expectedInsertSql = "INSERT INTO `my_db`.`my_name` " + + "(`col_int`, `col_string`, `col_decimal`, `col_datetime`, `col_variant`, `batch_id`, `append_time`) " + + "(SELECT legend_persistence_temp.`col_int`,legend_persistence_temp.`col_string`,legend_persistence_temp.`col_decimal`,legend_persistence_temp.`col_datetime`,legend_persistence_temp.`col_variant`,(SELECT COALESCE(MAX(bulk_load_batch_metadata.`batch_id`),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.`table_name`) = 'MY_NAME'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "FROM `my_db`.`my_name_legend_persistence_temp` as legend_persistence_temp)"; + + String expectedMetadataIngestSql = "INSERT INTO bulk_load_batch_metadata (`batch_id`, `table_name`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`, `batch_source_info`) " + + "(SELECT (SELECT COALESCE(MAX(bulk_load_batch_metadata.`batch_id`),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.`table_name`) = 'MY_NAME'),'my_name',PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),CURRENT_DATETIME(),'{BULK_LOAD_BATCH_STATUS_PLACEHOLDER}'," + + "PARSE_JSON('{\"files\":[\"/path/xyz/file1.csv\",\"/path/xyz/file2.csv\"]}'))"; + + Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); + Assertions.assertEquals(expectedCopySql, ingestSql.get(0)); + Assertions.assertEquals(expectedInsertSql, ingestSql.get(1)); + Assertions.assertEquals(expectedMetadataIngestSql, metadataIngestSql.get(0)); + + Assertions.assertEquals("SELECT 0 as `rowsDeleted`", statsSql.get(ROWS_DELETED)); + Assertions.assertEquals("SELECT 0 as `rowsTerminated`", statsSql.get(ROWS_TERMINATED)); + Assertions.assertEquals("SELECT 0 as `rowsUpdated`", statsSql.get(ROWS_UPDATED)); + Assertions.assertEquals("SELECT COUNT(*) as `rowsInserted` FROM `my_db`.`my_name` as my_alias WHERE my_alias.`append_time` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')", statsSql.get(ROWS_INSERTED)); + } + + @Test + public void testBulkLoadWithDigestNotGeneratedAuditDisabledNoExtraOptions() + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(NoAuditing.builder().build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + BigQueryStagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(filesList).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4, col5)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database("my_db").name("my_name").alias("my_alias") + .schema(SchemaDefinition.builder().build()) + .build(); + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .bulkLoadTaskIdValue(TASK_ID_VALUE) + .build(); + + GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); + + List preActionsSql = operations.preActionsSql(); + List ingestSql = operations.ingestSql(); + Map statsSql = operations.postIngestStatisticsSql(); + + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS `my_db`.`my_name`" + + "(`col_int` INT64,`col_string` STRING,`col_decimal` NUMERIC(5,2),`col_datetime` DATETIME,`col_variant` JSON,`batch_id` INT64)"; + + String expectedCopySql = "LOAD DATA OVERWRITE `my_db`.`my_name_legend_persistence_temp` " + + "(`col_int` INT64,`col_string` STRING,`col_decimal` NUMERIC(5,2),`col_datetime` DATETIME,`col_variant` JSON) " + + "FROM FILES (uris=['/path/xyz/file1.csv','/path/xyz/file2.csv'], format='CSV')"; + + String expectedInsertSql = "INSERT INTO `my_db`.`my_name` " + + "(`col_int`, `col_string`, `col_decimal`, `col_datetime`, `col_variant`, `batch_id`) " + + "(SELECT legend_persistence_temp.`col_int`,legend_persistence_temp.`col_string`,legend_persistence_temp.`col_decimal`,legend_persistence_temp.`col_datetime`,legend_persistence_temp.`col_variant`,(SELECT COALESCE(MAX(bulk_load_batch_metadata.`batch_id`),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.`table_name`) = 'MY_NAME') " + + "FROM `my_db`.`my_name_legend_persistence_temp` as legend_persistence_temp)"; + + Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); + Assertions.assertEquals(expectedCopySql, ingestSql.get(0)); + Assertions.assertEquals(expectedInsertSql, ingestSql.get(1)); + + Assertions.assertEquals("SELECT 0 as `rowsDeleted`", statsSql.get(ROWS_DELETED)); + Assertions.assertEquals("SELECT 0 as `rowsTerminated`", statsSql.get(ROWS_TERMINATED)); + Assertions.assertEquals("SELECT 0 as `rowsUpdated`", statsSql.get(ROWS_UPDATED)); + Assertions.assertNull(statsSql.get(ROWS_INSERTED)); + } + + @Test + public void testBulkLoadWithDigestGeneratedAuditEnabledNoExtraOptions() + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(UDFBasedDigestGenStrategy.builder().digestField(DIGEST).digestUdfName(DIGEST_UDF).build()) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + BigQueryStagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(filesList).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4, col5)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database("my_db").name("my_name").alias("my_alias") + .schema(SchemaDefinition.builder().build()) + .build(); + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .bulkLoadTaskIdValue(TASK_ID_VALUE) + .build(); + + GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); + + List preActionsSql = operations.preActionsSql(); + List ingestSql = operations.ingestSql(); + Map statsSql = operations.postIngestStatisticsSql(); + + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS `my_db`.`my_name`" + + "(`col_int` INT64,`col_string` STRING,`col_decimal` NUMERIC(5,2),`col_datetime` DATETIME,`col_variant` JSON,`digest` STRING,`batch_id` INT64,`append_time` DATETIME)"; + + String expectedCopySql = "LOAD DATA OVERWRITE `my_db`.`my_name_legend_persistence_temp` " + + "(`col_int` INT64,`col_string` STRING,`col_decimal` NUMERIC(5,2),`col_datetime` DATETIME,`col_variant` JSON) " + + "FROM FILES (uris=['/path/xyz/file1.csv','/path/xyz/file2.csv'], format='CSV')"; + + String expectedInsertSql = "INSERT INTO `my_db`.`my_name` " + + "(`col_int`, `col_string`, `col_decimal`, `col_datetime`, `col_variant`, `digest`, `batch_id`, `append_time`) " + + "(SELECT legend_persistence_temp.`col_int`,legend_persistence_temp.`col_string`,legend_persistence_temp.`col_decimal`,legend_persistence_temp.`col_datetime`,legend_persistence_temp.`col_variant`,LAKEHOUSE_MD5(TO_JSON(legend_persistence_temp)),(SELECT COALESCE(MAX(bulk_load_batch_metadata.`batch_id`),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.`table_name`) = 'MY_NAME'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "FROM `my_db`.`my_name_legend_persistence_temp` as legend_persistence_temp)"; + + Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); + Assertions.assertEquals(expectedCopySql, ingestSql.get(0)); + Assertions.assertEquals(expectedInsertSql, ingestSql.get(1)); + + Assertions.assertEquals("SELECT 0 as `rowsDeleted`", statsSql.get(ROWS_DELETED)); + Assertions.assertEquals("SELECT 0 as `rowsTerminated`", statsSql.get(ROWS_TERMINATED)); + Assertions.assertEquals("SELECT 0 as `rowsUpdated`", statsSql.get(ROWS_UPDATED)); + Assertions.assertEquals("SELECT COUNT(*) as `rowsInserted` FROM `my_db`.`my_name` as my_alias WHERE my_alias.`append_time` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')", statsSql.get(ROWS_INSERTED)); + } + + @Test + public void testBulkLoadWithDigestGeneratedAuditEnabledNoExtraOptionsUpperCase() + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(UDFBasedDigestGenStrategy.builder().digestField(DIGEST).digestUdfName(DIGEST_UDF).build()) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + BigQueryStagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(filesList).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4, col5)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database("my_db").name("my_name").alias("my_alias") + .schema(SchemaDefinition.builder().build()) + .build(); + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .bulkLoadTaskIdValue(TASK_ID_VALUE) + .caseConversion(CaseConversion.TO_UPPER) + .build(); + + GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); + + List preActionsSql = operations.preActionsSql(); + List ingestSql = operations.ingestSql(); + Map statsSql = operations.postIngestStatisticsSql(); + + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS `MY_DB`.`MY_NAME`" + + "(`COL_INT` INT64,`COL_STRING` STRING,`COL_DECIMAL` NUMERIC(5,2),`COL_DATETIME` DATETIME,`COL_VARIANT` JSON,`DIGEST` STRING,`BATCH_ID` INT64,`APPEND_TIME` DATETIME)"; + + String expectedCopySql = "LOAD DATA OVERWRITE `MY_DB`.`MY_NAME_LEGEND_PERSISTENCE_TEMP` " + + "(`COL_INT` INT64,`COL_STRING` STRING,`COL_DECIMAL` NUMERIC(5,2),`COL_DATETIME` DATETIME,`COL_VARIANT` JSON) " + + "FROM FILES (uris=['/path/xyz/file1.csv','/path/xyz/file2.csv'], format='CSV')"; + + String expectedInsertSql = "INSERT INTO `MY_DB`.`MY_NAME` " + + "(`COL_INT`, `COL_STRING`, `COL_DECIMAL`, `COL_DATETIME`, `COL_VARIANT`, `DIGEST`, `BATCH_ID`, `APPEND_TIME`) " + + "(SELECT legend_persistence_temp.`COL_INT`,legend_persistence_temp.`COL_STRING`,legend_persistence_temp.`COL_DECIMAL`,legend_persistence_temp.`COL_DATETIME`,legend_persistence_temp.`COL_VARIANT`,LAKEHOUSE_MD5(TO_JSON(legend_persistence_temp)),(SELECT COALESCE(MAX(BULK_LOAD_BATCH_METADATA.`BATCH_ID`),0)+1 FROM BULK_LOAD_BATCH_METADATA as BULK_LOAD_BATCH_METADATA WHERE UPPER(BULK_LOAD_BATCH_METADATA.`TABLE_NAME`) = 'MY_NAME'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "FROM `MY_DB`.`MY_NAME_LEGEND_PERSISTENCE_TEMP` as legend_persistence_temp)"; + + Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); + Assertions.assertEquals(expectedCopySql, ingestSql.get(0)); + Assertions.assertEquals(expectedInsertSql, ingestSql.get(1)); + + Assertions.assertEquals("SELECT 0 as `ROWSDELETED`", statsSql.get(ROWS_DELETED)); + Assertions.assertEquals("SELECT 0 as `ROWSTERMINATED`", statsSql.get(ROWS_TERMINATED)); + Assertions.assertEquals("SELECT 0 as `ROWSUPDATED`", statsSql.get(ROWS_UPDATED)); + Assertions.assertEquals("SELECT COUNT(*) as `ROWSINSERTED` FROM `MY_DB`.`MY_NAME` as my_alias WHERE my_alias.`APPEND_TIME` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')", statsSql.get(ROWS_INSERTED)); + } + + @Test + public void testBulkLoadDigestColumnNotProvided() + { + try + { + BulkLoad bulkLoad = BulkLoad.builder() + .digestGenStrategy(UDFBasedDigestGenStrategy.builder().digestUdfName(DIGEST_UDF).build()) + .batchIdField(BATCH_ID) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + Assertions.fail("Exception was not thrown"); + } + catch (Exception e) + { + Assertions.assertTrue(e.getMessage().contains("Cannot build UDFBasedDigestGenStrategy, some of required attributes are not set [digestField]")); + } + } + + @Test + public void testBulkLoadDigestUDFNotProvided() + { + try + { + BulkLoad bulkLoad = BulkLoad.builder() + .digestGenStrategy(UDFBasedDigestGenStrategy.builder().digestField(DIGEST).build()) + .batchIdField(BATCH_ID) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + Assertions.fail("Exception was not thrown"); + } + catch (Exception e) + { + Assertions.assertTrue(e.getMessage().contains("Cannot build UDFBasedDigestGenStrategy, some of required attributes are not set [digestUdfName]")); + } + } + + @Test + public void testBulkLoadStagedFilesDatasetNotProvided() + { + try + { + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) + .build(); + + Dataset stagingDataset = DatasetDefinition.builder() + .database("my_db").name("my_stage").alias("my_alias") + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database("my_db").name("my_name").alias("my_alias") + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2)).build()) + .build(); + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(BigQuerySink.get()) + .bulkLoadTaskIdValue(TASK_ID_VALUE) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .build(); + + GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagingDataset)); + Assertions.fail("Exception was not thrown"); + } + catch (Exception e) + { + Assertions.assertTrue(e.getMessage().contains("Only StagedFilesDataset are allowed under Bulk Load")); + } + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java index 1e8dbb810ae..adf83659f9f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java @@ -54,7 +54,7 @@ public class IngestModeTest String[] partitionKeys = new String[] {"biz_date"}; HashMap> partitionFilter = new HashMap>() {{ - put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00", "2000-01-02 00:00:00"))); + put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00.000000", "2000-01-02 00:00:00"))); }}; // Base Columns: Primary keys : id, name @@ -167,9 +167,9 @@ public class IngestModeTest "`BATCH_STATUS` VARCHAR(32)," + "`TABLE_BATCH_ID` INTEGER)"; - protected String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`) (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),CURRENT_TIMESTAMP(),'DONE')"; + protected String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`) (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),CURRENT_TIMESTAMP(),'DONE')"; - protected String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (`TABLE_NAME`, `TABLE_BATCH_ID`, `BATCH_START_TS_UTC`, `BATCH_END_TS_UTC`, `BATCH_STATUS`) (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as batch_metadata WHERE batch_metadata.`TABLE_NAME` = 'main'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),CURRENT_TIMESTAMP(),'DONE')"; + protected String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (`TABLE_NAME`, `TABLE_BATCH_ID`, `BATCH_START_TS_UTC`, `BATCH_END_TS_UTC`, `BATCH_STATUS`) (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as batch_metadata WHERE batch_metadata.`TABLE_NAME` = 'main'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),CURRENT_TIMESTAMP(),'DONE')"; String expectedStagingCleanupQuery = "DELETE FROM `mydb`.`staging` as stage"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaTest.java index 09fcb7745c0..667cb8c5ccb 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaTest.java @@ -31,9 +31,15 @@ public class NontemporalDeltaTest extends org.finos.legend.engine.persistence.co protected String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; protected String incomingRecordCountWithSplits = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage WHERE " + "(stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + protected String incomingRecordCountWithSplitsTempStaginTable = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE " + + "(stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + + protected String incomingRecordCountWithSplitsAndDuplicates = "SELECT COALESCE(SUM(stage.`legend_persistence_count`),0) as `incomingRecordCount` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE " + + "(stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + protected String rowsTerminated = "SELECT 0 as `rowsTerminated`"; protected String rowsDeleted = "SELECT 0 as `rowsDeleted`"; - protected String rowsDeletedWithDeleteIndicator = "SELECT COUNT(*) as `rowsDeleted` FROM `mydb`.`main` as sink WHERE EXISTS (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`) AND (stage.`delete_indicator` IN ('yes','1','true')))"; + protected String rowsDeletedWithDeleteIndicator = "SELECT COUNT(*) as `rowsDeleted` FROM `mydb`.`main` as sink WHERE EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`) AND (stage.`delete_indicator` IN ('yes','1','true')))"; @Override @@ -43,7 +49,7 @@ public RelationalSink getRelationalSink() } @Override - public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalDeltaNoAuditingNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -73,13 +79,13 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operatio } @Override - public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalDeltaWithAuditingFilterDupsNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String mergeSql = "MERGE INTO `mydb`.`main` as sink " + - "USING `mydb`.`staging` as stage " + + "USING `mydb`.`staging_legend_persistence_temp_staging` as stage " + "ON (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`) " + "WHEN MATCHED AND sink.`digest` <> stage.`digest` " + "THEN UPDATE SET " + @@ -88,10 +94,10 @@ public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operat "sink.`amount` = stage.`amount`," + "sink.`biz_date` = stage.`biz_date`," + "sink.`digest` = stage.`digest`," + - "sink.`batch_update_time` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_update_time` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHEN NOT MATCHED THEN INSERT " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "VALUES (stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))"; + "VALUES (stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(mergeSql, milestoningSqlList.get(0)); @@ -103,7 +109,31 @@ public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operat } @Override - public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyNonTemporalDeltaNoAuditingNoDedupAllVersion(List operations, List dataSplitRanges) + { + String mergeSql = "MERGE INTO `mydb`.`main` as sink " + + "USING (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) " + + "as stage ON (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`) " + + "WHEN MATCHED AND sink.`digest` <> stage.`digest` " + + "THEN UPDATE SET sink.`id` = stage.`id`,sink.`name` = stage.`name`,sink.`amount` = stage.`amount`,sink.`biz_date` = stage.`biz_date`,sink.`digest` = stage.`digest` " + + "WHEN NOT MATCHED " + + "THEN INSERT (`id`, `name`, `amount`, `biz_date`, `digest`) " + + "VALUES (stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`)"; + + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(mergeSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(mergeSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); + + // Stats + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsTempStaginTable, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsTempStaginTable, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsTerminated, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + Assertions.assertEquals(rowsDeleted, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + } + + @Override + public void verifyNonTemporalDeltaNoAuditingNoDedupAllVersionWithoutPerform(List operations, List dataSplitRanges) { String mergeSql = "MERGE INTO `mydb`.`main` as sink " + "USING (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM `mydb`.`staging` as stage " + @@ -127,31 +157,31 @@ public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List } @Override - public void verifyNonTemporalDeltaWithWithAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyNonTemporalDeltaWithWithAuditingFailOnDupsAllVersion(List operations, List dataSplitRanges) { String mergeSql = "MERGE INTO `mydb`.`main` as sink " + - "USING (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM `mydb`.`staging` as stage " + + "USING (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) " + "as stage ON (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`) " + "WHEN MATCHED AND sink.`digest` <> stage.`digest` " + - "THEN UPDATE SET sink.`id` = stage.`id`,sink.`name` = stage.`name`,sink.`amount` = stage.`amount`,sink.`biz_date` = stage.`biz_date`,sink.`digest` = stage.`digest`,sink.`batch_update_time` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "THEN UPDATE SET sink.`id` = stage.`id`,sink.`name` = stage.`name`,sink.`amount` = stage.`amount`,sink.`biz_date` = stage.`biz_date`,sink.`digest` = stage.`digest`,sink.`batch_update_time` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHEN NOT MATCHED " + "THEN INSERT (`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "VALUES (stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))"; + "VALUES (stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(mergeSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(mergeSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); // Stats - Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplits, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); - Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplits, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsAndDuplicates, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsAndDuplicates, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsTerminated, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); Assertions.assertEquals(rowsDeleted, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); } @Override - public void verifyNontemporalDeltaNoAuditingNoDataSplitWithDeleteIndicator(GeneratorResult operations) + public void verifyNontemporalDeltaNoAuditingWithDeleteIndicatorNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -250,15 +280,14 @@ public void verifyNontemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult } @Override - public void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(GeneratorResult operations) + public void verifyNontemporalDeltaWithFilterDupsMaxVersionWithStagingFilters(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String mergeSql = "MERGE INTO `mydb`.`main` as sink " + "USING " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` FROM " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`,ROW_NUMBER() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`version` DESC) as `legend_persistence_row_num` FROM `mydb`.`staging` as stage WHERE stage.`snapshot_id` > 18972) as stage WHERE stage.`legend_persistence_row_num` = 1) as stage " + + "`mydb`.`staging_legend_persistence_temp_staging` as stage " + "ON (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`) " + "WHEN MATCHED AND stage.`version` > sink.`version` " + "THEN UPDATE SET sink.`id` = stage.`id`,sink.`name` = stage.`name`,sink.`amount` = stage.`amount`,sink.`biz_date` = stage.`biz_date`,sink.`digest` = stage.`digest`,sink.`version` = stage.`version` " + @@ -275,7 +304,7 @@ public void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(Ge } @Override - public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaWithNoDedupMaxVersioningWithoutPerformWithStagingFilters(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -299,7 +328,7 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(Gene } @Override - public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaNoDedupMaxVersionWithoutPerform(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -322,15 +351,14 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( } @Override - public void verifyNontemporalDeltaWithWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaAllowDuplicatesMaxVersionWithUpperCase(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String mergeSql = "MERGE INTO `MYDB`.`MAIN` as sink " + "USING " + - "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION` FROM " + - "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`,ROW_NUMBER() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`VERSION` DESC) as `LEGEND_PERSISTENCE_ROW_NUM` FROM `MYDB`.`STAGING` as stage) as stage WHERE stage.`LEGEND_PERSISTENCE_ROW_NUM` = 1) as stage " + + "`MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage " + "ON (sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`) " + "WHEN MATCHED AND stage.`VERSION` >= sink.`VERSION` " + "THEN UPDATE SET sink.`ID` = stage.`ID`,sink.`NAME` = stage.`NAME`,sink.`AMOUNT` = stage.`AMOUNT`,sink.`BIZ_DATE` = stage.`BIZ_DATE`,sink.`DIGEST` = stage.`DIGEST`,sink.`VERSION` = stage.`VERSION` " + diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotTest.java index e56b89495a9..1636e9de80d 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotTest.java @@ -28,19 +28,18 @@ public class NontemporalSnapshotTest extends NontemporalSnapshotTestCases { String rowsDeleted = "SELECT COUNT(*) as `rowsDeleted` FROM `mydb`.`main` as sink"; - String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; String rowsUpdated = "SELECT 0 as `rowsUpdated`"; String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink"; String rowsTerminated = "SELECT 0 as `rowsTerminated`"; @Override - public void verifyNontemporalSnapshotNoAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotNoAuditingNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`) " + - "(SELECT * FROM `mydb`.`staging` as stage)"; + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date` FROM `mydb`.`staging` as stage)"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedStagingTableCreateQuery, preActionsSqlList.get(1)); @@ -48,66 +47,51 @@ public void verifyNontemporalSnapshotNoAuditingNoDataSplit(GeneratorResult opera Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override - public void verifyNontemporalSnapshotNoAuditingWithDataSplit(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date` FROM `mydb`.`staging` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`staging` as stage_right WHERE " + - "(stage.`data_split` < stage_right.`data_split`) AND ((stage.`id` = stage_right.`id`) AND (stage.`name` = stage_right.`name`)))))"; - - Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(BigQueryTestArtifacts.cleanUpMainTableSql, milestoningSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyNontemporalSnapshotWithAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotWithAuditingFilterDupsNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String insertSql = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + - "FROM `mydb`.`staging` as stage)"; + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage)"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTableWithAuditPKCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.cleanUpMainTableSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override - public void verifyNontemporalSnapshotWithAuditingWithDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotWithAuditingFailOnDupMaxVersion(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); - String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + - "FROM `mydb`.`staging` as stage WHERE NOT (EXISTS " + - "(SELECT * FROM `mydb`.`staging` as stage_right " + - "WHERE (stage.`data_split` < stage_right.`data_split`) AND ((stage.`id` = stage_right.`id`) AND " + - "(stage.`name` = stage_right.`name`)))))"; + String insertSql = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`," + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') FROM " + + "`mydb`.`staging_legend_persistence_temp_staging` as stage)"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTableWithAuditPKCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTempStagingTableWithCount, preActionsSqlList.get(1)); Assertions.assertEquals(BigQueryTestArtifacts.cleanUpMainTableSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override @@ -117,7 +101,7 @@ public void verifyNontemporalSnapshotWithUpperCaseOptimizer(GeneratorResult quer List milestoningSqlList = queries.ingestSql(); String insertSql = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`) " + - "(SELECT * FROM `MYDB`.`STAGING` as stage)"; + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE` FROM `MYDB`.`STAGING` as stage)"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTableCreateQueryWithUpperCase, preActionsSqlList.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.cleanupMainTableSqlUpperCase, milestoningSqlList.get(0)); @@ -131,7 +115,7 @@ public void verifyNontemporalSnapshotWithLessColumnsInStaging(GeneratorResult op List milestoningSqlList = operations.ingestSql(); String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`) " + - "(SELECT * FROM `mydb`.`staging` as stage)"; + "(SELECT stage.`id`,stage.`name`,stage.`amount` FROM `mydb`.`staging` as stage)"; Assertions.assertEquals(BigQueryTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.cleanUpMainTableSql, milestoningSqlList.get(0)); @@ -148,9 +132,9 @@ public void verifyNontemporalSnapshotWithCleanStagingData(GeneratorResult operat } @Override - public void verifyNontemporalSnapshotWithDropStagingData(SqlPlan physicalPlanForPostActions) + public void verifyNontemporalSnapshotWithDropStagingData(SqlPlan physicalPlanForPostCleanup) { - List sqlsForPostActions = physicalPlanForPostActions.getSqlList(); + List sqlsForPostActions = physicalPlanForPostCleanup.getSqlList(); List expectedSQL = new ArrayList<>(); expectedSQL.add(BigQueryTestArtifacts.expectedDropTableQuery); assertIfListsAreSameIgnoringOrder(expectedSQL, sqlsForPostActions); @@ -162,12 +146,13 @@ public RelationalSink getRelationalSink() return BigQuerySink.get(); } - private void verifyStats(GeneratorResult operations) + private void verifyStats(GeneratorResult operations, String stageTableName) { // Pre stats: Assertions.assertEquals(rowsDeleted, operations.preIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); // Post Stats: + String incomingRecordCount = String.format("SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`%s` as stage", stageTableName); Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java index 18063de520c..7290a5e44eb 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java @@ -26,7 +26,7 @@ public class UnitemporalDeltaBatchIdBasedTest extends UnitmemporalDeltaBatchIdBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -67,7 +67,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupAllVersionsWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + @@ -108,7 +108,7 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -118,7 +118,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + "WHERE " + "(sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) " + "AND ((sink.`digest` <> stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; @@ -126,7 +126,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999 FROM `mydb`.`staging` as stage " + + "999999999 FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))) AND " + @@ -147,13 +147,13 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndNoDedupAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET sink.`batch_id_out` = " + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + "WHERE " + "(sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) " + "AND ((sink.`digest` <> stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; @@ -161,7 +161,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + @@ -179,7 +179,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -257,7 +257,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFilters(G } @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFiltersIncludesNullValues(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndWithOptimizationFiltersIncludesNullValues(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -317,35 +317,24 @@ public void verifyUnitemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult } @Override - public void verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(GeneratorResult operations) + public void verifyUnitemporalDeltaWithFilterDupsMaxVersionWithStagingFilter(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 " + - "FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + - "WHERE (sink.`batch_id_out` = 999999999) AND (EXISTS " + - "(SELECT * FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` " + - "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`,ROW_NUMBER() " + - "OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`version` DESC) as `legend_persistence_row_num` " + - "FROM `mydb`.`staging` as stage WHERE stage.`batch_id_in` > 5) as stage " + - "WHERE stage.`legend_persistence_row_num` = 1) as stage " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + + "WHERE (sink.`batch_id_out` = 999999999) AND (EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (stage.`version` > sink.`version`)))"; - String expectedUpsertQuery = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, " + - "`digest`, `version`, `batch_id_in`, `batch_id_out`) " + + String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `version`, `batch_id_in`, `batch_id_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 " + - "FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + - "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` " + - "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`," + - "ROW_NUMBER() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`version` DESC) " + - "as `legend_persistence_row_num` FROM `mydb`.`staging` as stage WHERE stage.`batch_id_in` > 5) as stage " + - "WHERE stage.`legend_persistence_row_num` = 1) as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + - "WHERE (sink.`batch_id_out` = 999999999) AND (stage.`version` <= sink.`version`) " + - "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))))"; + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 FROM `mydb`.`staging_legend_persistence_temp_staging` " + + "as stage WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) " + + "AND (stage.`version` <= sink.`version`) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableBatchIdAndVersionBasedCreateQuery, preActionsSql.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, preActionsSql.get(1)); @@ -356,7 +345,7 @@ public void verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(Gen } @Override - public void verifyUnitemporalDeltaWithMaxVersionNoDedupAndStagingFilter(GeneratorResult operations) + public void verifyUnitemporalDeltaWithNoDedupMaxVersionWithoutPerformAndStagingFilters(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -393,7 +382,7 @@ public void verifyUnitemporalDeltaWithMaxVersionNoDedupAndStagingFilter(Generato } @Override - public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations) + public void verifyUnitemporalDeltaWithFailOnDupsMaxVersioningWithoutPerform(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -401,7 +390,7 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + "WHERE (sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(stage.`version` > sink.`version`)))"; @@ -410,7 +399,7 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + "999999999 " + - "FROM `mydb`.`staging` as stage " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (stage.`version` <= sink.`version`) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))))"; @@ -424,7 +413,7 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( } @Override - public void verifyUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations) + public void verifyUnitemporalDeltaWithNoDedupMaxVersioningAndUpperCaseWithoutStagingFilters(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -433,22 +422,16 @@ public void verifyUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWitho "(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA " + "as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1 " + "WHERE (sink.`BATCH_ID_OUT` = 999999999) AND " + - "(EXISTS (SELECT * FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION` " + - "FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`," + - "ROW_NUMBER() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`VERSION` DESC) " + - "as `LEGEND_PERSISTENCE_ROW_NUM` FROM `MYDB`.`STAGING` as stage) as stage WHERE stage.`LEGEND_PERSISTENCE_ROW_NUM` = 1) as stage " + + "(EXISTS (SELECT * FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage " + "WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (stage.`VERSION` >= sink.`VERSION`)))"; - String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `VERSION`, `BATCH_ID_IN`, `BATCH_ID_OUT`) " + + String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` " + + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `VERSION`, `BATCH_ID_IN`, `BATCH_ID_OUT`) " + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`," + "(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + - "WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999 FROM " + - "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION` " + - "FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`," + - "ROW_NUMBER() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`VERSION` DESC) as `LEGEND_PERSISTENCE_ROW_NUM` " + - "FROM `MYDB`.`STAGING` as stage) as stage WHERE stage.`LEGEND_PERSISTENCE_ROW_NUM` = 1) as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_ID_OUT` = 999999999) " + - "AND (stage.`VERSION` < sink.`VERSION`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; + "WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999 FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_ID_OUT` = 999999999) AND " + + "(stage.`VERSION` < sink.`VERSION`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableBatchIdAndVersionBasedCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQueryWithUpperCase, preActionsSql.get(1)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdDateTimeBasedTest.java index 018e7800bcc..9cc6944a77d 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdDateTimeBasedTest.java @@ -27,7 +27,7 @@ public class UnitemporalDeltaBatchIdDateTimeBasedTest extends UnitmemporalDeltaBatchIdDateTimeBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -35,7 +35,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -45,7 +45,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + @@ -68,13 +68,13 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndFilterDupsAllVersionWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(sink.`digest` <> stage.`digest`)))"; @@ -82,8 +82,8 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + @@ -101,7 +101,7 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -119,7 +119,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET sink.`batch_id_out` = " + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE " + "(sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + @@ -131,7 +131,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator "`batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage " + + "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage " + "WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))) AND " + @@ -154,7 +154,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator } @Override - public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalDeltaWithDeleteInd(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -162,7 +162,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET sink.`batch_id_out` = " + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE " + "(sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + @@ -174,7 +174,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "`batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage " + + "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage " + "WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))) AND " + @@ -189,13 +189,13 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndFailOnDupsAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + "sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE " + "((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "((sink.`digest` <> stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; @@ -203,7 +203,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) AND " + "(sink.`digest` = stage.`digest`) AND ((sink.`id` = stage.`id`) AND " + @@ -236,8 +236,8 @@ public void verifyUnitemporalDeltaWithUpperCaseOptimizer(GeneratorResult operati List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_ID_OUT` = (SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1,sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` <> stage.`DIGEST`)))"; - String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_ID_IN`, `BATCH_ID_OUT`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (sink.`DIGEST` = stage.`DIGEST`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; + String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_ID_OUT` = (SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1,sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` <> stage.`DIGEST`)))"; + String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_ID_IN`, `BATCH_ID_OUT`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (sink.`DIGEST` = stage.`DIGEST`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQueryWithUpperCase, preActionsSql.get(1)); Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); @@ -254,7 +254,7 @@ public void verifyUnitemporalDeltaWithLessColumnsInStaging(GeneratorResult opera String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE " + "((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` <> stage.`digest`)))"; @@ -263,7 +263,7 @@ public void verifyUnitemporalDeltaWithLessColumnsInStaging(GeneratorResult opera "(`id`, `name`, `amount`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + @@ -329,7 +329,7 @@ public void verifyUnitemporalDeltaWithOnlySchemaSet(GeneratorResult operations) String expectedMilestoneQuery = "UPDATE `my_schema`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `my_schema`.`staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -339,7 +339,7 @@ public void verifyUnitemporalDeltaWithOnlySchemaSet(GeneratorResult operations) "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `my_schema`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `my_schema`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + @@ -374,7 +374,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothSet(GeneratorResult operati String expectedMilestoneQuery = "UPDATE `mydb`.`my_schema`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`my_schema`.`staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -384,7 +384,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothSet(GeneratorResult operati "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`my_schema`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`my_schema`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + @@ -419,7 +419,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothNotSet(GeneratorResult oper String expectedMilestoneQuery = "UPDATE main as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM staging as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -429,7 +429,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothNotSet(GeneratorResult oper "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM staging as stage " + "WHERE NOT (EXISTS (SELECT * FROM main as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaDateTimeBasedTest.java index a2a6cb0fb30..cb6962a0515 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaDateTimeBasedTest.java @@ -26,14 +26,14 @@ public class UnitemporalDeltaDateTimeBasedTest extends UnitmemporalDeltaDateTimeBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -42,7 +42,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) " + @@ -57,29 +57,29 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio // Stats String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; - String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')"; + String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')"; String rowsDeleted = "SELECT 0 as `rowsDeleted`"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')) as `rowsInserted`"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')) as `rowsInserted`"; String rowsTerminated = "SELECT 0 as `rowsTerminated`"; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndFailOnDupsAllVersionWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(sink.`digest` <> stage.`digest`)))"; String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + - "FROM `mydb`.`staging` as stage " + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) " + @@ -97,23 +97,23 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE " + "(sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + @@ -124,7 +124,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "(`id`, `name`, `amount`, `biz_date`, `digest`, " + "`batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage " + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage " + "WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))) AND " + @@ -139,21 +139,21 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper // Stats String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; - String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))))"; + String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))))"; String rowsDeleted = "SELECT 0 as `rowsDeleted`"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))))) as `rowsInserted`"; - String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))))) as `rowsTerminated`"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))))) as `rowsInserted`"; + String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))))) as `rowsTerminated`"; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndFilterDupsAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE " + "(sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) " + "AND ((sink.`digest` <> stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; @@ -161,7 +161,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND (sink.`digest` = stage.`digest`) " + @@ -180,11 +180,11 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') WHERE (sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` <> stage.`DIGEST`)))"; + String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') WHERE (sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` <> stage.`DIGEST`)))"; - String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND (sink.`DIGEST` = stage.`DIGEST`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; + String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND (sink.`DIGEST` = stage.`DIGEST`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableTimeBasedCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQueryWithUpperCase, preActionsSql.get(1)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdBasedTest.java index 219547c5ad1..2d36914369f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdBasedTest.java @@ -14,6 +14,7 @@ package org.finos.legend.engine.persistence.components.ingestmode; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; import org.finos.legend.engine.persistence.components.relational.bigquery.BigQuerySink; @@ -21,6 +22,7 @@ import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; public class UnitemporalSnapshotBatchIdBasedTest extends UnitmemporalSnapshotBatchIdBasedTestCases { @@ -31,7 +33,7 @@ public class UnitemporalSnapshotBatchIdBasedTest extends UnitmemporalSnapshotBat String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1)-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_id_in` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'))))) as `rowsTerminated`"; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -60,6 +62,41 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } + @Override + public void verifyUnitemporalSnapshotWithoutPartitionFailOnDupsNoVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + + "WHERE (sink.`batch_id_out` = 999999999) " + + "AND (NOT (EXISTS " + + "(SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + + String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999)))"; + + Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableBatchIdBasedCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedStagingTableWithDigestCreateQuery, preActionsSql.get(1)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, preActionsSql.get(2)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(BigQueryTestArtifacts.maxDupsErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + } + @Override public void verifyUnitemporalSnapshotWithoutPartitionWithNoOpEmptyBatchHandling(GeneratorResult operations) { @@ -90,7 +127,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -120,7 +157,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdDateTimeBasedTest.java index 31fa65fd32a..6e709c287b3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdDateTimeBasedTest.java @@ -14,6 +14,7 @@ package org.finos.legend.engine.persistence.components.ingestmode; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; import org.finos.legend.engine.persistence.components.relational.bigquery.BigQuerySink; @@ -21,6 +22,7 @@ import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; public class UnitemporalSnapshotBatchIdDateTimeBasedTest extends UnitmemporalSnapshotBatchIdDateTimeBasedTestCases { @@ -31,14 +33,14 @@ public class UnitemporalSnapshotBatchIdDateTimeBasedTest extends UnitmemporalSna String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1)-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_id_in` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'))))) as `rowsTerminated`"; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; @@ -46,7 +48,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999)))"; @@ -60,6 +62,42 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } + @Override + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupMaxVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "WHERE (sink.`batch_id_out` = 999999999) " + + "AND (NOT (EXISTS " + + "(SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + + String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999)))"; + + Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, preActionsSql.get(1)); + + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(BigQueryTestArtifacts.dataErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + } + @Override public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBatchHandling(GeneratorResult operations) { @@ -67,7 +105,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBa List milestoningSql = operations.ingestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE sink.`batch_id_out` = 999999999"; Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableCreateQuery, preActionsSql.get(0)); @@ -76,31 +114,47 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBa } @Override - public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizerFilterDupsMaxVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink " + + "SET sink.`BATCH_ID_OUT` = (SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + + "WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1,sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (NOT (EXISTS (SELECT * FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) " + + "AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` = stage.`DIGEST`))))"; + String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` " + + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_ID_IN`, `BATCH_ID_OUT`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) " + + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 " + + "FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999," + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage WHERE NOT (stage.`DIGEST` IN (SELECT sink.`DIGEST` FROM `MYDB`.`MAIN` as sink WHERE sink.`BATCH_ID_OUT` = 999999999)))"; - String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_ID_OUT` = (SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1,sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (NOT (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` = stage.`DIGEST`))))"; - String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_ID_IN`, `BATCH_ID_OUT`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `MYDB`.`STAGING` as stage WHERE NOT (stage.`DIGEST` IN (SELECT sink.`DIGEST` FROM `MYDB`.`MAIN` as sink WHERE sink.`BATCH_ID_OUT` = 999999999)))"; Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQueryWithUpperCase, preActionsSql.get(1)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQueryInUpperCase, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicatesUpperCase, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(BigQueryTestArtifacts.dataErrorCheckSqlUpperCase, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); Assertions.assertEquals(getExpectedMetadataTableIngestQueryWithUpperCase(), metadataIngestSql.get(0)); } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`)))) " + @@ -109,7 +163,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) AND (sink.`biz_date` = stage.`biz_date`))))"; @@ -136,14 +190,14 @@ public void verifyUnitemporalSnapshotWithPartitionWithDefaultEmptyDataHandling(G } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`)))) " + @@ -152,7 +206,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorR String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) AND (sink.`biz_date` IN ('2000-01-01 00:00:00','2000-01-02 00:00:00')))))"; @@ -172,7 +226,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersWithDeleteTargetDataEmp List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (sink.`biz_date` IN ('2000-01-01 00:00:00','2000-01-02 00:00:00'))"; @@ -198,7 +252,7 @@ public void verifyUnitemporalSnapshotWithLessColumnsInStaging(GeneratorResult op List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; @@ -206,7 +260,7 @@ public void verifyUnitemporalSnapshotWithLessColumnsInStaging(GeneratorResult op String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`digest`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999)))"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotDateTimeBasedTest.java index 1986c36015b..8e748b0bb41 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotDateTimeBasedTest.java @@ -14,6 +14,7 @@ package org.finos.legend.engine.persistence.components.ingestmode; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; import org.finos.legend.engine.persistence.components.relational.bigquery.BigQuerySink; @@ -21,25 +22,26 @@ import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; public class UnitemporalSnapshotDateTimeBasedTest extends UnitmemporalSnapshotDateTimeBasedTestCases { String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; - String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))))"; + String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))))"; String rowsDeleted = "SELECT 0 as `rowsDeleted`"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))))) as `rowsInserted`"; - String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'))))) as `rowsTerminated`"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))))) as `rowsInserted`"; + String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000')) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'))))) as `rowsTerminated`"; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "SET sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage " + @@ -48,7 +50,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59'))))"; @@ -61,6 +63,44 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } + @Override + public void verifyUnitemporalSnapshotWithoutPartitionFailOnDupsMaxVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + + "SET sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) " + + "AND (NOT (EXISTS " + + "(SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + + String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59'))))"; + + Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableTimeBasedCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedMetadataTableCreateQuery, preActionsSql.get(1)); + + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + + Assertions.assertEquals(BigQueryTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(BigQueryTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(BigQueryTestArtifacts.maxDupsErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + Assertions.assertEquals(BigQueryTestArtifacts.dataErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + } + @Override public void verifyUnitemporalSnapshotWithoutPartitionWithDefaultEmptyBatchHandling(GeneratorResult operations) { @@ -69,7 +109,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDefaultEmptyBatchHandli List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')"; Assertions.assertEquals(BigQueryTestArtifacts.expectedMainTableTimeBasedCreateQuery, preActionsSql.get(0)); @@ -87,7 +127,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET " + - "sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND " + "(NOT (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) " + "AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` = stage.`DIGEST`))))"; @@ -95,7 +135,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` " + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) " + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`," + - "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `MYDB`.`STAGING` as stage " + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `MYDB`.`STAGING` as stage " + "WHERE NOT (stage.`DIGEST` IN (SELECT sink.`DIGEST` FROM `MYDB`.`MAIN` as sink " + "WHERE sink.`BATCH_TIME_OUT` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59'))))"; @@ -108,14 +148,14 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "SET sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`)))) " + @@ -124,7 +164,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND (sink.`biz_date` = stage.`biz_date`))))"; @@ -138,14 +178,14 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00') " + + "sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000') " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND " + "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`)))) AND " + @@ -154,7 +194,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorR String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage " + + "PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59') FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = PARSE_DATETIME('%Y-%m-%d %H:%M:%S','9999-12-31 23:59:59')) AND " + "(sink.`biz_date` IN ('2000-01-01 00:00:00','2000-01-02 00:00:00')))))"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsBigQueryTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsBigQueryTest.java index efcf49965fc..8d7be47cd26 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsBigQueryTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsBigQueryTest.java @@ -24,14 +24,14 @@ public String getExpectedSqlForMetadata() { return "INSERT INTO bulk_load_batch_metadata " + "(`batch_id`, `table_name`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`, `batch_source_info`) " + - "(SELECT 'batch_id_123','appeng_log_table_name',PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),CURRENT_DATETIME(),'',PARSE_JSON('my_lineage_value'))"; + "(SELECT (SELECT COALESCE(MAX(bulk_load_batch_metadata.`batch_id`),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.`table_name`) = 'APPENG_LOG_TABLE_NAME'),'appeng_log_table_name',PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),CURRENT_DATETIME(),'',PARSE_JSON('my_lineage_value'))"; } public String getExpectedSqlForMetadataUpperCase() { return "INSERT INTO BULK_LOAD_BATCH_METADATA " + "(`BATCH_ID`, `TABLE_NAME`, `BATCH_START_TS_UTC`, `BATCH_END_TS_UTC`, `BATCH_STATUS`, `BATCH_SOURCE_INFO`) " + - "(SELECT 'batch_id_123','BULK_LOAD_TABLE_NAME',PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00'),CURRENT_DATETIME(),'',PARSE_JSON('my_lineage_value'))"; + "(SELECT (SELECT COALESCE(MAX(bulk_load_batch_metadata.`BATCH_ID`),0)+1 FROM BULK_LOAD_BATCH_METADATA as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.`TABLE_NAME`) = 'BULK_LOAD_TABLE_NAME'),'BULK_LOAD_TABLE_NAME',PARSE_DATETIME('%Y-%m-%d %H:%M:%S','2000-01-01 00:00:00.000000'),CURRENT_DATETIME(),'',PARSE_JSON('my_lineage_value'))"; } public RelationalSink getRelationalSink() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/expected/bulk_load/expected_table1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/expected/bulk_load/expected_table1.csv new file mode 100644 index 00000000000..e7a4d4b5f4b --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/expected/bulk_load/expected_table1.csv @@ -0,0 +1,7 @@ +0,Candy,999.99,2022-01-15T00:00:00,1,2000-01-01T00:00:00 +1,Andy,5.2,2022-01-11T00:00:00,1,2000-01-01T00:00:00 +1,Andy,5.2,2022-01-11T00:00:00,1,2000-01-01T00:00:00 +2,Bella,99.99,2022-01-12T00:00:00,1,2000-01-01T00:00:00 +2,Bella,99.99,2022-01-12T00:00:00,1,2000-01-01T00:00:00 +49,Sandy,123.45,2022-01-13T00:00:00,1,2000-01-01T00:00:00 +50,Mindy,0,2022-01-14T00:00:00,1,2000-01-01T00:00:00 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/expected/bulk_load/expected_table2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/expected/bulk_load/expected_table2.csv new file mode 100644 index 00000000000..c1da46d0fb6 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/expected/bulk_load/expected_table2.csv @@ -0,0 +1,4 @@ +1,Andy,5.2,2022-01-11T00:00:00,1,2000-01-01T00:00:00 +2,Bella,99.99,2022-01-12T00:00:00,1,2000-01-01T00:00:00 +11,Success,123.45,2022-01-13T00:00:00,1,2000-01-01T00:00:00 +49,Sandy,123.45,2022-01-13T00:00:00,1,2000-01-01T00:00:00 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/bad_file.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/bad_file.csv new file mode 100644 index 00000000000..1c941007414 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/bad_file.csv @@ -0,0 +1,3 @@ +hello,Andy,5.20,2022-01-11 00:00:00.0 +2,Bella,99.99,2022-01-99 00:00:00.0 +11,Success,123.45,2022-01-13 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/input/staged_file5.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/staged_file1.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/input/staged_file5.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/staged_file1.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/staged_file2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/staged_file2.csv new file mode 100644 index 00000000000..a4e5d3b6eb9 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/staged_file2.csv @@ -0,0 +1,3 @@ +1,Andy,5.20,2022-01-11 00:00:00.0 +2,Bella,99.99,2022-01-12 00:00:00.0 +50,Mindy,0.00,2022-01-14 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/staged_file3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/staged_file3.csv new file mode 100644 index 00000000000..1ec00ee9883 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-bigquery/src/test/resources/input/bulk_load/staged_file3.csv @@ -0,0 +1 @@ +0,Candy,999.99,2022-01-15 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/pom.xml index bab1efb8874..b11b40a2909 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/ApiUtils.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/ApiUtils.java index fda329b17ee..3ea9a5d4d9b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/ApiUtils.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/ApiUtils.java @@ -14,19 +14,37 @@ package org.finos.legend.engine.persistence.components.relational.api; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.eclipse.collections.api.tuple.Pair; +import org.eclipse.collections.impl.tuple.Tuples; +import org.finos.legend.engine.persistence.components.common.DatasetFilter; import org.finos.legend.engine.persistence.components.common.Datasets; -import org.finos.legend.engine.persistence.components.ingestmode.DeriveMainDatasetSchemaFromStaging; -import org.finos.legend.engine.persistence.components.ingestmode.IngestMode; -import org.finos.legend.engine.persistence.components.ingestmode.IngestModeCaseConverter; +import org.finos.legend.engine.persistence.components.common.FilterType; +import org.finos.legend.engine.persistence.components.common.OptimizationFilter; +import org.finos.legend.engine.persistence.components.executor.Executor; +import org.finos.legend.engine.persistence.components.ingestmode.*; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; +import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetsCaseConverter; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; +import org.finos.legend.engine.persistence.components.planner.Planner; import org.finos.legend.engine.persistence.components.relational.CaseConversion; +import org.finos.legend.engine.persistence.components.relational.SqlPlan; +import org.finos.legend.engine.persistence.components.relational.sql.TabularData; +import org.finos.legend.engine.persistence.components.relational.sqldom.SqlGen; +import org.finos.legend.engine.persistence.components.transformer.Transformer; import org.finos.legend.engine.persistence.components.util.BulkLoadMetadataDataset; import org.finos.legend.engine.persistence.components.util.LockInfoDataset; import org.finos.legend.engine.persistence.components.util.MetadataDataset; -import java.util.List; +import java.util.*; + +import static org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory.MAX_OF_FIELD; +import static org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory.MIN_OF_FIELD; public class ApiUtils { @@ -97,4 +115,125 @@ private static LockInfoDataset getLockInfoDataset(Datasets datasets) } return lockInfoDataset; } + + public static Optional getNextBatchId(Datasets datasets, Executor executor, + Transformer transformer, IngestMode ingestMode) + { + if (ingestMode.accept(IngestModeVisitors.IS_INGEST_MODE_TEMPORAL) || ingestMode instanceof BulkLoad) + { + LogicalPlan logicalPlanForNextBatchId = LogicalPlanFactory.getLogicalPlanForNextBatchId(datasets, ingestMode); + List tabularData = executor.executePhysicalPlanAndGetResults(transformer.generatePhysicalPlan(logicalPlanForNextBatchId)); + Optional nextBatchId = getFirstColumnValue(getFirstRowForFirstResult(tabularData)); + if (nextBatchId.isPresent()) + { + return retrieveValueAsLong(nextBatchId.get()); + } + } + return Optional.empty(); + } + + public static Optional>> getOptimizationFilterBounds(Datasets datasets, Executor executor, + Transformer transformer, IngestMode ingestMode) + { + List filters = ingestMode.accept(IngestModeVisitors.RETRIEVE_OPTIMIZATION_FILTERS); + if (!filters.isEmpty()) + { + Map> map = new HashMap<>(); + for (OptimizationFilter filter : filters) + { + LogicalPlan logicalPlanForMinAndMaxForField = LogicalPlanFactory.getLogicalPlanForMinAndMaxForField(datasets.stagingDataset(), filter.fieldName()); + List tabularData = executor.executePhysicalPlanAndGetResults(transformer.generatePhysicalPlan(logicalPlanForMinAndMaxForField)); + Map resultMap = getFirstRowForFirstResult(tabularData); + // Put into map only when not null + Object lower = resultMap.get(MIN_OF_FIELD); + Object upper = resultMap.get(MAX_OF_FIELD); + if (lower != null && upper != null) + { + map.put(filter, Tuples.pair(lower, upper)); + } + } + return Optional.of(map); + } + return Optional.empty(); + } + + public static List extractDatasetFilters(MetadataDataset metadataDataset, Executor executor, SqlPlan physicalPlan) throws JsonProcessingException + { + List datasetFilters = new ArrayList<>(); + List results = executor.executePhysicalPlanAndGetResults(physicalPlan); + Optional stagingFilters = results.stream() + .findFirst() + .map(TabularData::getData) + .flatMap(t -> t.stream().findFirst()) + .map(stringObjectMap -> (String) stringObjectMap.get(metadataDataset.stagingFiltersField())); + + // Convert map of Filters to List of Filters + if (stagingFilters.isPresent()) + { + Map> datasetFiltersMap = new ObjectMapper().readValue(stagingFilters.get(), new TypeReference>>() {}); + for (Map.Entry> filtersMapEntry : datasetFiltersMap.entrySet()) + { + for (Map.Entry filterEntry : filtersMapEntry.getValue().entrySet()) + { + DatasetFilter datasetFilter = DatasetFilter.of(filtersMapEntry.getKey(), FilterType.fromName(filterEntry.getKey()), filterEntry.getValue()); + datasetFilters.add(datasetFilter); + } + } + } + return datasetFilters; + } + + public static List getDataSplitRanges(Executor executor, Planner planner, + Transformer transformer, IngestMode ingestMode) + { + List dataSplitRanges = new ArrayList<>(); + if (ingestMode.versioningStrategy() instanceof AllVersionsStrategy) + { + Dataset stagingDataset = planner.stagingDataset(); + String dataSplitField = ingestMode.dataSplitField().get(); + LogicalPlan logicalPlanForMaxOfField = LogicalPlanFactory.getLogicalPlanForMaxOfField(stagingDataset, dataSplitField); + List tabularData = executor.executePhysicalPlanAndGetResults(transformer.generatePhysicalPlan(logicalPlanForMaxOfField)); + Map row = getFirstRowForFirstResult(tabularData); + Long maxDataSplit = retrieveValueAsLong(row.get(MAX_OF_FIELD)).orElseThrow(IllegalStateException::new); + for (int i = 1; i <= maxDataSplit; i++) + { + dataSplitRanges.add(DataSplitRange.of(i, i)); + } + } + return dataSplitRanges; + } + + public static Optional retrieveValueAsLong(Object obj) + { + if (obj instanceof Integer) + { + return Optional.of(Long.valueOf((Integer) obj)); + } + else if (obj instanceof Long) + { + return Optional.of((Long) obj); + } + return Optional.empty(); + } + + public static Map getFirstRowForFirstResult(List tabularData) + { + Map resultMap = tabularData.stream() + .findFirst() + .map(TabularData::getData) + .flatMap(t -> t.stream().findFirst()) + .orElse(Collections.emptyMap()); + return resultMap; + } + + public static Optional getFirstColumnValue(Map row) + { + Optional object = Optional.empty(); + if (!row.isEmpty()) + { + String key = row.keySet().stream().findFirst().orElseThrow(IllegalStateException::new); + object = Optional.ofNullable(row.get(key)); + } + return object; + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/GeneratorResultAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/GeneratorResultAbstract.java index 3cfc890a74f..b88ef115205 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/GeneratorResultAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/GeneratorResultAbstract.java @@ -14,6 +14,7 @@ package org.finos.legend.engine.persistence.components.relational.api; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.relational.SqlPlan; @@ -56,10 +57,14 @@ public abstract class GeneratorResultAbstract public abstract Optional metadataIngestSqlPlan(); + public abstract Optional deduplicationAndVersioningSqlPlan(); + public abstract SqlPlan postActionsSqlPlan(); public abstract Optional postCleanupSqlPlan(); + public abstract Map deduplicationAndVersioningErrorChecksSqlPlan(); + public abstract Map preIngestStatisticsSqlPlan(); public abstract Map postIngestStatisticsSqlPlan(); @@ -99,6 +104,11 @@ public List metadataIngestSql() return metadataIngestSqlPlan().map(SqlPlanAbstract::getSqlList).orElse(Collections.emptyList()); } + public List deduplicationAndVersioningSql() + { + return deduplicationAndVersioningSqlPlan().map(SqlPlanAbstract::getSqlList).orElse(Collections.emptyList()); + } + public List postActionsSql() { return postActionsSqlPlan().getSqlList(); @@ -117,6 +127,14 @@ public Map preIngestStatisticsSql() k -> preIngestStatisticsSqlPlan().get(k).getSql())); } + public Map deduplicationAndVersioningErrorChecksSql() + { + return deduplicationAndVersioningErrorChecksSqlPlan().keySet().stream() + .collect(Collectors.toMap( + k -> k, + k -> deduplicationAndVersioningErrorChecksSqlPlan().get(k).getSql())); + } + public Map postIngestStatisticsSql() { return postIngestStatisticsSqlPlan().keySet().stream() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/RelationalGeneratorAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/RelationalGeneratorAbstract.java index 3ee9b546ec0..2858ce87f80 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/RelationalGeneratorAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/RelationalGeneratorAbstract.java @@ -15,6 +15,7 @@ package org.finos.legend.engine.persistence.components.relational.api; import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.common.Resources; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.ingestmode.IngestMode; @@ -45,7 +46,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import java.util.UUID; @Immutable @Style( @@ -114,11 +114,7 @@ public boolean enableConcurrentSafety() public abstract Optional infiniteBatchIdValue(); - @Default - public String bulkLoadBatchIdValue() - { - return UUID.randomUUID().toString(); - } + public abstract Optional bulkLoadTaskIdValue(); @Default public String bulkLoadBatchStatusPattern() @@ -141,6 +137,7 @@ protected PlannerOptions plannerOptions() .enableSchemaEvolution(enableSchemaEvolution()) .createStagingDataset(createStagingDataset()) .enableConcurrentSafety(enableConcurrentSafety()) + .bulkLoadTaskIdValue(bulkLoadTaskIdValue()) .build(); } @@ -152,7 +149,6 @@ protected TransformOptions transformOptions() .batchStartTimestampPattern(batchStartTimestampPattern()) .batchEndTimestampPattern(batchEndTimestampPattern()) .infiniteBatchIdValue(infiniteBatchIdValue()) - .bulkLoadBatchIdValue(bulkLoadBatchIdValue()) .bulkLoadBatchStatusPattern(bulkLoadBatchStatusPattern()) .batchIdPattern(batchIdPattern()); @@ -192,7 +188,7 @@ GeneratorResult generateOperations(Datasets datasets, Resources resources) Datasets datasetsWithCaseConversion = ApiUtils.enrichAndApplyCase(datasets, caseConversion()); Dataset enrichedMainDataset = ApiUtils.deriveMainDatasetFromStaging(datasetsWithCaseConversion, ingestModeWithCaseConversion); Datasets enrichedDatasets = datasetsWithCaseConversion.withMainDataset(enrichedMainDataset); - Planner planner = Planners.get(enrichedDatasets, ingestModeWithCaseConversion, plannerOptions()); + Planner planner = Planners.get(enrichedDatasets, ingestModeWithCaseConversion, plannerOptions(), relationalSink().capabilities()); return generateOperations(enrichedDatasets, resources, planner, ingestModeWithCaseConversion); } @@ -243,11 +239,26 @@ GeneratorResult generateOperations(Datasets datasets, Resources resources, Plann schemaEvolutionDataset = Optional.of(schemaEvolutionResult.evolvedDataset()); // update main dataset with evolved schema and re-initialize planner - planner = Planners.get(datasets.withMainDataset(schemaEvolutionDataset.get()), ingestMode, plannerOptions()); + planner = Planners.get(datasets.withMainDataset(schemaEvolutionDataset.get()), ingestMode, plannerOptions(), relationalSink().capabilities()); + } + + // deduplication and versioning + LogicalPlan deduplicationAndVersioningLogicalPlan = planner.buildLogicalPlanForDeduplicationAndVersioning(resources); + Optional deduplicationAndVersioningSqlPlan = Optional.empty(); + if (deduplicationAndVersioningLogicalPlan != null) + { + deduplicationAndVersioningSqlPlan = Optional.of(transformer.generatePhysicalPlan(deduplicationAndVersioningLogicalPlan)); + } + + Map deduplicationAndVersioningErrorChecksLogicalPlan = planner.buildLogicalPlanForDeduplicationAndVersioningErrorChecks(resources); + Map deduplicationAndVersioningErrorChecksSqlPlan = new HashMap<>(); + for (DedupAndVersionErrorStatistics statistic : deduplicationAndVersioningErrorChecksLogicalPlan.keySet()) + { + deduplicationAndVersioningErrorChecksSqlPlan.put(statistic, transformer.generatePhysicalPlan(deduplicationAndVersioningErrorChecksLogicalPlan.get(statistic))); } // ingest - LogicalPlan ingestLogicalPlan = planner.buildLogicalPlanForIngest(resources, relationalSink().capabilities()); + LogicalPlan ingestLogicalPlan = planner.buildLogicalPlanForIngest(resources); SqlPlan ingestSqlPlan = transformer.generatePhysicalPlan(ingestLogicalPlan); // metadata-ingest @@ -286,6 +297,8 @@ GeneratorResult generateOperations(Datasets datasets, Resources resources, Plann .postActionsSqlPlan(postActionsSqlPlan) .postCleanupSqlPlan(postCleanupSqlPlan) .metadataIngestSqlPlan(metaDataIngestSqlPlan) + .deduplicationAndVersioningSqlPlan(deduplicationAndVersioningSqlPlan) + .putAllDeduplicationAndVersioningErrorChecksSqlPlan(deduplicationAndVersioningErrorChecksSqlPlan) .putAllPreIngestStatisticsSqlPlan(preIngestStatisticsSqlPlan) .putAllPostIngestStatisticsSqlPlan(postIngestStatisticsSqlPlan) .build(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/RelationalIngestorAbstract.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/RelationalIngestorAbstract.java index d362d4d4bb4..12faa47d745 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/RelationalIngestorAbstract.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/api/RelationalIngestorAbstract.java @@ -15,33 +15,16 @@ package org.finos.legend.engine.persistence.components.relational.api; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; import org.eclipse.collections.api.tuple.Pair; -import org.eclipse.collections.impl.tuple.Tuples; -import org.finos.legend.engine.persistence.components.common.Datasets; -import org.finos.legend.engine.persistence.components.common.OptimizationFilter; -import org.finos.legend.engine.persistence.components.common.Resources; -import org.finos.legend.engine.persistence.components.common.DatasetFilter; -import org.finos.legend.engine.persistence.components.common.FilterType; -import org.finos.legend.engine.persistence.components.common.StatisticName; +import org.finos.legend.engine.persistence.components.common.*; import org.finos.legend.engine.persistence.components.executor.DigestInfo; import org.finos.legend.engine.persistence.components.executor.Executor; import org.finos.legend.engine.persistence.components.importer.Importer; import org.finos.legend.engine.persistence.components.importer.Importers; -import org.finos.legend.engine.persistence.components.ingestmode.DeriveMainDatasetSchemaFromStaging; -import org.finos.legend.engine.persistence.components.ingestmode.IngestMode; -import org.finos.legend.engine.persistence.components.ingestmode.IngestModeOptimizationColumnHandler; -import org.finos.legend.engine.persistence.components.ingestmode.IngestModeVisitors; -import org.finos.legend.engine.persistence.components.ingestmode.BulkLoad; +import org.finos.legend.engine.persistence.components.ingestmode.*; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.Selection; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetReference; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.ExternalDatasetReference; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.Field; -import org.finos.legend.engine.persistence.components.ingestmode.TempDatasetsEnricher; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.*; import org.finos.legend.engine.persistence.components.logicalplan.values.StringValue; import org.finos.legend.engine.persistence.components.planner.Planner; import org.finos.legend.engine.persistence.components.planner.PlannerOptions; @@ -62,26 +45,16 @@ import org.immutables.value.Value.Derived; import org.immutables.value.Value.Immutable; import org.immutables.value.Value.Style; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import java.sql.Date; import java.time.Clock; import java.time.LocalDateTime; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Arrays; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.UUID; - -import static org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory.MAX_OF_FIELD; -import static org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory.MIN_OF_FIELD; +import java.util.*; + import static org.finos.legend.engine.persistence.components.logicalplan.LogicalPlanFactory.TABLE_IS_NON_EMPTY; +import static org.finos.legend.engine.persistence.components.relational.api.ApiUtils.*; import static org.finos.legend.engine.persistence.components.relational.api.RelationalGeneratorAbstract.BULK_LOAD_BATCH_STATUS_PATTERN; import static org.finos.legend.engine.persistence.components.transformer.Transformer.TransformOptionsAbstract.DATE_TIME_FORMATTER; @@ -98,8 +71,8 @@ public abstract class RelationalIngestorAbstract private static final String STAGING = "staging"; private static final String UNDERSCORE = "_"; private static final String SINGLE_QUOTE = "'"; - private static final String BATCH_ID_PATTERN = "{NEXT_BATCH_ID_PATTERN}"; + public static final String BATCH_ID_PATTERN = "{NEXT_BATCH_ID_PATTERN}"; public static final String BATCH_START_TS_PATTERN = "{BATCH_START_TIMESTAMP_PLACEHOLDER}"; private static final String BATCH_END_TS_PATTERN = "{BATCH_END_TIMESTAMP_PLACEHOLDER}"; @@ -161,18 +134,14 @@ public Set schemaEvolutionCapabilitySet() return Collections.emptySet(); } - @Default - public String bulkLoadBatchIdValue() - { - return UUID.randomUUID().toString(); - } - //---------- FIELDS ---------- public abstract IngestMode ingestMode(); public abstract RelationalSink relationalSink(); + public abstract Optional bulkLoadTaskIdValue(); + @Derived protected PlannerOptions plannerOptions() { @@ -182,6 +151,7 @@ protected PlannerOptions plannerOptions() .enableSchemaEvolution(enableSchemaEvolution()) .createStagingDataset(createStagingDataset()) .enableConcurrentSafety(enableConcurrentSafety()) + .bulkLoadTaskIdValue(bulkLoadTaskIdValue()) .build(); } @@ -244,14 +214,26 @@ public Datasets evolve(Datasets datasets) return this.enrichedDatasets; } + /* + - Perform cleanup of temporary tables + */ + public Datasets dedupAndVersion(Datasets datasets) + { + LOGGER.info("Invoked dedupAndVersion method, will perform Deduplication and Versioning"); + init(datasets); + dedupAndVersion(); + return this.enrichedDatasets; + } + /* - Perform ingestion from staging to main dataset based on the Ingest mode, executes in current transaction */ - public IngestorResult ingest(Datasets datasets) + public List ingest(Datasets datasets) { LOGGER.info("Invoked ingest method, will perform the ingestion"); init(datasets); - IngestorResult result = ingest(Arrays.asList()).stream().findFirst().orElseThrow(IllegalStateException::new); + List dataSplitRanges = ApiUtils.getDataSplitRanges(executor, planner, transformer, ingestMode()); + List result = ingest(dataSplitRanges); LOGGER.info("Ingestion completed"); return result; } @@ -276,10 +258,10 @@ public Datasets cleanUp(Datasets datasets) 4. Ingestion from staging to main dataset in a transaction 5. Clean up of temporary tables */ - public IngestorResult performFullIngestion(RelationalConnection connection, Datasets datasets) + public List performFullIngestion(RelationalConnection connection, Datasets datasets) { LOGGER.info("Invoked performFullIngestion method"); - return performFullIngestion(connection, datasets, null).stream().findFirst().orElseThrow(IllegalStateException::new); + return performFullIngestion(connection, datasets, new ArrayList<>()); } /* @@ -318,7 +300,7 @@ public List getLatestStagingFilters(RelationalConnection connecti Transformer transformer = new RelationalTransformer(relationalSink(), transformOptions()); Executor executor = relationalSink().getRelationalExecutor(connection); SqlPlan physicalPlan = transformer.generatePhysicalPlan(logicalPlan); - return extractDatasetFilters(metadataDataset, executor, physicalPlan); + return ApiUtils.extractDatasetFilters(metadataDataset, executor, physicalPlan); } // ---------- UTILITY METHODS ---------- @@ -339,6 +321,34 @@ private void createAllDatasets() executor.executePhysicalPlan(generatorResult.preActionsSqlPlan()); } + private void dedupAndVersion() + { + if (generatorResult.deduplicationAndVersioningSqlPlan().isPresent()) + { + LOGGER.info("Executing Deduplication and Versioning"); + executor.executePhysicalPlan(generatorResult.deduplicationAndVersioningSqlPlan().get()); + Map errorStatistics = executeDeduplicationAndVersioningErrorChecks(executor, generatorResult.deduplicationAndVersioningErrorChecksSqlPlan()); + /* Error Checks + 1. if Dedup = fail on dups, Fail the job if count > 1 + 2. If versioining = Max Version/ All Versioin, Check for data error + */ + Optional maxDuplicatesValue = retrieveValueAsLong(errorStatistics.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + Optional maxDataErrorsValue = retrieveValueAsLong(errorStatistics.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + if (maxDuplicatesValue.isPresent() && maxDuplicatesValue.get() > 1) + { + String errorMessage = "Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy"; + LOGGER.error(errorMessage); + throw new RuntimeException(errorMessage); + } + if (maxDataErrorsValue.isPresent() && maxDataErrorsValue.get() > 1) + { + String errorMessage = "Encountered Data errors (same PK, same version but different data), hence failing the batch"; + LOGGER.error(errorMessage); + throw new RuntimeException(errorMessage); + } + } + } + private void initializeLock() { if (enableConcurrentSafety()) @@ -408,6 +418,14 @@ private List performFullIngestion(RelationalConnection connectio // Evolve Schema evolveSchema(); + // Dedup and Version + dedupAndVersion(); + // Find the data split ranges based on the result of dedup and versioning + if (dataSplitRanges.isEmpty()) + { + dataSplitRanges = ApiUtils.getDataSplitRanges(executor, planner, transformer, ingestMode()); + } + // Perform Ingestion List result; try @@ -494,10 +512,10 @@ private void init(Datasets datasets) .batchStartTimestampPattern(BATCH_START_TS_PATTERN) .batchEndTimestampPattern(BATCH_END_TS_PATTERN) .batchIdPattern(BATCH_ID_PATTERN) - .bulkLoadBatchIdValue(bulkLoadBatchIdValue()) + .bulkLoadTaskIdValue(bulkLoadTaskIdValue()) .build(); - planner = Planners.get(enrichedDatasets, enrichedIngestMode, plannerOptions()); + planner = Planners.get(enrichedDatasets, enrichedIngestMode, plannerOptions(), relationalSink().capabilities()); generatorResult = generator.generateOperations(enrichedDatasets, resourcesBuilder.build(), planner, enrichedIngestMode); } @@ -569,8 +587,10 @@ private List performBulkLoad(Datasets datasets, Transformer trans LogicalPlan checkIsDatasetEmptyLogicalPlan = LogicalPlanFactory.getLogicalPlanForIsDatasetEmpty(dataset); SqlPlan physicalPlanForCheckIsDataSetEmpty = transformer.generatePhysicalPlan(checkIsDatasetEmptyLogicalPlan); List results = executor.executePhysicalPlanAndGetResults(physicalPlanForCheckIsDataSetEmpty); - - String value = String.valueOf(results.stream() - .findFirst() - .map(TabularData::getData) - .flatMap(t -> t.stream().findFirst()) - .map(Map::values) - .flatMap(t -> t.stream().findFirst()) - .orElseThrow(IllegalStateException::new)); + Optional obj = getFirstColumnValue(getFirstRowForFirstResult(results)); + String value = String.valueOf(obj.orElseThrow(IllegalStateException::new)); return !value.equals(TABLE_IS_NON_EMPTY); } @@ -639,18 +653,29 @@ private Map executeStatisticsPhysicalPlan(Executor statisticsSqlPlan, Map placeHolderKeyValues) { - return statisticsSqlPlan.keySet() - .stream() - .collect(Collectors.toMap( - k -> k, - k -> executor.executePhysicalPlanAndGetResults(statisticsSqlPlan.get(k), placeHolderKeyValues) - .stream() - .findFirst() - .map(TabularData::getData) - .flatMap(t -> t.stream().findFirst()) - .map(Map::values) - .flatMap(t -> t.stream().findFirst()) - .orElseThrow(IllegalStateException::new))); + Map results = new HashMap<>(); + for (Map.Entry entry: statisticsSqlPlan.entrySet()) + { + List result = executor.executePhysicalPlanAndGetResults(entry.getValue(), placeHolderKeyValues); + Optional obj = getFirstColumnValue(getFirstRowForFirstResult(result)); + Object value = obj.orElse(null); + results.put(entry.getKey(), value); + } + return results; + } + + private Map executeDeduplicationAndVersioningErrorChecks(Executor executor, + Map errorChecksPlan) + { + Map results = new HashMap<>(); + for (Map.Entry entry: errorChecksPlan.entrySet()) + { + List result = executor.executePhysicalPlanAndGetResults(entry.getValue()); + Optional obj = getFirstColumnValue(getFirstRowForFirstResult(result)); + Object value = obj.orElse(null); + results.put(entry.getKey(), value); + } + return results; } private Map extractPlaceHolderKeyValues(Datasets datasets, Executor executor, @@ -658,8 +683,8 @@ private Map extractPlaceHolderKeyValues(Datasets datasets, Execu Optional dataSplitRange) { Map placeHolderKeyValues = new HashMap<>(); - Optional nextBatchId = getNextBatchId(datasets, executor, transformer, ingestMode); - Optional>> optimizationFilters = getOptimizationFilterBounds(datasets, executor, transformer, ingestMode); + Optional nextBatchId = ApiUtils.getNextBatchId(datasets, executor, transformer, ingestMode); + Optional>> optimizationFilters = ApiUtils.getOptimizationFilterBounds(datasets, executor, transformer, ingestMode); if (nextBatchId.isPresent()) { LOGGER.info(String.format("Obtained the next Batch id: %s", nextBatchId.get())); @@ -696,88 +721,4 @@ else if (lowerBound instanceof Number) return placeHolderKeyValues; } - private Optional getNextBatchId(Datasets datasets, Executor executor, - Transformer transformer, IngestMode ingestMode) - { - if (ingestMode.accept(IngestModeVisitors.IS_INGEST_MODE_TEMPORAL)) - { - LogicalPlan logicalPlanForNextBatchId = LogicalPlanFactory.getLogicalPlanForNextBatchId(datasets); - List tabularData = executor.executePhysicalPlanAndGetResults(transformer.generatePhysicalPlan(logicalPlanForNextBatchId)); - Optional nextBatchId = Optional.ofNullable(tabularData.stream() - .findFirst() - .map(TabularData::getData) - .flatMap(t -> t.stream().findFirst()) - .map(Map::values) - .flatMap(t -> t.stream().findFirst()) - .orElseThrow(IllegalStateException::new)); - if (nextBatchId.isPresent()) - { - if (nextBatchId.get() instanceof Integer) - { - return Optional.of(Long.valueOf((Integer) nextBatchId.get())); - } - if (nextBatchId.get() instanceof Long) - { - return Optional.of((Long) nextBatchId.get()); - } - } - } - return Optional.empty(); - } - - private Optional>> getOptimizationFilterBounds(Datasets datasets, Executor executor, - Transformer transformer, IngestMode ingestMode) - { - List filters = ingestMode.accept(IngestModeVisitors.RETRIEVE_OPTIMIZATION_FILTERS); - if (!filters.isEmpty()) - { - Map> map = new HashMap<>(); - for (OptimizationFilter filter : filters) - { - LogicalPlan logicalPlanForMinAndMaxForField = LogicalPlanFactory.getLogicalPlanForMinAndMaxForField(datasets.stagingDataset(), filter.fieldName()); - List tabularData = executor.executePhysicalPlanAndGetResults(transformer.generatePhysicalPlan(logicalPlanForMinAndMaxForField)); - Map resultMap = tabularData.stream() - .findFirst() - .map(TabularData::getData) - .flatMap(t -> t.stream().findFirst()) - .orElseThrow(IllegalStateException::new); - // Put into map only when not null - Object lower = resultMap.get(MIN_OF_FIELD); - Object upper = resultMap.get(MAX_OF_FIELD); - if (lower != null && upper != null) - { - map.put(filter, Tuples.pair(lower, upper)); - } - } - return Optional.of(map); - } - return Optional.empty(); - } - - private List extractDatasetFilters(MetadataDataset metadataDataset, Executor executor, SqlPlan physicalPlan) throws JsonProcessingException - { - List datasetFilters = new ArrayList<>(); - List results = executor.executePhysicalPlanAndGetResults(physicalPlan); - Optional stagingFilters = results.stream() - .findFirst() - .map(TabularData::getData) - .flatMap(t -> t.stream().findFirst()) - .map(stringObjectMap -> (String) stringObjectMap.get(metadataDataset.stagingFiltersField())); - - // Convert map of Filters to List of Filters - if (stagingFilters.isPresent()) - { - Map> datasetFiltersMap = new ObjectMapper().readValue(stagingFilters.get(), new TypeReference>>() {}); - for (Map.Entry> filtersMapEntry : datasetFiltersMap.entrySet()) - { - for (Map.Entry filterEntry : filtersMapEntry.getValue().entrySet()) - { - DatasetFilter datasetFilter = DatasetFilter.of(filtersMapEntry.getKey(), FilterType.fromName(filterEntry.getKey()), filterEntry.getValue()); - datasetFilters.add(datasetFilter); - } - } - } - return datasetFilters; - } - } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/sqldom/common/Clause.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/sqldom/common/Clause.java index 1086d1e2c06..a68b724f02f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/sqldom/common/Clause.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/sqldom/common/Clause.java @@ -58,7 +58,10 @@ public enum Clause NOT_ENFORCED("NOT ENFORCED"), DATA_TYPE("DATA TYPE"), CONVERT("CONVERT"), - ARRAY("ARRAY"); + ARRAY("ARRAY"), + LOAD_DATA("LOAD DATA"), + OVERWRITE("OVERWRITE"), + FILES("FILES"); private final String clause; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/sqldom/common/FunctionName.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/sqldom/common/FunctionName.java index 34482bded7c..52423165eab 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/sqldom/common/FunctionName.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-core/src/main/java/org/finos/legend/engine/persistence/components/relational/sqldom/common/FunctionName.java @@ -24,6 +24,7 @@ public enum FunctionName MAX("MAX"), MIN("MIN"), COUNT("COUNT"), + DISTINCT("DISTINCT"), COALESCE("COALESCE"), CURRENT_TIME("CURRENT_TIME"), CURRENT_DATE("CURRENT_DATE"), @@ -36,6 +37,7 @@ public enum FunctionName UPPER("UPPER"), SUBSTRING("SUBSTRING"), ROW_NUMBER("ROW_NUMBER"), + DENSE_RANK("DENSE_RANK"), DATE("DATE"), DATE_TRUNC("DATE_TRUNC"), DATETIME_TRUNC("DATETIME_TRUNC"), @@ -45,7 +47,8 @@ public enum FunctionName PARSE_DATETIME("PARSE_DATETIME"), PARSE_JSON("PARSE_JSON"), TO_VARIANT("TO_VARIANT"), - OBJECT_CONSTRUCT("OBJECT_CONSTRUCT"); + OBJECT_CONSTRUCT("OBJECT_CONSTRUCT"), + TO_JSON("TO_JSON"); private static final Map BY_NAME = Arrays .stream(FunctionName.values()) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/pom.xml index 2c78e852fdf..0c052881d75 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/main/java/org/finos/legend/engine/persistence/components/relational/h2/H2DigestUtil.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/main/java/org/finos/legend/engine/persistence/components/relational/h2/H2DigestUtil.java index 82a7f3788f7..3a6c1db5e95 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/main/java/org/finos/legend/engine/persistence/components/relational/h2/H2DigestUtil.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/main/java/org/finos/legend/engine/persistence/components/relational/h2/H2DigestUtil.java @@ -17,9 +17,20 @@ import org.apache.commons.codec.digest.DigestUtils; import org.finos.legend.engine.persistence.components.relational.jdbc.JdbcHelper; +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + public class H2DigestUtil { + private static final byte[] EMPTY_STRING = new byte[] { 88 }; + public static void registerMD5Udf(JdbcHelper sink, String UdfName) { sink.executeStatement("CREATE ALIAS " + UdfName + " FOR \"org.finos.legend.engine.persistence.components.relational.h2.H2DigestUtil.MD5\";"); @@ -27,9 +38,57 @@ public static void registerMD5Udf(JdbcHelper sink, String UdfName) public static String MD5(String[] columnNameList, String[] columnValueList) { - String columnNames = String.join("", columnNameList); - String columnValues = String.join("", columnValueList); - String columnNamesAndColumnValues = columnNames + columnValues; - return DigestUtils.md5Hex(columnNamesAndColumnValues).toUpperCase(); + return calculateMD5Digest(generateRowMap(columnNameList, columnValueList)); + } + + private static Map generateRowMap(String[] columnNameList, String[] columnValueList) + { + Map map = new HashMap<>(); + for (int i = 0; i < columnNameList.length; i++) + { + map.put(columnNameList[i], columnValueList[i]); + } + return map; + } + + private static String calculateMD5Digest(Map row) + { + List fieldNames = row.keySet().stream().sorted().collect(Collectors.toList()); + try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); + DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream)) + { + fieldNames.stream().forEachOrdered(field -> + { + Optional value = Optional.ofNullable(row.get(field)); + value.ifPresent(v -> writeValueWithFieldName(field, v, dataOutputStream)); + }); + dataOutputStream.flush(); + return DigestUtils.md5Hex(byteArrayOutputStream.toByteArray()); + } + catch (IOException e) + { + throw new RuntimeException("Unable to create digest", e); + } + } + + private static void writeValueWithFieldName(String fieldName, Object value, DataOutputStream dataOutputStream) + { + try + { + dataOutputStream.writeInt(fieldName.hashCode()); + String stringValue = value.toString(); + if (stringValue == null || stringValue.length() == 0) + { + dataOutputStream.write(EMPTY_STRING); + } + else + { + dataOutputStream.writeBytes(stringValue); + } + } + catch (IOException e) + { + throw new RuntimeException(String.format("Unable to create digest for field [%s]", fieldName), e); + } } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/main/java/org/finos/legend/engine/persistence/components/relational/h2/H2Sink.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/main/java/org/finos/legend/engine/persistence/components/relational/h2/H2Sink.java index 9f93c193c6f..9b8ac1db944 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/main/java/org/finos/legend/engine/persistence/components/relational/h2/H2Sink.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/main/java/org/finos/legend/engine/persistence/components/relational/h2/H2Sink.java @@ -77,6 +77,7 @@ import java.util.Map; import java.util.Set; +import static org.finos.legend.engine.persistence.components.relational.api.RelationalIngestorAbstract.BATCH_ID_PATTERN; import static org.finos.legend.engine.persistence.components.relational.api.RelationalIngestorAbstract.BATCH_START_TS_PATTERN; public class H2Sink extends AnsiSqlSink @@ -97,6 +98,7 @@ public class H2Sink extends AnsiSqlSink capabilities.add(Capability.EXPLICIT_DATA_TYPE_CONVERSION); capabilities.add(Capability.DATA_TYPE_LENGTH_CHANGE); capabilities.add(Capability.DATA_TYPE_SCALE_CHANGE); + capabilities.add(Capability.TRANSFORM_WHILE_COPY); CAPABILITIES = Collections.unmodifiableSet(capabilities); Map, LogicalPlanVisitor> logicalPlanVisitorByClass = new HashMap<>(); @@ -200,6 +202,7 @@ public Optional optimizerForCaseConversion(CaseConversion caseConvers } } + @Override public IngestorResult performBulkLoad(Datasets datasets, Executor executor, SqlPlan ingestSqlPlan, Map statisticsSqlPlan, Map placeHolderKeyValues) { executor.executePhysicalPlan(ingestSqlPlan, placeHolderKeyValues); @@ -224,6 +227,7 @@ public IngestorResult performBulkLoad(Datasets datasets, Executor fieldsWithoutPk = stagingTable.schema().fields().stream().map(field -> field.withPrimaryKey(false)).collect(Collectors.toList()); + stagingTable = stagingTable.withSchema(stagingTable.schema().withFields(fieldsWithoutPk)); + RelationalTransformer transformer = new RelationalTransformer(H2Sink.get()); + LogicalPlan tableCreationPlan = LogicalPlanFactory.getDatasetCreationPlan(stagingTable, true); + SqlPlan tableCreationPhysicalPlan = transformer.generatePhysicalPlan(tableCreationPlan); + executor.executePhysicalPlan(tableCreationPhysicalPlan); + } + protected void createStagingTable(DatasetDefinition stagingTable) throws Exception { RelationalTransformer transformer = new RelationalTransformer(H2Sink.get()); @@ -133,9 +145,9 @@ protected IngestorResult executePlansAndVerifyResults(IngestMode ingestMode, Pla return executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPath, expectedStats, Clock.systemUTC()); } - protected IngestorResult executePlansAndVerifyResults(IngestMode ingestMode, PlannerOptions options, Datasets datasets, String[] schema, String expectedDataPath, Map expectedStats, Set userCapabilitySet) throws Exception + protected IngestorResult executePlansAndVerifyResults(IngestMode ingestMode, PlannerOptions options, Datasets datasets, String[] schema, String expectedDataPath, Map expectedStats, Set userCapabilitySet, Clock executionTimestampClock) throws Exception { - return executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPath, expectedStats, Clock.systemUTC(), userCapabilitySet, false); + return executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPath, expectedStats, executionTimestampClock, userCapabilitySet, false); } private void verifyLatestStagingFilters(RelationalIngestor ingestor, Datasets datasets) throws Exception @@ -179,7 +191,7 @@ protected IngestorResult executePlansAndVerifyResults(RelationalIngestor ingesto String expectedDataPath, Map expectedStats, boolean verifyStagingFilters) throws Exception { // Execute physical plans - IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets); + IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets).get(0); Map actualStats = result.statisticByName(); @@ -214,12 +226,12 @@ protected IngestorResult executePlansAndVerifyResults(IngestMode ingestMode, Pla return executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPath, expectedStats, executionTimestampClock, Collections.emptySet(), false); } - protected List executePlansAndVerifyResultsWithDataSplits(IngestMode ingestMode, PlannerOptions options, Datasets datasets, String[] schema, String expectedDataPath, List> expectedStats, List dataSplitRanges) throws Exception + protected List executePlansAndVerifyResultsWithSpecifiedDataSplits(IngestMode ingestMode, PlannerOptions options, Datasets datasets, String[] schema, String expectedDataPath, List> expectedStats, List dataSplitRanges) throws Exception { - return executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPath, expectedStats, dataSplitRanges, Clock.systemUTC()); + return executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPath, expectedStats, dataSplitRanges, Clock.systemUTC()); } - protected List executePlansAndVerifyResultsWithDataSplits(IngestMode ingestMode, PlannerOptions options, Datasets datasets, String[] schema, String expectedDataPath, List> expectedStats, List dataSplitRanges, Clock executionTimestampClock) throws Exception + protected List executePlansAndVerifyResultsWithSpecifiedDataSplits(IngestMode ingestMode, PlannerOptions options, Datasets datasets, String[] schema, String expectedDataPath, List> expectedStats, List dataSplitRanges, Clock executionTimestampClock) throws Exception { RelationalIngestor ingestor = RelationalIngestor.builder() .ingestMode(ingestMode) @@ -247,6 +259,34 @@ protected List executePlansAndVerifyResultsWithDataSplits(Ingest return results; } + protected List executePlansAndVerifyResultsWithDerivedDataSplits(IngestMode ingestMode, PlannerOptions options, Datasets datasets, String[] schema, String expectedDataPath, List> expectedStats, Clock executionTimestampClock) throws Exception + { + RelationalIngestor ingestor = RelationalIngestor.builder() + .ingestMode(ingestMode) + .relationalSink(H2Sink.get()) + .executionTimestampClock(executionTimestampClock) + .cleanupStagingData(options.cleanupStagingData()) + .collectStatistics(options.collectStatistics()) + .enableSchemaEvolution(options.enableSchemaEvolution()) + .build(); + + List results = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets); + + List> tableData = h2Sink.executeQuery("select * from \"TEST\".\"main\""); + TestUtils.assertFileAndTableDataEquals(schema, expectedDataPath, tableData); + + for (int i = 0; i < results.size(); i++) + { + Map actualStats = results.get(i).statisticByName(); + Assertions.assertEquals(expectedStats.get(i).size(), actualStats.size()); + for (String statistic : expectedStats.get(i).keySet()) + { + Assertions.assertEquals(expectedStats.get(i).get(statistic).toString(), actualStats.get(StatisticName.valueOf(statistic)).toString()); + } + } + return results; + } + protected Map createExpectedStatsMap(int incomingRecordCount, int rowsDeleted, int rowsInserted, int rowsUpdated, int rowsTerminated) { Map expectedStats = new HashMap<>(); @@ -284,9 +324,10 @@ public IngestorResult executePlansAndVerifyForCaseConversion(RelationalIngestor datasets = ingestor.create(datasets); datasets = ingestor.evolve(datasets); + datasets = ingestor.dedupAndVersion(datasets); executor.begin(); - IngestorResult result = ingestor.ingest(datasets); + IngestorResult result = ingestor.ingest(datasets).get(0); // Do more stuff if needed executor.commit(); @@ -328,6 +369,26 @@ protected void loadBasicStagingDataInUpperCase(String path) throws Exception h2Sink.executeStatement(loadSql); } + protected void loadStagingDataWithNoPk(String path) throws Exception + { + validateFileExists(path); + String loadSql = "TRUNCATE TABLE \"TEST\".\"staging\";" + + "INSERT INTO \"TEST\".\"staging\"(name, income, expiry_date) " + + "SELECT \"name\", CONVERT( \"income\", BIGINT), CONVERT( \"expiry_date\", DATE)" + + " FROM CSVREAD( '" + path + "', 'name, income, expiry_date', NULL )"; + h2Sink.executeStatement(loadSql); + } + + protected void loadStagingDataWithNoPkInUpperCase(String path) throws Exception + { + validateFileExists(path); + String loadSql = "TRUNCATE TABLE \"TEST\".\"STAGING\";" + + "INSERT INTO \"TEST\".\"STAGING\"(NAME, INCOME, EXPIRY_DATE) " + + "SELECT \"NAME\", CONVERT( \"INCOME\", BIGINT), CONVERT( \"EXPIRY_DATE\", DATE)" + + " FROM CSVREAD( '" + path + "', 'NAME, INCOME, EXPIRY_DATE', NULL )"; + h2Sink.executeStatement(loadSql); + } + protected void loadStagingDataForWithPartition(String path) throws Exception { validateFileExists(path); @@ -338,6 +399,26 @@ protected void loadStagingDataForWithPartition(String path) throws Exception h2Sink.executeStatement(loadSql); } + protected void loadStagingDataForWithPartitionWithVersion(String path) throws Exception + { + validateFileExists(path); + String loadSql = "TRUNCATE TABLE \"TEST\".\"staging\";" + + "INSERT INTO \"TEST\".\"staging\"(date, entity, price, volume, digest, version) " + + "SELECT CONVERT( \"date\",DATE ), \"entity\", CONVERT( \"price\", DECIMAL(20,2)), CONVERT( \"volume\", BIGINT), \"digest\", CONVERT( \"version\",INT)" + + " FROM CSVREAD( '" + path + "', 'date, entity, price, volume, digest, version', NULL )"; + h2Sink.executeStatement(loadSql); + } + + protected void loadStagingDataForWithPartitionWithVersionInUpperCase(String path) throws Exception + { + validateFileExists(path); + String loadSql = "TRUNCATE TABLE \"TEST\".\"STAGING\";" + + "INSERT INTO \"TEST\".\"STAGING\"(DATE, ENTITY, PRICE, VOLUME, DIGEST, VERSION) " + + "SELECT CONVERT( \"DATE\",DATE ), \"ENTITY\", CONVERT( \"PRICE\", DECIMAL(20,2)), CONVERT( \"VOLUME\", BIGINT), \"DIGEST\", CONVERT( \"VERSION\",INT)" + + " FROM CSVREAD( '" + path + "', 'DATE, ENTITY, PRICE, VOLUME, DIGEST, VERSION', NULL )"; + h2Sink.executeStatement(loadSql); + } + protected void loadStagingDataWithDeleteInd(String path) throws Exception { validateFileExists(path); @@ -358,6 +439,16 @@ protected void loadStagingDataWithVersion(String path) throws Exception h2Sink.executeStatement(loadSql); } + protected void loadStagingDataWithVersionInUpperCase(String path) throws Exception + { + validateFileExists(path); + String loadSql = "TRUNCATE TABLE \"TEST\".\"STAGING\";" + + "INSERT INTO \"TEST\".\"STAGING\"(ID, NAME, INCOME, START_TIME ,EXPIRY_DATE, DIGEST, VERSION) " + + "SELECT CONVERT( \"ID\",INT ), \"NAME\", CONVERT( \"INCOME\", BIGINT), CONVERT( \"START_TIME\", DATETIME), CONVERT( \"EXPIRY_DATE\", DATE), DIGEST, CONVERT( \"VERSION\",INT)" + + " FROM CSVREAD( '" + path + "', 'ID, NAME, INCOME, START_TIME, EXPIRY_DATE, DIGEST, VERSION', NULL )"; + h2Sink.executeStatement(loadSql); + } + protected void loadStagingDataWithFilter(String path) throws Exception { validateFileExists(path); @@ -448,23 +539,23 @@ protected void loadStagingDataForBitemporalFromOnlyWithDeleteInd(String path) th h2Sink.executeStatement(loadSql); } - protected void loadStagingDataForBitemporalFromOnlyWithDataSplit(String path) throws Exception + protected void loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(String path) throws Exception { validateFileExists(path); String loadSql = "TRUNCATE TABLE \"TEST\".\"staging\";" + - "INSERT INTO \"TEST\".\"staging\"(index, datetime, balance, digest, data_split) " + - "SELECT CONVERT( \"index\", INT), CONVERT( \"datetime\", DATETIME), CONVERT( \"balance\", BIGINT), \"digest\", CONVERT( \"data_split\", BIGINT)" + - " FROM CSVREAD( '" + path + "', 'index, datetime, balance, digest, data_split', NULL )"; + "INSERT INTO \"TEST\".\"staging\"(index, datetime, balance, digest, version, data_split) " + + "SELECT CONVERT( \"index\", INT), CONVERT( \"datetime\", DATETIME), CONVERT( \"balance\", BIGINT), \"digest\", CONVERT( \"version\", BIGINT), CONVERT( \"data_split\", BIGINT)" + + " FROM CSVREAD( '" + path + "', 'index, datetime, balance, digest, version, data_split', NULL )"; h2Sink.executeStatement(loadSql); } - protected void loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(String path) throws Exception + protected void loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(String path) throws Exception { validateFileExists(path); String loadSql = "TRUNCATE TABLE \"TEST\".\"staging\";" + - "INSERT INTO \"TEST\".\"staging\"(index, datetime, balance, digest, delete_indicator, data_split) " + - "SELECT CONVERT( \"index\", INT), CONVERT( \"datetime\", DATETIME), CONVERT( \"balance\", BIGINT), \"digest\", \"delete_indicator\", CONVERT( \"data_split\", BIGINT)" + - " FROM CSVREAD( '" + path + "', 'index, datetime, balance, digest, delete_indicator, data_split', NULL )"; + "INSERT INTO \"TEST\".\"staging\"(index, datetime, balance, digest, version, delete_indicator, data_split) " + + "SELECT CONVERT( \"index\", INT), CONVERT( \"datetime\", DATETIME), CONVERT( \"balance\", BIGINT), \"digest\", CONVERT( \"version\", BIGINT), \"delete_indicator\", CONVERT( \"data_split\", BIGINT)" + + " FROM CSVREAD( '" + path + "', 'index, datetime, balance, digest, version, delete_indicator, data_split', NULL )"; h2Sink.executeStatement(loadSql); } @@ -498,7 +589,7 @@ protected void loadStagingDataForWithoutName(String path) throws Exception h2Sink.executeStatement(loadSql); } - protected void validateFileExists(String path) throws Exception + protected static void validateFileExists(String path) throws Exception { File f = new File(path); if (!f.exists()) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/H2DigestUtilTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/H2DigestUtilTest.java new file mode 100644 index 00000000000..0a0b29ea9ea --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/H2DigestUtilTest.java @@ -0,0 +1,32 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components; + +import org.finos.legend.engine.persistence.components.relational.h2.H2DigestUtil; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class H2DigestUtilTest +{ + private String expectedDigest = "fd40b241c6d2eb55348e3bc51e81925b"; + private String[] columns = new String[]{"COLUMN_1", "COLUMN_2", "COLUMN_3", "COLUMN_4", "COLUMN_5", "COLUMN_6"}; + private String[] values = new String[]{"test data", "true", "33", "1111", "1.5", null}; + + @Test + void testMD5() + { + Assertions.assertEquals(expectedDigest, H2DigestUtil.MD5(columns, values)); + } +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/TestUtils.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/TestUtils.java index d5cae4280ad..1ed5b1b8fd7 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/TestUtils.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/TestUtils.java @@ -253,6 +253,19 @@ public static SchemaDefinition getStagingSchemaWithVersion() .build(); } + public static SchemaDefinition getStagingSchemaWithNonPkVersion() + { + return SchemaDefinition.builder() + .addFields(id) + .addFields(name) + .addFields(income) + .addFields(startTime) + .addFields(expiryDate) + .addFields(digest) + .addFields(version) + .build(); + } + public static SchemaDefinition getStagingSchemaWithFilterForDB() { return SchemaDefinition.builder() @@ -333,6 +346,15 @@ public static DatasetDefinition getBasicStagingTable() .build(); } + public static DatasetDefinition getStagingTableWithNoPks() + { + return DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(getSchemaWithNoPKs()) + .build(); + } + public static DatasetDefinition getBasicStagingTableWithExpiryDatePk() { return DatasetDefinition.builder() @@ -351,6 +373,15 @@ public static DatasetDefinition getStagingTableWithVersion() .build(); } + public static DatasetDefinition getStagingTableWithNonPkVersion() + { + return DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(getStagingSchemaWithNonPkVersion()) + .build(); + } + public static DatasetDefinition getStagingTableWithFilterForDB() { return DatasetDefinition.builder() @@ -671,6 +702,23 @@ public static DatasetDefinition getEntityPriceStagingTable() .build(); } + public static DatasetDefinition getEntityPriceWithVersionStagingTable() + { + return DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(SchemaDefinition.builder() + .addFields(date) + .addFields(entity) + .addFields(price) + .addFields(volume) + .addFields(digest) + .addFields(version) + .build() + ) + .build(); + } + public static DatasetDefinition getBitemporalMainTable() { return DatasetDefinition.builder() @@ -790,6 +838,25 @@ public static DatasetDefinition getBitemporalFromOnlyMainTableIdBased() .build(); } + public static DatasetDefinition getBitemporalFromOnlyMainTableWithVersionIdBased() + { + return DatasetDefinition.builder() + .group(testSchemaName) + .name(mainTableName) + .schema(SchemaDefinition.builder() + .addFields(index) + .addFields(balance) + .addFields(digest) + .addFields(version) + .addFields(startDateTime) + .addFields(endDateTime) + .addFields(batchIdIn) + .addFields(batchIdOut) + .build() + ) + .build(); + } + public static DatasetDefinition getBitemporalFromOnlyTempTableIdBased() { return DatasetDefinition.builder() @@ -808,6 +875,25 @@ public static DatasetDefinition getBitemporalFromOnlyTempTableIdBased() .build(); } + public static DatasetDefinition getBitemporalFromOnlyTempTableWithVersionIdBased() + { + return DatasetDefinition.builder() + .group(testSchemaName) + .name(tempTableName) + .schema(SchemaDefinition.builder() + .addFields(index) + .addFields(balance) + .addFields(digest) + .addFields(version) + .addFields(startDateTime) + .addFields(endDateTime) + .addFields(batchIdIn) + .addFields(batchIdOut) + .build() + ) + .build(); + } + public static DatasetDefinition getBitemporalFromOnlyTempTableWithDeleteIndicatorIdBased() { return DatasetDefinition.builder() @@ -857,7 +943,7 @@ public static DatasetDefinition getBitemporalFromOnlyStagingTableWithoutDuplicat .build(); } - public static DatasetDefinition getBitemporalFromOnlyStagingTableWithDataSplitIdBased() + public static DatasetDefinition getBitemporalFromOnlyStagingTableWithVersionWithDataSplitIdBased() { return DatasetDefinition.builder() .group(testSchemaName) @@ -867,6 +953,7 @@ public static DatasetDefinition getBitemporalFromOnlyStagingTableWithDataSplitId .addFields(dateTime) .addFields(balance) .addFields(digest) + .addFields(version) .addFields(dataSplit) .build() ) @@ -889,7 +976,7 @@ public static DatasetDefinition getBitemporalFromOnlyStagingTableWithDeleteIndic .build(); } - public static DatasetDefinition getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithDataSplitIdBased() + public static DatasetDefinition getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithVersionWithDataSplitIdBased() { return DatasetDefinition.builder() .group(testSchemaName) @@ -899,6 +986,7 @@ public static DatasetDefinition getBitemporalFromOnlyStagingTableWithDeleteIndic .addFields(dateTime) .addFields(balance) .addFields(digest) + .addFields(version) .addFields(deleteIndicator) .addFields(dataSplit) .build() @@ -906,7 +994,7 @@ public static DatasetDefinition getBitemporalFromOnlyStagingTableWithDeleteIndic .build(); } - public static DatasetDefinition getBitemporalFromOnlyStagingTableWithoutDuplicatesWithDeleteIndicatorWithDataSplitIdBased() + public static DatasetDefinition getBitemporalFromOnlyStagingTableWithoutDuplicatesWithDeleteIndicatorWithVersionWithDataSplitIdBased() { return DatasetDefinition.builder() .group(testSchemaName) @@ -916,6 +1004,7 @@ public static DatasetDefinition getBitemporalFromOnlyStagingTableWithoutDuplicat .addFields(dateTime) .addFields(balance) .addFields(digest) + .addFields(version) .addFields(deleteIndicator) .addFields(dataSplit) .build() @@ -934,6 +1023,7 @@ public static DatasetDefinition getSchemaEvolutionAddColumnMainTable() .addFields(startTime) .addFields(expiryDate) .addFields(digest) + .addFields(batchUpdateTimestamp) .build()) .build(); } @@ -998,6 +1088,7 @@ public static DatasetDefinition getSchemaEvolutionDataTypeConversionMainTable() .addFields(startTime) .addFields(expiryDate) .addFields(digest) + .addFields(batchUpdateTimestamp) .build()) .build(); } @@ -1046,6 +1137,7 @@ public static DatasetDefinition getSchemaEvolutionDataTypeConversionAndColumnNul .addFields(startTime) .addFields(expiryDate) .addFields(digest) + .addFields(batchUpdateTimestamp) .build()) .build(); } @@ -1092,6 +1184,7 @@ public static DatasetDefinition getSchemaEvolutionPKTypeDifferentMainTable() .addFields(income) .addFields(expiryDate) .addFields(digest) + .addFields(batchUpdateTimestamp) .build()) .build(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaWithBatchIdTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaWithBatchIdTest.java index 2c842b5ace9..f45582ddb34 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaWithBatchIdTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalDeltaWithBatchIdTest.java @@ -18,12 +18,13 @@ import org.finos.legend.engine.persistence.components.TestUtils; import org.finos.legend.engine.persistence.components.common.Datasets; import org.finos.legend.engine.persistence.components.ingestmode.BitemporalDelta; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchId; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.ValidDateTime; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromAndThruDateTime; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.planner.PlannerOptions; @@ -33,7 +34,6 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; @@ -56,6 +56,7 @@ import static org.finos.legend.engine.persistence.components.TestUtils.indexName; import static org.finos.legend.engine.persistence.components.TestUtils.startDateTimeName; import static org.finos.legend.engine.persistence.components.TestUtils.valueName; +import static org.finos.legend.engine.persistence.components.TestUtils.versionName; class BitemporalDeltaWithBatchIdTest extends BaseTest { @@ -117,7 +118,7 @@ void testMilestoningSourceSpecifiesFromAndThrough() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "source_specifies_from_and_through/without_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "source_specifies_from_and_through/without_delete_ind/expected_pass3.csv"; // 1. Load staging table loadStagingDataForBitemp(dataPass3); @@ -183,7 +184,7 @@ void testMilestoningSourceSpecifiesFromAndThroughWithDeleteIndicator() throws Ex executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "source_specifies_from_and_through/with_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "source_specifies_from_and_through/with_delete_ind/expected_pass3.csv"; // 1. Load staging table loadStagingDataForBitempWithDeleteInd(dataPass3); @@ -238,7 +239,7 @@ void testMilestoningSourceSpecifiesFromAndThroughWithLessColumnsInStaging() thro executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "source_specifies_from_and_through/less_columns_in_staging/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "source_specifies_from_and_through/less_columns_in_staging/expected_pass3.csv"; stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMainForBitemp(dataPass3); // Execute plans and verify results @@ -500,10 +501,10 @@ void testMilestoningSourceSpecifiesFromSet2() throws Exception void testMilestoningSourceSpecifiesFromSet3WithDataSplit() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDataSplitIdBased(); - DatasetDefinition tempTable = TestUtils.getBitemporalFromOnlyTempTableIdBased(); + DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithVersionWithDataSplitIdBased(); + DatasetDefinition tempTable = TestUtils.getBitemporalFromOnlyTempTableWithVersionIdBased(); - String[] schema = new String[] {indexName, balanceName, digestName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; + String[] schema = new String[] {indexName, balanceName, digestName, versionName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; // Create staging table createStagingTable(stagingTable); @@ -512,7 +513,12 @@ void testMilestoningSourceSpecifiesFromSet3WithDataSplit() throws Exception BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -533,19 +539,19 @@ void testMilestoningSourceSpecifiesFromSet3WithDataSplit() throws Exception String dataPass1 = basePathForInput + "source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass1.csv"; String expectedDataPass1 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass1.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass1); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass1); // 2. Execute Plan and Verify Results List dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); List> expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 2, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass2.csv"; String expectedDataPass4 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass4.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass2); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass2); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); @@ -555,21 +561,19 @@ void testMilestoningSourceSpecifiesFromSet3WithDataSplit() throws Exception expectedStats.add(createExpectedStatsMap(1, 0, 1, 1, 0)); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); // ------------ Perform Pass3 (identical records) ------------------------ String dataPass3 = basePathForInput + "source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass3.csv"; - String expectedDataPass6 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass6.csv"; + String expectedDataPass6 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass5.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass3); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass3); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); - dataSplitRanges.add(DataSplitRange.of(2, 2)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass6, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass6, expectedStats, dataSplitRanges); } /* @@ -579,10 +583,10 @@ void testMilestoningSourceSpecifiesFromSet3WithDataSplit() throws Exception void testMilestoningSourceSpecifiesFromSet3WithDataSplitMultiPasses() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDataSplitIdBased(); - DatasetDefinition tempTable = TestUtils.getBitemporalFromOnlyTempTableIdBased(); + DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithVersionWithDataSplitIdBased(); + DatasetDefinition tempTable = TestUtils.getBitemporalFromOnlyTempTableWithVersionIdBased(); - String[] schema = new String[] {indexName, balanceName, digestName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; + String[] schema = new String[] {indexName, balanceName, digestName, versionName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; // Create staging table createStagingTable(stagingTable); @@ -591,7 +595,12 @@ void testMilestoningSourceSpecifiesFromSet3WithDataSplitMultiPasses() throws Exc BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -612,25 +621,25 @@ void testMilestoningSourceSpecifiesFromSet3WithDataSplitMultiPasses() throws Exc String dataPass1 = basePathForInput + "source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass1.csv"; String expectedDataPass1 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass1.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass1); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass1); // 2. Execute Plan and Verify Results List dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); List> expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 2, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass2.csv"; String expectedDataPass2 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass2.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass2); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass2); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 1, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, dataSplitRanges); // ------------ Perform Pass3 ------------------------ String expectedDataPass3 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass3.csv"; @@ -639,7 +648,7 @@ void testMilestoningSourceSpecifiesFromSet3WithDataSplitMultiPasses() throws Exc dataSplitRanges.add(DataSplitRange.of(2, 3)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); // ------------ Perform Pass4 ------------------------ String expectedDataPass4 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass4.csv"; @@ -648,28 +657,19 @@ void testMilestoningSourceSpecifiesFromSet3WithDataSplitMultiPasses() throws Exc dataSplitRanges.add(DataSplitRange.of(50, 100)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); // ------------ Perform Pass5 (identical records) ------------------------ String dataPass3 = basePathForInput + "source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass3.csv"; String expectedDataPass5 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass5.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass3); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass3); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass5, expectedStats, dataSplitRanges); - - // ------------ Perform Pass6 (identical records) ------------------------ - String expectedDataPass6 = basePathForExpected + "source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass6.csv"; - // 2. Execute Plan and Verify Results - dataSplitRanges = new ArrayList<>(); - dataSplitRanges.add(DataSplitRange.of(2, 2)); - expectedStats = new ArrayList<>(); - expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass6, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass5, expectedStats, dataSplitRanges); } /* @@ -705,7 +705,7 @@ void testMilestoningSourceSpecifiesFromSet4FilterDuplicates() throws Exception .sourceDateTimeFromField(dateTimeName) .build()) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); @@ -772,11 +772,11 @@ void testMilestoningSourceSpecifiesFromSet4FilterDuplicates() throws Exception @Test void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicates() throws Exception { - DatasetDefinition mainTable = TestUtils.getBitemporalFromOnlyMainTableIdBased(); - DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDataSplitIdBased(); - DatasetDefinition tempTable = TestUtils.getBitemporalFromOnlyTempTableIdBased(); + DatasetDefinition mainTable = TestUtils.getBitemporalFromOnlyMainTableWithVersionIdBased(); + DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithVersionWithDataSplitIdBased(); + DatasetDefinition tempTable = TestUtils.getBitemporalFromOnlyTempTableWithVersionIdBased(); - String[] schema = new String[] {indexName, balanceName, digestName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; + String[] schema = new String[] {indexName, balanceName, digestName, versionName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; // Create staging table createStagingTable(stagingTable); @@ -785,7 +785,12 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicates() throw BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -797,7 +802,7 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicates() throw .sourceDateTimeFromField(dateTimeName) .build()) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -807,19 +812,19 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicates() throw String dataPass1 = basePathForInput + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv"; String expectedDataPass1 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass1); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass1); // 2. Execute Plan and Verify Results List dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); List> expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 2, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv"; String expectedDataPass4 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass2); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass2); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); @@ -829,21 +834,19 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicates() throw expectedStats.add(createExpectedStatsMap(1, 0, 1, 1, 0)); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); // ------------ Perform Pass3 (identical records) ------------------------ String dataPass3 = basePathForInput + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv"; - String expectedDataPass6 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass6.csv"; + String expectedDataPass6 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass5.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass3); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass3); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); - dataSplitRanges.add(DataSplitRange.of(2, 2)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 0, 0)); - expectedStats.add(createExpectedStatsMap(1, 0, 0, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass6, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass6, expectedStats, dataSplitRanges); } /* @@ -853,10 +856,10 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicates() throw void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicatesMultiPasses() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDataSplitIdBased(); - DatasetDefinition tempTable = TestUtils.getBitemporalFromOnlyTempTableIdBased(); + DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithVersionWithDataSplitIdBased(); + DatasetDefinition tempTable = TestUtils.getBitemporalFromOnlyTempTableWithVersionIdBased(); - String[] schema = new String[] {indexName, balanceName, digestName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; + String[] schema = new String[] {indexName, balanceName, digestName, versionName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; // Create staging table createStagingTable(stagingTable); @@ -865,7 +868,12 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicatesMultiPas BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -877,7 +885,7 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicatesMultiPas .sourceDateTimeFromField(dateTimeName) .build()) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -887,25 +895,25 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicatesMultiPas String dataPass1 = basePathForInput + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv"; String expectedDataPass1 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass1); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass1); // 2. Execute Plan and Verify Results List dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); List> expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 2, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv"; String expectedDataPass2 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass2); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass2); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 1, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, dataSplitRanges); // ------------ Perform Pass3 ------------------------ String expectedDataPass3 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv"; @@ -914,7 +922,7 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicatesMultiPas dataSplitRanges.add(DataSplitRange.of(2, 3)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); // ------------ Perform Pass4 ------------------------ String expectedDataPass4 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv"; @@ -923,28 +931,19 @@ void testMilestoningSourceSpecifiesFromSet5WithDataSplitFilterDuplicatesMultiPas dataSplitRanges.add(DataSplitRange.of(50, 100)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); // ------------ Perform Pass5 (identical records) ------------------------ String dataPass3 = basePathForInput + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv"; String expectedDataPass5 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass5.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDataSplit(dataPass3); + loadStagingDataForBitemporalFromOnlyWithVersionWithDataSplit(dataPass3); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(1, 1)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass5, expectedStats, dataSplitRanges); - - // ------------ Perform Pass6 (identical records) ------------------------ - String expectedDataPass6 = basePathForExpected + "source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass6.csv"; - // 2. Execute Plan and Verify Results - dataSplitRanges = new ArrayList<>(); - dataSplitRanges.add(DataSplitRange.of(2, 2)); - expectedStats = new ArrayList<>(); - expectedStats.add(createExpectedStatsMap(1, 0, 0, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass6, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass5, expectedStats, dataSplitRanges); } /* @@ -1109,17 +1108,22 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet2() throws Exceptio @Test void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet3WithDataSplit() throws Exception { - DatasetDefinition mainTable = TestUtils.getBitemporalFromOnlyMainTableIdBased(); - DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithDataSplitIdBased(); + DatasetDefinition mainTable = TestUtils.getBitemporalFromOnlyMainTableWithVersionIdBased(); + DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithVersionWithDataSplitIdBased(); - String[] schema = new String[] {indexName, balanceName, digestName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; + String[] schema = new String[] {indexName, balanceName, digestName, versionName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; // Create staging table createStagingTable(stagingTable); BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -1144,19 +1148,19 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet3WithDataSplit() th String dataPass1 = basePathForInput + "source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass1.csv"; String expectedDataPass1 = basePathForExpected + "source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass1.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass1); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass1); // 2. Execute Plan and Verify Results List dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(5, 5)); List> expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 2, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass2.csv"; String expectedDataPass3 = basePathForExpected + "source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass3.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass2); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass2); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(0, 1)); @@ -1164,19 +1168,19 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet3WithDataSplit() th expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 1, 1, 0)); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 1)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); // ------------ Perform Pass3 (identical records) ------------------------ String dataPass3 = basePathForInput + "source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass3.csv"; String expectedDataPass4 = basePathForExpected + "source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass4.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass3); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass3); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(70, 70)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 0, 2, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); } /* @@ -1186,16 +1190,21 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet3WithDataSplit() th void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet3WithDataSplitWithMultiplePasses() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithDataSplitIdBased(); + DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithVersionWithDataSplitIdBased(); - String[] schema = new String[] {indexName, balanceName, digestName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; + String[] schema = new String[] {indexName, balanceName, digestName, versionName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; // Create staging table createStagingTable(stagingTable); BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -1220,25 +1229,25 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet3WithDataSplitWithM String dataPass1 = basePathForInput + "source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass1.csv"; String expectedDataPass1 = basePathForExpected + "source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass1.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass1); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass1); // 2. Execute Plan and Verify Results List dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(5, 5)); List> expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 2, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass2.csv"; String expectedDataPass2 = basePathForExpected + "source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass2.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass2); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass2); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(0, 1)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 1, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, dataSplitRanges); // ------------ Perform Pass3 ------------------------ String expectedDataPass3 = basePathForExpected + "source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass3.csv"; @@ -1247,19 +1256,19 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet3WithDataSplitWithM dataSplitRanges.add(DataSplitRange.of(2, 2)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 1)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); // ------------ Perform Pass4 (identical records) ------------------------ String dataPass3 = basePathForInput + "source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass3.csv"; String expectedDataPass4 = basePathForExpected + "source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass4.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass3); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass3); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(70, 71)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 0, 2, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); } /* @@ -1298,7 +1307,7 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet4FilterDuplicates() .deleteField(deleteIndicatorName) .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); @@ -1366,10 +1375,10 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet4FilterDuplicates() void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilterDuplicates() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithDataSplitIdBased(); - DatasetDefinition stagingTableWithoutDuplicates = TestUtils.getBitemporalFromOnlyStagingTableWithoutDuplicatesWithDeleteIndicatorWithDataSplitIdBased(); + DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithVersionWithDataSplitIdBased(); + DatasetDefinition stagingTableWithoutDuplicates = TestUtils.getBitemporalFromOnlyStagingTableWithoutDuplicatesWithDeleteIndicatorWithVersionWithDataSplitIdBased(); - String[] schema = new String[] {indexName, balanceName, digestName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; + String[] schema = new String[] {indexName, balanceName, digestName, versionName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; // Create staging table createStagingTable(stagingTable); @@ -1378,7 +1387,12 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -1394,7 +1408,7 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte .deleteField(deleteIndicatorName) .addAllDeleteValues(Arrays.asList(deleteIndicatorValuesEdgeCase)) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -1404,19 +1418,19 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte String dataPass1 = basePathForInput + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv"; String expectedDataPass1 = basePathForExpected + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass1); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass1); // 2. Execute Plan and Verify Results List dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(5, 5)); List> expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 2, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv"; String expectedDataPass3 = basePathForExpected + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass2); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass2); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(0, 1)); @@ -1424,19 +1438,19 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 1, 1, 0)); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 1)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); // ------------ Perform Pass3 (identical records) ------------------------ String dataPass3 = basePathForInput + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv"; String expectedDataPass4 = basePathForExpected + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass3); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass3); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(5, 100)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 0, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); } /* @@ -1445,11 +1459,11 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte @Test void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilterDuplicatesWithMultiplePasses() throws Exception { - DatasetDefinition mainTable = TestUtils.getBitemporalFromOnlyMainTableIdBased(); - DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithDataSplitIdBased(); - DatasetDefinition stagingTableWithoutDuplicates = TestUtils.getBitemporalFromOnlyStagingTableWithoutDuplicatesWithDeleteIndicatorWithDataSplitIdBased(); + DatasetDefinition mainTable = TestUtils.getBitemporalFromOnlyMainTableWithVersionIdBased(); + DatasetDefinition stagingTable = TestUtils.getBitemporalFromOnlyStagingTableWithDeleteIndicatorWithVersionWithDataSplitIdBased(); + DatasetDefinition stagingTableWithoutDuplicates = TestUtils.getBitemporalFromOnlyStagingTableWithoutDuplicatesWithDeleteIndicatorWithVersionWithDataSplitIdBased(); - String[] schema = new String[] {indexName, balanceName, digestName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; + String[] schema = new String[] {indexName, balanceName, digestName, versionName, startDateTimeName, endDateTimeName, batchIdInName, batchIdOutName}; // Create staging table createStagingTable(stagingTable); @@ -1458,7 +1472,12 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -1474,7 +1493,7 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte .deleteField(deleteIndicatorName) .addAllDeleteValues(Arrays.asList(deleteIndicatorValuesEdgeCase)) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -1484,25 +1503,25 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte String dataPass1 = basePathForInput + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv"; String expectedDataPass1 = basePathForExpected + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass1); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass1); // 2. Execute Plan and Verify Results List dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(5, 5)); List> expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 2, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, dataSplitRanges); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv"; String expectedDataPass2 = basePathForExpected + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass2); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass2); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(0, 1)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 1, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, dataSplitRanges); // ------------ Perform Pass3 ------------------------ String expectedDataPass3 = basePathForExpected + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv"; @@ -1511,18 +1530,18 @@ void testMilestoningSourceSpecifiesFromWithDeleteIndicatorSet5WithDataSplitFilte dataSplitRanges.add(DataSplitRange.of(2, 2)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(1, 0, 0, 1, 1)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, dataSplitRanges); // ------------ Perform Pass4 (identical records) ------------------------ String dataPass3 = basePathForInput + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv"; String expectedDataPass4 = basePathForExpected + "source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv"; // 1. Load Staging table - loadStagingDataForBitemporalFromOnlyWithDeleteIndWithDataSplit(dataPass3); + loadStagingDataForBitemporalFromOnlyWithDeleteIndWithVersionWithDataSplit(dataPass3); // 2. Execute Plan and Verify Results dataSplitRanges = new ArrayList<>(); dataSplitRanges.add(DataSplitRange.of(0, 100)); expectedStats = new ArrayList<>(); expectedStats.add(createExpectedStatsMap(2, 0, 0, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass4, expectedStats, dataSplitRanges); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalSnapshotWithBatchIdTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalSnapshotWithBatchIdTest.java index 59798aef55a..242816c9ce2 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalSnapshotWithBatchIdTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bitemporal/BitemporalSnapshotWithBatchIdTest.java @@ -113,7 +113,7 @@ void testBitemporalSnapshotMilestoningLogicWithoutPartition() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats); // ------------ Perform bitemporal snapshot milestoning Pass4 (Empty Batch) ------------------------ - String dataPass4 = basePathForInput + "without_partition/staging_data_pass4.csv"; + String dataPass4 = "src/test/resources/data/empty_file.csv"; String expectedDataPass4 = basePathForExpected + "without_partition/expected_pass4.csv"; // 1. Load Staging table loadStagingDataForBitemp(dataPass4); @@ -181,7 +181,7 @@ void testBitemporalSnapshotMilestoningLogicHasFromTimeOnly() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats); // ------------ Perform bitemporal snapshot milestoning Pass4 (Empty Batch) ------------------------ - String dataPass4 = basePathForInput + "has_from_time_only/staging_data_pass4.csv"; + String dataPass4 = "src/test/resources/data/empty_file.csv"; String expectedDataPass4 = basePathForExpected + "has_from_time_only/expected_pass4.csv"; // 1. Load Staging table loadStagingDataForBitemp(dataPass4); @@ -250,7 +250,7 @@ void testBitemporalSnapshotMilestoningLogicWithPartition() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats); // ------------ Perform bitemporal snapshot milestoning Pass4 (Empty Batch) ------------------------ - String dataPass4 = basePathForInput + "with_partition/staging_data_pass4.csv"; + String dataPass4 = "src/test/resources/data/empty_file.csv"; String expectedDataPass4 = basePathForExpected + "with_partition/expected_pass4.csv"; // 1. Load Staging table loadStagingDataForBitemp(dataPass4); @@ -315,7 +315,7 @@ void testBitemporalSnapshotMilestoningLogicWithLessColumnsInStaging() throws Exc Scenario: Test milestoning Logic when staging table is pre populated and staging table is cleaned up in the end */ @Test - void testBitemporalSnapshotMilestoningLogicWithPartitionWithcleanStagingData() throws Exception + void testBitemporalSnapshotMilestoningLogicWithPartitionWithCleanStagingData() throws Exception { DatasetDefinition mainTable = TestUtils.getBitemporalMainTable(); DatasetDefinition stagingTable = TestUtils.getBitemporalStagingTable(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bulkload/BulkLoadTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bulkload/BulkLoadTest.java index 20c0c1e7d1d..4f09b145a66 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bulkload/BulkLoadTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/bulkload/BulkLoadTest.java @@ -62,27 +62,28 @@ public class BulkLoadTest extends BaseTest private static final String APPEND_TIME = "append_time"; private static final String DIGEST = "digest"; private static final String DIGEST_UDF = "LAKEHOUSE_MD5"; - private static final String col_int = "col_int"; - private static final String col_string = "col_string"; - private static final String col_decimal = "col_decimal"; - private static final String col_datetime = "col_datetime"; private static final String BATCH_ID = "batch_id"; + private static final String TASK_ID_VALUE_1 = "xyz123"; + private static final String TASK_ID_VALUE_2 = "abc987"; + private static final String COL_INT = "col_int"; + private static final String COL_STRING = "col_string"; + private static final String COL_DECIMAL = "col_decimal"; + private static final String COL_DATETIME = "col_datetime"; private static Field col1 = Field.builder() - .name(col_int) + .name(COL_INT) .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) - .primaryKey(true) .build(); private static Field col2 = Field.builder() - .name(col_string) + .name(COL_STRING) .type(FieldType.of(DataType.STRING, Optional.empty(), Optional.empty())) .build(); private static Field col3 = Field.builder() - .name(col_decimal) + .name(COL_DECIMAL) .type(FieldType.of(DataType.DECIMAL, 5, 2)) .build(); private static Field col4 = Field.builder() - .name(col_datetime) + .name(COL_DATETIME) .type(FieldType.of(DataType.DATETIME, Optional.empty(), Optional.empty())) .build(); @@ -90,12 +91,12 @@ public class BulkLoadTest extends BaseTest protected final Clock fixedClock_2000_01_01 = Clock.fixed(fixedZonedDateTime_2000_01_01.toInstant(), ZoneOffset.UTC); @Test - public void testBulkLoadWithDigestNotGeneratedAuditEnabled() throws Exception + public void testBulkLoadWithDigestNotGeneratedAuditEnabledNoTaskId() throws Exception { String filePath = "src/test/resources/data/bulk-load/input/staged_file1.csv"; BulkLoad bulkLoad = BulkLoad.builder() - .batchIdField("batch_id") + .batchIdField(BATCH_ID) .digestGenStrategy(NoDigestGenStrategy.builder().build()) .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) .build(); @@ -104,7 +105,7 @@ public void testBulkLoadWithDigestNotGeneratedAuditEnabled() throws Exception .stagedFilesDatasetProperties( H2StagedFilesDatasetProperties.builder() .fileFormat(FileFormat.CSV) - .addAllFiles(Collections.singletonList(filePath)).build()) + .addAllFiles(Collections.singletonList(filePath)).build()) .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4)).build()) .build(); @@ -121,7 +122,7 @@ public void testBulkLoadWithDigestNotGeneratedAuditEnabled() throws Exception .relationalSink(H2Sink.get()) .collectStatistics(true) .executionTimestampClock(fixedClock_2000_01_01) - .bulkLoadBatchIdValue("xyz123") + .batchIdPattern("{NEXT_BATCH_ID_PATTERN}") .build(); GeneratorResult operations = generator.generateOperations(datasets); @@ -131,22 +132,22 @@ public void testBulkLoadWithDigestNotGeneratedAuditEnabled() throws Exception Map statsSql = operations.postIngestStatisticsSql(); String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"TEST_DB\".\"TEST\".\"main\"" + - "(\"col_int\" INTEGER NOT NULL PRIMARY KEY,\"col_string\" VARCHAR,\"col_decimal\" DECIMAL(5,2),\"col_datetime\" TIMESTAMP,\"batch_id\" VARCHAR,\"append_time\" TIMESTAMP)"; + "(\"col_int\" INTEGER,\"col_string\" VARCHAR,\"col_decimal\" DECIMAL(5,2),\"col_datetime\" TIMESTAMP,\"batch_id\" INTEGER,\"append_time\" TIMESTAMP)"; String expectedIngestSql = "INSERT INTO \"TEST_DB\".\"TEST\".\"main\" " + "(\"col_int\", \"col_string\", \"col_decimal\", \"col_datetime\", \"batch_id\", \"append_time\") " + "SELECT CONVERT(\"col_int\",INTEGER),CONVERT(\"col_string\",VARCHAR),CONVERT(\"col_decimal\",DECIMAL(5,2)),CONVERT(\"col_datetime\",TIMESTAMP)," + - "'xyz123','2000-01-01 00:00:00' FROM CSVREAD('src/test/resources/data/bulk-load/input/staged_file1.csv'," + + "{NEXT_BATCH_ID_PATTERN},'2000-01-01 00:00:00.000000' FROM CSVREAD('src/test/resources/data/bulk-load/input/staged_file1.csv'," + "'col_int,col_string,col_decimal,col_datetime',NULL)"; Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); Assertions.assertEquals(expectedIngestSql, ingestSql.get(0)); - Assertions.assertEquals("SELECT COUNT(*) as \"rowsInserted\" FROM \"TEST_DB\".\"TEST\".\"main\" as my_alias WHERE my_alias.\"append_time\" = '2000-01-01 00:00:00'", statsSql.get(ROWS_INSERTED)); + Assertions.assertEquals("SELECT COUNT(*) as \"rowsInserted\" FROM \"TEST_DB\".\"TEST\".\"main\" as my_alias WHERE my_alias.\"append_time\" = '2000-01-01 00:00:00.000000'", statsSql.get(ROWS_INSERTED)); // Verify execution using ingestor PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - String[] schema = new String[]{col_int, col_string, col_decimal, col_datetime, BATCH_ID, APPEND_TIME}; + String[] schema = new String[]{COL_INT, COL_STRING, COL_DECIMAL, COL_DATETIME, BATCH_ID, APPEND_TIME}; Map expectedStats = new HashMap<>(); expectedStats.put(StatisticName.ROWS_INSERTED.name(), 3); @@ -155,11 +156,11 @@ public void testBulkLoadWithDigestNotGeneratedAuditEnabled() throws Exception String expectedDataPath = "src/test/resources/data/bulk-load/expected/expected_table1.csv"; - RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.NONE); + RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.NONE, Optional.empty()); executePlansAndVerifyResults(ingestor, datasets, schema, expectedDataPath, expectedStats, false); Map appendMetadata = h2Sink.executeQuery("select * from bulk_load_batch_metadata").get(0); - verifyBulkLoadMetadata(appendMetadata, filePath); + verifyBulkLoadMetadata(appendMetadata, filePath, 1, Optional.empty()); } @Test @@ -170,7 +171,7 @@ public void testBulkLoadWithDigestNotGeneratedAuditDisabled() throws Exception BulkLoad bulkLoad = BulkLoad.builder() .digestGenStrategy(NoDigestGenStrategy.builder().build()) .auditing(NoAuditing.builder().build()) - .batchIdField("batch_id") + .batchIdField(BATCH_ID) .build(); Dataset stagedFilesDataset = StagedFilesDataset.builder() @@ -194,7 +195,7 @@ public void testBulkLoadWithDigestNotGeneratedAuditDisabled() throws Exception .relationalSink(H2Sink.get()) .collectStatistics(true) .executionTimestampClock(fixedClock_2000_01_01) - .bulkLoadBatchIdValue("xyz123") + .bulkLoadTaskIdValue(TASK_ID_VALUE_1) .build(); GeneratorResult operations = generator.generateOperations(datasets); @@ -204,12 +205,12 @@ public void testBulkLoadWithDigestNotGeneratedAuditDisabled() throws Exception Map statsSql = operations.postIngestStatisticsSql(); String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"TEST_DB\".\"TEST\".\"main\"" + - "(\"col_int\" INTEGER NOT NULL PRIMARY KEY,\"col_string\" VARCHAR,\"col_decimal\" DECIMAL(5,2),\"col_datetime\" TIMESTAMP,\"batch_id\" VARCHAR)"; + "(\"col_int\" INTEGER,\"col_string\" VARCHAR,\"col_decimal\" DECIMAL(5,2),\"col_datetime\" TIMESTAMP,\"batch_id\" INTEGER)"; String expectedIngestSql = "INSERT INTO \"TEST_DB\".\"TEST\".\"main\" " + "(\"col_int\", \"col_string\", \"col_decimal\", \"col_datetime\", \"batch_id\") " + "SELECT CONVERT(\"col_int\",INTEGER),CONVERT(\"col_string\",VARCHAR),CONVERT(\"col_decimal\",DECIMAL(5,2)),CONVERT(\"col_datetime\",TIMESTAMP)," + - "'xyz123' FROM CSVREAD('src/test/resources/data/bulk-load/input/staged_file2.csv','col_int,col_string,col_decimal,col_datetime',NULL)"; + "(SELECT COALESCE(MAX(bulk_load_batch_metadata.\"batch_id\"),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.\"table_name\") = 'MAIN') FROM CSVREAD('src/test/resources/data/bulk-load/input/staged_file2.csv','col_int,col_string,col_decimal,col_datetime',NULL)"; Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); Assertions.assertEquals(expectedIngestSql, ingestSql.get(0)); @@ -218,7 +219,7 @@ public void testBulkLoadWithDigestNotGeneratedAuditDisabled() throws Exception // Verify execution using ingestor PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - String[] schema = new String[]{col_int, col_string, col_decimal, col_datetime, BATCH_ID}; + String[] schema = new String[]{COL_INT, COL_STRING, COL_DECIMAL, COL_DATETIME, BATCH_ID}; Map expectedStats = new HashMap<>(); expectedStats.put(StatisticName.FILES_LOADED.name(), 1); @@ -226,10 +227,10 @@ public void testBulkLoadWithDigestNotGeneratedAuditDisabled() throws Exception String expectedDataPath = "src/test/resources/data/bulk-load/expected/expected_table2.csv"; - RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.NONE); + RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.NONE, Optional.of(TASK_ID_VALUE_1)); executePlansAndVerifyResults(ingestor, datasets, schema, expectedDataPath, expectedStats, false); Map appendMetadata = h2Sink.executeQuery("select * from bulk_load_batch_metadata").get(0); - verifyBulkLoadMetadata(appendMetadata, filePath); + verifyBulkLoadMetadata(appendMetadata, filePath, 1, Optional.of(TASK_ID_VALUE_1)); } @Test @@ -243,7 +244,7 @@ public void testBulkLoadWithDigestGeneratedAuditEnabled() throws Exception BulkLoad bulkLoad = BulkLoad.builder() .digestGenStrategy(UDFBasedDigestGenStrategy.builder().digestUdfName(DIGEST_UDF).digestField(DIGEST).build()) .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) - .batchIdField("batch_id") + .batchIdField(BATCH_ID) .build(); Dataset stagedFilesDataset = StagedFilesDataset.builder() @@ -266,7 +267,7 @@ public void testBulkLoadWithDigestGeneratedAuditEnabled() throws Exception .ingestMode(bulkLoad) .relationalSink(H2Sink.get()) .collectStatistics(true) - .bulkLoadBatchIdValue("xyz123") + .bulkLoadTaskIdValue(TASK_ID_VALUE_1) .executionTimestampClock(fixedClock_2000_01_01) .build(); @@ -277,22 +278,22 @@ public void testBulkLoadWithDigestGeneratedAuditEnabled() throws Exception Map statsSql = operations.postIngestStatisticsSql(); String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"TEST_DB\".\"TEST\".\"main\"" + - "(\"col_int\" INTEGER NOT NULL PRIMARY KEY,\"col_string\" VARCHAR,\"col_decimal\" DECIMAL(5,2),\"col_datetime\" TIMESTAMP,\"digest\" VARCHAR,\"batch_id\" VARCHAR,\"append_time\" TIMESTAMP)"; + "(\"col_int\" INTEGER,\"col_string\" VARCHAR,\"col_decimal\" DECIMAL(5,2),\"col_datetime\" TIMESTAMP,\"digest\" VARCHAR,\"batch_id\" INTEGER,\"append_time\" TIMESTAMP)"; String expectedIngestSql = "INSERT INTO \"TEST_DB\".\"TEST\".\"main\" " + "(\"col_int\", \"col_string\", \"col_decimal\", \"col_datetime\", \"digest\", \"batch_id\", \"append_time\") " + "SELECT CONVERT(\"col_int\",INTEGER),CONVERT(\"col_string\",VARCHAR),CONVERT(\"col_decimal\",DECIMAL(5,2)),CONVERT(\"col_datetime\",TIMESTAMP)," + "LAKEHOUSE_MD5(ARRAY['col_int','col_string','col_decimal','col_datetime'],ARRAY[\"col_int\",\"col_string\",\"col_decimal\",\"col_datetime\"])," + - "'xyz123','2000-01-01 00:00:00' FROM CSVREAD('src/test/resources/data/bulk-load/input/staged_file3.csv','col_int,col_string,col_decimal,col_datetime',NULL)"; + "(SELECT COALESCE(MAX(bulk_load_batch_metadata.\"batch_id\"),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00.000000' FROM CSVREAD('src/test/resources/data/bulk-load/input/staged_file3.csv','col_int,col_string,col_decimal,col_datetime',NULL)"; Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); Assertions.assertEquals(expectedIngestSql, ingestSql.get(0)); - Assertions.assertEquals("SELECT COUNT(*) as \"rowsInserted\" FROM \"TEST_DB\".\"TEST\".\"main\" as my_alias WHERE my_alias.\"append_time\" = '2000-01-01 00:00:00'", statsSql.get(ROWS_INSERTED)); + Assertions.assertEquals("SELECT COUNT(*) as \"rowsInserted\" FROM \"TEST_DB\".\"TEST\".\"main\" as my_alias WHERE my_alias.\"append_time\" = '2000-01-01 00:00:00.000000'", statsSql.get(ROWS_INSERTED)); // Verify execution using ingestor PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - String[] schema = new String[]{col_int, col_string, col_decimal, col_datetime, DIGEST, BATCH_ID, APPEND_TIME}; + String[] schema = new String[]{COL_INT, COL_STRING, COL_DECIMAL, COL_DATETIME, DIGEST, BATCH_ID, APPEND_TIME}; Map expectedStats = new HashMap<>(); expectedStats.put(StatisticName.ROWS_INSERTED.name(), 3); @@ -301,10 +302,10 @@ public void testBulkLoadWithDigestGeneratedAuditEnabled() throws Exception String expectedDataPath = "src/test/resources/data/bulk-load/expected/expected_table3.csv"; - RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.NONE); + RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.NONE, Optional.of(TASK_ID_VALUE_1)); executePlansAndVerifyResults(ingestor, datasets, schema, expectedDataPath, expectedStats, false); Map appendMetadata = h2Sink.executeQuery("select * from bulk_load_batch_metadata").get(0); - verifyBulkLoadMetadata(appendMetadata, filePath); + verifyBulkLoadMetadata(appendMetadata, filePath, 1, Optional.of(TASK_ID_VALUE_1)); } @Test @@ -316,7 +317,7 @@ public void testBulkLoadWithDigestGeneratedAuditEnabledUpperCase() throws Except String filePath = "src/test/resources/data/bulk-load/input/staged_file4.csv"; BulkLoad bulkLoad = BulkLoad.builder() - .batchIdField("batch_id") + .batchIdField(BATCH_ID) .digestGenStrategy(UDFBasedDigestGenStrategy.builder().digestUdfName(DIGEST_UDF).digestField(DIGEST).build()) .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) .build(); @@ -341,7 +342,7 @@ public void testBulkLoadWithDigestGeneratedAuditEnabledUpperCase() throws Except .ingestMode(bulkLoad) .relationalSink(H2Sink.get()) .collectStatistics(true) - .bulkLoadBatchIdValue("xyz123") + .bulkLoadTaskIdValue(TASK_ID_VALUE_1) .executionTimestampClock(fixedClock_2000_01_01) .caseConversion(CaseConversion.TO_UPPER) .build(); @@ -353,23 +354,23 @@ public void testBulkLoadWithDigestGeneratedAuditEnabledUpperCase() throws Except Map statsSql = operations.postIngestStatisticsSql(); String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"TEST_DB\".\"TEST\".\"MAIN\"" + - "(\"COL_INT\" INTEGER NOT NULL PRIMARY KEY,\"COL_STRING\" VARCHAR,\"COL_DECIMAL\" DECIMAL(5,2),\"COL_DATETIME\" TIMESTAMP,\"DIGEST\" VARCHAR,\"BATCH_ID\" VARCHAR,\"APPEND_TIME\" TIMESTAMP)"; + "(\"COL_INT\" INTEGER,\"COL_STRING\" VARCHAR,\"COL_DECIMAL\" DECIMAL(5,2),\"COL_DATETIME\" TIMESTAMP,\"DIGEST\" VARCHAR,\"BATCH_ID\" INTEGER,\"APPEND_TIME\" TIMESTAMP)"; String expectedIngestSql = "INSERT INTO \"TEST_DB\".\"TEST\".\"MAIN\" " + "(\"COL_INT\", \"COL_STRING\", \"COL_DECIMAL\", \"COL_DATETIME\", \"DIGEST\", \"BATCH_ID\", \"APPEND_TIME\") " + "SELECT CONVERT(\"COL_INT\",INTEGER),CONVERT(\"COL_STRING\",VARCHAR),CONVERT(\"COL_DECIMAL\",DECIMAL(5,2)),CONVERT(\"COL_DATETIME\",TIMESTAMP)," + "LAKEHOUSE_MD5(ARRAY['COL_INT','COL_STRING','COL_DECIMAL','COL_DATETIME'],ARRAY[\"COL_INT\",\"COL_STRING\",\"COL_DECIMAL\",\"COL_DATETIME\"])," + - "'xyz123','2000-01-01 00:00:00' " + + "(SELECT COALESCE(MAX(BULK_LOAD_BATCH_METADATA.\"BATCH_ID\"),0)+1 FROM BULK_LOAD_BATCH_METADATA as BULK_LOAD_BATCH_METADATA WHERE UPPER(BULK_LOAD_BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),'2000-01-01 00:00:00.000000' " + "FROM CSVREAD('src/test/resources/data/bulk-load/input/staged_file4.csv','COL_INT,COL_STRING,COL_DECIMAL,COL_DATETIME',NULL)"; Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); Assertions.assertEquals(expectedIngestSql, ingestSql.get(0)); - Assertions.assertEquals("SELECT COUNT(*) as \"ROWSINSERTED\" FROM \"TEST_DB\".\"TEST\".\"MAIN\" as my_alias WHERE my_alias.\"APPEND_TIME\" = '2000-01-01 00:00:00'", statsSql.get(ROWS_INSERTED)); + Assertions.assertEquals("SELECT COUNT(*) as \"ROWSINSERTED\" FROM \"TEST_DB\".\"TEST\".\"MAIN\" as my_alias WHERE my_alias.\"APPEND_TIME\" = '2000-01-01 00:00:00.000000'", statsSql.get(ROWS_INSERTED)); // Verify execution using ingestor PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - String[] schema = new String[]{col_int.toUpperCase(), col_string.toUpperCase(), col_decimal.toUpperCase(), col_datetime.toUpperCase(), DIGEST.toUpperCase(), BATCH_ID.toUpperCase(), APPEND_TIME.toUpperCase()}; + String[] schema = new String[]{COL_INT.toUpperCase(), COL_STRING.toUpperCase(), COL_DECIMAL.toUpperCase(), COL_DATETIME.toUpperCase(), DIGEST.toUpperCase(), BATCH_ID.toUpperCase(), APPEND_TIME.toUpperCase()}; Map expectedStats = new HashMap<>(); expectedStats.put(StatisticName.ROWS_INSERTED.name(), 3); @@ -378,10 +379,64 @@ public void testBulkLoadWithDigestGeneratedAuditEnabledUpperCase() throws Except String expectedDataPath = "src/test/resources/data/bulk-load/expected/expected_table4.csv"; - RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.TO_UPPER); + RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.TO_UPPER, Optional.of(TASK_ID_VALUE_1)); executePlansAndVerifyForCaseConversion(ingestor, datasets, schema, expectedDataPath, expectedStats); Map appendMetadata = h2Sink.executeQuery("select * from BULK_LOAD_BATCH_METADATA").get(0); - verifyBulkLoadMetadataForUpperCase(appendMetadata, filePath); + verifyBulkLoadMetadataForUpperCase(appendMetadata, filePath, 1, Optional.of(TASK_ID_VALUE_1)); + } + + @Test + public void testBulkLoadWithDigestNotGeneratedAuditDisabledTwoBatches() throws Exception + { + String filePath = "src/test/resources/data/bulk-load/input/staged_file2.csv"; + + BulkLoad bulkLoad = BulkLoad.builder() + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(NoAuditing.builder().build()) + .batchIdField(BATCH_ID) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + H2StagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(Collections.singletonList(filePath)).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database(testDatabaseName).group(testSchemaName).name(mainTableName).alias("my_alias") + .schema(SchemaDefinition.builder().build()) + .build(); + + Datasets datasets = Datasets.of(mainDataset, stagedFilesDataset); + + + // Verify execution using ingestor (first batch) + PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); + String[] schema = new String[]{COL_INT, COL_STRING, COL_DECIMAL, COL_DATETIME, BATCH_ID}; + + Map expectedStats = new HashMap<>(); + expectedStats.put(StatisticName.FILES_LOADED.name(), 1); + expectedStats.put(StatisticName.ROWS_WITH_ERRORS.name(), 0); + + String expectedDataPath = "src/test/resources/data/bulk-load/expected/expected_table2.csv"; + + RelationalIngestor ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.NONE, Optional.of(TASK_ID_VALUE_1)); + executePlansAndVerifyResults(ingestor, datasets, schema, expectedDataPath, expectedStats, false); + Map appendMetadata = h2Sink.executeQuery("select * from bulk_load_batch_metadata").get(0); + verifyBulkLoadMetadata(appendMetadata, filePath, 1, Optional.of(TASK_ID_VALUE_1)); + + + // Verify execution using ingestor (second batch) + expectedDataPath = "src/test/resources/data/bulk-load/expected/expected_table5.csv"; + + ingestor = getRelationalIngestor(bulkLoad, options, fixedClock_2000_01_01, CaseConversion.NONE, Optional.of(TASK_ID_VALUE_2)); + executePlansAndVerifyResults(ingestor, datasets, schema, expectedDataPath, expectedStats, false); + appendMetadata = h2Sink.executeQuery("select * from bulk_load_batch_metadata").get(0); + verifyBulkLoadMetadata(appendMetadata, filePath, 1, Optional.of(TASK_ID_VALUE_1)); + appendMetadata = h2Sink.executeQuery("select * from bulk_load_batch_metadata").get(1); + verifyBulkLoadMetadata(appendMetadata, filePath, 2, Optional.of(TASK_ID_VALUE_2)); } @Test @@ -391,6 +446,7 @@ public void testBulkLoadDigestColumnNotProvided() { BulkLoad bulkLoad = BulkLoad.builder() .digestGenStrategy(UDFBasedDigestGenStrategy.builder().digestUdfName(DIGEST_UDF).build()) + .batchIdField(BATCH_ID) .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) .build(); Assertions.fail("Exception was not thrown"); @@ -408,7 +464,7 @@ public void testBulkLoadDigestUDFNotProvided() { BulkLoad bulkLoad = BulkLoad.builder() .digestGenStrategy(UDFBasedDigestGenStrategy.builder().digestField(DIGEST).build()) - .batchIdField("batch_id") + .batchIdField(BATCH_ID) .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) .build(); Assertions.fail("Exception was not thrown"); @@ -425,7 +481,7 @@ public void testBulkLoadStagedFilesDatasetNotProvided() try { BulkLoad bulkLoad = BulkLoad.builder() - .batchIdField("batch_id") + .batchIdField(BATCH_ID) .digestGenStrategy(NoDigestGenStrategy.builder().build()) .auditing(DateTimeAuditing.builder().dateTimeField(APPEND_TIME).build()) .build(); @@ -443,7 +499,7 @@ public void testBulkLoadStagedFilesDatasetNotProvided() RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(bulkLoad) .relationalSink(H2Sink.get()) - .bulkLoadBatchIdValue("xyz123") + .bulkLoadTaskIdValue(TASK_ID_VALUE_1) .collectStatistics(true) .executionTimestampClock(fixedClock_2000_01_01) .build(); @@ -457,6 +513,100 @@ public void testBulkLoadStagedFilesDatasetNotProvided() } } + @Test + public void testBulkLoadStageHasPrimaryKey() + { + try + { + Field pkCol = Field.builder() + .name("some_pk") + .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) + .primaryKey(true) + .build(); + + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(NoAuditing.builder().build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + H2StagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(Collections.singletonList("src/test/resources/data/bulk-load/input/staged_file1.csv")).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4, pkCol)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database("my_db").name("my_name").alias("my_alias") + .schema(SchemaDefinition.builder().build()) + .build(); + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(H2Sink.get()) + .bulkLoadTaskIdValue(TASK_ID_VALUE_1) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .build(); + + GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); + Assertions.fail("Exception was not thrown"); + } + catch (Exception e) + { + Assertions.assertTrue(e.getMessage().contains("Primary key list must be empty")); + } + } + + @Test + public void testBulkLoadMainHasPrimaryKey() + { + try + { + Field pkCol = Field.builder() + .name("some_pk") + .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) + .primaryKey(true) + .build(); + + BulkLoad bulkLoad = BulkLoad.builder() + .batchIdField(BATCH_ID) + .digestGenStrategy(NoDigestGenStrategy.builder().build()) + .auditing(NoAuditing.builder().build()) + .build(); + + Dataset stagedFilesDataset = StagedFilesDataset.builder() + .stagedFilesDatasetProperties( + H2StagedFilesDatasetProperties.builder() + .fileFormat(FileFormat.CSV) + .addAllFiles(Collections.singletonList("src/test/resources/data/bulk-load/input/staged_file1.csv")).build()) + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4)).build()) + .build(); + + Dataset mainDataset = DatasetDefinition.builder() + .database("my_db").name("my_name").alias("my_alias") + .schema(SchemaDefinition.builder().addAllFields(Arrays.asList(col1, col2, col3, col4, pkCol)).build()) + .build(); + + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(bulkLoad) + .relationalSink(H2Sink.get()) + .bulkLoadTaskIdValue(TASK_ID_VALUE_1) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .build(); + + GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); + Assertions.fail("Exception was not thrown"); + } + catch (Exception e) + { + Assertions.assertTrue(e.getMessage().contains("Primary key list must be empty")); + } + } + @Test public void testBulkLoadMoreThanOneFile() { @@ -497,7 +647,7 @@ public void testBulkLoadNotCsvFile() } } - RelationalIngestor getRelationalIngestor(IngestMode ingestMode, PlannerOptions options, Clock executionTimestampClock, CaseConversion caseConversion) + RelationalIngestor getRelationalIngestor(IngestMode ingestMode, PlannerOptions options, Clock executionTimestampClock, CaseConversion caseConversion, Optional taskId) { return RelationalIngestor.builder() .ingestMode(ingestMode) @@ -505,30 +655,46 @@ RelationalIngestor getRelationalIngestor(IngestMode ingestMode, PlannerOptions o .executionTimestampClock(executionTimestampClock) .cleanupStagingData(options.cleanupStagingData()) .collectStatistics(options.collectStatistics()) - .bulkLoadBatchIdValue("xyz123") + .bulkLoadTaskIdValue(taskId) .enableConcurrentSafety(true) .caseConversion(caseConversion) .build(); } - private void verifyBulkLoadMetadata(Map appendMetadata, String fileName) + private void verifyBulkLoadMetadata(Map appendMetadata, String fileName, int batchId, Optional taskId) { - Assertions.assertEquals("xyz123", appendMetadata.get("batch_id")); + Assertions.assertEquals(batchId, appendMetadata.get("batch_id")); Assertions.assertEquals("SUCCEEDED", appendMetadata.get("batch_status")); Assertions.assertEquals("main", appendMetadata.get("table_name")); - Assertions.assertEquals(String.format("{\"files\":[\"%s\"]}", fileName), appendMetadata.get("batch_source_info")); Assertions.assertEquals("2000-01-01 00:00:00.0", appendMetadata.get("batch_start_ts_utc").toString()); Assertions.assertEquals("2000-01-01 00:00:00.0", appendMetadata.get("batch_end_ts_utc").toString()); + Assertions.assertTrue(appendMetadata.get("batch_source_info").toString().contains(String.format("\"files\":[\"%s\"]", fileName))); + if (taskId.isPresent()) + { + Assertions.assertTrue(appendMetadata.get("batch_source_info").toString().contains(String.format("\"task_id\":\"%s\"", taskId.get()))); + } + else + { + Assertions.assertFalse(appendMetadata.get("batch_source_info").toString().contains("\"task_id\"")); + } } - private void verifyBulkLoadMetadataForUpperCase(Map appendMetadata, String fileName) + private void verifyBulkLoadMetadataForUpperCase(Map appendMetadata, String fileName, int batchId, Optional taskId) { - Assertions.assertEquals("xyz123", appendMetadata.get("BATCH_ID")); + Assertions.assertEquals(batchId, appendMetadata.get("BATCH_ID")); Assertions.assertEquals("SUCCEEDED", appendMetadata.get("BATCH_STATUS")); Assertions.assertEquals("MAIN", appendMetadata.get("TABLE_NAME")); - Assertions.assertEquals(String.format("{\"files\":[\"%s\"]}", fileName), appendMetadata.get("BATCH_SOURCE_INFO")); Assertions.assertEquals("2000-01-01 00:00:00.0", appendMetadata.get("BATCH_START_TS_UTC").toString()); Assertions.assertEquals("2000-01-01 00:00:00.0", appendMetadata.get("BATCH_END_TS_UTC").toString()); + Assertions.assertTrue(appendMetadata.get("BATCH_SOURCE_INFO").toString().contains(String.format("\"files\":[\"%s\"]", fileName))); + if (taskId.isPresent()) + { + Assertions.assertTrue(appendMetadata.get("BATCH_SOURCE_INFO").toString().contains(String.format("\"task_id\":\"%s\"", taskId.get()))); + } + else + { + Assertions.assertFalse(appendMetadata.get("BATCH_SOURCE_INFO").toString().contains("\"task_id\"")); + } } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/mixed/MixedIngestModeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/mixed/MixedIngestModeTest.java index ef070aaa5b0..95135656318 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/mixed/MixedIngestModeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/mixed/MixedIngestModeTest.java @@ -87,7 +87,7 @@ public void testMultiIngestionTypes() throws Exception .enableConcurrentSafety(true) .build(); - IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets); + IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets).get(0); MultiTableIngestionTest.verifyResults(1, schema, expectedPath, "main", result, expectedStats); // Pass 2 : unitemporalDelta @@ -106,7 +106,7 @@ public void testMultiIngestionTypes() throws Exception .enableConcurrentSafety(true) .build(); - result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets); + result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets).get(0); MultiTableIngestionTest.verifyResults(2, schema, expectedPath, "main", result, expectedStats); // Pass 3 : unitemporalSnapshot @@ -125,7 +125,7 @@ public void testMultiIngestionTypes() throws Exception .enableConcurrentSafety(true) .build(); - result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets); + result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets).get(0); MultiTableIngestionTest.verifyResults(3, schema, expectedPath, "main", result, expectedStats); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/mixed/UnitemporalDeltaRunner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/mixed/UnitemporalDeltaRunner.java index 28c7995ade2..ca85039087d 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/mixed/UnitemporalDeltaRunner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/mixed/UnitemporalDeltaRunner.java @@ -90,7 +90,7 @@ public void run() .executionTimestampClock(clock) .build(); - IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets); + IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(h2Sink.connection()), datasets).get(0); if (maxBatchIdCounter.get() < result.batchId().get()) { maxBatchIdCounter.set(result.batchId().get()); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyTest.java index 30e8b98d60f..11d59c8ad52 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyTest.java @@ -22,11 +22,15 @@ import org.finos.legend.engine.persistence.components.ingestmode.AppendOnly; import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategy; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.planner.PlannerOptions; -import org.finos.legend.engine.persistence.components.relational.api.DataSplitRange; import org.finos.legend.engine.persistence.components.relational.api.RelationalIngestor; import org.finos.legend.engine.persistence.components.relational.h2.H2Sink; import org.finos.legend.engine.persistence.components.relational.jdbc.JdbcConnection; @@ -40,274 +44,367 @@ import java.util.Map; import static org.finos.legend.engine.persistence.components.TestUtils.batchUpdateTimeName; +import static org.finos.legend.engine.persistence.components.TestUtils.dataSplitName; import static org.finos.legend.engine.persistence.components.TestUtils.digestName; import static org.finos.legend.engine.persistence.components.TestUtils.expiryDateName; import static org.finos.legend.engine.persistence.components.TestUtils.idName; import static org.finos.legend.engine.persistence.components.TestUtils.incomeName; import static org.finos.legend.engine.persistence.components.TestUtils.nameName; import static org.finos.legend.engine.persistence.components.TestUtils.startTimeName; -import static org.finos.legend.engine.persistence.components.TestUtils.dataSplitName; +import static org.finos.legend.engine.persistence.components.TestUtils.versionName; class AppendOnlyTest extends BaseTest { private final String basePath = "src/test/resources/data/incremental-append-milestoning/"; /* Scenarios: - 1. FilterDuplicates and No Auditing - 2. Staging data is imported along with Digest field population - 3. Staging has lesser columns than main dataset - 4. Staging data cleanup - 5. FilterDuplicates and Auditing enabled - 6. Add column schema evolution - 7. implicit data type change schema evolution - 8. Filter Duplicates and Data Splits enabled + 1) With Auditing, NoVersion, Filter Duplicates, true - tested (perform deduplication, auditing, filter existing) + 2) No Auditing, NoVersion, Allow Duplicates, false - tested (the most basic case) + 3) With Auditing, MaxVersion, Filter Duplicates, true - tested (perform deduplication and versioning, auditing, filter existing) + 4) With Auditing, MaxVersion, Filter Duplicates, false - tested (perform deduplication and versioning, auditing) + 5) With Auditing, AllVersion, Filter Duplicates, true - tested (perform deduplication and versioning, data split, auditing, filter existing) + 6) With Auditing, AllVersion, Filter Duplicates, false - tested (perform deduplication and versioning, data split, auditing) + + Other enrichment tests: + 1) Staging data is imported along with Digest field population + 2) Staging has lesser columns than main dataset + 3) Do no create table */ - /* - Scenario: Test Append Only Logic with FilterDuplicates and No Auditing + Scenario: Test Append Only vanilla case + staging table is cleaned up in the end with upper case (2) */ @Test - void testAppendOnlyWithFilterDuplicatesAndNoAuditing() throws Exception + void testAppendOnlyVanillaUpperCase() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); + DatasetDefinition stagingTable = TestUtils.getStagingTableWithNoPks(); // Create staging table - createStagingTable(stagingTable); + h2Sink.executeStatement("CREATE TABLE IF NOT EXISTS \"TEST\".\"STAGING\"(\"NAME\" VARCHAR(64) NOT NULL,\"INCOME\" BIGINT,\"EXPIRY_DATE\" DATE)"); // Generate the milestoning object AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) .auditing(NoAuditing.builder().build()) + .filterExistingRecords(false) .build(); - PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); + PlannerOptions options = PlannerOptions.builder().cleanupStagingData(true).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{nameName.toUpperCase(), incomeName.toUpperCase(), expiryDateName.toUpperCase()}; - // ------------ Perform incremental (append) milestoning Pass1 ------------------------ + // ------------ Perform incremental (append) milestoning With Clean Staging Table ------------------------ String dataPass1 = basePath + "input/vanilla_case/data_pass1.csv"; String expectedDataPass1 = basePath + "expected/vanilla_case/expected_pass1.csv"; // 1. Load staging table - loadBasicStagingData(dataPass1); + loadStagingDataWithNoPkInUpperCase(dataPass1); // 2. Execute plans and verify results Map expectedStats = new HashMap<>(); expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); - // 3. Assert that the staging table is NOT truncated - List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); - Assertions.assertEquals(stagingTableList.size(), 3); + executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + // 3. Assert that the staging table is truncated + List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"STAGING\""); + Assertions.assertEquals(stagingTableList.size(), 0); - // ------------ Perform incremental (append) milestoning Pass2 ------------------------ + // ------------ Perform incremental (append) milestoning With Clean Staging Table ------------------------ String dataPass2 = basePath + "input/vanilla_case/data_pass2.csv"; String expectedDataPass2 = basePath + "expected/vanilla_case/expected_pass2.csv"; // 1. Load staging table - loadBasicStagingData(dataPass2); + loadStagingDataWithNoPkInUpperCase(dataPass2); // 2. Execute plans and verify results - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); + expectedStats = new HashMap<>(); + expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); + expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); + expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); + expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); + executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); + // 3. Assert that the staging table is truncated + stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"STAGING\""); + Assertions.assertEquals(stagingTableList.size(), 0); } /* - Scenario: Test Append Only Logic with FilterDuplicates and No Auditing with Upper Case Optimizer + Scenario: Test Append Only with auditing, no versioning, filter duplicates and filter existing records (1) */ @Test - void testAppendOnlyWithFilterDuplicatesAndNoAuditingWithUpperCaseOptimizer() throws Exception + void testAppendOnlyWithAuditingNoVersioningFilterDuplicatesFilterExistingRecords() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); // Create staging table - h2Sink.executeStatement("CREATE TABLE IF NOT EXISTS \"TEST\".\"STAGING\"(\"ID\" INTEGER NOT NULL,\"NAME\" VARCHAR(64) NOT NULL,\"INCOME\" BIGINT,\"START_TIME\" TIMESTAMP NOT NULL,\"EXPIRY_DATE\" DATE,\"DIGEST\" VARCHAR,PRIMARY KEY (\"ID\", \"START_TIME\"))"); + createStagingTableWithoutPks(stagingTable); // Generate the milestoning object AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestName) - .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) - .build(); + .digestField(digestName) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) + .filterExistingRecords(true) + .build(); - PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); + PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName.toUpperCase(), nameName.toUpperCase(), incomeName.toUpperCase(), startTimeName.toUpperCase(), expiryDateName.toUpperCase(), digestName.toUpperCase()}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform incremental (append) milestoning Pass1 ------------------------ - String dataPass1 = basePath + "input/vanilla_case/data_pass1.csv"; - String expectedDataPass1 = basePath + "expected/vanilla_case/expected_pass1.csv"; + String dataPass1 = basePath + "input/auditing_no_version_filter_dup_filter_existing/data_pass1.csv"; + String expectedDataPass1 = basePath + "expected/auditing_no_version_filter_dup_filter_existing/expected_pass1.csv"; // 1. Load staging table - loadBasicStagingDataInUpperCase(dataPass1); + loadBasicStagingData(dataPass1); + // 2. Execute plans and verify results + Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); + // ------------ Perform incremental (append) milestoning Pass2 ------------------------ + String dataPass2 = basePath + "input/auditing_no_version_filter_dup_filter_existing/data_pass2.csv"; + String expectedDataPass2 = basePath + "expected/auditing_no_version_filter_dup_filter_existing/expected_pass2.csv"; + // 1. Load staging table + loadBasicStagingData(dataPass2); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats = createExpectedStatsMap(4, 0, 2, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); + } - List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"STAGING\""); - Assertions.assertEquals(stagingTableList.size(), 3); + /* + Scenario: Test Append Only with auditing, max version, filter duplicates and filter existing records with upper case (3) + */ + @Test + void testAppendOnlyWithAuditingMaxVersionFilterDuplicatesFilterExistingRecordsUpperCase() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = TestUtils.getStagingTableWithNonPkVersion(); - executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + // Create staging table + h2Sink.executeStatement("CREATE TABLE IF NOT EXISTS \"TEST\".\"STAGING\"(\"ID\" INTEGER NOT NULL,\"NAME\" VARCHAR(64) NOT NULL,\"INCOME\" BIGINT,\"START_TIME\" TIMESTAMP NOT NULL,\"EXPIRY_DATE\" DATE,\"DIGEST\" VARCHAR,\"VERSION\" INT)"); - // 3. Assert that the staging table is NOT truncated - stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"STAGING\""); - Assertions.assertEquals(stagingTableList.size(), 3); + // Generate the milestoning object + AppendOnly ingestMode = AppendOnly.builder() + .digestField(digestName) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) + .filterExistingRecords(true) + .build(); + + PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + String[] schema = new String[]{idName.toUpperCase(), nameName.toUpperCase(), incomeName.toUpperCase(), startTimeName.toUpperCase(), expiryDateName.toUpperCase(), digestName.toUpperCase(), versionName.toUpperCase(), batchUpdateTimeName.toUpperCase()}; + + // ------------ Perform incremental (append) milestoning Pass1 ------------------------ + String dataPass1 = basePath + "input/auditing_max_version_filter_dup_filter_existing/data_pass1.csv"; + String expectedDataPass1 = basePath + "expected/auditing_max_version_filter_dup_filter_existing/expected_pass1.csv"; + // 1. Load staging table + loadStagingDataWithVersionInUpperCase(dataPass1); + // 2. Execute plans and verify results + Map expectedStats = createExpectedStatsMap(4, 0, 3, 0, 0); + executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); // ------------ Perform incremental (append) milestoning Pass2 ------------------------ - String dataPass2 = basePath + "input/vanilla_case/data_pass2.csv"; - String expectedDataPass2 = basePath + "expected/vanilla_case/expected_pass2.csv"; + String dataPass2 = basePath + "input/auditing_max_version_filter_dup_filter_existing/data_pass2.csv"; + String expectedDataPass2 = basePath + "expected/auditing_max_version_filter_dup_filter_existing/expected_pass2.csv"; // 1. Load staging table - loadBasicStagingDataInUpperCase(dataPass2); + loadStagingDataWithVersionInUpperCase(dataPass2); // 2. Execute plans and verify results - executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); + expectedStats = createExpectedStatsMap(4, 0, 2, 0, 0); + executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); } /* - Scenario: test AppendOnly when staging data is imported along with Digest field population + Scenario: Test Append Only with auditing, max version, filter duplicates and no filter existing records (4) */ @Test - void testAppendOnlyWithStagingDataImportedWithPopulateDigest() throws Exception + void testAppendOnlyWithAuditingMaxVersionFilterDuplicatesNoFilterExistingRecords() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - String dataPass1 = "src/test/resources/data/import-data/data_pass1.json"; - Dataset stagingTable = TestUtils.getJsonDatasetWithoutDigestReferenceTable(dataPass1); + DatasetDefinition stagingTable = TestUtils.getStagingTableWithNonPkVersion(); + + // Create staging table + createStagingTableWithoutPks(stagingTable); // Generate the milestoning object AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) + .filterExistingRecords(false) .build(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, versionName, batchUpdateTimeName}; // ------------ Perform incremental (append) milestoning Pass1 ------------------------ - String expectedDataPass1 = "src/test/resources/data/import-data/data_expected_with_digest_pass1.csv"; - // Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 5); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + String dataPass1 = basePath + "input/auditing_max_version_filter_dup_no_filter_existing/data_pass1.csv"; + String expectedDataPass1 = basePath + "expected/auditing_max_version_filter_dup_no_filter_existing/expected_pass1.csv"; + // 1. Load staging table + loadStagingDataWithVersion(dataPass1); + // 2. Execute plans and verify results + Map expectedStats = createExpectedStatsMap(4, 0, 3, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); // ------------ Perform incremental (append) milestoning Pass2 ------------------------ - String dataPass2 = "src/test/resources/data/import-data/data_pass2.json"; - stagingTable = TestUtils.getJsonDatasetWithoutDigestReferenceTable(dataPass2); - String expectedDataPass2 = "src/test/resources/data/import-data/data_expected_with_digest_pass2.csv"; - // Execute plans and verify results - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 2); - executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats); + String dataPass2 = basePath + "input/auditing_max_version_filter_dup_no_filter_existing/data_pass2.csv"; + String expectedDataPass2 = basePath + "expected/auditing_max_version_filter_dup_no_filter_existing/expected_pass2.csv"; + // 1. Load staging table + loadStagingDataWithVersion(dataPass2); + // 2. Execute plans and verify results + expectedStats = createExpectedStatsMap(4, 0, 3, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); } /* - Scenario: Test AppendOnly when staging has lesser columns than main + Scenario: Test Append Only with auditing, all version, filter duplicates and filter existing records (5) */ @Test - void testAppendOnlyWithLessColumnsInStaging() throws Exception + void testAppendOnlyWithAuditingAllVersionFilterDuplicatesFilterExistingRecords() throws Exception { - DatasetDefinition mainTable = TestUtils.getBasicMainTable(); - String dataPass1 = basePath + "input/less_columns_in_staging/data_pass1.csv"; - Dataset stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass1); + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = TestUtils.getStagingTableWithNonPkVersion(); + IncrementalClock incrementalClock = new IncrementalClock(fixedExecutionZonedDateTime1.toInstant(), ZoneOffset.UTC, 1000); + + // Create staging table + createStagingTableWithoutPks(stagingTable); // Generate the milestoning object AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) + .filterExistingRecords(true) .build(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, versionName, batchUpdateTimeName}; // ------------ Perform incremental (append) milestoning Pass1 ------------------------ - String expectedDataPass1 = basePath + "expected/less_columns_in_staging/expected_pass1.csv"; - // Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + String dataPass1 = basePath + "input/auditing_all_version_filter_dup_filter_existing/data_pass1.csv"; + String expectedDataPass1 = basePath + "expected/auditing_all_version_filter_dup_filter_existing/expected_pass1.csv"; + // 1. Load staging table + loadStagingDataWithVersion(dataPass1); + // 2. Execute plans and verify results + List> expectedStatsList = new ArrayList<>(); + Map expectedStats1 = createExpectedStatsMap(3, 0, 3, 0, 0); + Map expectedStats2 = createExpectedStatsMap(1, 0, 1, 0, 0); + expectedStatsList.add(expectedStats1); + expectedStatsList.add(expectedStats2); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, incrementalClock); // ------------ Perform incremental (append) milestoning Pass2 ------------------------ - String dataPass2 = basePath + "input/less_columns_in_staging/data_pass2.csv"; - stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass2); - String expectedDataPass2 = basePath + "expected/less_columns_in_staging/expected_pass2.csv"; - // Execute plans and verify results - executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats); + String dataPass2 = basePath + "input/auditing_all_version_filter_dup_filter_existing/data_pass2.csv"; + String expectedDataPass2 = basePath + "expected/auditing_all_version_filter_dup_filter_existing/expected_pass2.csv"; + // 1. Load staging table + loadStagingDataWithVersion(dataPass2); + // 2. Execute plans and verify results + expectedStatsList = new ArrayList<>(); + expectedStats1 = createExpectedStatsMap(4, 0, 2, 0, 0); + expectedStatsList.add(expectedStats1); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStatsList, incrementalClock); } /* - Scenario: Test AppendOnly when staging table is cleaned up in the end + Scenario: Test Append Only with auditing, all version, filter duplicates and no filter existing records (6) */ @Test - void testAppendOnlyWithCleanStagingData() throws Exception + void testAppendOnlyWithAuditingAllVersionFilterDuplicatesNoFilterExistingRecords() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); + DatasetDefinition stagingTable = TestUtils.getStagingTableWithNonPkVersion(); + IncrementalClock incrementalClock = new IncrementalClock(fixedExecutionZonedDateTime1.toInstant(), ZoneOffset.UTC, 1000); // Create staging table - createStagingTable(stagingTable); + createStagingTableWithoutPks(stagingTable); // Generate the milestoning object AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .dataSplitFieldName(dataSplitName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) + .filterExistingRecords(false) .build(); - PlannerOptions options = PlannerOptions.builder().cleanupStagingData(true).collectStatistics(true).build(); + PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, versionName, batchUpdateTimeName}; - // ------------ Perform incremental (append) milestoning With Clean Staging Table ------------------------ - String dataPass1 = basePath + "input/vanilla_case/data_pass1.csv"; - String expectedDataPass1 = basePath + "expected/vanilla_case/expected_pass1.csv"; + // ------------ Perform incremental (append) milestoning Pass1 ------------------------ + String dataPass1 = basePath + "input/auditing_all_version_filter_dup_no_filter_existing/data_pass1.csv"; + String expectedDataPass1 = basePath + "expected/auditing_all_version_filter_dup_no_filter_existing/expected_pass1.csv"; // 1. Load staging table - loadBasicStagingData(dataPass1); + loadStagingDataWithVersion(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); + List> expectedStatsList = new ArrayList<>(); + Map expectedStats1 = createExpectedStatsMap(3, 0, 3, 0, 0); + Map expectedStats2 = createExpectedStatsMap(1, 0, 1, 0, 0); + expectedStatsList.add(expectedStats1); + expectedStatsList.add(expectedStats2); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, incrementalClock); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); - // 3. Assert that the staging table is truncated - List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); - Assertions.assertEquals(stagingTableList.size(), 0); + // ------------ Perform incremental (append) milestoning Pass1 ------------------------ + String dataPass2 = basePath + "input/auditing_all_version_filter_dup_no_filter_existing/data_pass2.csv"; + String expectedDataPass2 = basePath + "expected/auditing_all_version_filter_dup_no_filter_existing/expected_pass2.csv"; + // 1. Load staging table + loadStagingDataWithVersion(dataPass2); + // 2. Execute plans and verify results + expectedStatsList = new ArrayList<>(); + expectedStats1 = createExpectedStatsMap(4, 0, 3, 0, 0); + expectedStatsList.add(expectedStats1); + expectedStatsList.add(expectedStats2); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStatsList, incrementalClock); } /* - Scenario: Test AppendOnly with FilterDuplicates and Auditing enabled + Scenario: test Append Only with auditing, no version, allow duplicates and filter existing records when staging data is imported along with digest field population */ @Test - void testAppendOnlyWithFilterDuplicatesAndAuditingEnabled() throws Exception + void testAppendOnlyWithStagingDataImportedWithPopulateDigest() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - String dataPass1 = basePath + "input/with_update_timestamp_field/data_pass1.csv"; - Dataset stagingTable = TestUtils.getBasicCsvDatasetReferenceTable(dataPass1); + String dataPass1 = basePath + "input/import_with_populate_digest/data_pass1.json"; + Dataset stagingTable = TestUtils.getJsonDatasetWithoutDigestReferenceTable(dataPass1); // Generate the milestoning object AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) + .filterExistingRecords(true) .build(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); @@ -316,67 +413,79 @@ void testAppendOnlyWithFilterDuplicatesAndAuditingEnabled() throws Exception String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform incremental (append) milestoning Pass1 ------------------------ - String expectedDataPass1 = basePath + "expected/with_update_timestamp_field/expected_pass1.csv"; + String expectedDataPass1 = basePath + "expected/import_with_populate_digest/expected_pass1.csv"; // Execute plans and verify results - Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + Map expectedStats = createExpectedStatsMap(5, 0, 5, 0, 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); + + // ------------ Perform incremental (append) milestoning Pass2 ------------------------ + String dataPass2 = basePath + "input/import_with_populate_digest/data_pass2.json"; + stagingTable = TestUtils.getJsonDatasetWithoutDigestReferenceTable(dataPass2); + String expectedDataPass2 = basePath + "expected/import_with_populate_digest/expected_pass2.csv"; + // Execute plans and verify results + expectedStats = createExpectedStatsMap(2, 0, 1, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); } /* - Scenario: Test AppendOnly with Filter Duplicates and Data Splits enabled + Scenario: Test Append Only with auditing, no version, allow duplicates and no filter existing records when staging has lesser columns than main */ @Test - void testAppendOnlyWithFilterDuplicatesAuditEnabledWithDataSplits() throws Exception + void testAppendOnlyWithLessColumnsInStaging() throws Exception { - DatasetDefinition mainTable = TestUtils.getMainTableWithBatchUpdateTimeField(); - String dataPass1 = basePath + "input/with_data_splits/data_pass1.csv"; - Dataset stagingTable = TestUtils.getBasicCsvDatasetReferenceTableWithDataSplits(dataPass1); - IncrementalClock incrementalClock = new IncrementalClock(fixedExecutionZonedDateTime1.toInstant(), ZoneOffset.UTC, 1000); + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + String dataPass1 = basePath + "input/less_columns_in_staging/data_pass1.csv"; + Dataset stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass1); // Generate the milestoning object AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestName) - .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) - .dataSplitField(dataSplitName) - .build(); + .digestField(digestName) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) + .filterExistingRecords(false) + .build(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{batchUpdateTimeName, idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform incremental (append) milestoning Pass1 ------------------------ - String expectedDataPass1 = basePath + "expected/with_data_splits/expected_pass1.csv"; + String expectedDataPass1 = basePath + "expected/less_columns_in_staging/expected_pass1.csv"; // Execute plans and verify results - List dataSplitRanges = new ArrayList<>(); - dataSplitRanges.add(DataSplitRange.of(1, 1)); - dataSplitRanges.add(DataSplitRange.of(2, 2)); - List> expectedStatsList = new ArrayList<>(); - Map expectedStats1 = createExpectedStatsMap(3, 0, 3, 0, 0); - Map expectedStats2 = createExpectedStatsMap(2, 0, 2, 0, 0); - - expectedStatsList.add(expectedStats1); - expectedStatsList.add(expectedStats2); + Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, dataSplitRanges, incrementalClock); + // ------------ Perform incremental (append) milestoning Pass2 ------------------------ + String dataPass2 = basePath + "input/less_columns_in_staging/data_pass2.csv"; + stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass2); + String expectedDataPass2 = basePath + "expected/less_columns_in_staging/expected_pass2.csv"; + // Execute plans and verify results + expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); } + /* + Scenario: Test Append Only vanilla case with do not create table + */ @Test void testAppendOnlyDoNotCreateTables() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); + DatasetDefinition stagingTable = TestUtils.getStagingTableWithNoPks(); // Create staging table createStagingTable(stagingTable); // Generate the milestoning object AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestName) - .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) - .build(); + .digestField(digestName) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) + .auditing(NoAuditing.builder().build()) + .filterExistingRecords(false) + .build(); Datasets datasets = Datasets.of(mainTable, stagingTable); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyWithDuplicatesTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyWithDuplicatesTest.java deleted file mode 100644 index a9d35446be8..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/AppendOnlyWithDuplicatesTest.java +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright 2022 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.engine.persistence.components.ingestmode.nontemporal; - -import org.finos.legend.engine.persistence.components.BaseTest; -import org.finos.legend.engine.persistence.components.TestUtils; -import org.finos.legend.engine.persistence.components.common.Datasets; -import org.finos.legend.engine.persistence.components.common.StatisticName; -import org.finos.legend.engine.persistence.components.ingestmode.AppendOnly; -import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.CsvExternalDatasetReference; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; -import org.finos.legend.engine.persistence.components.planner.PlannerOptions; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.finos.legend.engine.persistence.components.TestUtils.digestName; -import static org.finos.legend.engine.persistence.components.TestUtils.expiryDateName; -import static org.finos.legend.engine.persistence.components.TestUtils.getSchemaWithNoPKs; -import static org.finos.legend.engine.persistence.components.TestUtils.idName; -import static org.finos.legend.engine.persistence.components.TestUtils.incomeName; -import static org.finos.legend.engine.persistence.components.TestUtils.nameName; -import static org.finos.legend.engine.persistence.components.TestUtils.startTimeName; - -class AppendOnlyWithDuplicatesTest extends BaseTest -{ - private final String basePath = "src/test/resources/data/incremental-append-milestoning/"; - /* - Scenarios: - 1. Allow Duplicates where PKs are provided - 2. Allow Duplicates where no PKs are provided - 3. FAIL_ON_DUPLICATES validation with primary keys empty - 4. FAIL_ON_DUPLICATES causing the test to fail - */ - - /* - Scenario: Test Append Only with ALLOW_DUPLICATES validation when primary keys are not empty - */ - @Test - void testAppendOnlyWithAllowDuplicatesWherePKsNotEmpty() throws Exception - { - DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); - - // Create staging table - createStagingTable(stagingTable); - - // Generate the milestoning object - AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestName) - .deduplicationStrategy(AllowDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) - .build(); - - PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); - Datasets datasets = Datasets.of(mainTable, stagingTable); - - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; - - String expectedDataPass1 = basePath + "expected/allow_duplicates/expected_pass1.csv"; - // Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_INSERTED.name(), 3); - - try - { - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); - - Assertions.fail("Exception was not thrown"); - } - catch (Exception e) - { - Assertions.assertEquals("Primary key list must be empty", e.getMessage()); - } - } - - /* - Scenario: Test Append Only with ALLOW_DUPLICATES and no PKs - */ - @Test - void testAppendOnlyWithAllowDuplicates() throws Exception - { - DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - String dataPass1 = basePath + "input/allow_duplicates/data_pass1.csv"; - Dataset stagingTable = CsvExternalDatasetReference.builder().schema(getSchemaWithNoPKs()).csvDataPath(dataPass1).build(); - - // Generate the milestoning object - AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestName) - .deduplicationStrategy(AllowDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) - .build(); - - PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - Datasets datasets = Datasets.of(mainTable, stagingTable); - - String[] schema = new String[]{nameName, incomeName, expiryDateName}; - - // ------------ Perform incremental (append) milestoning Pass1 ------------------------ - String expectedDataPass1 = basePath + "expected/allow_duplicates/expected_pass1.csv"; - // Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_INSERTED.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); - - // ------------ Perform incremental (append) milestoning Pass2 ------------------------ - String dataPass2 = basePath + "input/allow_duplicates/data_pass2.csv"; - stagingTable = CsvExternalDatasetReference.builder().schema(getSchemaWithNoPKs()).csvDataPath(dataPass1).build(); - String expectedDataPass2 = basePath + "expected/allow_duplicates/expected_pass2.csv"; - // Execute plans and verify results - executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats); - } - - /* - Scenario: FAIL_ON_DUPLICATES validation with primary keys empty - */ - @Test - void testAppendOnlyWithFailOnDuplicatesValidation() throws Exception - { - DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - String dataPass1 = basePath + "input/allow_duplicates/data_pass1.csv"; - Dataset stagingTable = CsvExternalDatasetReference.builder().schema(getSchemaWithNoPKs()).csvDataPath(dataPass1).build(); - - // Generate the milestoning object - AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestName) - .deduplicationStrategy(FailOnDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) - .build(); - - PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - Datasets datasets = Datasets.of(mainTable, stagingTable); - - String[] schema = new String[]{nameName, incomeName, expiryDateName}; - - String expectedDataPass1 = basePath + "expected/allow_duplicates/expected_pass1.csv"; - // Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_INSERTED.name(), 3); - try - { - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); - - Assertions.fail("Exception was not thrown"); - } - catch (Exception e) - { - Assertions.assertEquals("Primary key list must not be empty", e.getMessage()); - } - } - - /* - Scenario: Test Append Only with FAIL_ON_DUPLICATES strategy will cause the test to fail - */ - @Test - void testAppendOnlyWithFailOnDuplicates() throws Exception - { - DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); - - // Create staging table - createStagingTable(stagingTable); - - // Generate the milestoning object - AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestName) - .deduplicationStrategy(FailOnDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) - .build(); - - PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); - Datasets datasets = Datasets.of(mainTable, stagingTable); - - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; - - // ------------ Perform incremental (append) milestoning Pass1 ------------------------ - String dataPass1 = basePath + "input/vanilla_case/data_pass1.csv"; - String expectedDataPass1 = basePath + "expected/vanilla_case/expected_pass1.csv"; - // 1. Load staging table - loadBasicStagingData(dataPass1); - // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - expectedStats.put(StatisticName.ROWS_INSERTED.name(), 3); - - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); - // 3. Assert that the staging table is NOT truncated - List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); - Assertions.assertEquals(stagingTableList.size(), 3); - - // ------------ Perform incremental (append) milestoning Pass2 ------------------------ - String dataPass2 = basePath + "input/vanilla_case/data_pass2.csv"; - String expectedDataPass2 = basePath + "expected/vanilla_case/expected_pass2.csv"; - // 1. Load staging table - loadBasicStagingData(dataPass2); - // 2. Execute plans and verify results - try - { - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); - Assertions.fail("Exception was not thrown"); - } - catch (Exception e) - { - Assertions.assertTrue(e.getMessage().contains("Unique index or primary key violation")); - } - } -} \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaMergeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaMergeTest.java deleted file mode 100644 index f6127663533..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaMergeTest.java +++ /dev/null @@ -1,202 +0,0 @@ -// Copyright 2022 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.engine.persistence.components.ingestmode.nontemporal; - -import org.finos.legend.engine.persistence.components.BaseTest; -import org.finos.legend.engine.persistence.components.TestUtils; -import org.finos.legend.engine.persistence.components.common.Datasets; -import org.finos.legend.engine.persistence.components.common.StatisticName; -import org.finos.legend.engine.persistence.components.ingestmode.NontemporalDelta; -import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; -import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; -import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; -import org.finos.legend.engine.persistence.components.planner.PlannerOptions; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.finos.legend.engine.persistence.components.TestUtils.batchUpdateTimeName; -import static org.finos.legend.engine.persistence.components.TestUtils.digestName; -import static org.finos.legend.engine.persistence.components.TestUtils.expiryDateName; -import static org.finos.legend.engine.persistence.components.TestUtils.idName; -import static org.finos.legend.engine.persistence.components.TestUtils.incomeName; -import static org.finos.legend.engine.persistence.components.TestUtils.nameName; -import static org.finos.legend.engine.persistence.components.TestUtils.startTimeName; - -class NontemporalDeltaMergeTest extends BaseTest -{ - private final String basePath = "src/test/resources/data/incremental-delta-milestoning/"; - - /* - Scenario: Test milestoning Logic when staging table pre populated - */ - @Test - void testMilestoningStagingTablePrePopulated() throws Exception - { - DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); - - // Create staging table - createStagingTable(stagingTable); - - // Generate the milestoning object - NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .build(); - - PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); - Datasets datasets = Datasets.of(mainTable, stagingTable); - - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; - - // ------------ Perform incremental (delta) milestoning Pass1 ------------------------ - String dataPass1 = basePath + "input/vanilla_case/data_pass1.csv"; - String expectedDataPass1 = basePath + "expected/vanilla_case/expected_pass1.csv"; - // 1. Load staging table - loadBasicStagingData(dataPass1); - // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); - // 3. Assert that the staging table is NOT truncated - List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); - Assertions.assertEquals(stagingTableList.size(), 3); - - // ------------ Perform incremental (delta) milestoning Pass2 ------------------------ - String dataPass2 = basePath + "input/vanilla_case/data_pass2.csv"; - String expectedDataPass2 = basePath + "expected/vanilla_case/expected_pass2.csv"; - // 1. Load staging table - loadBasicStagingData(dataPass2); - // 2. Execute plans and verify results - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); - } - - /* - Scenario: Test milestoning Logic when staging data comes from CSV and has less columns than main dataset - */ - @Test - void testIncrementalDeltaMilestoningLogicWithLessColumnsInStaging() throws Exception - { - DatasetDefinition mainTable = TestUtils.getBasicMainTable(); - String dataPass1 = basePath + "input/less_columns_in_staging/data_pass1.csv"; - Dataset stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass1); - - // Generate the milestoning object - NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .build(); - - PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - Datasets datasets = Datasets.of(mainTable, stagingTable); - - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; - - // ------------ Perform incremental (delta) milestoning Pass1 ------------------------ - String expectedDataPass1 = basePath + "expected/less_columns_in_staging/expected_pass1.csv"; - // Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); - - // ------------ Perform incremental (delta) milestoning Pass2 ------------------------ - String dataPass2 = basePath + "input/less_columns_in_staging/data_pass2.csv"; - String expectedDataPass2 = basePath + "expected/less_columns_in_staging/expected_pass2.csv"; - stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass2); - // Execute plans and verify results - executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats); - } - - /* - Scenario: Test milestoning Logic when staging table is pre populated - and isUpdateBatchTimeEnabled is enabled - */ - @Test - void testGeneratePhysicalPlanWithUpdateTimestampColumn() throws Exception - { - DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); - - // Create staging table - createStagingTable(stagingTable); - - // Generate the milestoning object - NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) - .build(); - - PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - Datasets datasets = Datasets.of(mainTable, stagingTable); - - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; - - // ------------ Perform incremental (delta) milestoning Pass1 ------------------------ - String dataPass1 = basePath + "input/with_update_timestamp_field/data_pass1.csv"; - String expectedDataPass1 = basePath + "expected/with_update_timestamp_field/expected_pass1.csv"; - // 1. Load staging table - loadBasicStagingData(dataPass1); - // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); - } - - @Test - void testGeneratePhysicalPlanWithDeleteIndicator() throws Exception - { - DatasetDefinition mainTable = TestUtils.getMainTableWithBatchUpdateTimeField(); - DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); - - // Create staging table - createStagingTable(stagingTable); - - // Generate the milestoning object - NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) - .build(); - - PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); - Datasets datasets = Datasets.of(mainTable, stagingTable); - - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; - - // ------------ Perform incremental (delta) milestoning Pass1 ------------------------ - String dataPass1 = basePath + "input/with_update_timestamp_field/data_pass1.csv"; - String expectedDataPass1 = basePath + "expected/with_update_timestamp_field/expected_pass1.csv"; - // 1. Load staging table - loadBasicStagingData(dataPass1); - // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); - } -} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaTest.java index ac929288621..b0dfc3d65a8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalDeltaTest.java @@ -17,19 +17,27 @@ import java.util.Arrays; import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.TestUtils; +import org.finos.legend.engine.persistence.components.common.DatasetFilter; import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.common.FilterType; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.ingestmode.NontemporalDelta; import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.MaxVersionStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningComparator; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.*; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionColumnBasedResolver; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DerivedDataset; import org.finos.legend.engine.persistence.components.planner.PlannerOptions; import org.finos.legend.engine.persistence.components.relational.api.DataSplitRange; +import org.finos.legend.engine.persistence.components.versioning.TestDedupAndVersioning; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -37,18 +45,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.logging.Filter; -import static org.finos.legend.engine.persistence.components.TestUtils.batchUpdateTimeName; -import static org.finos.legend.engine.persistence.components.TestUtils.deleteIndicatorName; -import static org.finos.legend.engine.persistence.components.TestUtils.deleteIndicatorValues; -import static org.finos.legend.engine.persistence.components.TestUtils.digestName; -import static org.finos.legend.engine.persistence.components.TestUtils.expiryDateName; -import static org.finos.legend.engine.persistence.components.TestUtils.idName; -import static org.finos.legend.engine.persistence.components.TestUtils.incomeName; -import static org.finos.legend.engine.persistence.components.TestUtils.nameName; -import static org.finos.legend.engine.persistence.components.TestUtils.startTimeName; -import static org.finos.legend.engine.persistence.components.TestUtils.dataSplitName; -import static org.finos.legend.engine.persistence.components.TestUtils.versionName; +import static org.finos.legend.engine.persistence.components.TestUtils.*; class NontemporalDeltaTest extends BaseTest { @@ -77,9 +76,9 @@ void testNonTemporalDeltaWithNoAuditing() throws Exception // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -127,9 +126,9 @@ void testNonTemporalDeltaWithDeleteIndicator() throws Exception .digestField(digestName) .auditing(NoAuditing.builder().build()) .mergeStrategy(DeleteIndicatorMergeStrategy.builder() - .deleteField(deleteIndicatorName) - .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) - .build()) + .deleteField(deleteIndicatorName) + .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) + .build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -180,9 +179,9 @@ void testNonTemporalDeltaWithLessColumnsInStaging() throws Exception // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .build(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -211,19 +210,20 @@ void testNonTemporalDeltaWithLessColumnsInStaging() throws Exception Scenario: Test NonTemporal Delta when staging table is cleaned up in the end */ @Test - void testNonTemporalDeltaWithCleanStagingData() throws Exception + void testNonTemporalDeltaWithCleanStagingDataWithFailOnDups() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); // Create staging table - createStagingTable(stagingTable); + createStagingTableWithoutPks(stagingTable); // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(true).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -245,25 +245,39 @@ void testNonTemporalDeltaWithCleanStagingData() throws Exception // 3. Assert that the staging table is truncated List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); Assertions.assertEquals(stagingTableList.size(), 0); + + // ------------ Perform incremental (delta) milestoning Fail on Dups ------------------------ + String dataPass2 = basePath + "input/with_duplicates/data_pass1.csv"; + loadBasicStagingData(dataPass2); + try + { + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + Assertions.fail("Should not Succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } } /* Scenario: Test NonTemporal Delta when Auditing is enabled */ @Test - void testNonTemporalDeltaWithAuditing() throws Exception + void testNonTemporalDeltaWithAuditingFilterDuplicates() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); DatasetDefinition stagingTable = TestUtils.getBasicStagingTable(); // Create staging table - createStagingTable(stagingTable); + createStagingTableWithoutPks(stagingTable); // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) - .build(); + .digestField(digestName) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .build(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -277,7 +291,7 @@ void testNonTemporalDeltaWithAuditing() throws Exception loadBasicStagingData(dataPass1); // 2. Execute plans and verify results Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); + expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 5); expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); @@ -287,7 +301,7 @@ void testNonTemporalDeltaWithAuditing() throws Exception Scenario: Test NonTemporal Delta when Data splits are enabled */ @Test - void testNonTemporalDeltaNoAuditingWithDataSplits() throws Exception + void testNonTemporalDeltaNoAuditingWithAllVersionDoNotPerform() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); String dataPass1 = basePath + "input/with_data_splits/data_pass1.csv"; @@ -296,7 +310,11 @@ void testNonTemporalDeltaNoAuditingWithDataSplits() throws Exception // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(expiryDateName) + .dataSplitFieldName(dataSplitName) + .performStageVersioning(false) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) .auditing(NoAuditing.builder().build()) .build(); @@ -323,7 +341,7 @@ void testNonTemporalDeltaNoAuditingWithDataSplits() throws Exception expectedStats2.put(StatisticName.ROWS_TERMINATED.name(), 0); expectedStatsList.add(expectedStats1); expectedStatsList.add(expectedStats2); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, dataSplitRanges); } @Test @@ -337,14 +355,14 @@ void testNonTemporalDeltaWithMaxVersioningGreaterThan() throws Exception // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .versioningStrategy(MaxVersionStrategy.builder() - .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(false) - .build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(false) + .build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -390,14 +408,14 @@ void testNonTemporalDeltaWithMaxVersioningGreaterThanEqualTo() throws Exception // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .versioningStrategy(MaxVersionStrategy.builder() - .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(false) - .build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(false) + .build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -443,14 +461,14 @@ void testNonTemporalDeltaWithMaxVersioningGreaterThanWithDedup() throws Exceptio // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .versioningStrategy(MaxVersionStrategy.builder() - .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(true) - .build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(true) + .build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -496,14 +514,14 @@ void testNonTemporalDeltaWithMaxVersioningGreaterThanEqualToWithDedup() throws E // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .versioningStrategy(MaxVersionStrategy.builder() - .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(true) - .build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(true) + .build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -550,9 +568,9 @@ void testNonTemporalDeltaWithFilterStagingTable() throws Exception // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -601,14 +619,14 @@ void testNonTemporalDeltaWithFilterStagingTableWithMaxVersioningGreaterThan() th // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .versioningStrategy(MaxVersionStrategy.builder() - .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(false) - .build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(false) + .build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -657,14 +675,14 @@ void testNonTemporalDeltaWithFilterStagingTableWithMaxVersioningGreaterThanEqual // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .versioningStrategy(MaxVersionStrategy.builder() - .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(false) - .build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(false) + .build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -709,18 +727,19 @@ void testNonTemporalDeltaWithFilterStagingTableWithMaxVersioningGreaterThanWithD // Create staging table DatasetDefinition stagingTableForDB = TestUtils.getStagingTableWithFilterWithVersionForDB(); - createStagingTable(stagingTableForDB); + createStagingTableWithoutPks(stagingTableForDB); // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .versioningStrategy(MaxVersionStrategy.builder() - .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(true) - .build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(true) + .build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -755,28 +774,46 @@ void testNonTemporalDeltaWithFilterStagingTableWithMaxVersioningGreaterThanWithD expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); + + + // ------------ Perform incremental (delta) milestoning Pass3 Fail on Dups ------------------------ + // 0. Create new filter + datasets = Datasets.of(mainTable, TestUtils.getStagingTableWithFilterWithVersionSecondPass()); + String dataPass3 = basePath + "input/with_staging_filter/with_max_versioning/greater_than/with_dedup/data_pass3.csv"; + // 1. Load staging table + loadStagingDataWithFilterWithVersion(dataPass3); + try + { + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); + Assertions.fail("Should not Succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } } @Test - void testNonTemporalDeltaWithFilterStagingTableWithMaxVersioningGreaterThanEqualToWithDedup() throws Exception + void testNonTemporalDeltaWithFilterStagingTableWithFilterDupsMaxVersioningGreaterThanEqualTo() throws Exception { DatasetDefinition mainTable = TestUtils.getBasicMainTableWithVersion(); DerivedDataset stagingTable = TestUtils.getDerivedStagingTableWithFilterWithVersion(); // Create staging table DatasetDefinition stagingTableForDB = TestUtils.getStagingTableWithFilterWithVersionForDB(); - createStagingTable(stagingTableForDB); + createStagingTableWithoutPks(stagingTableForDB); // Generate the milestoning object NontemporalDelta ingestMode = NontemporalDelta.builder() - .digestField(digestName) - .auditing(NoAuditing.builder().build()) - .versioningStrategy(MaxVersionStrategy.builder() - .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(true) - .build()) - .build(); + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(true) + .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -790,13 +827,13 @@ void testNonTemporalDeltaWithFilterStagingTableWithMaxVersioningGreaterThanEqual loadStagingDataWithFilterWithVersion(dataPass1); // 2. Execute plans and verify results Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); + expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 4); expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); // 3. Assert that the staging table is NOT truncated List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); - Assertions.assertEquals(stagingTableList.size(), 6); + Assertions.assertEquals(stagingTableList.size(), 7); // ------------ Perform incremental (delta) milestoning Pass2 ------------------------ // 0. Create new filter @@ -807,9 +844,167 @@ void testNonTemporalDeltaWithFilterStagingTableWithMaxVersioningGreaterThanEqual loadStagingDataWithFilterWithVersion(dataPass2); // 2. Execute plans and verify results expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 10); + expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 12); expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); } + + @Test + void testNonTemporalDeltaWithAllVersionGreaterThanAndStagingFilters() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingDataset = DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(TestDedupAndVersioning.baseSchemaWithVersionAndBatch) + .build(); + + createStagingTableWithoutPks(stagingDataset); + DerivedDataset stagingTable = DerivedDataset.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(TestDedupAndVersioning.baseSchemaWithVersion) + .addDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 1)) + .build(); + String path = "src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_all_version/greater_than/data1.csv"; + TestDedupAndVersioning.loadDataIntoStagingTableWithVersionAndBatch(path); + + // Generate the milestoning object + NontemporalDelta ingestMode = NontemporalDelta.builder() + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(true) + .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .build(); + + PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + String[] schema = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName}; + + // ------------ Perform incremental (delta) milestoning Pass1 ------------------------ + String expectedDataPass1 = basePath + "expected/with_staging_filter/with_all_version/greater_than/expected_pass1.csv"; + // 2. Execute plans and verify results + List> expectedStatsList = new ArrayList<>(); + Map expectedStats1 = new HashMap<>(); + expectedStats1.put(StatisticName.INCOMING_RECORD_COUNT.name(), 4); + expectedStats1.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats1.put(StatisticName.ROWS_DELETED.name(), 0); + Map expectedStats2 = new HashMap<>(); + expectedStats2.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); + expectedStats2.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats2.put(StatisticName.ROWS_DELETED.name(), 0); + Map expectedStats3 = new HashMap<>(); + expectedStats3.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); + expectedStats3.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats3.put(StatisticName.ROWS_DELETED.name(), 0); + expectedStatsList.add(expectedStats1); + expectedStatsList.add(expectedStats2); + expectedStatsList.add(expectedStats3); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, fixedClock_2000_01_01); + + // ------------ Perform incremental (delta) milestoning Pass2 Fail on Duplicates ------------------------ + ingestMode = ingestMode.withDeduplicationStrategy(FailOnDuplicates.builder().build()); + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 2)); + datasets = Datasets.of(mainTable, stagingTable); + try + { + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, fixedClock_2000_01_01); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } + + // ------------ Perform incremental (delta) milestoning Pass2 Filter Duplicates ------------------------ + String expectedDataPass2 = basePath + "expected/with_staging_filter/with_all_version/greater_than/expected_pass2.csv"; + expectedStatsList = new ArrayList<>(); + Map expectedStats4 = new HashMap<>(); + expectedStats4.put(StatisticName.INCOMING_RECORD_COUNT.name(), 4); + expectedStats4.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats4.put(StatisticName.ROWS_DELETED.name(), 0); + expectedStatsList.add(expectedStats4); + + ingestMode = ingestMode.withDeduplicationStrategy(FilterDuplicates.builder().build()); + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 2)); + datasets = Datasets.of(mainTable, stagingTable); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStatsList, fixedClock_2000_01_01); + } + + @Test + void testNonTemporalDeltaWithAllVersionDigestBasedAndStagingFilters() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingDataset = DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(TestDedupAndVersioning.baseSchemaWithVersionAndBatch) + .build(); + + createStagingTableWithoutPks(stagingDataset); + DerivedDataset stagingTable = DerivedDataset.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(TestDedupAndVersioning.baseSchemaWithVersion) + .addDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 1)) + .build(); + String path = "src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_all_version/digest_based/data1.csv"; + TestDedupAndVersioning.loadDataIntoStagingTableWithVersionAndBatch(path); + + // Generate the milestoning object + NontemporalDelta ingestMode = NontemporalDelta.builder() + .digestField(digestName) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) + .build(); + + PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + String[] schema = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName}; + + // ------------ Perform incremental (delta) milestoning Pass1 ------------------------ + String expectedDataPass1 = basePath + "expected/with_staging_filter/with_all_version/digest_based/expected_pass1.csv"; + // 2. Execute plans and verify results + List> expectedStatsList = new ArrayList<>(); + Map expectedStats1 = new HashMap<>(); + expectedStats1.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); + expectedStats1.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats1.put(StatisticName.ROWS_DELETED.name(), 0); + Map expectedStats2 = new HashMap<>(); + expectedStats2.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); + expectedStats2.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats2.put(StatisticName.ROWS_DELETED.name(), 0); + Map expectedStats3 = new HashMap<>(); + expectedStats3.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); + expectedStats3.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats3.put(StatisticName.ROWS_DELETED.name(), 0); + expectedStatsList.add(expectedStats1); + expectedStatsList.add(expectedStats2); + expectedStatsList.add(expectedStats3); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, fixedClock_2000_01_01); + + // ------------ Perform incremental (delta) milestoning Pass2 Filter Duplicates ------------------------ + String expectedDataPass2 = basePath + "expected/with_staging_filter/with_all_version/digest_based/expected_pass2.csv"; + expectedStatsList = new ArrayList<>(); + Map expectedStats4 = new HashMap<>(); + expectedStats4.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); + expectedStats4.put(StatisticName.ROWS_TERMINATED.name(), 0); + expectedStats4.put(StatisticName.ROWS_DELETED.name(), 0); + expectedStatsList.add(expectedStats4); + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 2)); + datasets = Datasets.of(mainTable, stagingTable); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStatsList, fixedClock_2000_01_01); + } + } \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalSnapshotTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalSnapshotTest.java index 903e83bc355..9e1ce6ca59b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalSnapshotTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/nontemporal/NontemporalSnapshotTest.java @@ -20,25 +20,21 @@ import org.finos.legend.engine.persistence.components.ingestmode.NontemporalSnapshot; import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategy; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.planner.PlannerOptions; -import org.finos.legend.engine.persistence.components.relational.api.DataSplitRange; +import org.finos.legend.engine.persistence.components.versioning.TestDedupAndVersioning; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.util.ArrayList; import java.util.List; import java.util.Map; -import static org.finos.legend.engine.persistence.components.TestUtils.batchUpdateTimeName; -import static org.finos.legend.engine.persistence.components.TestUtils.dataSplitName; -import static org.finos.legend.engine.persistence.components.TestUtils.digestName; -import static org.finos.legend.engine.persistence.components.TestUtils.expiryDateName; -import static org.finos.legend.engine.persistence.components.TestUtils.idName; -import static org.finos.legend.engine.persistence.components.TestUtils.incomeName; -import static org.finos.legend.engine.persistence.components.TestUtils.nameName; -import static org.finos.legend.engine.persistence.components.TestUtils.startTimeName; +import static org.finos.legend.engine.persistence.components.TestUtils.*; class NontemporalSnapshotTest extends BaseTest { @@ -52,7 +48,8 @@ class NontemporalSnapshotTest extends BaseTest 4. No Auditing & import external CSV dataset 5. Staging has lesser columns than main dataset 6. Staging data cleanup - 7. Data Splits enabled + 7. With Auditing, Max Version, Filter Duplicates + 8. With Auditing, No Version, Fail on Duplicates */ /* @@ -255,38 +252,106 @@ void testNontemporalSnapshotWithCleanStagingData() throws Exception } /* - Scenario: Test Nontemporal Snapshot when data splits are enabled + Scenario: Test Nontemporal Snapshot when MaxVersion and FilterDuplicates are enabled */ @Test - void testNontemporalSnapshotWithDataSplits() throws Exception + void testNontemporalSnapshotWithMaxVersionAndFilterDuplicates() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); - String dataPass1 = basePath + "input/with_data_splits/data_pass1.csv"; - Dataset stagingTable = TestUtils.getBasicCsvDatasetReferenceTableWithDataSplits(dataPass1); + DatasetDefinition stagingTable = TestDedupAndVersioning.getStagingTableWithVersion(); + + // Create staging table + TestDedupAndVersioning.createStagingTableWithVersion(); // Generate the milestoning object NontemporalSnapshot ingestMode = NontemporalSnapshot.builder() .auditing(NoAuditing.builder().build()) - .dataSplitField(dataSplitName) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("version").build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); - PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); + PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName}; - // ------------ Perform incremental (append) milestoning Pass1 ------------------------ - String expectedDataPass1 = basePath + "expected/with_data_splits/expected_pass1.csv"; - // Execute plans and verify results - List dataSplitRanges = new ArrayList<>(); - dataSplitRanges.add(DataSplitRange.of(1, 1)); - dataSplitRanges.add(DataSplitRange.of(2, 2)); - dataSplitRanges.add(DataSplitRange.of(3, 3)); - - List> expectedStatsList = new ArrayList<>(); - Map expectedStats = createExpectedStatsMap(5, 0, 3, 0, 0); - expectedStatsList.add(expectedStats); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, dataSplitRanges); + // ------------ Perform snapshot milestoning Pass1 ------------------------ + String dataPass1 = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + String expectedDataPass1 = basePath + "expected/max_version_filter_duplicates/expected_pass1.csv"; + // 1. Load staging table + TestDedupAndVersioning.loadDataIntoStagingTableWithVersion(dataPass1); + // 2. Execute plans and verify results + + Map expectedStats = createExpectedStatsMap(6, 0, 3, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + + // ------------ Perform snapshot milestoning Pass2 ------------------------ + // Throw Data Error + String dataPass2 = "src/test/resources/data/dedup-and-versioning/input/data3_with_dups_and_data_error.csv"; + // 1. Load staging table + TestDedupAndVersioning.loadDataIntoStagingTableWithVersion(dataPass2); + // 2. Execute plans and verify results + try + { + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Data errors (same PK, same version but different data), hence failing the batch", e.getMessage()); + } + } + + /* + Scenario: Test Nontemporal Snapshot when No Version and FailOnDuplicates + */ + @Test + void testNontemporalSnapshotWithFailOnDupsNoVersioning() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = TestDedupAndVersioning.getStagingTableWithoutVersion(); + + // Create staging table + TestDedupAndVersioning.createStagingTableWithoutVersion(); + + // Generate the milestoning object + NontemporalSnapshot ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .build(); + + PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + String[] schema = new String[]{idName, nameName, incomeName, expiryDateName, digestName}; + + // ------------ Perform snapshot milestoning Pass1 ------------------------ + String dataPass1 = "src/test/resources/data/dedup-and-versioning/input/data5_without_dups.csv"; + String expectedDataPass1 = "src/test/resources/data/dedup-and-versioning/input/data5_without_dups.csv"; + // 1. Load staging table + TestDedupAndVersioning.loadDataIntoStagingTableWithoutVersion(dataPass1); + // 2. Execute plans and verify results + + Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + + // ------------ Perform snapshot milestoning Pass2 ------------------------ + // Throw Data Error + String dataPass2 = "src/test/resources/data/dedup-and-versioning/input/data1_with_dups.csv"; + // 1. Load staging table + TestDedupAndVersioning.loadDataIntoStagingTableWithoutVersion(dataPass2); + // 2. Execute plans and verify results + try + { + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } } + } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/MultiTableIngestionTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/MultiTableIngestionTest.java index 8b3a608bfd3..59ff0c03089 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/MultiTableIngestionTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/MultiTableIngestionTest.java @@ -145,8 +145,8 @@ public void testMultiTableIngestionSuccessCase() throws Exception verifyResults(2, datsetSchema2, expectedDataset2Path, "main2", result.get(1), expectedStats); // Pass 3: - dataset1Path = basePathForInput + "multi_table_ingestion/staging_dataset_pass3.csv"; - dataset2Path = basePathForInput + "multi_table_ingestion/staging_dataset_pass3.csv"; + dataset1Path = "src/test/resources/data/empty_file.csv"; + dataset2Path = "src/test/resources/data/empty_file.csv"; expectedDataset1Path = basePathForExpected + "multi_table_ingestion/expected_dataset1_pass3.csv"; expectedDataset2Path = basePathForExpected + "multi_table_ingestion/expected_dataset2_pass3.csv"; expectedStats = createExpectedStatsMap(0, 0, 0, 0, 0); @@ -171,7 +171,7 @@ private List ingestMultiTables(Executor executor, RelationalInge executor.begin(); for (Datasets datasets: allDatasets) { - IngestorResult result = ingestor.ingest(datasets); + IngestorResult result = ingestor.ingest(datasets).get(0); multiTableIngestionResult.add(result); } @@ -256,7 +256,7 @@ private List ingestMultiTablesWithBadQuery(Executor executor, Re executor.begin(); for (Datasets datasets: allDatasets) { - IngestorResult result = ingestor.ingest(datasets); + IngestorResult result = ingestor.ingest(datasets).get(0); multiTableIngestionResult.add(result); } @@ -301,7 +301,7 @@ private void loadStagingDataset2(String path) throws Exception public static void verifyResults(int batchId, String[] schema, String expectedDataPath, String tableName, IngestorResult result, Map expectedStats) throws IOException { Assertions.assertEquals(batchId, result.batchId().get()); - Assertions.assertEquals("2000-01-01 00:00:00", result.ingestionTimestampUTC()); + Assertions.assertEquals("2000-01-01 00:00:00.000000", result.ingestionTimestampUTC()); List> tableData = h2Sink.executeQuery(String.format("select * from \"TEST\".\"%s\"", tableName)); TestUtils.assertFileAndTableDataEquals(schema, expectedDataPath, tableData); Map actualStats = result.statisticByName(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaDbAndSchemaMissingTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaDbAndSchemaMissingTest.java index 9e1d2079eb9..bb8cb4dbf4f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaDbAndSchemaMissingTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaDbAndSchemaMissingTest.java @@ -114,7 +114,7 @@ void testMilestoning(DatasetDefinition mainTable, DatasetDefinition stagingTable executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "without_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "without_delete_ind/expected_pass3.csv"; // 1. Load staging table loadStagingData(dataPass3, stagingTableFullyQualifiedName); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaTest.java index cf8c5f9ebec..c5a6709584f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaTest.java @@ -19,10 +19,14 @@ import org.finos.legend.engine.persistence.components.common.Datasets; import org.finos.legend.engine.persistence.components.common.OptimizationFilter; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalDelta; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.MaxVersionStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningComparator; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAndDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionColumnBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionComparator; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DerivedDataset; @@ -105,7 +109,7 @@ void testMilestoning() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "without_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "without_delete_ind/expected_pass3.csv"; // 1. Load staging table loadBasicStagingData(dataPass3); @@ -165,7 +169,7 @@ void testMilestoningWithDeleteIndicator() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_delete_ind/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithDeleteInd(dataPass3); @@ -222,7 +226,7 @@ void testMilestoningWithOptimizationFilters() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_optimization_filter/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_optimization_filter/expected_pass3.csv"; // 1. Load staging table loadBasicStagingData(dataPass3); @@ -232,7 +236,7 @@ void testMilestoningWithOptimizationFilters() throws Exception } @Test - void testMilestoningWithMaxVersioningGreaterThan() throws Exception + void testMilestoningWithMaxVersionGreaterThanDoNotPerform() throws Exception { DatasetDefinition mainTable = TestUtils.getUnitemporalMainTableWithVersion(); DatasetDefinition stagingTable = TestUtils.getStagingTableWithVersion(); @@ -252,8 +256,8 @@ void testMilestoningWithMaxVersioningGreaterThan() throws Exception .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(false) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(false) .build()) .build(); @@ -282,7 +286,7 @@ void testMilestoningWithMaxVersioningGreaterThan() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_max_versioning/greater_than/without_dedup/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_max_versioning/greater_than/without_dedup/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithVersion(dataPass3); @@ -292,7 +296,7 @@ void testMilestoningWithMaxVersioningGreaterThan() throws Exception } @Test - void testMilestoningWithMaxVersioningGreaterThanEqualTo() throws Exception + void testMilestoningWithMaxVersionGreaterThanEqualToDoNotPerform() throws Exception { DatasetDefinition mainTable = TestUtils.getUnitemporalMainTableWithVersion(); DatasetDefinition stagingTable = TestUtils.getStagingTableWithVersion(); @@ -312,8 +316,8 @@ void testMilestoningWithMaxVersioningGreaterThanEqualTo() throws Exception .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(false) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(false) .build()) .build(); @@ -342,7 +346,7 @@ void testMilestoningWithMaxVersioningGreaterThanEqualTo() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_max_versioning/greater_than_equal_to/without_dedup/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithVersion(dataPass3); @@ -352,7 +356,7 @@ void testMilestoningWithMaxVersioningGreaterThanEqualTo() throws Exception } @Test - void testMilestoningWithMaxVersioningGreaterThanWithDedup() throws Exception + void testMilestoningWithFilterDuplicatesMaxVersioningGreaterThan() throws Exception { DatasetDefinition mainTable = TestUtils.getUnitemporalMainTableWithVersion(); DatasetDefinition stagingTable = TestUtils.getStagingTableWithVersion(); @@ -360,7 +364,7 @@ void testMilestoningWithMaxVersioningGreaterThanWithDedup() throws Exception String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, versionName, batchIdInName, batchIdOutName, batchTimeInName, batchTimeOutName}; // Create staging table - createStagingTable(stagingTable); + createStagingTableWithoutPks(stagingTable); UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestName) @@ -372,9 +376,10 @@ void testMilestoningWithMaxVersioningGreaterThanWithDedup() throws Exception .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(true) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(true) .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -386,11 +391,11 @@ void testMilestoningWithMaxVersioningGreaterThanWithDedup() throws Exception // 1. Load staging table loadStagingDataWithVersion(dataPass1); // 2. Execute plans and verify results - Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + Map expectedStats = createExpectedStatsMap(6, 0, 3, 0, 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); // 3. Assert that the staging table is NOT truncated List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); - Assertions.assertEquals(stagingTableList.size(), 3); + Assertions.assertEquals(stagingTableList.size(), 6); // ------------ Perform Pass2 ------------------------ String dataPass2 = basePathForInput + "with_max_versioning/greater_than/with_dedup/staging_data_pass2.csv"; @@ -402,7 +407,7 @@ void testMilestoningWithMaxVersioningGreaterThanWithDedup() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_max_versioning/greater_than/with_dedup/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithVersion(dataPass3); @@ -412,7 +417,7 @@ void testMilestoningWithMaxVersioningGreaterThanWithDedup() throws Exception } @Test - void testMilestoningWithMaxVersioningGreaterThanEqualToWithDedup() throws Exception + void testMilestoningWithFailOnDuplicatesMaxVersioningGreaterThanEqualTo() throws Exception { DatasetDefinition mainTable = TestUtils.getUnitemporalMainTableWithVersion(); DatasetDefinition stagingTable = TestUtils.getStagingTableWithVersion(); @@ -420,7 +425,7 @@ void testMilestoningWithMaxVersioningGreaterThanEqualToWithDedup() throws Except String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, versionName, batchIdInName, batchIdOutName, batchTimeInName, batchTimeOutName}; // Create staging table - createStagingTable(stagingTable); + createStagingTableWithoutPks(stagingTable); UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestName) @@ -432,9 +437,10 @@ void testMilestoningWithMaxVersioningGreaterThanEqualToWithDedup() throws Except .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(true) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(true) .build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -462,13 +468,28 @@ void testMilestoningWithMaxVersioningGreaterThanEqualToWithDedup() throws Except executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_max_versioning/greater_than_equal_to/with_dedup/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithVersion(dataPass3); // 2. Execute plans and verify results expectedStats = createExpectedStatsMap(0, 0, 0, 0, 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats); + + // ------------ Perform Pass4 (Fail on Dups) ------------------------- + String dataPass4 = basePathForInput + "with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass4.csv"; + // 1. Load staging table + loadStagingDataWithVersion(dataPass4); + // 2. Execute plans and verify results + try + { + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } } @Test @@ -505,7 +526,7 @@ void testMilestoningWithFilterStagingTable() throws Exception Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); IngestorResult result = executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); Assertions.assertEquals(Optional.of(1), result.batchId()); - Assertions.assertEquals("2000-01-01 00:00:00", result.ingestionTimestampUTC()); + Assertions.assertEquals("2000-01-01 00:00:00.000000", result.ingestionTimestampUTC()); // 3. Assert that the staging table is NOT truncated List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); @@ -522,10 +543,10 @@ void testMilestoningWithFilterStagingTable() throws Exception expectedStats = createExpectedStatsMap(3, 0, 1, 1, 0); result = executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); Assertions.assertEquals(Optional.of(2), result.batchId()); - Assertions.assertEquals("2000-01-01 00:00:00", result.ingestionTimestampUTC()); + Assertions.assertEquals("2000-01-01 00:00:00.000000", result.ingestionTimestampUTC()); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_staging_filter/with_no_versioning/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_staging_filter/with_no_versioning/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithFilter(dataPass3); @@ -533,11 +554,11 @@ void testMilestoningWithFilterStagingTable() throws Exception expectedStats = createExpectedStatsMap(0, 0, 0, 0, 0); result = executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, fixedClock_2000_01_01); Assertions.assertEquals(Optional.of(3), result.batchId()); - Assertions.assertEquals("2000-01-01 00:00:00", result.ingestionTimestampUTC()); + Assertions.assertEquals("2000-01-01 00:00:00.000000", result.ingestionTimestampUTC()); } @Test - void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThan() throws Exception + void testMilestoningWithFilterDupsMaxVersionGreaterThanWithStagingFilters() throws Exception { DatasetDefinition mainTable = TestUtils.getUnitemporalMainTableWithVersion(); DerivedDataset stagingTable = TestUtils.getDerivedStagingTableWithFilterWithVersion(); @@ -546,7 +567,7 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThan() throws // Create staging table DatasetDefinition stagingTableForDB = TestUtils.getStagingTableWithFilterWithVersionForDB(); - createStagingTable(stagingTableForDB); + createStagingTableWithoutPks(stagingTableForDB); UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestName) @@ -558,9 +579,10 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThan() throws .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(false) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(false) .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -572,11 +594,11 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThan() throws // 1. Load staging table loadStagingDataWithFilterWithVersion(dataPass1); // 2. Execute plans and verify results - Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + Map expectedStats = createExpectedStatsMap(6, 0, 3, 0, 0); executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); // 3. Assert that the staging table is NOT truncated List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); - Assertions.assertEquals(stagingTableList.size(), 6); + Assertions.assertEquals(stagingTableList.size(), 9); // ------------ Perform Pass2 ------------------------ // 0. Create new filter @@ -590,7 +612,7 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThan() throws executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_staging_filter/with_max_versioning/greater_than/without_dedup/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_staging_filter/with_max_versioning/greater_than/without_dedup/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithFilterWithVersion(dataPass3); @@ -600,7 +622,7 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThan() throws } @Test - void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanEqualTo() throws Exception + void testMilestoningWithFailOnDupsMaxVersionGreaterThanEqualToWithStagingFilters() throws Exception { DatasetDefinition mainTable = TestUtils.getUnitemporalMainTableWithVersion(); DerivedDataset stagingTable = TestUtils.getDerivedStagingTableWithFilterWithVersion(); @@ -609,7 +631,7 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanEqualTo() // Create staging table DatasetDefinition stagingTableForDB = TestUtils.getStagingTableWithFilterWithVersionForDB(); - createStagingTable(stagingTableForDB); + createStagingTableWithoutPks(stagingTableForDB); UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestName) @@ -621,9 +643,10 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanEqualTo() .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(false) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(false) .build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -653,17 +676,32 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanEqualTo() executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithFilterWithVersion(dataPass3); // 2. Execute plans and verify results expectedStats = createExpectedStatsMap(0, 0, 0, 0, 0); executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, fixedClock_2000_01_01); + + // ------------ Perform Pass4 Fail on Dups ------------------------- + String dataPass4 = basePathForInput + "with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass4.csv"; + // 1. Load staging table + loadStagingDataWithFilterWithVersion(dataPass4); + // 2. Execute plans and verify results + try + { + executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, fixedClock_2000_01_01); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } } @Test - void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanWithDedup() throws Exception + void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThan() throws Exception { DatasetDefinition mainTable = TestUtils.getUnitemporalMainTableWithVersion(); DerivedDataset stagingTable = TestUtils.getDerivedStagingTableWithFilterWithVersion(); @@ -684,8 +722,8 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanWithDedup( .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(true) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(true) .build()) .build(); @@ -716,7 +754,7 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanWithDedup( executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_staging_filter/with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_staging_filter/with_max_versioning/greater_than/with_dedup/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithFilterWithVersion(dataPass3); @@ -726,7 +764,7 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanWithDedup( } @Test - void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanEqualToWithDedup() throws Exception + void testMilestoningWithFilterDupsMaxVersioningDigestBasedWithStagingFilters() throws Exception { DatasetDefinition mainTable = TestUtils.getUnitemporalMainTableWithVersion(); DerivedDataset stagingTable = TestUtils.getDerivedStagingTableWithFilterWithVersion(); @@ -747,17 +785,18 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanEqualToWit .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(true) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); Datasets datasets = Datasets.of(mainTable, stagingTable); // ------------ Perform Pass1 ------------------------ - String dataPass1 = basePathForInput + "with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass1.csv"; - String expectedDataPass1 = basePathForExpected + "with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass1.csv"; + String dataPass1 = basePathForInput + "with_staging_filter/with_max_versioning/digest_based/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "with_staging_filter/with_max_versioning/digest_based/expected_pass1.csv"; // 1. Load staging table loadStagingDataWithFilterWithVersion(dataPass1); // 2. Execute plans and verify results @@ -770,17 +809,17 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanEqualToWit // ------------ Perform Pass2 ------------------------ // 0. Create new filter datasets = Datasets.of(mainTable, TestUtils.getStagingTableWithFilterWithVersionSecondPass()); - String dataPass2 = basePathForInput + "with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass2.csv"; - String expectedDataPass2 = basePathForExpected + "with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass2.csv"; + String dataPass2 = basePathForInput + "with_staging_filter/with_max_versioning/digest_based/staging_data_pass2.csv"; + String expectedDataPass2 = basePathForExpected + "with_staging_filter/with_max_versioning/digest_based/expected_pass2.csv"; // 1. Load staging table loadStagingDataWithFilterWithVersion(dataPass2); // 2. Execute plans and verify results - expectedStats = createExpectedStatsMap(9, 0, 1, 3, 0); + expectedStats = createExpectedStatsMap(9, 0, 1, 2, 0); executePlansAndVerifyResultsWithStagingFilters(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass3.csv"; - String expectedDataPass3 = basePathForExpected + "with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; + String expectedDataPass3 = basePathForExpected + "with_staging_filter/with_max_versioning/digest_based/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithFilterWithVersion(dataPass3); // 2. Execute plans and verify results @@ -809,8 +848,8 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanWithDedupW .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(versionName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(true) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(true) .build()) .build(); @@ -838,7 +877,7 @@ void testMilestoningWithFilterStagingTableWithMaxVersioningGreaterThanWithDedupW executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_staging_filter/with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_staging_filter/with_max_versioning/greater_than/with_dedup/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithFilterWithVersionInUpperCase(dataPass3); @@ -868,8 +907,8 @@ void testMilestoningWithMaxVersioningFail() throws Exception .build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(nameName) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(false) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(false) .build()) .build(); @@ -934,7 +973,7 @@ void testMilestoningWithLessColumnsInStaging() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "less_columns_in_staging/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "less_columns_in_staging/expected_pass3.csv"; stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass3); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaWithBatchIdTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaWithBatchIdTest.java index 1e3abfd633b..4cfbd6efbc8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaWithBatchIdTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaWithBatchIdTest.java @@ -16,33 +16,34 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.TestUtils; +import org.finos.legend.engine.persistence.components.common.DatasetFilter; import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.common.FilterType; +import org.finos.legend.engine.persistence.components.common.StatisticName; +import org.finos.legend.engine.persistence.components.ingestmode.NontemporalDelta; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalDelta; +import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchId; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionColumnBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionComparator; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.DerivedDataset; import org.finos.legend.engine.persistence.components.planner.PlannerOptions; import org.finos.legend.engine.persistence.components.relational.api.DataSplitRange; +import org.finos.legend.engine.persistence.components.versioning.TestDedupAndVersioning; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.ArrayList; - -import static org.finos.legend.engine.persistence.components.TestUtils.batchIdInName; -import static org.finos.legend.engine.persistence.components.TestUtils.batchIdOutName; -import static org.finos.legend.engine.persistence.components.TestUtils.deleteIndicatorName; -import static org.finos.legend.engine.persistence.components.TestUtils.deleteIndicatorValues; -import static org.finos.legend.engine.persistence.components.TestUtils.digestName; -import static org.finos.legend.engine.persistence.components.TestUtils.expiryDateName; -import static org.finos.legend.engine.persistence.components.TestUtils.idName; -import static org.finos.legend.engine.persistence.components.TestUtils.incomeName; -import static org.finos.legend.engine.persistence.components.TestUtils.nameName; -import static org.finos.legend.engine.persistence.components.TestUtils.startTimeName; -import static org.finos.legend.engine.persistence.components.TestUtils.dataSplitName; +import java.util.*; + +import static org.finos.legend.engine.persistence.components.TestUtils.*; +import static org.finos.legend.engine.persistence.components.TestUtils.versionName; class UnitemporalDeltaWithBatchIdTest extends BaseTest { @@ -61,7 +62,7 @@ void testMilestoning() throws Exception String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchIdInName, batchIdOutName}; // Create staging table - createStagingTable(stagingTable); + createStagingTableWithoutPks(stagingTable); UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestName) @@ -69,6 +70,7 @@ void testMilestoning() throws Exception .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -92,11 +94,11 @@ void testMilestoning() throws Exception // 1. Load staging table loadBasicStagingData(dataPass2); // 2. Execute plans and verify results - expectedStats = createExpectedStatsMap(3, 0, 1, 1, 0); + expectedStats = createExpectedStatsMap(6, 0, 1, 1, 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "without_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "without_delete_ind/expected_pass3.csv"; // 1. Load staging table loadBasicStagingData(dataPass3); @@ -197,7 +199,7 @@ void testMilestoningWithDeleteIndicator() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_delete_ind/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithDeleteInd(dataPass3); @@ -246,7 +248,7 @@ void testMilestoningWithLessColumnsInStaging() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "less_columns_in_staging/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "less_columns_in_staging/expected_pass3.csv"; stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass3); @@ -299,7 +301,7 @@ void testMilestoningWithDeleteIndicatorWithCleanStagingData() throws Exception } @Test - void testMilestoningWithDataSplits() throws Exception + void testMilestoningAllVersionWithoutPerform() throws Exception { DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); String dataPass1 = basePathForInput + "with_data_splits/staging_data_pass1.csv"; @@ -309,7 +311,11 @@ void testMilestoningWithDataSplits() throws Exception UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestName) - .dataSplitField(dataSplitName) + .versioningStrategy(AllVersionsStrategy.builder() + .dataSplitFieldName(dataSplitName) + .versioningField(expiryDateName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false).build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInName) .batchIdOutName(batchIdOutName) @@ -333,9 +339,10 @@ void testMilestoningWithDataSplits() throws Exception expectedStatsList.add(expectedStatsSplit1); expectedStatsList.add(expectedStatsSplit2); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, dataSplitRanges); // ------------ Perform milestoning Pass2 ------------------------ + ingestMode = ingestMode.withDeduplicationStrategy(FilterDuplicates.builder().build()); String dataPass2 = basePathForInput + "with_data_splits/staging_data_pass2.csv"; stagingTable = TestUtils.getBasicCsvDatasetReferenceTableWithDataSplits(dataPass2); String expectedDataPass2 = basePathForExpected + "with_data_splits/expected_pass2.csv"; @@ -350,16 +357,203 @@ void testMilestoningWithDataSplits() throws Exception expectedStatsList.add(createExpectedStatsMap(1, 0, 0, 1, 0)); expectedStatsList.add(createExpectedStatsMap(1, 0, 0, 1, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, Datasets.of(mainTable, stagingTable), schema, expectedDataPass2, expectedStatsList, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, Datasets.of(mainTable, stagingTable), schema, expectedDataPass2, expectedStatsList, dataSplitRanges); // ------------ Perform milestoning Pass3 - Empty batch ------------------------ - String dataPass3 = basePathForInput + "with_data_splits/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; stagingTable = TestUtils.getBasicCsvDatasetReferenceTableWithDataSplits(dataPass3); String expectedDataPass3 = basePathForExpected + "with_data_splits/expected_pass3.csv"; // Execute plans and verify results dataSplitRanges = new ArrayList<>(); expectedStatsList = new ArrayList<>(); expectedStatsList.add(createExpectedStatsMap(0, 0, 0, 0, 0)); - executePlansAndVerifyResultsWithDataSplits(ingestMode, options, Datasets.of(mainTable, stagingTable), schema, expectedDataPass3, expectedStatsList, dataSplitRanges); + executePlansAndVerifyResultsWithSpecifiedDataSplits(ingestMode, options, Datasets.of(mainTable, stagingTable), schema, expectedDataPass3, expectedStatsList, dataSplitRanges); + } + + @Test + void testUniTemporalDeltaWithAllVersionGreaterThanAndStagingFilters() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingDataset = DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(TestDedupAndVersioning.baseSchemaWithVersionAndBatch) + .build(); + + createStagingTableWithoutPks(stagingDataset); + DerivedDataset stagingTable = DerivedDataset.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(TestDedupAndVersioning.baseSchemaWithVersion) + .addDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 1)) + .build(); + String path = basePathForInput + "with_all_version/data1.csv"; + TestDedupAndVersioning.loadDataIntoStagingTableWithVersionAndBatch(path); + + // Generate the milestoning object + UnitemporalDelta ingestMode = UnitemporalDelta.builder() + .digestField(digestName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(true) + .build()) + .transactionMilestoning(BatchId.builder() + .batchIdInName(batchIdInName) + .batchIdOutName(batchIdOutName) + .build()) + .build(); + + PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + String[] schema = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName}; + + // ------------ Perform milestoning Pass1 ------------------------ + String expectedDataPass1 = basePathForExpected + "with_all_version/greater_than/expected_pass1.csv"; + // 2. Execute plans and verify results + List> expectedStatsList = new ArrayList<>(); + Map expectedStats1 = createExpectedStatsMap(3,0,3,0,0); + Map expectedStats2 = createExpectedStatsMap(2,0,0,2,0); + Map expectedStats3 = createExpectedStatsMap(1,0,0,1,0); + expectedStatsList.add(expectedStats1); + expectedStatsList.add(expectedStats2); + expectedStatsList.add(expectedStats3); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, fixedClock_2000_01_01); + + // ------------ Perform milestoning Pass2 Fail on Duplicates ------------------------ + ingestMode = ingestMode.withDeduplicationStrategy(FailOnDuplicates.builder().build()); + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 2)); + datasets = Datasets.of(mainTable, stagingTable); + try + { + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, fixedClock_2000_01_01); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } + + // ------------ Perform milestoning Pass2 Filter Duplicates ------------------------ + String expectedDataPass2 = basePathForExpected + "with_all_version/greater_than/expected_pass2.csv"; + expectedStatsList = new ArrayList<>(); + Map expectedStats4 = createExpectedStatsMap(4,0,1,0,0); + Map expectedStats5 = createExpectedStatsMap(2,0,0,2,0); + expectedStatsList.add(expectedStats4); + expectedStatsList.add(expectedStats5); + + ingestMode = ingestMode.withDeduplicationStrategy(FilterDuplicates.builder().build()); + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 2)); + datasets = Datasets.of(mainTable, stagingTable); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStatsList, fixedClock_2000_01_01); + + // ------------ Perform milestoning Pass3 Data Error ------------------------ + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 3)); + datasets = Datasets.of(mainTable, stagingTable); + + try + { + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStatsList, fixedClock_2000_01_01); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Data errors (same PK, same version but different data), hence failing the batch", e.getMessage()); + } + } + + @Test + void testUniTemporalDeltaWithAllVersionDigestBasedAndStagingFilters() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingDataset = DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(TestDedupAndVersioning.baseSchemaWithVersionAndBatch) + .build(); + + createStagingTableWithoutPks(stagingDataset); + DerivedDataset stagingTable = DerivedDataset.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(TestDedupAndVersioning.baseSchemaWithVersion) + .addDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 1)) + .build(); + String path = basePathForInput + "with_all_version/data1.csv"; + TestDedupAndVersioning.loadDataIntoStagingTableWithVersionAndBatch(path); + + // Generate the milestoning object + UnitemporalDelta ingestMode = UnitemporalDelta.builder() + .digestField(digestName) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) + .transactionMilestoning(BatchId.builder() + .batchIdInName(batchIdInName) + .batchIdOutName(batchIdOutName) + .build()) + .build(); + + PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + String[] schema = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName}; + + // ------------ Perform milestoning Pass1 ------------------------ + String expectedDataPass1 = basePathForExpected + "with_all_version/digest_based/expected_pass1.csv"; + // 2. Execute plans and verify results + List> expectedStatsList = new ArrayList<>(); + Map expectedStats1 = createExpectedStatsMap(3,0,3,0,0); + Map expectedStats2 = createExpectedStatsMap(2,0,0,2,0); + Map expectedStats3 = createExpectedStatsMap(1,0,0,1,0); + expectedStatsList.add(expectedStats1); + expectedStatsList.add(expectedStats2); + expectedStatsList.add(expectedStats3); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, fixedClock_2000_01_01); + + // ------------ Perform milestoning Pass2 Fail on Duplicates ------------------------ + ingestMode = ingestMode.withDeduplicationStrategy(FailOnDuplicates.builder().build()); + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 2)); + datasets = Datasets.of(mainTable, stagingTable); + try + { + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass1, expectedStatsList, fixedClock_2000_01_01); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } + + // ------------ Perform milestoning Pass2 Filter Duplicates ------------------------ + String expectedDataPass2 = basePathForExpected + "with_all_version/digest_based/expected_pass2.csv"; + expectedStatsList = new ArrayList<>(); + Map expectedStats4 = createExpectedStatsMap(4,0,1,1,0); + Map expectedStats5 = createExpectedStatsMap(2,0,0,2,0); + expectedStatsList.add(expectedStats4); + expectedStatsList.add(expectedStats5); + + ingestMode = ingestMode.withDeduplicationStrategy(FilterDuplicates.builder().build()); + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 2)); + datasets = Datasets.of(mainTable, stagingTable); + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStatsList, fixedClock_2000_01_01); + + // ------------ Perform milestoning Pass3 Data Error ------------------------ + stagingTable = stagingTable.withDatasetFilters(DatasetFilter.of("batch", FilterType.EQUAL_TO, 3)); + datasets = Datasets.of(mainTable, stagingTable); + + try + { + executePlansAndVerifyResultsWithDerivedDataSplits(ingestMode, options, datasets, schema, expectedDataPass2, expectedStatsList, fixedClock_2000_01_01); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Data errors (same PK, same version but different data), hence failing the batch", e.getMessage()); + } } + } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaWithBatchTimeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaWithBatchTimeTest.java index d1934283bfe..fcc3eec3c89 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaWithBatchTimeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalDeltaWithBatchTimeTest.java @@ -18,6 +18,7 @@ import org.finos.legend.engine.persistence.components.TestUtils; import org.finos.legend.engine.persistence.components.common.Datasets; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalDelta; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionDateTime; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; @@ -58,7 +59,7 @@ void testMilestoning() throws Exception String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchTimeInName, batchTimeOutName}; // Create staging table - createStagingTable(stagingTable); + createStagingTableWithoutPks(stagingTable); UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestName) @@ -66,6 +67,7 @@ void testMilestoning() throws Exception .dateTimeInName(batchTimeInName) .dateTimeOutName(batchTimeOutName) .build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); @@ -93,13 +95,28 @@ void testMilestoning() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "without_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "without_delete_ind/expected_pass3.csv"; // 1. Load staging table loadBasicStagingData(dataPass3); // 2. Execute plans and verify results expectedStats = createExpectedStatsMap(0, 0, 0, 0, 0); executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, fixedClock_2000_01_03); + + // ------------ Perform Pass4 Fail on Duplicates ------------------------- + String dataPass4 = basePathForInput + "without_delete_ind/staging_data_pass4.csv"; + // 1. Load staging table + loadBasicStagingData(dataPass4); + // 2. Execute plans and verify results + try + { + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, fixedClock_2000_01_03); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy", e.getMessage()); + } } /* @@ -150,7 +167,7 @@ void testMilestoningWithDeleteIndicator() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "with_delete_ind/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_delete_ind/expected_pass3.csv"; // 1. Load staging table loadStagingDataWithDeleteInd(dataPass3); @@ -197,7 +214,7 @@ void testMilestoningWithLessColumnsInStaging() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets.withStagingDataset(stagingTable), schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); // ------------ Perform Pass3 empty batch (No Impact) ------------------------- - String dataPass3 = basePathForInput + "less_columns_in_staging/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "less_columns_in_staging/expected_pass3.csv"; stagingTable = TestUtils.getCsvDatasetRefWithLessColumnsThanMain(dataPass3); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotTest.java index 92126d34a4a..8ab15d09f9f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotTest.java @@ -18,9 +18,12 @@ import org.finos.legend.engine.persistence.components.TestUtils; import org.finos.legend.engine.persistence.components.common.Datasets; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalSnapshot; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.emptyhandling.FailEmptyBatch; import org.finos.legend.engine.persistence.components.ingestmode.emptyhandling.NoOp; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAndDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.planner.PlannerOptions; @@ -69,8 +72,8 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartition() throws Exception Datasets datasets = Datasets.of(mainTable, stagingTable); // ------------ Perform unitemporal snapshot milestoning Pass1 ------------------------ - String dataPass1 = basePathForInput + "without_partition/staging_data_pass1.csv"; - String expectedDataPass1 = basePathForExpected + "without_partition/expected_pass1.csv"; + String dataPass1 = basePathForInput + "without_partition/no_version/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "without_partition/no_version/expected_pass1.csv"; // 1. Load staging table loadBasicStagingData(dataPass1); // 2. Execute plans and verify results @@ -81,8 +84,8 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartition() throws Exception Assertions.assertEquals(stagingTableList.size(), 3); // ------------ Perform unitemporal snapshot milestoning Pass2 ------------------------ - String dataPass2 = basePathForInput + "without_partition/staging_data_pass2.csv"; - String expectedDataPass2 = basePathForExpected + "without_partition/expected_pass2.csv"; + String dataPass2 = basePathForInput + "without_partition/no_version/staging_data_pass2.csv"; + String expectedDataPass2 = basePathForExpected + "without_partition/no_version/expected_pass2.csv"; // 1. Load staging table loadBasicStagingData(dataPass2); // 2. Execute plans and verify results @@ -93,8 +96,8 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartition() throws Exception options = options.withCleanupStagingData(true); - String dataPass3 = basePathForInput + "without_partition/staging_data_pass3.csv"; - String expectedDataPass3 = basePathForExpected + "without_partition/expected_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; + String expectedDataPass3 = basePathForExpected + "without_partition/no_version/expected_pass3.csv"; // 1. Load Staging table loadBasicStagingData(dataPass3); // 2. Execute plans and verify results @@ -127,8 +130,8 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartitionWithCaseConversion() Datasets datasets = Datasets.of(mainTable, stagingTable); // ------------ Perform unitemporal snapshot milestoning Pass1 ------------------------ - String dataPass1 = basePathForInput + "without_partition/staging_data_pass1.csv"; - String expectedDataPass1 = basePathForExpected + "without_partition/expected_pass1.csv"; + String dataPass1 = basePathForInput + "without_partition/no_version/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "without_partition/no_version/expected_pass1.csv"; // 1. Load staging table loadBasicStagingDataInUpperCase(dataPass1); // 2. Execute plans and verify results @@ -139,8 +142,8 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartitionWithCaseConversion() Assertions.assertEquals(stagingTableList.size(), 3); // ------------ Perform unitemporal snapshot milestoning Pass2 ------------------------ - String dataPass2 = basePathForInput + "without_partition/staging_data_pass2.csv"; - String expectedDataPass2 = basePathForExpected + "without_partition/expected_pass2.csv"; + String dataPass2 = basePathForInput + "without_partition/no_version/staging_data_pass2.csv"; + String expectedDataPass2 = basePathForExpected + "without_partition/no_version/expected_pass2.csv"; // 1. Load staging table loadBasicStagingDataInUpperCase(dataPass2); // 2. Execute plans and verify results @@ -161,8 +164,8 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartitionWithCaseConversion() options = options.withCleanupStagingData(true); - String dataPass3 = basePathForInput + "without_partition/staging_data_pass3.csv"; - String expectedDataPass3 = basePathForExpected + "without_partition/expected_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; + String expectedDataPass3 = basePathForExpected + "without_partition/no_version/expected_pass3.csv"; // 1. Load Staging table loadBasicStagingDataInUpperCase(dataPass3); // 2. Execute plans and verify results @@ -191,8 +194,8 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartitionWithCaseConversion() options = options.withCleanupStagingData(true); - dataPass3 = basePathForInput + "without_partition/staging_data_pass3.csv"; - expectedDataPass3 = basePathForExpected + "without_partition/expected_pass2.csv"; + dataPass3 = "src/test/resources/data/empty_file.csv"; + expectedDataPass3 = basePathForExpected + "without_partition/no_version/expected_pass2.csv"; // 1. Load Staging table loadBasicStagingDataInUpperCase(dataPass3); // 2. Execute plans and verify results @@ -203,8 +206,8 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartitionWithCaseConversion() // ------------ Perform unitemporal snapshot milestoning Pass6 (Empty Batch) Empty Data Handling = Skip ------------------------ options = options.withCleanupStagingData(true); - dataPass3 = basePathForInput + "without_partition/staging_data_pass3.csv"; - expectedDataPass3 = basePathForExpected + "without_partition/expected_pass4.csv"; + dataPass3 = "src/test/resources/data/empty_file.csv"; + expectedDataPass3 = basePathForExpected + "without_partition/no_version/expected_pass4.csv"; // 1. Load Staging table loadBasicStagingDataInUpperCase(dataPass3); // 2. Execute plans and verify results @@ -244,8 +247,8 @@ void testUnitemporalSnapshotMilestoningLogicWithPartition() throws Exception Datasets datasets = Datasets.of(mainTable, stagingTable); // ------------ Perform unitemporal snapshot milestoning Pass1 ------------------------ - String dataPass1 = basePathForInput + "with_partition/staging_data_pass1.csv"; - String expectedDataPass1 = basePathForExpected + "with_partition/expected_pass1.csv"; + String dataPass1 = basePathForInput + "with_partition/no_version/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "with_partition/no_version/expected_pass1.csv"; // 1. Load staging table loadStagingDataForWithPartition(dataPass1); // 2. Execute plans and verify results @@ -253,8 +256,8 @@ void testUnitemporalSnapshotMilestoningLogicWithPartition() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); // ------------ Perform unitemporal snapshot milestoning Pass2 ------------------------ - String dataPass2 = basePathForInput + "with_partition/staging_data_pass2.csv"; - String expectedDataPass2 = basePathForExpected + "with_partition/expected_pass2.csv"; + String dataPass2 = basePathForInput + "with_partition/no_version/staging_data_pass2.csv"; + String expectedDataPass2 = basePathForExpected + "with_partition/no_version/expected_pass2.csv"; // 1. Load staging table loadStagingDataForWithPartition(dataPass2); // 2. Execute plans and verify results @@ -265,8 +268,8 @@ void testUnitemporalSnapshotMilestoningLogicWithPartition() throws Exception options = options.withCleanupStagingData(true); - String dataPass3 = basePathForInput + "with_partition/staging_data_pass3.csv"; - String expectedDataPass3 = basePathForExpected + "with_partition/expected_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; + String expectedDataPass3 = basePathForExpected + "with_partition/no_version/expected_pass3.csv"; // 1. Load Staging table loadStagingDataForWithPartition(dataPass3); // 2. Execute plans and verify results @@ -346,8 +349,8 @@ void testUnitemporalSnapshotMilestoningLogicWithPartitionWithCleanStagingDataWit Datasets datasets = Datasets.of(mainTable, stagingTable); // ------------ Perform unitemporal snapshot milestoning With Clean Staging Table ------------------------ - String dataPass1 = basePathForInput + "with_partition/staging_data_pass1.csv"; - String expectedDataPass1 = basePathForExpected + "with_partition/expected_pass1.csv"; + String dataPass1 = basePathForInput + "with_partition/no_version/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "with_partition/no_version/expected_pass1.csv"; // 1. Load staging table loadStagingDataForWithPartition(dataPass1); // 2. Execute plans and verify results @@ -384,8 +387,8 @@ void testUnitemporalSnapshotMilestoningLogicWithPartitionWithoutCleanStagingData Datasets datasets = Datasets.of(mainTable, stagingTable); // ------------ Perform unitemporal snapshot milestoning With Clean Staging Table ------------------------ - String dataPass1 = basePathForInput + "with_partition/staging_data_pass1.csv"; - String expectedDataPass1 = basePathForExpected + "with_partition/expected_pass1.csv"; + String dataPass1 = basePathForInput + "with_partition/no_version/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "with_partition/no_version/expected_pass1.csv"; // 1. Load staging table loadStagingDataForWithPartition(dataPass1); // 2. Execute plans and verify results @@ -395,4 +398,194 @@ void testUnitemporalSnapshotMilestoningLogicWithPartitionWithoutCleanStagingData List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); Assertions.assertEquals(stagingTableList.size(), 6); } + + /* + Scenario: Test milestoning Logic with max version and without Partition when staging table pre populated + */ + @Test + void testUnitemporalSnapshotMilestoningLogicMaxVersionWithoutPartitionAllowDuplicates() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = TestUtils.getStagingTableWithNonPkVersion(); + + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, versionName, batchIdInName, batchIdOutName, batchTimeInName, batchTimeOutName}; + + // Create staging table + createStagingTableWithoutPks(stagingTable); + + UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() + .digestField(digestName) + .transactionMilestoning(BatchIdAndDateTime.builder() + .batchIdInName(batchIdInName) + .batchIdOutName(batchIdOutName) + .dateTimeInName(batchTimeInName) + .dateTimeOutName(batchTimeOutName) + .build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(DigestBasedResolver.builder().build()) + .performStageVersioning(true) + .build()) + .build(); + + PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + // ------------ Perform unitemporal snapshot milestoning Pass1 ------------------------ + String dataPass1 = basePathForInput + "without_partition/max_version/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "without_partition/max_version/expected_pass1.csv"; + // 1. Load staging table + loadStagingDataWithVersion(dataPass1); + // 2. Execute plans and verify results + Map expectedStats = createExpectedStatsMap(6, 0, 3, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); + // 3. Assert that the staging table is NOT truncated + List> stagingTableList = h2Sink.executeQuery("select * from \"TEST\".\"staging\""); + Assertions.assertEquals(stagingTableList.size(), 6); + + // ------------ Perform unitemporal snapshot milestoning Pass2 ------------------------ + String dataPass2 = basePathForInput + "without_partition/max_version/staging_data_pass2.csv"; + String expectedDataPass2 = basePathForExpected + "without_partition/max_version/expected_pass2.csv"; + // 1. Load staging table + loadStagingDataWithVersion(dataPass2); + // 2. Execute plans and verify results + expectedStats = createExpectedStatsMap(4, 0, 1, 2, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); + + // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch) ------------------------ + options = options.withCleanupStagingData(true); + String dataPass3 = "src/test/resources/data/empty_file.csv"; + String expectedDataPass3 = basePathForExpected + "without_partition/max_version/expected_pass3.csv"; + // 1. Load Staging table + loadStagingDataWithVersion(dataPass3); + // 2. Execute plans and verify results + expectedStats = createExpectedStatsMap(0, 0, 0, 0, 4); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, fixedClock_2000_01_01); + } + + /* + Scenario: Test milestoning Logic with max version and with Partition when staging table pre populated + */ + @Test + void testUnitemporalSnapshotMilestoningLogicMaxVersionWithPartitionFilterDuplicates() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = TestUtils.getEntityPriceWithVersionStagingTable(); + + String[] schema = new String[]{dateName, entityName, priceName, volumeName, digestName, versionName, batchIdInName, batchIdOutName, batchTimeInName, batchTimeOutName}; + + // Create staging table + createStagingTableWithoutPks(stagingTable); + + UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() + .digestField(digestName) + .transactionMilestoning(BatchIdAndDateTime.builder() + .batchIdInName(batchIdInName) + .batchIdOutName(batchIdOutName) + .dateTimeInName(batchTimeInName) + .dateTimeOutName(batchTimeOutName) + .build()) + .addAllPartitionFields(Collections.singletonList(dateName)) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(DigestBasedResolver.builder().build()) + .performStageVersioning(true) + .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .build(); + + PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + // ------------ Perform unitemporal snapshot milestoning Pass1 ------------------------ + String dataPass1 = basePathForInput + "with_partition/max_version/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "with_partition/max_version/expected_pass1.csv"; + // 1. Load staging table + loadStagingDataForWithPartitionWithVersion(dataPass1); + // 2. Execute plans and verify results + Map expectedStats = createExpectedStatsMap(9, 0, 6, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); + + // ------------ Perform unitemporal snapshot milestoning Pass2 ------------------------ + String dataPass2 = basePathForInput + "with_partition/max_version/staging_data_pass2.csv"; + String expectedDataPass2 = basePathForExpected + "with_partition/max_version/expected_pass2.csv"; + // 1. Load staging table + loadStagingDataForWithPartitionWithVersion(dataPass2); + // 2. Execute plans and verify results + expectedStats = createExpectedStatsMap(4, 0, 1, 1, 1); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); + + // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch) ------------------------ + options = options.withCleanupStagingData(true); + String dataPass3 = "src/test/resources/data/empty_file.csv"; + String expectedDataPass3 = basePathForExpected + "with_partition/max_version/expected_pass3.csv"; + // 1. Load Staging table + loadStagingDataForWithPartitionWithVersion(dataPass3); + // 2. Execute plans and verify results + expectedStats = createExpectedStatsMap(0, 0, 0, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, fixedClock_2000_01_01); + } + + /* + Scenario: Test milestoning Logic with max version and with Partition when staging table pre populated with upper case + */ + @Test + void testUnitemporalSnapshotMilestoningLogicMaxVersionWithPartitionFilterDuplicatesUpperCase() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = TestUtils.getEntityPriceWithVersionStagingTable(); + + String[] schema = new String[]{dateName.toUpperCase(), entityName.toUpperCase(), priceName.toUpperCase(), volumeName.toUpperCase(), digestName.toUpperCase(), versionName.toUpperCase(), batchIdInName.toUpperCase(), batchIdOutName.toUpperCase(), batchTimeInName.toUpperCase(), batchTimeOutName.toUpperCase()}; + + // Create staging table + h2Sink.executeStatement("CREATE TABLE IF NOT EXISTS \"TEST\".\"STAGING\"(\"DATE\" DATE NOT NULL,\"ENTITY\" VARCHAR NOT NULL,\"PRICE\" DECIMAL(20,2),\"VOLUME\" BIGINT,\"DIGEST\" VARCHAR,\"VERSION\" INTEGER)"); + + UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() + .digestField(digestName) + .transactionMilestoning(BatchIdAndDateTime.builder() + .batchIdInName(batchIdInName) + .batchIdOutName(batchIdOutName) + .dateTimeInName(batchTimeInName) + .dateTimeOutName(batchTimeOutName) + .build()) + .addAllPartitionFields(Collections.singletonList(dateName)) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(versionName) + .mergeDataVersionResolver(DigestBasedResolver.builder().build()) + .performStageVersioning(true) + .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .build(); + + PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + + // ------------ Perform unitemporal snapshot milestoning Pass1 ------------------------ + String dataPass1 = basePathForInput + "with_partition/max_version/staging_data_pass1.csv"; + String expectedDataPass1 = basePathForExpected + "with_partition/max_version/expected_pass1.csv"; + // 1. Load staging table + loadStagingDataForWithPartitionWithVersionInUpperCase(dataPass1); + // 2. Execute plans and verify results + Map expectedStats = createExpectedStatsMap(9, 0, 6, 0, 0); + executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, fixedClock_2000_01_01); + + // ------------ Perform unitemporal snapshot milestoning Pass2 ------------------------ + String dataPass2 = basePathForInput + "with_partition/max_version/staging_data_pass2.csv"; + String expectedDataPass2 = basePathForExpected + "with_partition/max_version/expected_pass2.csv"; + // 1. Load staging table + loadStagingDataForWithPartitionWithVersionInUpperCase(dataPass2); + // 2. Execute plans and verify results + expectedStats = createExpectedStatsMap(4, 0, 1, 1, 1); + executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_01); + + // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch) ------------------------ + options = options.withCleanupStagingData(true); + String dataPass3 = "src/test/resources/data/empty_file.csv";; + String expectedDataPass3 = basePathForExpected + "with_partition/max_version/expected_pass3.csv"; + // 1. Load Staging table + loadStagingDataForWithPartitionWithVersionInUpperCase(dataPass3); + // 2. Execute plans and verify results + expectedStats = createExpectedStatsMap(0, 0, 0, 0, 0); + executePlansAndVerifyForCaseConversion(ingestMode, options, datasets, schema, expectedDataPass3, expectedStats, fixedClock_2000_01_01); + } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotWithBatchIdTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotWithBatchIdTest.java index f29f4ed594c..56741c184ec 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotWithBatchIdTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotWithBatchIdTest.java @@ -102,7 +102,7 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartition() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch) ------------------------ - String dataPass3 = basePathForInput + "without_partition/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "without_partition/expected_pass3.csv"; // 1. Load Staging table loadBasicStagingData(dataPass3); @@ -159,7 +159,7 @@ void testUnitemporalSnapshotMilestoningLogicWithPartition() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats); // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch) ------------------------ - String dataPass3 = basePathForInput + "with_partition/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_partition/expected_pass3.csv"; // 1. Load Staging table loadStagingDataForWithPartition(dataPass3); @@ -218,7 +218,7 @@ void testUnitemporalSnapshotMilestoningLogicWithPartitionFilter() throws Excepti // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch - No Op) ------------------------ IngestMode ingestModeWithNoOpBatchHandling = ingestMode.withEmptyDatasetHandling(NoOp.builder().build()); - String dataPass3 = basePathForInput + "with_partition_filter/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_partition_filter/expected_pass2.csv"; // 1. Load Staging table loadStagingDataForWithPartition(dataPass3); @@ -228,7 +228,7 @@ void testUnitemporalSnapshotMilestoningLogicWithPartitionFilter() throws Excepti // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch - Delete target Data) ------------------------ IngestMode ingestModeWithDeleteTargetData = ingestMode.withEmptyDatasetHandling(DeleteTargetData.builder().build()); - dataPass3 = basePathForInput + "with_partition_filter/staging_data_pass3.csv"; + dataPass3 = "src/test/resources/data/empty_file.csv"; expectedDataPass3 = basePathForExpected + "with_partition_filter/expected_pass3.csv"; // 1. Load Staging table loadStagingDataForWithPartition(dataPass3); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotWithBatchTimeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotWithBatchTimeTest.java index 02b22293a2a..fe429653740 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotWithBatchTimeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/unitemporal/UnitemporalSnapshotWithBatchTimeTest.java @@ -100,7 +100,7 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartition() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch) ------------------------ - String dataPass3 = basePathForInput + "without_partition/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "without_partition/expected_pass3.csv"; // 1. Load Staging table loadBasicStagingData(dataPass3); @@ -118,7 +118,7 @@ void testUnitemporalSnapshotMilestoningLogicWithoutPartition() throws Exception .emptyDatasetHandling(FailEmptyBatch.builder().build()) .build(); - dataPass3 = basePathForInput + "without_partition/staging_data_pass3.csv"; + dataPass3 = "src/test/resources/data/empty_file.csv"; expectedDataPass3 = basePathForExpected + "without_partition/expected_pass3.csv"; // 1. Load Staging table loadBasicStagingData(dataPass3); @@ -183,7 +183,7 @@ void testUnitemporalSnapshotMilestoningLogicWithPartition() throws Exception executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, fixedClock_2000_01_02); // ------------ Perform unitemporal snapshot milestoning Pass3 (Empty Batch) ------------------------ - String dataPass3 = basePathForInput + "with_partition/staging_data_pass3.csv"; + String dataPass3 = "src/test/resources/data/empty_file.csv"; String expectedDataPass3 = basePathForExpected + "with_partition/expected_pass3.csv"; // 1. Load Staging table loadStagingDataForWithPartition(dataPass3); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/logicalplan/operations/SchemaEvolutionTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/logicalplan/operations/SchemaEvolutionTest.java index 8501e66e418..b59eb9776c2 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/logicalplan/operations/SchemaEvolutionTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/logicalplan/operations/SchemaEvolutionTest.java @@ -19,6 +19,7 @@ import org.finos.legend.engine.persistence.components.common.Datasets; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.ingestmode.AppendOnly; +import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; @@ -36,23 +37,7 @@ import java.util.Map; import java.util.Set; -import static org.finos.legend.engine.persistence.components.TestUtils.assertTableColumnsEquals; -import static org.finos.legend.engine.persistence.components.TestUtils.assertUpdatedDataset; -import static org.finos.legend.engine.persistence.components.TestUtils.createDatasetWithUpdatedField; -import static org.finos.legend.engine.persistence.components.TestUtils.digestName; -import static org.finos.legend.engine.persistence.components.TestUtils.expiryDateName; -import static org.finos.legend.engine.persistence.components.TestUtils.getColumnDataTypeFromTable; -import static org.finos.legend.engine.persistence.components.TestUtils.getColumnDataTypeLengthFromTable; -import static org.finos.legend.engine.persistence.components.TestUtils.getColumnDataTypeScaleFromTable; -import static org.finos.legend.engine.persistence.components.TestUtils.getIsColumnNullableFromTable; -import static org.finos.legend.engine.persistence.components.TestUtils.idName; -import static org.finos.legend.engine.persistence.components.TestUtils.incomeName; -import static org.finos.legend.engine.persistence.components.TestUtils.mainTableName; -import static org.finos.legend.engine.persistence.components.TestUtils.name; -import static org.finos.legend.engine.persistence.components.TestUtils.nameName; -import static org.finos.legend.engine.persistence.components.TestUtils.startTimeName; -import static org.finos.legend.engine.persistence.components.TestUtils.testDatabaseName; -import static org.finos.legend.engine.persistence.components.TestUtils.testSchemaName; +import static org.finos.legend.engine.persistence.components.TestUtils.*; class SchemaEvolutionTest extends BaseTest { @@ -74,7 +59,7 @@ void testAddColumn() throws Exception AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).enableSchemaEvolution(true).build(); @@ -82,7 +67,7 @@ void testAddColumn() throws Exception schemaEvolutionCapabilitySet.add(SchemaEvolutionCapability.ADD_COLUMN); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform Pass1 (Schema Evolution) ------------------------ String dataPass1 = basePathForInput + "add_column_data_pass1.csv"; @@ -90,12 +75,8 @@ void testAddColumn() throws Exception // 1. Load staging table loadBasicStagingData(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet); + Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_01); // 3. Verify schema changes in database List> actualTableData = h2Sink.executeQuery("select * from \"TEST\".\"main\""); assertTableColumnsEquals(Arrays.asList(schema), actualTableData); @@ -110,12 +91,8 @@ void testAddColumn() throws Exception // 2. Load staging table loadBasicStagingData(dataPass2); // 3. Execute plans and verify results - expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet); + expectedStats = createExpectedStatsMap(1, 0, 1, 0, 0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_02); } @Test @@ -134,7 +111,7 @@ void testDataTypeConversion() throws Exception AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).enableSchemaEvolution(true).build(); @@ -142,7 +119,7 @@ void testDataTypeConversion() throws Exception schemaEvolutionCapabilitySet.add(SchemaEvolutionCapability.DATA_TYPE_CONVERSION); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform Pass1 (Schema Evolution) ------------------------ String dataPass1 = basePathForInput + "data_type_conversion_data_pass1.csv"; @@ -150,12 +127,8 @@ void testDataTypeConversion() throws Exception // 1. Load staging table loadBasicStagingData(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet); + Map expectedStats = createExpectedStatsMap(3, 0, 3, 0, 0); + IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_01); // 3. Verify schema changes in database List> actualTableData = h2Sink.executeQuery("select * from \"TEST\".\"main\""); assertTableColumnsEquals(Arrays.asList(schema), actualTableData); @@ -171,18 +144,14 @@ void testDataTypeConversion() throws Exception // 2. Load staging table loadBasicStagingData(dataPass2); // 3. Execute plans and verify results - expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet); + expectedStats = createExpectedStatsMap(1,0,1,0,0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_03); } @Test void testDataTypeSizeChange() throws Exception { - DatasetDefinition mainTable = TestUtils.getBasicMainTable(); + DatasetDefinition mainTable = TestUtils.getMainTableWithBatchUpdateTimeField(); DatasetDefinition stagingTable = TestUtils.getSchemaEvolutionDataTypeSizeChangeStagingTable(); // Create staging table @@ -195,7 +164,7 @@ void testDataTypeSizeChange() throws Exception AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).enableSchemaEvolution(true).build(); @@ -203,7 +172,7 @@ void testDataTypeSizeChange() throws Exception schemaEvolutionCapabilitySet.add(SchemaEvolutionCapability.DATA_TYPE_SIZE_CHANGE); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform Pass1 (Schema Evolution) ------------------------ String dataPass1 = basePathForInput + "datatype_type_size_change_data_pass1.csv"; @@ -211,12 +180,8 @@ void testDataTypeSizeChange() throws Exception // 1. Load staging table loadStagingDataForIntIncome(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet); + Map expectedStats = createExpectedStatsMap(3,0,3,0,0); + IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_01); // 3. Verify schema changes in database List> actualTableData = h2Sink.executeQuery("select * from \"TEST\".\"main\""); assertTableColumnsEquals(Arrays.asList(schema), actualTableData); @@ -234,18 +199,14 @@ void testDataTypeSizeChange() throws Exception // 2. Load staging table loadStagingDataForIntIncome(dataPass2); // 3. Execute plans and verify results - expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet); + expectedStats = createExpectedStatsMap(1,0,1,0,0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_03); } @Test void testColumnNullabilityChange() throws Exception { - DatasetDefinition mainTable = TestUtils.getBasicMainTable(); + DatasetDefinition mainTable = TestUtils.getMainTableWithBatchUpdateTimeField(); DatasetDefinition stagingTable = TestUtils.getSchemaEvolutionColumnNullabilityChangeStagingTable(); // Create staging table @@ -258,7 +219,7 @@ void testColumnNullabilityChange() throws Exception AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).enableSchemaEvolution(true).build(); @@ -266,7 +227,7 @@ void testColumnNullabilityChange() throws Exception schemaEvolutionCapabilitySet.add(SchemaEvolutionCapability.COLUMN_NULLABILITY_CHANGE); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform Pass1 (Schema Evolution) ------------------------ String dataPass1 = basePathForInput + "column_nullability_change_data_pass1.csv"; @@ -274,12 +235,8 @@ void testColumnNullabilityChange() throws Exception // 1. Load staging table loadBasicStagingData(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet); + Map expectedStats = createExpectedStatsMap(3,0,3,0,0); + IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_01); // 3. Verify schema changes in database List> actualTableData = h2Sink.executeQuery("select * from \"TEST\".\"main\""); assertTableColumnsEquals(Arrays.asList(schema), actualTableData); @@ -295,12 +252,8 @@ void testColumnNullabilityChange() throws Exception // 2. Load staging table loadBasicStagingData(dataPass2); // 3. Execute plans and verify results - expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet); + expectedStats = createExpectedStatsMap(1,0,1,0,0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_03); } @Test @@ -319,7 +272,7 @@ void testDataTypeConversionAndColumnNullabilityChange() throws Exception AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).enableSchemaEvolution(true).build(); @@ -328,7 +281,7 @@ void testDataTypeConversionAndColumnNullabilityChange() throws Exception schemaEvolutionCapabilitySet.add(SchemaEvolutionCapability.COLUMN_NULLABILITY_CHANGE); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform Pass1 (Schema Evolution) ------------------------ String dataPass1 = basePathForInput + "data_type_conversion_and_column_nullability_change_data_pass1.csv"; @@ -336,12 +289,8 @@ void testDataTypeConversionAndColumnNullabilityChange() throws Exception // 1. Load staging table loadBasicStagingData(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet); + Map expectedStats = createExpectedStatsMap(3,0,3,0,0); + IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_01); // 3. Verify schema changes in database List> actualTableData = h2Sink.executeQuery("select * from \"TEST\".\"main\""); assertTableColumnsEquals(Arrays.asList(schema), actualTableData); @@ -358,18 +307,14 @@ void testDataTypeConversionAndColumnNullabilityChange() throws Exception // 2. Load staging table loadBasicStagingData(dataPass2); // 3. Execute plans and verify results - expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet); + expectedStats = createExpectedStatsMap(1,0,1,0,0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_03); } @Test void testDataTypeConversionAndDataTypeSizeChange() throws Exception { - DatasetDefinition mainTable = TestUtils.getBasicMainTable(); + DatasetDefinition mainTable = TestUtils.getMainTableWithBatchUpdateTimeField(); DatasetDefinition stagingTable = TestUtils.getSchemaEvolutionDataTypeConversionAndDataTypeSizeChangeStagingTable(); // Create staging table @@ -382,7 +327,7 @@ void testDataTypeConversionAndDataTypeSizeChange() throws Exception AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).enableSchemaEvolution(true).build(); @@ -391,7 +336,7 @@ void testDataTypeConversionAndDataTypeSizeChange() throws Exception schemaEvolutionCapabilitySet.add(SchemaEvolutionCapability.DATA_TYPE_SIZE_CHANGE); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform Pass1 (Schema Evolution) ------------------------ String dataPass1 = basePathForInput + "data_type_conversion_and_data_type_size_change_data_pass1.csv"; @@ -399,12 +344,8 @@ void testDataTypeConversionAndDataTypeSizeChange() throws Exception // 1. Load staging table loadStagingDataForDecimalIncome(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet); + Map expectedStats = createExpectedStatsMap(3,0,3,0,0); + IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_01); // 3. Verify schema changes in database List> actualTableData = h2Sink.executeQuery("select * from \"TEST\".\"main\""); assertTableColumnsEquals(Arrays.asList(schema), actualTableData); @@ -422,18 +363,14 @@ void testDataTypeConversionAndDataTypeSizeChange() throws Exception // 2. Load staging table loadStagingDataForDecimalIncome(dataPass2); // 3. Execute plans and verify results - expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet); + expectedStats = createExpectedStatsMap(1,0,1,0,0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_03); } @Test void testMakeMainColumnNullable() throws Exception { - DatasetDefinition mainTable = TestUtils.getBasicMainTable(); + DatasetDefinition mainTable = TestUtils.getMainTableWithBatchUpdateTimeField(); DatasetDefinition stagingTable = TestUtils.getSchemaEvolutionMakeMainColumnNullableStagingTable(); // Create staging table @@ -445,7 +382,7 @@ void testMakeMainColumnNullable() throws Exception AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).enableSchemaEvolution(true).build(); @@ -453,7 +390,7 @@ void testMakeMainColumnNullable() throws Exception schemaEvolutionCapabilitySet.add(SchemaEvolutionCapability.COLUMN_NULLABILITY_CHANGE); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform Pass1 (Schema Evolution) ------------------------ String dataPass1 = basePathForInput + "make_main_column_nullable_data_pass1.csv"; @@ -461,12 +398,8 @@ void testMakeMainColumnNullable() throws Exception // 1. Load staging table loadStagingDataForWithoutName(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet); + Map expectedStats = createExpectedStatsMap(3,0,3,0,0); + IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_01); // 3. Verify schema changes in database List> actualTableData = h2Sink.executeQuery("select * from \"TEST\".\"main\""); assertTableColumnsEquals(Arrays.asList(schema), actualTableData); @@ -482,12 +415,8 @@ void testMakeMainColumnNullable() throws Exception // 2. Load staging table loadStagingDataForWithoutName(dataPass2); // 3. Execute plans and verify results - expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 1); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); - executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet); + expectedStats = createExpectedStatsMap(1,0,1,0,0); + executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass2, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_03); } @Test @@ -506,7 +435,7 @@ void testSchemaEvolutionFailPKTypeDifferent() throws Exception AppendOnly ingestMode = AppendOnly.builder() .digestField(digestName) .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeName).build()) .build(); PlannerOptions options = PlannerOptions.builder().cleanupStagingData(false).collectStatistics(true).enableSchemaEvolution(true).build(); @@ -514,7 +443,7 @@ void testSchemaEvolutionFailPKTypeDifferent() throws Exception schemaEvolutionCapabilitySet.add(SchemaEvolutionCapability.DATA_TYPE_CONVERSION); Datasets datasets = Datasets.of(mainTable, stagingTable); - String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName}; + String[] schema = new String[]{idName, nameName, incomeName, startTimeName, expiryDateName, digestName, batchUpdateTimeName}; // ------------ Perform Pass1 ------------------------ String dataPass1 = basePathForInput + "data_type_conversion_data_pass1.csv"; @@ -522,15 +451,11 @@ void testSchemaEvolutionFailPKTypeDifferent() throws Exception // 1. Load staging table loadBasicStagingData(dataPass1); // 2. Execute plans and verify results - Map expectedStats = new HashMap<>(); - expectedStats.put(StatisticName.INCOMING_RECORD_COUNT.name(), 3); - expectedStats.put(StatisticName.ROWS_DELETED.name(), 0); - expectedStats.put(StatisticName.ROWS_UPDATED.name(), 0); - expectedStats.put(StatisticName.ROWS_TERMINATED.name(), 0); + Map expectedStats = createExpectedStatsMap(3,0,3,0,0); try { - IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet); + IngestorResult result = executePlansAndVerifyResults(ingestMode, options, datasets, schema, expectedDataPass1, expectedStats, schemaEvolutionCapabilitySet, fixedClock_2000_01_03); Assertions.fail("Exception was not thrown"); } catch (IncompatibleSchemaChangeException e) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/versioning/TestDedupAndVersioning.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/versioning/TestDedupAndVersioning.java new file mode 100644 index 00000000000..984e5ab7277 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/java/org/finos/legend/engine/persistence/components/versioning/TestDedupAndVersioning.java @@ -0,0 +1,664 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.persistence.components.versioning; + +import org.finos.legend.engine.persistence.components.BaseTest; +import org.finos.legend.engine.persistence.components.TestUtils; +import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.executor.Executor; +import org.finos.legend.engine.persistence.components.ingestmode.AppendOnly; +import org.finos.legend.engine.persistence.components.ingestmode.IngestMode; +import org.finos.legend.engine.persistence.components.ingestmode.NontemporalSnapshot; +import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.*; +import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; +import org.finos.legend.engine.persistence.components.relational.api.RelationalIngestor; +import org.finos.legend.engine.persistence.components.relational.h2.H2Sink; +import org.finos.legend.engine.persistence.components.relational.jdbc.JdbcConnection; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.finos.legend.engine.persistence.components.TestUtils.*; +import static org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategyAbstract.DATA_SPLIT; +import static org.finos.legend.engine.persistence.components.util.LogicalPlanUtils.TEMP_STAGING_DATASET_BASE_NAME; + +public class TestDedupAndVersioning extends BaseTest +{ + + /* Scenarios: + 1. No Dedup, NoVersion -> No tempStagingTable + 2. No Dedup, MaxVersion do not perform versioning -> No tempStagingTable + 3. No Dedup, MaxVersion with perform versioning -> tempStagingTable with only MaxVersioned Data [throw Error on Data errors] + 4. No Dedup, AllVersion do not perform versioning -> No tempStagingTable + 5. No Dedup, AllVersion with perform versioning -> tempStagingTable with Data splits [throw Error on Data errors] + + 6. Filter Dups, NoVersion -> tempStagingTable with count column + 7. Filter Dups, MaxVersion do not perform versioning -> tempStagingTable with count column + 8. Filter Dups, MaxVersion with perform versioning -> tempStagingTable with count column and only max version [throw Error on Data errors] + 9. Filter Dups, AllVersion do not perform versioning -> tempStagingTable with count column + 10. Filter Dups, AllVersion with perform versioning -> tempStagingTable with count column and Data splits [throw Error on Data errors] + + 11.Fail on Dups, NoVersion -> tempStagingTable with count column [Throw error on dups] + 12.Fail on Dups, MaxVersion do not perform versioning -> tempStagingTable with count column [Throw error on dups] + 13.Fail on Dups, MaxVersion with perform versioning -> tempStagingTable with count column and only max version [Throw error on dups, throw Error on Data errors] + 14.Fail on Dups, AllVersion do not perform versioning -> tempStagingTable with count column [Throw error on dups] + 15.Fail on Dups, AllVersion with perform versioning -> tempStagingTable with count column and Data splits [Throw error on dups, throw Error on Data errors] + */ + + private static Field name = Field.builder().name(nameName).type(FieldType.of(DataType.VARCHAR, 64, null)).nullable(false).primaryKey(true).fieldAlias(nameName).build(); + + // Base Schema : PK : id, name + public static SchemaDefinition baseSchemaWithoutVersion = + SchemaDefinition.builder() + .addFields(id) + .addFields(name) + .addFields(income) + .addFields(expiryDate) + .addFields(digest) + .build(); + + public static SchemaDefinition baseSchemaWithVersion = + SchemaDefinition.builder() + .addFields(id) + .addFields(name) + .addFields(version) + .addFields(income) + .addFields(expiryDate) + .addFields(digest) + .build(); + + public static SchemaDefinition baseSchemaWithVersionAndBatch = + SchemaDefinition.builder() + .addFields(id) + .addFields(name) + .addFields(version) + .addFields(income) + .addFields(expiryDate) + .addFields(digest) + .addFields(batch) + .build(); + + private static final String tempStagingTableName = stagingTableName + "_" + TEMP_STAGING_DATASET_BASE_NAME; + + String[] schemaWithCount = new String[]{idName, nameName, incomeName, expiryDateName, digestName, "legend_persistence_count"}; + String[] schemaWithVersion = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName}; + String[] schemaWithVersionAndCount = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName, "legend_persistence_count"}; + String[] schemaWithVersionCountAndDataSplit = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName, "legend_persistence_count", DATA_SPLIT}; + + String[] schemaWithVersionAndDataSplit = new String[]{idName, nameName, versionName, incomeName, expiryDateName, digestName, DATA_SPLIT}; + + + // Scenario 1 + @Test + void testNoDedupNoVersioning() + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithoutVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .build(); + + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + Assertions.assertEquals(false, h2Sink.doesTableExist(getTempStagingDataset())); + } + + // Scenario 2 + @Test + void testNoDedupMaxVersioningDoNotPerform() + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("version").performStageVersioning(false).mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .build(); + + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + Assertions.assertEquals(false, h2Sink.doesTableExist(getTempStagingDataset())); + } + + // Scenario 3 + @Test + void testNoDedupMaxVersioning() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("version").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .build(); + + createStagingTableWithVersion(); + String srcDataPath = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data2_allow_dups_max_versioning.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath); + + performDedupAndVersioining(datasets, ingestMode); + verifyResults(expectedDataPath, schemaWithVersion); + + // Data error scenario, should throw error + String srcDataPath2 = "src/test/resources/data/dedup-and-versioning/input/data3_with_dups_and_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath2); + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Data errors (same PK, same version but different data), hence failing the batch",e.getMessage()); + } + } + + // Scenario 4 + @Test + void testNoDedupAllVersioningDoNotPerform() + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = AppendOnly.builder() + .auditing(DateTimeAuditing.builder().dateTimeField("append_time").build()) + .digestField("digest") + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("version").performStageVersioning(false).mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .build(); + + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + Assertions.assertEquals(false, h2Sink.doesTableExist(getTempStagingDataset())); + } + + // Scenario 5 + @Test + void testNoDedupAllVersion() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = AppendOnly.builder() + .auditing(DateTimeAuditing.builder().dateTimeField("append_time").build()) + .digestField("digest") + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("version") + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE).performStageVersioning(true).build()) + .build(); + + createStagingTableWithVersion(); + String srcDataPath = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data2_allow_dups_all_version.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath); + + performDedupAndVersioining(datasets, ingestMode); + verifyResults(expectedDataPath, schemaWithVersionAndDataSplit); + + // Data error scenario, should throw error + String srcDataPath2 = "src/test/resources/data/dedup-and-versioning/input/data3_with_dups_and_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath2); + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Data errors (same PK, same version but different data), hence failing the batch",e.getMessage()); + } + } + + // Scenario 6 + @Test + void testFilterDupsNoVersioning() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithoutVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .build(); + createStagingTableWithoutVersion(); + String srcDataPath = "src/test/resources/data/dedup-and-versioning/input/data1_with_dups.csv"; + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data1_filter_dups_no_versioning.csv"; + loadDataIntoStagingTableWithoutVersion(srcDataPath); + + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + verifyResults(expectedDataPath, schemaWithCount); + } + + // Scenario 7 + @Test + void testFilterDupsMaxVersionDoNotPerform() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("version").performStageVersioning(false).mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .build(); + + createStagingTableWithVersion(); + String srcDataPath = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_no_versioning.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath); + + performDedupAndVersioining(datasets, ingestMode); + verifyResults(expectedDataPath, schemaWithVersionAndCount); + } + + + // Scenario 8 + @Test + void testFilterDupsMaxVersion() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("version").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .build(); + + createStagingTableWithVersion(); + String srcDataPath = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_max_versioning.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath); + + performDedupAndVersioining(datasets, ingestMode); + verifyResults(expectedDataPath, schemaWithVersionAndCount); + + // Data error scenario, should throw error + String srcDataPath2 = "src/test/resources/data/dedup-and-versioning/input/data3_with_dups_and_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath2); + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Data errors (same PK, same version but different data), hence failing the batch",e.getMessage()); + } + } + + // Scenario 9 + @Test + void testFilterDupsAllVersionDoNotPerform() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = AppendOnly.builder() + .auditing(DateTimeAuditing.builder().dateTimeField("append_time").build()) + .digestField("digest") + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("version") + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE).performStageVersioning(false).build()) + .build(); + + createStagingTableWithVersion(); + String srcDataPath = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_no_versioning.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath); + + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + verifyResults(expectedDataPath, schemaWithVersionAndCount); + } + + // Scenario 10 + @Test + void testFilterDupsAllVersion() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = AppendOnly.builder() + .auditing(DateTimeAuditing.builder().dateTimeField("append_time").build()) + .digestField("digest") + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("version") + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE).performStageVersioning(true).build()) + .build(); + + createStagingTableWithVersion(); + String srcDataPath = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_all_version.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath); + + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + verifyResults(expectedDataPath, schemaWithVersionCountAndDataSplit); + + // Data error scenario, should throw error + String srcDataPath2 = "src/test/resources/data/dedup-and-versioning/input/data3_with_dups_and_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath2); + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Data errors (same PK, same version but different data), hence failing the batch",e.getMessage()); + } + } + + // Scenario 11 + @Test + void testFailOnDupsNoVersioning() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithoutVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .build(); + createStagingTableWithoutVersion(); + String srcDataPath = "src/test/resources/data/dedup-and-versioning/input/data1_with_dups.csv"; + loadDataIntoStagingTableWithoutVersion(srcDataPath); + + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy",e.getMessage()); + } + } + + // Scenario 12 + @Test + void testFailOnDupsMaxVersionDoNotPerform() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("version").performStageVersioning(false).mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .build(); + + // Happy scenario + createStagingTableWithVersion(); + String srcDataPath1 = "src/test/resources/data/dedup-and-versioning/input/data4_without_dups_no_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath1); + + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_no_versioning.csv"; + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + verifyResults(expectedDataPath, schemaWithVersionAndCount); + + + // Duplicates scenario, should throw error + String srcDataPath2 = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath2); + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy",e.getMessage()); + } + } + + // Scenario 13 + @Test + void testFailOnDupsMaxVersion() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("version").performStageVersioning(true).mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .build(); + + // Happy scenario + createStagingTableWithVersion(); + String srcDataPath1 = "src/test/resources/data/dedup-and-versioning/input/data4_without_dups_no_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath1); + + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_max_versioin.csv"; + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + verifyResults(expectedDataPath, schemaWithVersionAndCount); + + + // Duplicates scenario, should throw error + String srcDataPath2 = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath2); + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy",e.getMessage()); + } + } + + + // Scenario 14 + @Test + void testFailOnDupsAllVersionDoNotPerform() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = AppendOnly.builder() + .auditing(DateTimeAuditing.builder().dateTimeField("append_time").build()) + .digestField("digest") + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("version") + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE).performStageVersioning(false).build()) + .build(); + + // Happy scenario + createStagingTableWithVersion(); + String srcDataPath1 = "src/test/resources/data/dedup-and-versioning/input/data4_without_dups_no_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath1); + + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_no_versioning.csv"; + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + verifyResults(expectedDataPath, schemaWithVersionAndCount); + + + // Duplicates scenario, should throw error + String srcDataPath2 = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath2); + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy",e.getMessage()); + } + } + + // Scenario 15 + @Test + void testFailOnDupsAllVersion() throws Exception + { + DatasetDefinition mainTable = TestUtils.getDefaultMainTable(); + DatasetDefinition stagingTable = getStagingTableWithVersion(); + Datasets datasets = Datasets.of(mainTable, stagingTable); + IngestMode ingestMode = AppendOnly.builder() + .auditing(DateTimeAuditing.builder().dateTimeField("append_time").build()) + .digestField("digest") + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("version") + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE).performStageVersioning(true).build()) + .build(); + + // Happy scenario + createStagingTableWithVersion(); + String srcDataPath1 = "src/test/resources/data/dedup-and-versioning/input/data4_without_dups_no_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath1); + + String expectedDataPath = "src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_no_versioning.csv"; + performDedupAndVersioining(datasets, ingestMode); + // Validate tempTableExists + verifyResults(expectedDataPath, schemaWithVersionCountAndDataSplit); + + + // Duplicates scenario, should throw error + String srcDataPath2 = "src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv"; + loadDataIntoStagingTableWithVersion(srcDataPath2); + try + { + performDedupAndVersioining(datasets, ingestMode); + Assertions.fail("Should not succeed"); + } + catch (Exception e) + { + Assertions.assertEquals("Encountered Duplicates, Failing the batch as Fail on Duplicates is set as Deduplication strategy",e.getMessage()); + } + } + + + public static DatasetDefinition getStagingTableWithoutVersion() + { + return DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(baseSchemaWithoutVersion) + .build(); + } + + private Dataset getTempStagingDataset() + { + return DatasetReferenceImpl.builder() + .group(testSchemaName) + .name(tempStagingTableName) + .build(); + } + + public static DatasetDefinition getStagingTableWithVersion() + { + return DatasetDefinition.builder() + .group(testSchemaName) + .name(stagingTableName) + .schema(baseSchemaWithVersion) + .build(); + } + + + public static void createStagingTableWithoutVersion() + { + String createSql = "CREATE TABLE IF NOT EXISTS \"TEST\".\"staging\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR(64) NOT NULL," + + "\"income\" BIGINT," + + "\"expiry_date\" DATE," + + "\"digest\" VARCHAR)"; + h2Sink.executeStatement(createSql); + } + + public static void createStagingTableWithVersion() + { + String createSql = "CREATE TABLE IF NOT EXISTS \"TEST\".\"staging\"" + + "(\"id\" INTEGER NOT NULL," + + "\"name\" VARCHAR(64) NOT NULL," + + "\"version\" INTEGER," + + "\"income\" BIGINT," + + "\"expiry_date\" DATE," + + "\"digest\" VARCHAR)"; + h2Sink.executeStatement(createSql); + } + + private static void performDedupAndVersioining(Datasets datasets, IngestMode ingestMode) + { + RelationalIngestor ingestor = RelationalIngestor.builder() + .ingestMode(ingestMode) + .relationalSink(H2Sink.get()) + .build(); + + Executor executor = ingestor.init(JdbcConnection.of(h2Sink.connection())); + datasets = ingestor.create(datasets); + datasets = ingestor.dedupAndVersion(datasets); + } + + public static void loadDataIntoStagingTableWithoutVersion(String path) throws Exception + { + validateFileExists(path); + String loadSql = "TRUNCATE TABLE \"TEST\".\"staging\";" + + "INSERT INTO \"TEST\".\"staging\"(id, name, income ,expiry_date, digest) " + + "SELECT CONVERT( \"id\",INT ), \"name\", CONVERT( \"income\", BIGINT), CONVERT( \"expiry_date\", DATE), digest" + + " FROM CSVREAD( '" + path + "', 'id, name, income, expiry_date, digest', NULL )"; + h2Sink.executeStatement(loadSql); + } + + public static void loadDataIntoStagingTableWithVersion(String path) throws Exception + { + validateFileExists(path); + String loadSql = "TRUNCATE TABLE \"TEST\".\"staging\";" + + "INSERT INTO \"TEST\".\"staging\"(id, name, version, income ,expiry_date, digest) " + + "SELECT CONVERT( \"id\",INT ), \"name\", CONVERT( \"version\",INT ), CONVERT( \"income\", BIGINT), CONVERT( \"expiry_date\", DATE), digest" + + " FROM CSVREAD( '" + path + "', 'id, name, version, income, expiry_date, digest', NULL )"; + h2Sink.executeStatement(loadSql); + } + + public static void loadDataIntoStagingTableWithVersionAndBatch(String path) throws Exception + { + validateFileExists(path); + String loadSql = "TRUNCATE TABLE \"TEST\".\"staging\";" + + "INSERT INTO \"TEST\".\"staging\"(id, name, version, income ,expiry_date, digest, batch) " + + "SELECT CONVERT( \"id\",INT ), \"name\", CONVERT( \"version\",INT ), CONVERT( \"income\", BIGINT), CONVERT( \"expiry_date\", DATE), digest, CONVERT( \"batch\",INT )" + + " FROM CSVREAD( '" + path + "', 'id, name, version, income, expiry_date, digest, batch', NULL )"; + h2Sink.executeStatement(loadSql); + } + + private void verifyResults(String expectedDataPath, String [] schema) throws IOException + { + Assertions.assertEquals(true, h2Sink.doesTableExist(getTempStagingDataset())); + List> tableData = h2Sink.executeQuery(String.format("select * from \"TEST\".\"%s\"", tempStagingTableName)); + TestUtils.assertFileAndTableDataEquals(schema, expectedDataPath, tableData); + } + + +} diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass1.csv index bae437bf7b1..f699187454f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass1.csv @@ -1,2 +1,2 @@ -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,999999999 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass2.csv index 36b63f8e77c..7e300c3a802 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass2.csv @@ -1,4 +1,4 @@ -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 -1001,229000,DIGEST3,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,999999999 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 +1001,229000,DIGEST3,1,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass3.csv index cacfb889f8c..e67bc9b93aa 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass3.csv @@ -1,5 +1,5 @@ -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 -1001,229000,DIGEST3,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,2 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,2 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 +1001,229000,DIGEST3,1,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,2 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,2 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass4.csv index ca96c022fce..97a06baa08c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass4.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/expected_pass4.csv @@ -1,7 +1,7 @@ -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,3 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 -1001,229000,DIGEST3,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,2 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,2 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,3,3 -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,4,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,3 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 +1001,229000,DIGEST3,1,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,2 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,2 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,3,3 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,4,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv index bae437bf7b1..f699187454f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv @@ -1,2 +1,2 @@ -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,999999999 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv index 36b63f8e77c..7e300c3a802 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv @@ -1,4 +1,4 @@ -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 -1001,229000,DIGEST3,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,999999999 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 +1001,229000,DIGEST3,1,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv index cacfb889f8c..e67bc9b93aa 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv @@ -1,5 +1,5 @@ -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 -1001,229000,DIGEST3,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,2 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,2 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 +1001,229000,DIGEST3,1,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,2 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,2 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv index cacfb889f8c..e67bc9b93aa 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv @@ -1,5 +1,5 @@ -1001,225000,DIGEST1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 -1001,229000,DIGEST3,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,2 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,2 -1001,290000,DIGEST2,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 +1001,225000,DIGEST1,1,2022-01-11 00:00:00.0,2022-02-24 00:00:00.0,1,999999999 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,1,1 +1001,229000,DIGEST3,1,2022-05-08 00:00:00.0,9999-12-31 23:59:59.0,2,2 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,2022-05-08 00:00:00.0,2,2 +1001,290000,DIGEST2,1,2022-02-24 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass1.csv index 79a2fb132e7..bc679dc2d35 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass1.csv @@ -1,2 +1,2 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass2.csv index 6701330e84d..6e3151022e0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass2.csv @@ -1,4 +1,4 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 +39915188,120000,DIGEST3,1,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass3.csv index 889b5acb9b5..19ff341087a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass3.csv @@ -1,5 +1,5 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 -39915188,122000,DIGEST4,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 +39915188,120000,DIGEST3,1,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 +39915188,122000,DIGEST4,2,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass4.csv index 6d5ced14d6f..f6baa74cc08 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass4.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass4.csv @@ -1,6 +1,6 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 -39915188,122000,DIGEST4,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 -39915188,110000,DIGEST5,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 +39915188,120000,DIGEST3,1,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 +39915188,122000,DIGEST4,2,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 +39915188,110000,DIGEST5,3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass5.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass5.csv index 694efaebdd0..1c119448841 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass5.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass5.csv @@ -1,7 +1,7 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,4 -39915188,122000,DIGEST4,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 -39915188,110000,DIGEST5,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,5,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 +39915188,120000,DIGEST3,1,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,4 +39915188,122000,DIGEST4,2,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 +39915188,110000,DIGEST5,3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,5,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass6.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass6.csv deleted file mode 100644 index 07bc95699dc..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/expected_pass6.csv +++ /dev/null @@ -1,8 +0,0 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,4 -39915188,122000,DIGEST4,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 -39915188,110000,DIGEST5,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,5,5 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,6,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv index 79a2fb132e7..bc679dc2d35 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass1.csv @@ -1,2 +1,2 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv index 6701330e84d..6e3151022e0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass2.csv @@ -1,4 +1,4 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 +39915188,120000,DIGEST3,1,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv index 889b5acb9b5..19ff341087a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass3.csv @@ -1,5 +1,5 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 -39915188,122000,DIGEST4,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 +39915188,120000,DIGEST3,1,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 +39915188,122000,DIGEST4,2,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv index 6d5ced14d6f..f6baa74cc08 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass4.csv @@ -1,6 +1,6 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 -39915188,122000,DIGEST4,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 -39915188,110000,DIGEST5,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 +39915188,120000,DIGEST3,1,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 +39915188,122000,DIGEST4,2,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 +39915188,110000,DIGEST5,3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass5.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass5.csv index 6d5ced14d6f..f6baa74cc08 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass5.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass5.csv @@ -1,6 +1,6 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 -39915188,122000,DIGEST4,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 -39915188,110000,DIGEST5,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 +39915188,1250000,DIGEST1,1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 +39915188,120000,DIGEST3,1,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 +39915188,124000,DIGEST2,1,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 +39915188,122000,DIGEST4,2,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 +39915188,110000,DIGEST5,3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass6.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass6.csv deleted file mode 100644 index 6d5ced14d6f..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/expected/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/expected_pass6.csv +++ /dev/null @@ -1,6 +0,0 @@ -39915188,1250000,DIGEST1,2022-04-30 00:00:00.0,2022-05-31 00:00:00.0,1,999999999 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,9999-12-31 23:59:59.0,1,1 -39915188,120000,DIGEST3,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,2,2 -39915188,124000,DIGEST2,2022-05-31 00:00:00.0,2022-09-30 00:00:00.0,2,999999999 -39915188,122000,DIGEST4,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,3,3 -39915188,110000,DIGEST5,2022-09-30 00:00:00.0,9999-12-31 23:59:59.0,4,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass1.csv index 8fb30d52548..cb737cd2666 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass1.csv @@ -1,2 +1,2 @@ -1001,2022-01-11 00:00:00.0,225000,DIGEST1,1,5 -1001,2022-02-24 00:00:00.0,290000,DIGEST2,1,5 +1001,2022-01-11 00:00:00.0,225000,DIGEST1,1,1,5 +1001,2022-02-24 00:00:00.0,290000,DIGEST2,1,1,5 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass2.csv index bc24ee76372..6963a2a181a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass2.csv @@ -1,2 +1,2 @@ -1001,2022-05-08 00:00:00.0,229000,DIGEST3,1,1 -1001,2022-05-08 00:00:00.0,123456,DIGEST4,0,2 \ No newline at end of file +1001,2022-05-08 00:00:00.0,229000,DIGEST3,1,1,1 +1001,2022-05-08 00:00:00.0,123456,DIGEST4,2,0,2 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass3.csv index 6323e4d32a2..0938248ca4e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_3_with_data_split/staging_data_pass3.csv @@ -1,2 +1,2 @@ -1001,2022-01-11 00:00:00.0,225000,DIGEST1,1,70 -1001,2022-02-24 00:00:00.0,290000,DIGEST2,1,70 \ No newline at end of file +1001,2022-01-11 00:00:00.0,225000,DIGEST1,1,1,70 +1001,2022-02-24 00:00:00.0,290000,DIGEST2,1,1,70 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv index 8fb30d52548..cb737cd2666 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv @@ -1,2 +1,2 @@ -1001,2022-01-11 00:00:00.0,225000,DIGEST1,1,5 -1001,2022-02-24 00:00:00.0,290000,DIGEST2,1,5 +1001,2022-01-11 00:00:00.0,225000,DIGEST1,1,1,5 +1001,2022-02-24 00:00:00.0,290000,DIGEST2,1,1,5 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv index bc24ee76372..6963a2a181a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv @@ -1,2 +1,2 @@ -1001,2022-05-08 00:00:00.0,229000,DIGEST3,1,1 -1001,2022-05-08 00:00:00.0,123456,DIGEST4,0,2 \ No newline at end of file +1001,2022-05-08 00:00:00.0,229000,DIGEST3,1,1,1 +1001,2022-05-08 00:00:00.0,123456,DIGEST4,2,0,2 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv index 6323e4d32a2..0938248ca4e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/with_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv @@ -1,2 +1,2 @@ -1001,2022-01-11 00:00:00.0,225000,DIGEST1,1,70 -1001,2022-02-24 00:00:00.0,290000,DIGEST2,1,70 \ No newline at end of file +1001,2022-01-11 00:00:00.0,225000,DIGEST1,1,1,70 +1001,2022-02-24 00:00:00.0,290000,DIGEST2,1,1,70 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass1.csv index 4226bf7a05a..a7ef24a3b87 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass1.csv @@ -1,2 +1,2 @@ -39915188,2022-04-30 00:00:00.0,1250000,DIGEST1,1 -39915188,2022-05-31 00:00:00.0,124000,DIGEST2,1 +39915188,2022-04-30 00:00:00.0,1250000,DIGEST1,1,1 +39915188,2022-05-31 00:00:00.0,124000,DIGEST2,1,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass2.csv index 123606a3c79..e2b4e07e182 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass2.csv @@ -1,3 +1,3 @@ -39915188,2022-09-30 00:00:00.0,120000,DIGEST3,1 -39915188,2022-09-30 00:00:00.0,122000,DIGEST4,2 -39915188,2022-09-30 00:00:00.0,110000,DIGEST5,99 +39915188,2022-09-30 00:00:00.0,120000,DIGEST3,1,1 +39915188,2022-09-30 00:00:00.0,122000,DIGEST4,2,2 +39915188,2022-09-30 00:00:00.0,110000,DIGEST5,3,99 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass3.csv index c7ed61a2f09..478a83237e0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_3_with_data_split/staging_data_pass3.csv @@ -1,2 +1 @@ -39915188,2022-05-31 00:00:00.0,124000,DIGEST2,1 -39915188,2022-05-31 00:00:00.0,124000,DIGEST2,2 +39915188,2022-05-31 00:00:00.0,124000,DIGEST2,1,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv index 4226bf7a05a..a7ef24a3b87 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass1.csv @@ -1,2 +1,2 @@ -39915188,2022-04-30 00:00:00.0,1250000,DIGEST1,1 -39915188,2022-05-31 00:00:00.0,124000,DIGEST2,1 +39915188,2022-04-30 00:00:00.0,1250000,DIGEST1,1,1 +39915188,2022-05-31 00:00:00.0,124000,DIGEST2,1,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv index 123606a3c79..e2b4e07e182 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass2.csv @@ -1,3 +1,3 @@ -39915188,2022-09-30 00:00:00.0,120000,DIGEST3,1 -39915188,2022-09-30 00:00:00.0,122000,DIGEST4,2 -39915188,2022-09-30 00:00:00.0,110000,DIGEST5,99 +39915188,2022-09-30 00:00:00.0,120000,DIGEST3,1,1 +39915188,2022-09-30 00:00:00.0,122000,DIGEST4,2,2 +39915188,2022-09-30 00:00:00.0,110000,DIGEST5,3,99 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv index c7ed61a2f09..478a83237e0 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from/without_delete_ind/set_5_with_data_split_filter_duplicates/staging_data_pass3.csv @@ -1,2 +1 @@ -39915188,2022-05-31 00:00:00.0,124000,DIGEST2,1 -39915188,2022-05-31 00:00:00.0,124000,DIGEST2,2 +39915188,2022-05-31 00:00:00.0,124000,DIGEST2,1,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/less_columns_in_staging/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/less_columns_in_staging/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/less_columns_in_staging/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/with_delete_ind/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/with_delete_ind/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/with_delete_ind/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/without_delete_ind/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/without_delete_ind/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-incremental-milestoning/input/batch_id_based/source_specifies_from_and_through/without_delete_ind/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-snapshot-milestoning/input/batch_id_based/with_partition/staging_data_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-snapshot-milestoning/input/batch_id_based/with_partition/staging_data_pass4.csv deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-snapshot-milestoning/input/batch_id_based/without_partition/staging_data_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-snapshot-milestoning/input/batch_id_based/without_partition/staging_data_pass4.csv deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table1.csv index b68e9aa646b..022020ba331 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table1.csv @@ -1,3 +1,3 @@ -1,Andy,5.20,2022-01-11 00:00:00.0,xyz123,2000-01-01 00:00:00.0 -2,Bella,99.99,2022-01-12 00:00:00.0,xyz123,2000-01-01 00:00:00.0 -49,Sandy,123.45,2022-01-13 00:00:00.0,xyz123,2000-01-01 00:00:00.0 \ No newline at end of file +1,Andy,5.20,2022-01-11 00:00:00.0,1,2000-01-01 00:00:00.0 +2,Bella,99.99,2022-01-12 00:00:00.0,1,2000-01-01 00:00:00.0 +49,Sandy,123.45,2022-01-13 00:00:00.0,1,2000-01-01 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table2.csv index c807b1c4764..92b02b8f19c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table2.csv @@ -1,3 +1,3 @@ -1,Andy,5.20,2022-01-11 00:00:00.0,xyz123 -2,Bella,99.99,2022-01-12 00:00:00.0,xyz123 -49,Sandy,123.45,2022-01-13 00:00:00.0,xyz123 \ No newline at end of file +1,Andy,5.20,2022-01-11 00:00:00.0,1 +2,Bella,99.99,2022-01-12 00:00:00.0,1 +49,Sandy,123.45,2022-01-13 00:00:00.0,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table3.csv index 8fc9ed0670f..b9421520b4a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table3.csv @@ -1,3 +1,3 @@ -1,Andy,5.20,2022-01-11 00:00:00.0,6366D6AFD9E8B991393E719A5A4E6D35,xyz123,2000-01-01 00:00:00.0 -2,Bella,99.99,2022-01-12 00:00:00.0,C556B5DC2B9F3A66000202DF9D98EC05,xyz123,2000-01-01 00:00:00.0 -49,Sandy,123.45,2022-01-13 00:00:00.0,051D68CF86951CDE0DF875915940AEC6,xyz123,2000-01-01 00:00:00.0 \ No newline at end of file +1,Andy,5.20,2022-01-11 00:00:00.0,9fc62c73317227ab0760aed72f4fee17,1,2000-01-01 00:00:00.0 +2,Bella,99.99,2022-01-12 00:00:00.0,b0383f1a479eb2a6c5186f045af4c51f,1,2000-01-01 00:00:00.0 +49,Sandy,123.45,2022-01-13 00:00:00.0,dc170980c8540e2a667753e793dad94c,1,2000-01-01 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table4.csv index 074bc2e251d..0b162ed75bd 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table4.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table4.csv @@ -1,3 +1,3 @@ -1,Andy,5.20,2022-01-11 00:00:00.0,4B39799C7A1FB5EFC4BC328966A159E0,xyz123,2000-01-01 00:00:00.0 -2,Bella,99.99,2022-01-12 00:00:00.0,58467B440BCED7607369DC8A260B0607,xyz123,2000-01-01 00:00:00.0 -49,Sandy,123.45,2022-01-13 00:00:00.0,29B8C8A6CD28B069290372E6B54B6C72,xyz123,2000-01-01 00:00:00.0 \ No newline at end of file +1,Andy,5.20,2022-01-11 00:00:00.0,e7dc92b208f2244b9ece45d706474f55,1,2000-01-01 00:00:00.0 +2,Bella,99.99,2022-01-12 00:00:00.0,278cf3ee2c2981bb8aeade81cc21e87a,1,2000-01-01 00:00:00.0 +49,Sandy,123.45,2022-01-13 00:00:00.0,e8ff35a6699515eaca0a798a7f989978,1,2000-01-01 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table5.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table5.csv index 7d90d71c952..a20715af7c5 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table5.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bulk-load/expected/expected_table5.csv @@ -1,3 +1,6 @@ -1,Andy,5.20,2022-01-11 00:00:00.0,4B39799C7A1FB5EFC4BC328966A159E0,2000-01-01 00:00:00.0,src/test/resources/data/bulk-load/input/staged_file5.csv -2,Bella,99.99,2022-01-12 00:00:00.0,58467B440BCED7607369DC8A260B0607,2000-01-01 00:00:00.0,src/test/resources/data/bulk-load/input/staged_file5.csv -49,Sandy,123.45,2022-01-13 00:00:00.0,29B8C8A6CD28B069290372E6B54B6C72,2000-01-01 00:00:00.0,src/test/resources/data/bulk-load/input/staged_file5.csv \ No newline at end of file +1,Andy,5.20,2022-01-11 00:00:00.0,1 +2,Bella,99.99,2022-01-12 00:00:00.0,1 +49,Sandy,123.45,2022-01-13 00:00:00.0,1 +1,Andy,5.20,2022-01-11 00:00:00.0,2 +2,Bella,99.99,2022-01-12 00:00:00.0,2 +49,Sandy,123.45,2022-01-13 00:00:00.0,2 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data1_filter_dups_no_versioning.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data1_filter_dups_no_versioning.csv new file mode 100644 index 00000000000..f525044432d --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data1_filter_dups_no_versioning.csv @@ -0,0 +1,3 @@ +1,Andy,1000,2012-01-01,digest1,3 +2,Becky,2000,2012-01-02,digest2,2 +3,Cathy,3000,2012-01-03,digest3,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_allow_dups_all_version.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_allow_dups_all_version.csv new file mode 100644 index 00000000000..1b6ac7f8c1b --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_allow_dups_all_version.csv @@ -0,0 +1,6 @@ +1,Andy,1,1000,2012-01-01,digest1,1 +1,Andy,2,2000,2012-01-02,digest2,2 +1,Andy,3,3000,2012-01-03,digest3,3 +2,Becky,1,4000,2012-01-04,digest4,1 +2,Becky,1,4000,2012-01-04,digest4,1 +3,Cathy,1,5000,2012-01-05,digest5,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_allow_dups_max_versioning.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_allow_dups_max_versioning.csv new file mode 100644 index 00000000000..d49b849d319 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_allow_dups_max_versioning.csv @@ -0,0 +1,4 @@ +1,Andy,3,3000,2012-01-03,digest3 +2,Becky,1,4000,2012-01-04,digest4 +2,Becky,1,4000,2012-01-04,digest4 +3,Cathy,1,5000,2012-01-05,digest5 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_all_version.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_all_version.csv new file mode 100644 index 00000000000..aff93491809 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_all_version.csv @@ -0,0 +1,5 @@ +1,Andy,1,1000,2012-01-01,digest1,1,1 +1,Andy,2,2000,2012-01-02,digest2,1,2 +1,Andy,3,3000,2012-01-03,digest3,1,3 +2,Becky,1,4000,2012-01-04,digest4,2,1 +3,Cathy,1,5000,2012-01-05,digest5,1,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_max_versioning.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_max_versioning.csv new file mode 100644 index 00000000000..caebf714b95 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_max_versioning.csv @@ -0,0 +1,3 @@ +1,Andy,3,3000,2012-01-03,digest3,1 +2,Becky,1,4000,2012-01-04,digest4,2 +3,Cathy,1,5000,2012-01-05,digest5,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_no_versioning.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_no_versioning.csv new file mode 100644 index 00000000000..1531598979a --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data2_filter_dups_no_versioning.csv @@ -0,0 +1,5 @@ +1,Andy,1,1000,2012-01-01,digest1,1 +1,Andy,2,2000,2012-01-02,digest2,1 +1,Andy,3,3000,2012-01-03,digest3,1 +2,Becky,1,4000,2012-01-04,digest4,2 +3,Cathy,1,5000,2012-01-05,digest5,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_all_version.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_all_version.csv new file mode 100644 index 00000000000..aa49707585d --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_all_version.csv @@ -0,0 +1,5 @@ +1,Andy,1,1000,2012-01-01,digest1,1 +1,Andy,2,2000,2012-01-02,digest2,1 +1,Andy,3,3000,2012-01-03,digest3,1 +2,Becky,1,4000,2012-01-04,digest4,1 +3,Cathy,1,5000,2012-01-05,digest5,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_max_versioin.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_max_versioin.csv new file mode 100644 index 00000000000..db3e9b770d6 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_max_versioin.csv @@ -0,0 +1,3 @@ +1,Andy,3,3000,2012-01-03,digest3,1 +2,Becky,1,4000,2012-01-04,digest4,1 +3,Cathy,1,5000,2012-01-05,digest5,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_no_versioning.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_no_versioning.csv new file mode 100644 index 00000000000..dab62a825e0 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/expected/expected_data4_fail_on_dups_no_versioning.csv @@ -0,0 +1,5 @@ +1,Andy,1,1000,2012-01-01,digest1,1,1 +1,Andy,2,2000,2012-01-02,digest2,1,2 +1,Andy,3,3000,2012-01-03,digest3,1,3 +2,Becky,1,4000,2012-01-04,digest4,1,1 +3,Cathy,1,5000,2012-01-05,digest5,1,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data1_with_dups.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data1_with_dups.csv new file mode 100644 index 00000000000..5d2b28c46a9 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data1_with_dups.csv @@ -0,0 +1,6 @@ +1,Andy,1000,2012-01-01,digest1 +1,Andy,1000,2012-01-01,digest1 +1,Andy,1000,2012-01-01,digest1 +2,Becky,2000,2012-01-02,digest2 +2,Becky,2000,2012-01-02,digest2 +3,Cathy,3000,2012-01-03,digest3 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv new file mode 100644 index 00000000000..24b93e93ed7 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data2_with_dups_no_data_error.csv @@ -0,0 +1,6 @@ +1,Andy,1,1000,2012-01-01,digest1 +1,Andy,2,2000,2012-01-02,digest2 +1,Andy,3,3000,2012-01-03,digest3 +2,Becky,1,4000,2012-01-04,digest4 +2,Becky,1,4000,2012-01-04,digest4 +3,Cathy,1,5000,2012-01-05,digest5 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data3_with_dups_and_data_error.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data3_with_dups_and_data_error.csv new file mode 100644 index 00000000000..d2402f02f2e --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data3_with_dups_and_data_error.csv @@ -0,0 +1,7 @@ +1,Andy,1,1000,2012-01-01,digest1 +1,Andy,2,2000,2012-01-02,digest2 +1,Andy,3,3000,2012-01-03,digest3 +2,Becky,1,4000,2012-01-04,digest4 +2,Becky,1,4000,2012-01-04,digest4 +3,Cathy,1,5000,2012-01-05,digest5 +3,Cathy,1,6000,2012-01-06,digest6 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data4_without_dups_no_data_error.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data4_without_dups_no_data_error.csv new file mode 100644 index 00000000000..9f68e1a3650 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data4_without_dups_no_data_error.csv @@ -0,0 +1,5 @@ +1,Andy,1,1000,2012-01-01,digest1 +1,Andy,2,2000,2012-01-02,digest2 +1,Andy,3,3000,2012-01-03,digest3 +2,Becky,1,4000,2012-01-04,digest4 +3,Cathy,1,5000,2012-01-05,digest5 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data5_without_dups.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data5_without_dups.csv new file mode 100644 index 00000000000..06a96f0cbc1 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/dedup-and-versioning/input/data5_without_dups.csv @@ -0,0 +1,3 @@ +1,Andy,1000,2012-01-01,digest1 +2,Becky,2000,2012-01-02,digest2 +3,Cathy,3000,2012-01-03,digest3 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-snapshot-milestoning/input/batch_id_based/has_from_time_only/staging_data_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/empty_file.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/bitemporal-snapshot-milestoning/input/batch_id_based/has_from_time_only/staging_data_pass4.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/empty_file.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/allow_duplicates/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/allow_duplicates/expected_pass1.csv deleted file mode 100644 index a4b3d191440..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/allow_duplicates/expected_pass1.csv +++ /dev/null @@ -1,3 +0,0 @@ -HARRY,1000,2020-01-01 -ROBERT,2000,2020-01-02 -ANDY,3000,2020-01-03 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/allow_duplicates/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/allow_duplicates/expected_pass2.csv deleted file mode 100644 index aff27dff686..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/allow_duplicates/expected_pass2.csv +++ /dev/null @@ -1,6 +0,0 @@ -HARRY,1000,2020-01-01 -ROBERT,2000,2020-01-02 -ANDY,3000,2020-01-03 -HARRY,1000,2020-01-01 -ROBERT,2000,2020-01-02 -ANDY,3000,2020-01-03 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_filter_existing/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_filter_existing/expected_pass1.csv new file mode 100644 index 00000000000..e780c270f99 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_filter_existing/expected_pass1.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:02.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2000-01-01 00:00:02.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1,2000-01-01 00:00:02.0 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2,2000-01-01 00:00:03.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_filter_existing/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_filter_existing/expected_pass2.csv new file mode 100644 index 00000000000..b7f4ef8b6d5 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_filter_existing/expected_pass2.csv @@ -0,0 +1,6 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:02.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2000-01-01 00:00:02.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1,2000-01-01 00:00:02.0 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2,2000-01-01 00:00:03.0 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2,2000-01-01 00:00:06.0 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1,2000-01-01 00:00:06.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_no_filter_existing/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_no_filter_existing/expected_pass1.csv new file mode 100644 index 00000000000..e780c270f99 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_no_filter_existing/expected_pass1.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:02.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2000-01-01 00:00:02.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1,2000-01-01 00:00:02.0 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2,2000-01-01 00:00:03.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_no_filter_existing/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_no_filter_existing/expected_pass2.csv new file mode 100644 index 00000000000..1827545ff1d --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_all_version_filter_dup_no_filter_existing/expected_pass2.csv @@ -0,0 +1,7 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:02.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2000-01-01 00:00:02.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1,2000-01-01 00:00:02.0 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2,2000-01-01 00:00:03.0 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:06.0 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2,2000-01-01 00:00:06.0 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1,2000-01-01 00:00:06.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_filter_existing/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_filter_existing/expected_pass1.csv new file mode 100644 index 00000000000..8227e12357a --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_filter_existing/expected_pass1.csv @@ -0,0 +1,3 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2000-01-01 00:00:00.0 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_filter_existing/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_filter_existing/expected_pass2.csv new file mode 100644 index 00000000000..1f794ed0800 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_filter_existing/expected_pass2.csv @@ -0,0 +1,5 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2000-01-01 00:00:00.0 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2,2000-01-01 00:00:00.0 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2,2000-01-02 00:00:00.123456 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1,2000-01-02 00:00:00.123456 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_no_filter_existing/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_no_filter_existing/expected_pass1.csv new file mode 100644 index 00000000000..8227e12357a --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_no_filter_existing/expected_pass1.csv @@ -0,0 +1,3 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2000-01-01 00:00:00.0 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_no_filter_existing/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_no_filter_existing/expected_pass2.csv new file mode 100644 index 00000000000..35c1fd7f597 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_max_version_filter_dup_no_filter_existing/expected_pass2.csv @@ -0,0 +1,6 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2000-01-01 00:00:00.0 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2,2000-01-01 00:00:00.0 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2000-01-02 00:00:00.123456 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2,2000-01-02 00:00:00.123456 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1,2000-01-02 00:00:00.123456 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/with_update_timestamp_field/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_no_version_filter_dup_filter_existing/expected_pass1.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/with_update_timestamp_field/expected_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_no_version_filter_dup_filter_existing/expected_pass1.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_no_version_filter_dup_filter_existing/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_no_version_filter_dup_filter_existing/expected_pass2.csv new file mode 100644 index 00000000000..c7b0757ee0f --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/auditing_no_version_filter_dup_filter_existing/expected_pass2.csv @@ -0,0 +1,5 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.123456 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST4,2000-01-02 00:00:00.123456 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/import_with_populate_digest/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/import_with_populate_digest/expected_pass1.csv new file mode 100644 index 00000000000..305d57e103e --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/import_with_populate_digest/expected_pass1.csv @@ -0,0 +1,5 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,ec557ebad89621a74ee47c6520bf7b74,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,45c518d26f7530c57290c3f609042d58,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,e030e08bd1d885dd79ff22ff40e77917,2000-01-01 00:00:00.0 +4,MICHEL,4000,2020-01-04 00:00:00.0,2022-12-04,9aa7c323d0ef36f5319cb0e1b3b5fb79,2000-01-01 00:00:00.0 +5,LIZA,5000,2020-01-05 00:00:00.0,2022-12-05,1169c3b3ca193e3b91d1af481d6030a7,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/import_with_populate_digest/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/import_with_populate_digest/expected_pass2.csv new file mode 100644 index 00000000000..baac02321a8 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/import_with_populate_digest/expected_pass2.csv @@ -0,0 +1,6 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,ec557ebad89621a74ee47c6520bf7b74,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,45c518d26f7530c57290c3f609042d58,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,e030e08bd1d885dd79ff22ff40e77917,2000-01-01 00:00:00.0 +4,MICHEL,4000,2020-01-04 00:00:00.0,2022-12-04,9aa7c323d0ef36f5319cb0e1b3b5fb79,2000-01-01 00:00:00.0 +5,LIZA,5000,2020-01-05 00:00:00.0,2022-12-05,1169c3b3ca193e3b91d1af481d6030a7,2000-01-01 00:00:00.0 +6,BRAD,6000,2020-01-06 00:00:00.0,2022-12-06,c3c51a5f44766686c1ca456de687821c,2000-01-02 00:00:00.123456 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/less_columns_in_staging/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/less_columns_in_staging/expected_pass1.csv index 0cd30c5ef8e..8a7e93d21a6 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/less_columns_in_staging/expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/less_columns_in_staging/expected_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,null,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,null,DIGEST3 +1,HARRY,1000,2020-01-01 00:00:00.0,null,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,null,DIGEST3,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/less_columns_in_staging/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/less_columns_in_staging/expected_pass2.csv index b873fc394be..261724f1d36 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/less_columns_in_staging/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/less_columns_in_staging/expected_pass2.csv @@ -1,5 +1,6 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,null,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,null,DIGEST3 -3,ANDY,3100,2020-01-06 00:00:00.0,null,DIGEST3_UPDATED -4,MATT,4000,2020-01-04 00:00:00.0,null,DIGEST4 \ No newline at end of file +1,HARRY,1000,2020-01-01 00:00:00.0,null,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,null,DIGEST3,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2,2000-01-02 00:00:00.123456 +3,ANDY,3100,2020-01-06 00:00:00.0,null,DIGEST3_UPDATED,2000-01-02 00:00:00.123456 +4,MATT,4000,2020-01-04 00:00:00.0,null,DIGEST4,2000-01-02 00:00:00.123456 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/vanilla_case/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/vanilla_case/expected_pass1.csv index 9d7b8b2d1b7..179b54e57d8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/vanilla_case/expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/vanilla_case/expected_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 +HARRY,1000,2022-12-01 +ROBERT,2000,2022-12-02 +ANDY,3000,2022-12-03 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/vanilla_case/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/vanilla_case/expected_pass2.csv index 4d3a4826e15..2c6727854a3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/vanilla_case/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/vanilla_case/expected_pass2.csv @@ -1,5 +1,6 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -3,ANDY,3100,2020-01-06 00:00:00.0,2022-12-03,DIGEST3_UPDATED -4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 \ No newline at end of file +HARRY,1000,2022-12-01 +ROBERT,2000,2022-12-02 +ANDY,3000,2022-12-03 +ROBERT,2000,2022-12-02 +ANDY,3100,2022-12-03 +MATT,4000,2022-12-06 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/with_data_splits/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/with_data_splits/expected_pass1.csv deleted file mode 100644 index ae342aa1142..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/expected/with_data_splits/expected_pass1.csv +++ /dev/null @@ -1,5 +0,0 @@ -2000-01-01 00:00:02.0,1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2000-01-01 00:00:02.0,2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -2000-01-01 00:00:02.0,3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -2000-01-01 00:00:03.0,1,HARRISON,11000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4 -2000-01-01 00:00:03.0,2,ROBERTO,21000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/allow_duplicates/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/allow_duplicates/data_pass1.csv deleted file mode 100644 index 351fdf33161..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/allow_duplicates/data_pass1.csv +++ /dev/null @@ -1,3 +0,0 @@ -HARRY,1000,2020-01-01 -ROBERT,2000,2020-01-02 -ANDY,3000,2020-01-03 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/allow_duplicates/data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/allow_duplicates/data_pass2.csv deleted file mode 100644 index a4b3d191440..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/allow_duplicates/data_pass2.csv +++ /dev/null @@ -1,3 +0,0 @@ -HARRY,1000,2020-01-01 -ROBERT,2000,2020-01-02 -ANDY,3000,2020-01-03 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/with_data_splits/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_filter_existing/data_pass1.csv similarity index 58% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/with_data_splits/data_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_filter_existing/data_pass1.csv index 26ff40ce596..35120edb7c4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/with_data_splits/data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_filter_existing/data_pass1.csv @@ -1,5 +1,4 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1 -1,HARRISON,11000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2 -2,ROBERTO,21000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,2 \ No newline at end of file +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_filter_existing/data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_filter_existing/data_pass2.csv new file mode 100644 index 00000000000..9a2eb4ce32c --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_filter_existing/data_pass2.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/input/with_data_splits/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_no_filter_existing/data_pass1.csv similarity index 58% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/input/with_data_splits/data_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_no_filter_existing/data_pass1.csv index 26ff40ce596..35120edb7c4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/input/with_data_splits/data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_no_filter_existing/data_pass1.csv @@ -1,5 +1,4 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1 -1,HARRISON,11000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2 -2,ROBERTO,21000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,2 \ No newline at end of file +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_no_filter_existing/data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_no_filter_existing/data_pass2.csv new file mode 100644 index 00000000000..9a2eb4ce32c --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_all_version_filter_dup_no_filter_existing/data_pass2.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_filter_existing/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_filter_existing/data_pass1.csv new file mode 100644 index 00000000000..35120edb7c4 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_filter_existing/data_pass1.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_filter_existing/data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_filter_existing/data_pass2.csv new file mode 100644 index 00000000000..9a2eb4ce32c --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_filter_existing/data_pass2.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_no_filter_existing/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_no_filter_existing/data_pass1.csv new file mode 100644 index 00000000000..35120edb7c4 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_no_filter_existing/data_pass1.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1 +3,ANDY,4000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,2 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_no_filter_existing/data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_no_filter_existing/data_pass2.csv new file mode 100644 index 00000000000..9a2eb4ce32c --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_max_version_filter_dup_no_filter_existing/data_pass2.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST5,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/with_update_timestamp_field/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_no_version_filter_dup_filter_existing/data_pass1.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/with_update_timestamp_field/data_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_no_version_filter_dup_filter_existing/data_pass1.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_no_version_filter_dup_filter_existing/data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_no_version_filter_dup_filter_existing/data_pass2.csv new file mode 100644 index 00000000000..cbfdfc4dd21 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/auditing_no_version_filter_dup_filter_existing/data_pass2.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST4 +4,SANDY,4000,2020-01-04 00:00:00.0,2022-12-04,DIGEST4 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/import_with_populate_digest/data_pass1.json b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/import_with_populate_digest/data_pass1.json new file mode 100644 index 00000000000..d32d0bf5fd2 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/import_with_populate_digest/data_pass1.json @@ -0,0 +1,37 @@ +[ + { + "id": 1, + "name": "HARRY", + "income": 1000, + "start_time": "2020-01-01 00:00:00.0", + "expiry_date": "2022-12-01" + }, + { + "id": 2, + "name": "ROBERT", + "income": 2000, + "start_time": "2020-01-02 00:00:00.0", + "expiry_date": "2022-12-02" + }, + { + "id": 3, + "name": "ANDY", + "income": 3000, + "start_time": "2020-01-03 00:00:00.0", + "expiry_date": "2022-12-03" + }, + { + "id": 4, + "name": "MICHEL", + "income": 4000, + "start_time": "2020-01-04 00:00:00.0", + "expiry_date": "2022-12-04" + }, + { + "id": 5, + "name": "LIZA", + "income": 5000, + "start_time": "2020-01-05 00:00:00.0", + "expiry_date": "2022-12-05" + } +] \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/import_with_populate_digest/data_pass2.json b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/import_with_populate_digest/data_pass2.json new file mode 100644 index 00000000000..4e8e3c3c81a --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/import_with_populate_digest/data_pass2.json @@ -0,0 +1,16 @@ +[ + { + "id": 1, + "name": "HARRY", + "income": 1000, + "start_time": "2020-01-01 00:00:00.0", + "expiry_date": "2022-12-01" + }, + { + "id": 6, + "name": "BRAD", + "income": 6000, + "start_time": "2020-01-06 00:00:00.0", + "expiry_date": "2022-12-06" + } +] \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/vanilla_case/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/vanilla_case/data_pass1.csv index 9d7b8b2d1b7..179b54e57d8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/vanilla_case/data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/vanilla_case/data_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 +HARRY,1000,2022-12-01 +ROBERT,2000,2022-12-02 +ANDY,3000,2022-12-03 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/vanilla_case/data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/vanilla_case/data_pass2.csv index d7d8ccdcc2b..7bf2b920ce4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/vanilla_case/data_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-append-milestoning/input/vanilla_case/data_pass2.csv @@ -1,3 +1,3 @@ -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3100,2020-01-06 00:00:00.0,2022-12-03,DIGEST3_UPDATED -4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 \ No newline at end of file +ROBERT,2000,2022-12-02 +ANDY,3100,2022-12-03 +MATT,4000,2022-12-06 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/digest_based/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/digest_based/expected_pass1.csv new file mode 100644 index 00000000000..bc189099ff2 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/digest_based/expected_pass1.csv @@ -0,0 +1,3 @@ +1,Andy,3,13000,2012-01-03,digest13 +2,Becky,1,21000,2012-02-01,digest21 +3,Cathy,1,31000,2012-03-01,digest31 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/digest_based/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/digest_based/expected_pass2.csv new file mode 100644 index 00000000000..b6e0765827d --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/digest_based/expected_pass2.csv @@ -0,0 +1,4 @@ +1,Andy,2,12000,2012-01-02,digest12 +2,Becky,2,22000,2012-02-02,digest22 +3,Cathy,1,31000,2012-03-01,digest31 +4,Dexter,1,41000,2012-04-01,digest41 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/greater_than/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/greater_than/expected_pass1.csv new file mode 100644 index 00000000000..bc189099ff2 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/greater_than/expected_pass1.csv @@ -0,0 +1,3 @@ +1,Andy,3,13000,2012-01-03,digest13 +2,Becky,1,21000,2012-02-01,digest21 +3,Cathy,1,31000,2012-03-01,digest31 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/greater_than/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/greater_than/expected_pass2.csv new file mode 100644 index 00000000000..a3af00b3312 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/expected/with_staging_filter/with_all_version/greater_than/expected_pass2.csv @@ -0,0 +1,4 @@ +1,Andy,3,13000,2012-01-03,digest13 +2,Becky,2,22000,2012-02-02,digest22 +3,Cathy,1,31000,2012-03-01,digest31 +4,Dexter,1,41000,2012-04-01,digest41 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_duplicates/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_duplicates/data_pass1.csv new file mode 100644 index 00000000000..08f5d7a15ac --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_duplicates/data_pass1.csv @@ -0,0 +1,2 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_all_version/digest_based/data1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_all_version/digest_based/data1.csv new file mode 100644 index 00000000000..97a953361ef --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_all_version/digest_based/data1.csv @@ -0,0 +1,8 @@ +1,Andy,1,11000,2012-01-01,digest11,1 +1,Andy,2,12000,2012-01-02,digest12,1 +1,Andy,3,13000,2012-01-03,digest13,1 +2,Becky,1,21000,2012-02-01,digest21,1 +3,Cathy,1,31000,2012-03-01,digest31,1 +1,Andy,2,12000,2012-01-02,digest12,2 +2,Becky,2,22000,2012-02-02,digest22,2 +4,Dexter,1,41000,2012-04-01,digest41,2 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_all_version/greater_than/data1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_all_version/greater_than/data1.csv new file mode 100644 index 00000000000..64eae01aec9 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_all_version/greater_than/data1.csv @@ -0,0 +1,10 @@ +1,Andy,1,11000,2012-01-01,digest11,1 +1,Andy,2,12000,2012-01-02,digest12,1 +1,Andy,3,13000,2012-01-03,digest13,1 +2,Becky,1,21000,2012-02-01,digest21,1 +2,Becky,1,21000,2012-02-01,digest21,1 +3,Cathy,1,31000,2012-03-01,digest31,1 +1,Andy,2,12000,2012-01-02,digest12,2 +1,Andy,2,12000,2012-01-02,digest12,2 +2,Becky,2,22000,2012-02-02,digest22,2 +4,Dexter,1,41000,2012-04-01,digest41,2 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than/with_dedup/data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than/with_dedup/data_pass3.csv new file mode 100644 index 00000000000..fdf9e3fea09 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than/with_dedup/data_pass3.csv @@ -0,0 +1,2 @@ +4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST47,7,7 +4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST47,7,7 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/data_pass1.csv index 84ec7cae780..37ccb3a509c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/data_pass1.csv @@ -4,3 +4,4 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,2 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2,2 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,5,2 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,5,2 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/data_pass2.csv index 561a4e6f029..8748e2b4465 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/data_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/data_pass2.csv @@ -12,5 +12,7 @@ 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST23,3,3 3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-03,DIGEST31,1,3 3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-03,DIGEST33,5,4 +3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-03,DIGEST33,5,4 4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST41,1,5 +4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST47,7,6 4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST47,7,6 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_update_timestamp_field/data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_update_timestamp_field/data_pass1.csv index 9d7b8b2d1b7..10814030657 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_update_timestamp_field/data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/incremental-delta-milestoning/input/with_update_timestamp_field/data_pass1.csv @@ -1,3 +1,5 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/add_column_expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/add_column_expected_pass1.csv index 9d7b8b2d1b7..e4921df1a11 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/add_column_expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/add_column_expected_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/add_column_expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/add_column_expected_pass2.csv index 76b44d94f72..5ab3edf0db3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/add_column_expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/add_column_expected_pass2.csv @@ -1,4 +1,4 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 +4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.123456 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/column_nullability_change_expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/column_nullability_change_expected_pass1.csv index 4e9e555440a..4d09b0772f5 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/column_nullability_change_expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/column_nullability_change_expected_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,null,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,null,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/column_nullability_change_expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/column_nullability_change_expected_pass2.csv index 0e48a170f07..526c680fd9c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/column_nullability_change_expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/column_nullability_change_expected_pass2.csv @@ -1,4 +1,4 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,null,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -4,null,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 \ No newline at end of file +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,null,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 +4,null,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4,2000-01-03 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_column_nullability_change_expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_column_nullability_change_expected_pass1.csv index c4c8a8fb29e..cbdc6309ace 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_column_nullability_change_expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_column_nullability_change_expected_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,null,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,null,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_column_nullability_change_expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_column_nullability_change_expected_pass2.csv index 03a81404ca3..aa2c7d68969 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_column_nullability_change_expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_column_nullability_change_expected_pass2.csv @@ -1,4 +1,4 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,null,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -4,MATT,null,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,null,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 +4,MATT,null,2020-01-04 00:00:00.0,2022-12-06,DIGEST4,2000-01-03 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_data_type_size_change_expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_data_type_size_change_expected_pass1.csv index caf2ced1aa5..9c90abbf159 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_data_type_size_change_expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_data_type_size_change_expected_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000.00,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000.00,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000.00,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 +1,HARRY,1000.00,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000.00,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000.00,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_data_type_size_change_expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_data_type_size_change_expected_pass2.csv index 457f71bc1f4..6676b07163f 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_data_type_size_change_expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_and_data_type_size_change_expected_pass2.csv @@ -1,4 +1,4 @@ -1,HARRY,1000.00,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000.00,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000.00,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -4,MATT,4000.00,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 +1,HARRY,1000.00,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000.00,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000.00,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 +4,MATT,4000.00,2020-01-04 00:00:00.0,2022-12-06,DIGEST4,2000-01-03 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_expected_pass1.csv index 9d7b8b2d1b7..e4921df1a11 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_expected_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_expected_pass2.csv index 76b44d94f72..f4da0dda441 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/data_type_conversion_expected_pass2.csv @@ -1,4 +1,4 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 +4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4,2000-01-03 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/datatype_type_size_change_expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/datatype_type_size_change_expected_pass1.csv index 72cc5edbebc..f0ce511cc02 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/datatype_type_size_change_expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/datatype_type_size_change_expected_pass1.csv @@ -1,3 +1,3 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 \ No newline at end of file +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/datatype_type_size_change_expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/datatype_type_size_change_expected_pass2.csv index 61b3755ed1f..1d3e0db847b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/datatype_type_size_change_expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/datatype_type_size_change_expected_pass2.csv @@ -1,4 +1,4 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 \ No newline at end of file +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 +4,MATT,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4,2000-01-03 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/make_main_column_nullable_expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/make_main_column_nullable_expected_pass1.csv index 4ea8ae5aa6f..390bc643cdf 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/make_main_column_nullable_expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/make_main_column_nullable_expected_pass1.csv @@ -1,3 +1,3 @@ -1,null,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,null,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,null,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 +1,null,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,null,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,null,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/make_main_column_nullable_expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/make_main_column_nullable_expected_pass2.csv index 282156272f6..49917de2ef8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/make_main_column_nullable_expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/schema-evolution/expected/make_main_column_nullable_expected_pass2.csv @@ -1,4 +1,4 @@ -1,null,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 -2,null,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2 -3,null,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -4,null,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4 +1,null,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0 +2,null,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0 +3,null,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0 +4,null,4000,2020-01-04 00:00:00.0,2022-12-06,DIGEST4,2000-01-03 00:00:00.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/expected/max_version_filter_duplicates/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/expected/max_version_filter_duplicates/expected_pass1.csv new file mode 100644 index 00000000000..c8909eb5c9f --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/expected/max_version_filter_duplicates/expected_pass1.csv @@ -0,0 +1,3 @@ +1,Andy,3,3000,2012-01-03,digest3 +2,Becky,1,4000,2012-01-04,digest4 +3,Cathy,1,5000,2012-01-05,digest5 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/expected/with_data_splits/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/expected/with_data_splits/expected_pass1.csv deleted file mode 100644 index 296d0d6dd8c..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/snapshot-milestoning/expected/with_data_splits/expected_pass1.csv +++ /dev/null @@ -1,3 +0,0 @@ -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3 -1,HARRISON,11000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4 -2,ROBERTO,21000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass1.csv similarity index 80% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass1.csv index 7237e360400..f1b14a6a58e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass1.csv @@ -1,3 +1,3 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,5,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,5,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass2.csv similarity index 62% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass2.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass2.csv index 9166cca23da..a81247cc2d3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass2.csv @@ -1,7 +1,6 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,5,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,1,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,5,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,5,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST10,100,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST11,5,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST14,1,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST15,1,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass3.csv similarity index 62% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass3.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass3.csv index 9166cca23da..a81247cc2d3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/expected_pass3.csv @@ -1,7 +1,6 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,5,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,1,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,5,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,5,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST10,100,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST11,5,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST14,1,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST15,1,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/digest_based/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/digest_based/expected_pass1.csv new file mode 100644 index 00000000000..8b835df00f3 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/digest_based/expected_pass1.csv @@ -0,0 +1,6 @@ +1,Andy,1,11000,2012-01-01,digest11,1,1,1 +2,Becky,1,21000,2012-02-01,digest21,1,1,1 +3,Cathy,1,31000,2012-03-01,digest31,1,1,999999999 +1,Andy,2,12000,2012-01-02,digest12,1,2,2 +2,Becky,2,22000,2012-02-02,digest22,1,2,999999999 +1,Andy,3,13000,2012-01-03,digest13,1,3,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/digest_based/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/digest_based/expected_pass2.csv new file mode 100644 index 00000000000..39406f84aad --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/digest_based/expected_pass2.csv @@ -0,0 +1,10 @@ +1,Andy,1,11000,2012-01-01,digest11,1,1,1 +2,Becky,1,21000,2012-02-01,digest21,1,1,1 +3,Cathy,1,31000,2012-03-01,digest31,1,1,999999999 +1,Andy,2,12000,2012-01-02,digest12,1,2,2 +2,Becky,2,22000,2012-02-02,digest22,1,2,4 +1,Andy,3,13000,2012-01-03,digest13,1,3,3 +1,Andy,2,12000,2012-01-02,digest12,2,4,999999999 +4,Dexter,1,41000,2012-04-01,digest41,2,4,4 +2,Becky,3,23000,2012-02-03,digest23,2,5,999999999 +4,Dexter,2,42000,2012-04-02,digest42,2,5,999999999 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/greater_than/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/greater_than/expected_pass1.csv new file mode 100644 index 00000000000..8b835df00f3 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/greater_than/expected_pass1.csv @@ -0,0 +1,6 @@ +1,Andy,1,11000,2012-01-01,digest11,1,1,1 +2,Becky,1,21000,2012-02-01,digest21,1,1,1 +3,Cathy,1,31000,2012-03-01,digest31,1,1,999999999 +1,Andy,2,12000,2012-01-02,digest12,1,2,2 +2,Becky,2,22000,2012-02-02,digest22,1,2,999999999 +1,Andy,3,13000,2012-01-03,digest13,1,3,999999999 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/greater_than/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/greater_than/expected_pass2.csv new file mode 100644 index 00000000000..e88eb108eae --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/batch_id_based/with_all_version/greater_than/expected_pass2.csv @@ -0,0 +1,9 @@ +1,Andy,1,11000,2012-01-01,digest11,1,1,1 +2,Becky,1,21000,2012-02-01,digest21,1,1,1 +3,Cathy,1,31000,2012-03-01,digest31,1,1,999999999 +1,Andy,2,12000,2012-01-02,digest12,1,2,2 +2,Becky,2,22000,2012-02-02,digest22,1,2,4 +1,Andy,3,13000,2012-01-03,digest13,1,3,999999999 +4,Dexter,1,41000,2012-04-01,digest41,2,4,4 +2,Becky,3,23000,2012-02-03,digest23,2,5,999999999 +4,Dexter,2,42000,2012-04-02,digest42,2,5,999999999 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/less_columns_in_staging/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/less_columns_in_staging/expected_pass2.csv index 015c2721e1c..6baa1086207 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/less_columns_in_staging/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/less_columns_in_staging/expected_pass2.csv @@ -1,5 +1,5 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,null,DIGEST1,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 3,ANDY,3000,2020-01-03 00:00:00.0,null,DIGEST3,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,4000,2020-01-02 00:00:00.0,null,DIGEST2_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -4,MATT,6000,2020-01-06 00:00:00.0,null,DIGEST4,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file +2,ROBERT,4000,2020-01-02 00:00:00.0,null,DIGEST2_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +4,MATT,6000,2020-01-06 00:00:00.0,null,DIGEST4,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/less_columns_in_staging/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/less_columns_in_staging/expected_pass3.csv index 015c2721e1c..6baa1086207 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/less_columns_in_staging/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/less_columns_in_staging/expected_pass3.csv @@ -1,5 +1,5 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,null,DIGEST1,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 3,ANDY,3000,2020-01-03 00:00:00.0,null,DIGEST3,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,4000,2020-01-02 00:00:00.0,null,DIGEST2_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -4,MATT,6000,2020-01-06 00:00:00.0,null,DIGEST4,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file +2,ROBERT,4000,2020-01-02 00:00:00.0,null,DIGEST2_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +4,MATT,6000,2020-01-06 00:00:00.0,null,DIGEST4,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/with_delete_ind/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/with_delete_ind/expected_pass2.csv index 917080e73b3..7966d95c27e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/with_delete_ind/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/with_delete_ind/expected_pass2.csv @@ -1,6 +1,6 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -4,SAM,7000,2020-01-04 00:00:00.0,2022-12-04,DIGEST4,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -5,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST5,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file +4,SAM,7000,2020-01-04 00:00:00.0,2022-12-04,DIGEST4,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +5,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST5,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/with_delete_ind/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/with_delete_ind/expected_pass3.csv index 917080e73b3..7966d95c27e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/with_delete_ind/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/with_delete_ind/expected_pass3.csv @@ -1,6 +1,6 @@ -1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -4,SAM,7000,2020-01-04 00:00:00.0,2022-12-04,DIGEST4,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -5,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST5,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file +4,SAM,7000,2020-01-04 00:00:00.0,2022-12-04,DIGEST4,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +5,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST5,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/without_delete_ind/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/without_delete_ind/expected_pass2.csv index 340cea0a74a..767bd85e86c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/without_delete_ind/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/without_delete_ind/expected_pass2.csv @@ -1,5 +1,5 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file +2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/without_delete_ind/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/without_delete_ind/expected_pass3.csv index 340cea0a74a..767bd85e86c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/without_delete_ind/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/expected/time_based/without_delete_ind/expected_pass3.csv @@ -1,5 +1,5 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file +2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/less_columns_in_staging/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/less_columns_in_staging/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/less_columns_in_staging/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/multi_table_ingestion/staging_dataset_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/multi_table_ingestion/staging_dataset_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/multi_table_ingestion/staging_dataset_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_delete_ind/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_delete_ind/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_delete_ind/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/with_dedup/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/with_dedup/staging_data_pass1.csv index 6751bc6c308..0eed793beae 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/with_dedup/staging_data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/with_dedup/staging_data_pass1.csv @@ -1,3 +1,6 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,5 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,5 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/without_dedup/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/without_dedup/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than/without_dedup/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass4.csv new file mode 100644 index 00000000000..8eae7de4a66 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass4.csv @@ -0,0 +1,2 @@ +4,MATT,7000,2020-01-07 00:00:00.0,2022-12-07,DIGEST5,1 +4,MATT,7000,2020-01-07 00:00:00.0,2022-12-07,DIGEST5,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_optimization_filter/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_optimization_filter/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_optimization_filter/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/staging_data_pass1.csv similarity index 83% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/staging_data_pass1.csv index 2deae477a95..cf34912d6a6 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/staging_data_pass1.csv @@ -3,4 +3,4 @@ 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1,1 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2,2 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,5,2 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,1,2 \ No newline at end of file +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,5,2 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/staging_data_pass2.csv similarity index 79% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass2.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/staging_data_pass2.csv index bd59e145f6a..819b81a7600 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/digest_based/staging_data_pass2.csv @@ -8,8 +8,8 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST8,2,3 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST9,3,3 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST10,100,3 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST11,5,3 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST12,4,3 -2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST13,3,3 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,5,3 +2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST4,4,3 +2,ROBERT,1000,2020-01-02 00:00:00.0,2022-12-02,DIGEST3,3,3 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST14,1,3 4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST15,1,3 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/with_dedup/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/without_dedup/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/without_dedup/staging_data_pass1.csv index 4aaf96d9b8e..81428abcfa3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/without_dedup/staging_data_pass1.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/without_dedup/staging_data_pass1.csv @@ -2,5 +2,8 @@ 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,1 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,1,1 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2,2 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2,2 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST4,2,2 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,1,2 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST5,1,2 3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,1,2 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/without_dedup/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/without_dedup/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than/without_dedup/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/with_dedup/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass4.csv new file mode 100644 index 00000000000..25ba12fe9c4 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_max_versioning/greater_than_equal_to/without_dedup/staging_data_pass4.csv @@ -0,0 +1,2 @@ +4,MATT,7000,2020-01-07 00:00:00.0,2022-12-07,DIGEST8,1,4 +4,MATT,7000,2020-01-07 00:00:00.0,2022-12-07,DIGEST8,1,4 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_no_versioning/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_no_versioning/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/with_staging_filter/with_no_versioning/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/without_delete_ind/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/without_delete_ind/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_and_time_based/without_delete_ind/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/less_columns_in_staging/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/less_columns_in_staging/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/less_columns_in_staging/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_all_version/data1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_all_version/data1.csv new file mode 100644 index 00000000000..4802bef9db8 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_all_version/data1.csv @@ -0,0 +1,14 @@ +1,Andy,1,11000,2012-01-01,digest11,1 +1,Andy,2,12000,2012-01-02,digest12,1 +1,Andy,3,13000,2012-01-03,digest13,1 +2,Becky,1,21000,2012-02-01,digest21,1 +2,Becky,2,22000,2012-02-02,digest22,1 +3,Cathy,1,31000,2012-03-01,digest31,1 +1,Andy,2,12000,2012-01-02,digest12,2 +1,Andy,2,12000,2012-01-02,digest12,2 +2,Becky,2,22000,2012-02-02,digest22,2 +2,Becky,3,23000,2012-02-03,digest23,2 +4,Dexter,1,41000,2012-04-01,digest41,2 +4,Dexter,2,42000,2012-04-02,digest42,2 +5,Elena,1,51000,2012-05-01,digest51,3 +5,Elena,1,52000,2012-05-02,digest52,3 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_data_splits/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_data_splits/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_data_splits/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_delete_ind/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_delete_ind/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/with_delete_ind/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/without_delete_ind/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/without_delete_ind/staging_data_pass2.csv index 0d58c6909b0..1f269393f64 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/without_delete_ind/staging_data_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/without_delete_ind/staging_data_pass2.csv @@ -1,3 +1,6 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1 +2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED 2,ROBERT,4000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2_UPDATED 4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/without_delete_ind/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/without_delete_ind/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/batch_id_based/without_delete_ind/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/less_columns_in_staging/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/less_columns_in_staging/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/less_columns_in_staging/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/with_delete_ind/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/with_delete_ind/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/with_delete_ind/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/without_delete_ind/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/without_delete_ind/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/without_delete_ind/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/without_delete_ind/staging_data_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/without_delete_ind/staging_data_pass4.csv new file mode 100644 index 00000000000..a53cf448f1e --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-incremental-milestoning/input/time_based/without_delete_ind/staging_data_pass4.csv @@ -0,0 +1,2 @@ +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4 +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass1.csv new file mode 100644 index 00000000000..8d6910e9f83 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass1.csv @@ -0,0 +1,6 @@ +2021-12-01,GS,383.82,2476002,DIGEST3_UPDATED2,3,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-01,IBM,116.92,5958300,DIGEST1,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-01,JPM,161.00,12253400,DIGEST2,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,GS,37800.00,3343700,DIGEST6,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,IBM,117.37,5267100,DIGEST4,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,JPMX,159.83,12969901,DIGEST5_UPDATED,2,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass2.csv new file mode 100644 index 00000000000..a51cfd15e9b --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass2.csv @@ -0,0 +1,8 @@ +2021-12-01,GS,383.82,2476002,DIGEST3_UPDATED2,3,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-01,IBM,116.92,5958300,DIGEST1,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-01,JPM,161.00,12253400,DIGEST2,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,GS,37800.00,3343700,DIGEST6,1,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2021-12-02,IBM,117.37,5267100,DIGEST4,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,JPMX,159.83,12969901,DIGEST5_UPDATED,2,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2021-12-02,GS,378.00,3343700,DIGEST8,2,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,JPM,159.83,12969900,DIGEST7,1,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass3.csv new file mode 100644 index 00000000000..a51cfd15e9b --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/max_version/expected_pass3.csv @@ -0,0 +1,8 @@ +2021-12-01,GS,383.82,2476002,DIGEST3_UPDATED2,3,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-01,IBM,116.92,5958300,DIGEST1,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-01,JPM,161.00,12253400,DIGEST2,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,GS,37800.00,3343700,DIGEST6,1,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2021-12-02,IBM,117.37,5267100,DIGEST4,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,JPMX,159.83,12969901,DIGEST5_UPDATED,2,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2021-12-02,GS,378.00,3343700,DIGEST8,2,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,JPM,159.83,12969900,DIGEST7,1,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/no_version/expected_pass1.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/expected_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/no_version/expected_pass1.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/no_version/expected_pass2.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/expected_pass2.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/no_version/expected_pass2.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/no_version/expected_pass3.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/expected_pass3.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/with_partition/no_version/expected_pass3.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass1.csv new file mode 100644 index 00000000000..86afb1ffcf3 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass1.csv @@ -0,0 +1,3 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST3,2,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +3,ANDY,3002,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,3,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass2.csv new file mode 100644 index 00000000000..b3d51119d7c --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass2.csv @@ -0,0 +1,6 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,1,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST3,2,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +3,ANDY,3002,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,3,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-06,DIGEST7,4,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST8,1,2,999999999,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass3.csv new file mode 100644 index 00000000000..053bdcfcfe9 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/max_version/expected_pass3.csv @@ -0,0 +1,6 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1,1,2,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST3,2,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +3,ANDY,3002,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,3,1,1,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1,2,2,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-06,DIGEST7,4,2,2,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST8,1,2,2,2000-01-01 00:00:00.0,2000-01-01 00:00:00.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/expected_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/no_version/expected_pass1.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/expected_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/no_version/expected_pass1.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/no_version/expected_pass2.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/expected_pass2.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/no_version/expected_pass2.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/no_version/expected_pass3.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/expected_pass3.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/no_version/expected_pass3.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/expected_pass4.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/no_version/expected_pass4.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/expected_pass4.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/batch_id_and_time_based/without_partition/no_version/expected_pass4.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/less_columns_in_staging/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/less_columns_in_staging/expected_pass2.csv index 52ff36d9cec..5c99d4ae109 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/less_columns_in_staging/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/less_columns_in_staging/expected_pass2.csv @@ -1,5 +1,5 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,null,DIGEST1,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 2,ROBERT,2000,2020-01-02 00:00:00.0,null,DIGEST2,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -3,ANDY,3000,2020-01-03 00:00:00.0,null,DIGEST3,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -3,ANDY,3100,2020-01-03 00:00:00.0,null,DIGEST3_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -4,MATT,6000,2020-01-06 00:00:00.0,null,DIGEST4,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 +3,ANDY,3000,2020-01-03 00:00:00.0,null,DIGEST3,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +3,ANDY,3100,2020-01-03 00:00:00.0,null,DIGEST3_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +4,MATT,6000,2020-01-06 00:00:00.0,null,DIGEST4,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/with_partition/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/with_partition/expected_pass2.csv index e7232a18a95..2fe02593d27 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/with_partition/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/with_partition/expected_pass2.csv @@ -2,7 +2,7 @@ 2021-12-01,JPM,161.00,12253400,DIGEST2,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 2021-12-01,GS,383.82,2476000,DIGEST3,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 2021-12-02,IBM,117.37,5267100,DIGEST4,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2021-12-02,JPMX,159.83,12969900,DIGEST5,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -2021-12-02,GS,37800.00,3343700,DIGEST6,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -2021-12-02,JPM,159.83,12969900,DIGEST7,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -2021-12-02,GS,378.00,3343700,DIGEST8,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,JPMX,159.83,12969900,DIGEST5,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +2021-12-02,GS,37800.00,3343700,DIGEST6,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +2021-12-02,JPM,159.83,12969900,DIGEST7,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +2021-12-02,GS,378.00,3343700,DIGEST8,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/with_partition/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/with_partition/expected_pass3.csv index e7232a18a95..2fe02593d27 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/with_partition/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/with_partition/expected_pass3.csv @@ -2,7 +2,7 @@ 2021-12-01,JPM,161.00,12253400,DIGEST2,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 2021-12-01,GS,383.82,2476000,DIGEST3,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 2021-12-02,IBM,117.37,5267100,DIGEST4,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -2021-12-02,JPMX,159.83,12969900,DIGEST5,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -2021-12-02,GS,37800.00,3343700,DIGEST6,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -2021-12-02,JPM,159.83,12969900,DIGEST7,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -2021-12-02,GS,378.00,3343700,DIGEST8,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 +2021-12-02,JPMX,159.83,12969900,DIGEST5,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +2021-12-02,GS,37800.00,3343700,DIGEST6,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +2021-12-02,JPM,159.83,12969900,DIGEST7,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +2021-12-02,GS,378.00,3343700,DIGEST8,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/without_partition/expected_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/without_partition/expected_pass2.csv index 32412eb20f4..f0e990dd0eb 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/without_partition/expected_pass2.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/without_partition/expected_pass2.csv @@ -1,5 +1,5 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-06,DIGEST3_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-06,DIGEST3_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/without_partition/expected_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/without_partition/expected_pass3.csv index 32412eb20f4..f0e990dd0eb 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/without_partition/expected_pass3.csv +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/expected/time_based/without_partition/expected_pass3.csv @@ -1,5 +1,5 @@ 1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,2000-01-01 00:00:00.0,9999-12-31 23:59:59.0 -3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0,2000-01-02 00:00:00.0 -3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-06,DIGEST3_UPDATED,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 -4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.0,9999-12-31 23:59:59.0 \ No newline at end of file +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST3,2000-01-01 00:00:00.0,2000-01-02 00:00:00.123456 +3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-06,DIGEST3_UPDATED,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST4,2000-01-02 00:00:00.123456,9999-12-31 23:59:59.0 \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/max_version/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/max_version/staging_data_pass1.csv new file mode 100644 index 00000000000..aac15867141 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/max_version/staging_data_pass1.csv @@ -0,0 +1,9 @@ +2021-12-01,IBM,116.92,5958300,DIGEST1,1 +2021-12-01,JPM,161.00,12253400,DIGEST2,1 +2021-12-01,GS,383.82,2476000,DIGEST3,1 +2021-12-01,GS,383.82,2476001,DIGEST3_UPDATED1,2 +2021-12-01,GS,383.82,2476002,DIGEST3_UPDATED2,3 +2021-12-02,IBM,117.37,5267100,DIGEST4,1 +2021-12-02,JPMX,159.83,12969900,DIGEST5,1 +2021-12-02,JPMX,159.83,12969901,DIGEST5_UPDATED,2 +2021-12-02,GS,37800.00,3343700,DIGEST6,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/max_version/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/max_version/staging_data_pass2.csv new file mode 100644 index 00000000000..4110163a165 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/max_version/staging_data_pass2.csv @@ -0,0 +1,4 @@ +2021-12-02,IBM,117.37,5267100,DIGEST4,1 +2021-12-02,JPM,159.83,12969900,DIGEST7,1 +2021-12-02,GS,378.00,3343700,DIGEST8,2 +2021-12-02,GS,378.00,3343700,DIGEST8,2 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/no_version/staging_data_pass1.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/staging_data_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/no_version/staging_data_pass1.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/no_version/staging_data_pass2.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/staging_data_pass2.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/no_version/staging_data_pass2.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/with_partition/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/max_version/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/max_version/staging_data_pass1.csv new file mode 100644 index 00000000000..0f4408df251 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/max_version/staging_data_pass1.csv @@ -0,0 +1,6 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1 +2,ROBERT,2001,2020-01-02 00:00:00.0,2022-12-02,DIGEST3,2 +3,ANDY,3000,2020-01-03 00:00:00.0,2022-12-03,DIGEST4,1 +3,ANDY,3001,2020-01-03 00:00:00.0,2022-12-03,DIGEST5,2 +3,ANDY,3002,2020-01-03 00:00:00.0,2022-12-03,DIGEST6,3 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/max_version/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/max_version/staging_data_pass2.csv new file mode 100644 index 00000000000..6cd3b417fe5 --- /dev/null +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/max_version/staging_data_pass2.csv @@ -0,0 +1,4 @@ +1,HARRY,1000,2020-01-01 00:00:00.0,2022-12-01,DIGEST1,1 +2,ROBERT,2000,2020-01-02 00:00:00.0,2022-12-02,DIGEST2,1 +3,ANDY,3100,2020-01-03 00:00:00.0,2022-12-06,DIGEST7,4 +4,MATT,6000,2020-01-06 00:00:00.0,2022-12-06,DIGEST8,1 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/staging_data_pass1.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/no_version/staging_data_pass1.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/staging_data_pass1.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/no_version/staging_data_pass1.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/staging_data_pass2.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/no_version/staging_data_pass2.csv similarity index 100% rename from legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/staging_data_pass2.csv rename to legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/no_version/staging_data_pass2.csv diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_and_time_based/without_partition/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/with_partition/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/with_partition/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/with_partition/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/with_partition_filter/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/with_partition_filter/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/with_partition_filter/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/without_partition/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/without_partition/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/batch_id_based/without_partition/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/time_based/with_partition/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/time_based/with_partition/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/time_based/with_partition/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/time_based/without_partition/staging_data_pass3.csv b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/time_based/without_partition/staging_data_pass3.csv deleted file mode 100644 index 8b137891791..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-h2/src/test/resources/data/unitemporal-snapshot-milestoning/input/time_based/without_partition/staging_data_pass3.csv +++ /dev/null @@ -1 +0,0 @@ - diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/pom.xml index 631f58c585b..35deaaa24ff 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyTest.java index c79587954e9..20c714fa2a7 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/AppendOnlyTest.java @@ -14,7 +14,6 @@ package org.finos.legend.engine.persistence.components.ingestmode; -import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.api.DataSplitRange; @@ -41,7 +40,7 @@ public RelationalSink getRelationalSink() } @Override - public void verifyAppendOnlyAllowDuplicatesNoAuditing(GeneratorResult operations) + public void verifyAppendOnlyNoAuditingNoDedupNoVersioningNoFilterExistingRecordsDeriveMainSchema(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -53,45 +52,36 @@ public void verifyAppendOnlyAllowDuplicatesNoAuditing(GeneratorResult operations Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); // Stats - verifyStats(operations); - } - - @Override - public void verifyAppendOnlyAllowDuplicatesWithAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00' " + - "FROM `mydb`.`staging` as stage)"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTableCreateQueryWithAuditAndNoPKs, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); + Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); + Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + Assertions.assertNull(operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); } @Override - public void verifyAppendOnlyAllowDuplicatesWithAuditingWithDataSplits(List generatorResults, List dataSplitRanges) + public void verifyAppendOnlyWithAuditingFailOnDuplicatesAllVersionNoFilterExistingRecords(List generatorResults, List dataSplitRanges) { String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00' " + - "FROM `mydb`.`staging` as stage " + - "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))"; + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00.000000' " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, generatorResults.get(0).preActionsSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit, generatorResults.get(0).preActionsSql().get(1)); + + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, generatorResults.get(0).deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates, generatorResults.get(0).deduplicationAndVersioningSql().get(1)); + Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), generatorResults.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), generatorResults.get(1).ingestSql().get(0)); Assertions.assertEquals(2, generatorResults.size()); // Stats - String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage " + - "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; - String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`staging` as stage " + - "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String incomingRecordCount = "SELECT COALESCE(SUM(stage.`legend_persistence_count`),0) as `incomingRecordCount` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(0)), generatorResults.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(1)), generatorResults.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -102,72 +92,23 @@ public void verifyAppendOnlyAllowDuplicatesWithAuditingWithDataSplits(List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage)"; - - Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyAppendOnlyFailOnDuplicatesWithAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,'2000-01-01 00:00:00' " + - "FROM `mydb`.`staging` as stage)"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTableWithAuditNotPKCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyAppendOnlyFilterDuplicatesNoAuditing(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + - "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + - "(sink.`digest` = stage.`digest`))))"; - - Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); - - // Stats - Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); - Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); - Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); - Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); - } - - @Override - public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries) + public void verifyAppendOnlyWithAuditingFilterDuplicatesNoVersioningWithFilterExistingRecords(GeneratorResult queries) { List preActionsSqlList = queries.preActionsSql(); List milestoningSqlList = queries.ingestSql(); + List deduplicationAndVersioningSql = queries.deduplicationAndVersioningSql(); String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00' FROM `mydb`.`staging` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE ((sink.`id` = stage.`id`) AND " + - "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00.000000' FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE ((sink.`id` = stage.`id`) AND " + + "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); List postActionsSql = queries.postActionsSql(); @@ -176,6 +117,7 @@ public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries assertIfListsAreSameIgnoringOrder(expectedSQL, postActionsSql); // Stats + String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; Assertions.assertEquals(incomingRecordCount, queries.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, queries.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); @@ -185,26 +127,30 @@ public void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries } @Override - public void verifyAppendOnlyFilterDuplicatesWithAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyAppendOnlyWithAuditingFilterDuplicatesAllVersionWithFilterExistingRecords(List operations, List dataSplitRanges) { String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00' " + - "FROM `mydb`.`staging` as stage " + - "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + - "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + - "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + - "(sink.`digest` = stage.`digest`)))))"; + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00.000000' " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + + "(sink.`digest` = stage.`digest`)))))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit, operations.get(0).preActionsSql().get(1)); + + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, operations.get(0).deduplicationAndVersioningSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates, operations.get(0).deduplicationAndVersioningSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); Assertions.assertEquals(2, operations.size()); // Stats - String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage " + - "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + String incomingRecordCount = "SELECT COALESCE(SUM(stage.`legend_persistence_count`),0) as `incomingRecordCount` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCount, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -221,14 +167,13 @@ public void verifyAppendOnlyWithUpperCaseOptimizer(GeneratorResult operations) List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); - String insertSql = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`) " + - "(SELECT * FROM `MYDB`.`STAGING` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink " + - "WHERE ((sink.`ID` = stage.`ID`) " + - "AND (sink.`NAME` = stage.`NAME`)) " + - "AND (sink.`DIGEST` = stage.`DIGEST`))))"; + String insertSql = "INSERT INTO `MYDB`.`MAIN` " + + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_UPDATE_TIME`) " + + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,'2000-01-01 00:00:00.000000' FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage " + + "WHERE NOT (EXISTS " + + "(SELECT * FROM `MYDB`.`MAIN` as sink WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` = stage.`DIGEST`))))"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestCreateQueryWithUpperCase, preActionsSqlList.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQueryUpperCase, preActionsSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); } @@ -238,23 +183,71 @@ public void verifyAppendOnlyWithLessColumnsInStaging(GeneratorResult operations) List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); - String insertSql = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + - "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + - "(sink.`digest` = stage.`digest`))))"; + String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`digest`,'2000-01-01 00:00:00.000000' FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE ((sink.`id` = stage.`id`) AND " + + "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); } - private void verifyStats(GeneratorResult operations) + @Override + public void verifyAppendOnlyWithAuditingFailOnDuplicatesMaxVersionWithFilterExistingRecords(GeneratorResult operations) { + List preActionsSqlList = operations.preActionsSql(); + List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + + String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00.000000' FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE ((sink.`id` = stage.`id`) AND " + + "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); + + // Stats + String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; + String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); + Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + } + + @Override + public void verifyAppendOnlyWithAuditingFilterDupsMaxVersionNoFilterExistingRecords(GeneratorResult operations) + { + List preActionsSqlList = operations.preActionsSql(); + List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + + String insertSql = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00.000000' FROM `mydb`.`staging_legend_persistence_temp_staging` as stage)"; + + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTempStagingTablePlusDigestWithCount, preActionsSqlList.get(1)); + + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(insertSql, milestoningSqlList.get(0)); + + // Stats + String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; + String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink WHERE sink.`batch_update_time` = (SELECT MAX(sink.`batch_update_time`) FROM `mydb`.`main` as sink)"; + Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); Assertions.assertEquals(rowsDeleted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); + Assertions.assertEquals(rowsTerminated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java index b47384fde55..38fd10f2401 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BitemporalDeltaSourceSpecifiesFromAndThroughTest.java @@ -71,24 +71,24 @@ public void verifyBitemporalDeltaBatchIdDateTimeBasedNoDeleteIndWithDataSplits(L { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET sink.`batch_id_out` = " + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + - "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) AND (EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE " + "((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(sink.`validity_from_target` = stage.`validity_from_reference`) AND (sink.`digest` <> stage.`digest`)))"; String expectedUpsertQuery = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `validity_from_target`, " + - "`validity_through_target`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + + "`validity_through_target`, `digest`, `version`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`validity_through_reference`," + - "stage.`digest`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + - "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "stage.`digest`,stage.`version`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE " + "(sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(sink.`validity_from_target` = stage.`validity_from_reference`)))) AND " + "((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')))"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalMainTableWithBatchIdDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalMainTableWithVersionWithBatchIdDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedMilestoneQuery, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); @@ -153,7 +153,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndNoDataSplits(Generator public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND " + @@ -162,10 +162,10 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `validity_from_target`, `validity_through_target`, `digest`, " + + "(`id`, `name`, `amount`, `validity_from_target`, `validity_through_target`, `digest`, `version`, " + "`batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`," + - "stage.`validity_through_reference`,stage.`digest`,'2000-01-01 00:00:00'," + + "stage.`validity_through_reference`,stage.`digest`,stage.`version`,'2000-01-01 00:00:00.000000'," + "'9999-12-31 23:59:59' FROM `mydb`.`staging` as stage WHERE " + "((NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') " + "AND (sink.`digest` = stage.`digest`) AND ((sink.`id` = stage.`id`) AND " + @@ -173,7 +173,7 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) AND " + "(stage.`delete_indicator` NOT IN ('yes','1','true')))"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalMainTableWithDatetimeCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalMainTableWithVersionBatchDateTimeCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedMilestoneQuery, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); @@ -185,10 +185,10 @@ public void verifyBitemporalDeltaDatetimeBasedWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) { String expectedStageToTemp = "INSERT INTO `mydb`.`temp` " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`data_split` FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`data_split` FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`legend_persistence_start_date`),MIN(legend_persistence_x.`legend_persistence_end_date`)) as `legend_persistence_end_date` " + "FROM " + @@ -132,10 +134,10 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`delete_indicator`,stage.`data_split` FROM `mydb`.`staging` as stage WHERE (stage.`delete_indicator` NOT IN ('yes','1','true')) AND ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`legend_persistence_start_date`),MIN(legend_persistence_x.`legend_persistence_end_date`)) as `legend_persistence_end_date` " + "FROM " + @@ -354,10 +360,10 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplits(List legend_persistence_x.`validity_from_target`) AND (legend_persistence_y.`delete_indicator` = 0) " + "WHERE legend_persistence_x.`delete_indicator` = 0 " + - "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`digest`, legend_persistence_x.`validity_from_target`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`) as legend_persistence_x " + + "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`version`, legend_persistence_x.`digest`, legend_persistence_x.`validity_from_target`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`) as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_y.`validity_through_target` > legend_persistence_x.`legend_persistence_start_date`) AND (legend_persistence_y.`validity_through_target` <= legend_persistence_x.`legend_persistence_end_date`) AND (legend_persistence_y.`delete_indicator` <> 0) " + - "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`digest`, legend_persistence_x.`legend_persistence_start_date`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`)"; + "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`version`, legend_persistence_x.`digest`, legend_persistence_x.`legend_persistence_start_date`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`)"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableWithDeleteIndicatorCreateQuery, operations.get(0).preActionsSql().get(3)); @@ -446,6 +452,9 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) { String expectedStageToStageWithoutDuplicates = "INSERT INTO `mydb`.`stagingWithoutDuplicates` " + - "(`id`, `name`, `amount`, `validity_from_reference`, `digest`, `data_split`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`data_split` FROM `mydb`.`staging` as stage " + + "(`id`, `name`, `amount`, `validity_from_reference`, `digest`, `version`, `data_split`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`data_split` FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`digest` = stage.`digest`) AND (sink.`batch_id_out` = 999999999))))"; String expectedStageToTemp = "INSERT INTO `mydb`.`temp` " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`data_split` FROM `mydb`.`stagingWithoutDuplicates` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`data_split` FROM `mydb`.`stagingWithoutDuplicates` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`legend_persistence_start_date`),MIN(legend_persistence_x.`legend_persistence_end_date`)) as `legend_persistence_end_date` " + "FROM " + @@ -566,10 +575,10 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDupl "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_x.`validity_from_reference` = legend_persistence_y.`legend_persistence_start_date`))"; String expectedMainToTemp = "INSERT INTO `mydb`.`temp` " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999) as legend_persistence_x " + + "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`version`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999) as legend_persistence_x " + "INNER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,legend_persistence_x.`legend_persistence_end_date` as `legend_persistence_end_date` " + "FROM " + @@ -594,13 +603,13 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDupl "AND (sink.`batch_id_out` = 999999999)"; String expectedTempToMain = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `digest`, `batch_id_in`, `batch_id_out`, `validity_from_target`, `validity_through_target`) " + - "(SELECT temp.`id`,temp.`name`,temp.`amount`,temp.`digest`,temp.`batch_id_in`,temp.`batch_id_out`,temp.`validity_from_target`,temp.`validity_through_target` FROM `mydb`.`temp` as temp)"; + "(`id`, `name`, `amount`, `digest`, `version`, `batch_id_in`, `batch_id_out`, `validity_from_target`, `validity_through_target`) " + + "(SELECT temp.`id`,temp.`name`,temp.`amount`,temp.`digest`,temp.`version`,temp.`batch_id_in`,temp.`batch_id_out`,temp.`validity_from_target`,temp.`validity_through_target` FROM `mydb`.`temp` as temp)"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); - Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); - Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyStageWithDataSplitWithoutDuplicatesTableCreateQuery, operations.get(0).preActionsSql().get(3)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyTempTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(2)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyStageWithVersionWithDataSplitWithoutDuplicatesTableCreateQuery, operations.get(0).preActionsSql().get(3)); Assertions.assertEquals(expectedStageToStageWithoutDuplicates, operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(expectedStageToTemp, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(1)); @@ -620,6 +629,9 @@ public void verifyBitemporalDeltaBatchIdBasedNoDeleteIndWithDataSplitsFilterDupl Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), operations.get(0).metadataIngestSql().get(0)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`temp`"), operations.get(0).postCleanupSql().get(0)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`stagingWithoutDuplicates`"), operations.get(0).postCleanupSql().get(1)); + Assertions.assertEquals(2, operations.size()); String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; verifyStats(operations.get(0), enrichSqlWithDataSplits(incomingRecordCount,dataSplitRanges.get(0)), rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); @@ -763,6 +775,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "`name` VARCHAR(256) NOT NULL," + "`amount` DOUBLE," + "`digest` VARCHAR(256)," + + "`version` INTEGER," + "`batch_id_in` INTEGER NOT NULL," + "`batch_id_out` INTEGER," + "`validity_from_target` DATETIME NOT NULL," + @@ -774,6 +787,7 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "`name` VARCHAR(256) NOT NULL," + "`amount` DOUBLE," + "`digest` VARCHAR(256)," + + "`version` INTEGER," + "`batch_id_in` INTEGER NOT NULL," + "`batch_id_out` INTEGER," + "`validity_from_target` DATETIME NOT NULL," + @@ -787,20 +801,21 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "`amount` DOUBLE," + "`validity_from_reference` DATETIME NOT NULL," + "`digest` VARCHAR(256)," + + "`version` INTEGER," + "`delete_indicator` VARCHAR(256)," + "`data_split` BIGINT NOT NULL," + "PRIMARY KEY (`id`, `name`, `validity_from_reference`, `data_split`))"; String expectedStageToStageWithoutDuplicates = "INSERT INTO " + stageWithoutDuplicatesName + " " + - "(`id`, `name`, `amount`, `validity_from_reference`, `digest`, `delete_indicator`, `data_split`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`delete_indicator`,stage.`data_split` FROM `mydb`.`staging` as stage " + + "(`id`, `name`, `amount`, `validity_from_reference`, `digest`, `version`, `delete_indicator`, `data_split`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest`,stage.`version`,stage.`delete_indicator`,stage.`data_split` FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`digest` = stage.`digest`) AND (sink.`batch_id_out` = 999999999))))"; String expectedStageToTemp = "INSERT INTO " + tempName + " " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT legend_persistence_stageWithoutDuplicates.`id`,legend_persistence_stageWithoutDuplicates.`name`,legend_persistence_stageWithoutDuplicates.`amount`,legend_persistence_stageWithoutDuplicates.`validity_from_reference`,legend_persistence_stageWithoutDuplicates.`digest`,legend_persistence_stageWithoutDuplicates.`delete_indicator`,legend_persistence_stageWithoutDuplicates.`data_split` FROM " + stageWithoutDuplicatesName + " as legend_persistence_stageWithoutDuplicates WHERE (legend_persistence_stageWithoutDuplicates.`delete_indicator` NOT IN ('yes','1','true')) AND ((legend_persistence_stageWithoutDuplicates.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (legend_persistence_stageWithoutDuplicates.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + + "(SELECT legend_persistence_stageWithoutDuplicates.`id`,legend_persistence_stageWithoutDuplicates.`name`,legend_persistence_stageWithoutDuplicates.`amount`,legend_persistence_stageWithoutDuplicates.`validity_from_reference`,legend_persistence_stageWithoutDuplicates.`digest`,legend_persistence_stageWithoutDuplicates.`version`,legend_persistence_stageWithoutDuplicates.`delete_indicator`,legend_persistence_stageWithoutDuplicates.`data_split` FROM " + stageWithoutDuplicatesName + " as legend_persistence_stageWithoutDuplicates WHERE (legend_persistence_stageWithoutDuplicates.`delete_indicator` NOT IN ('yes','1','true')) AND ((legend_persistence_stageWithoutDuplicates.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (legend_persistence_stageWithoutDuplicates.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}'))) as legend_persistence_x " + "LEFT OUTER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`legend_persistence_start_date`),MIN(legend_persistence_x.`legend_persistence_end_date`)) as `legend_persistence_end_date` " + "FROM " + @@ -818,10 +833,10 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_x.`validity_from_reference` = legend_persistence_y.`legend_persistence_start_date`))"; String expectedMainToTemp = "INSERT INTO " + tempName + " " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + "FROM " + - "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999) as legend_persistence_x " + + "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`version`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999) as legend_persistence_x " + "INNER JOIN " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`legend_persistence_start_date`,legend_persistence_x.`legend_persistence_end_date` as `legend_persistence_end_date` " + "FROM " + @@ -846,12 +861,12 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "AND (sink.`batch_id_out` = 999999999)"; String expectedTempToMain = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `digest`, `batch_id_in`, `batch_id_out`, `validity_from_target`, `validity_through_target`) " + - "(SELECT legend_persistence_temp.`id`,legend_persistence_temp.`name`,legend_persistence_temp.`amount`,legend_persistence_temp.`digest`,legend_persistence_temp.`batch_id_in`,legend_persistence_temp.`batch_id_out`,legend_persistence_temp.`validity_from_target`,legend_persistence_temp.`validity_through_target` FROM " + tempName + " as legend_persistence_temp)"; + "(`id`, `name`, `amount`, `digest`, `version`, `batch_id_in`, `batch_id_out`, `validity_from_target`, `validity_through_target`) " + + "(SELECT legend_persistence_temp.`id`,legend_persistence_temp.`name`,legend_persistence_temp.`amount`,legend_persistence_temp.`digest`,legend_persistence_temp.`version`,legend_persistence_temp.`batch_id_in`,legend_persistence_temp.`batch_id_out`,legend_persistence_temp.`validity_from_target`,legend_persistence_temp.`validity_through_target` FROM " + tempName + " as legend_persistence_temp)"; String expectedMainToTempForDeletion = "INSERT INTO " + tempWithDeleteIndicatorName + " " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`, `delete_indicator`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_x.`validity_through_target` as `legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,(CASE WHEN legend_persistence_y.`delete_indicator` IS NULL THEN 0 ELSE 1 END) " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`, `delete_indicator`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_x.`validity_through_target` as `legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,(CASE WHEN legend_persistence_y.`delete_indicator` IS NULL THEN 0 ELSE 1 END) " + "FROM " + "(SELECT * FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) " + "AND (EXISTS " + @@ -871,19 +886,19 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu "AND (sink.`batch_id_out` = 999999999)"; String expectedTempToMainForDeletion = "INSERT INTO `mydb`.`main` " + - "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`legend_persistence_start_date` as `legend_persistence_start_date`,MAX(legend_persistence_y.`validity_through_target`) as `legend_persistence_end_date`,legend_persistence_x.`batch_id_in`,legend_persistence_x.`batch_id_out` FROM " + - "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`validity_from_target`),'9999-12-31 23:59:59') as `legend_persistence_end_date`,legend_persistence_x.`batch_id_in`,legend_persistence_x.`batch_id_out` " + + "(`id`, `name`, `amount`, `version`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`) " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`legend_persistence_start_date` as `legend_persistence_start_date`,MAX(legend_persistence_y.`validity_through_target`) as `legend_persistence_end_date`,legend_persistence_x.`batch_id_in`,legend_persistence_x.`batch_id_out` FROM " + + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`version`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,COALESCE(MIN(legend_persistence_y.`validity_from_target`),'9999-12-31 23:59:59') as `legend_persistence_end_date`,legend_persistence_x.`batch_id_in`,legend_persistence_x.`batch_id_out` " + "FROM " + tempWithDeleteIndicatorName + " as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_y.`validity_from_target` > legend_persistence_x.`validity_from_target`) AND (legend_persistence_y.`delete_indicator` = 0) " + "WHERE legend_persistence_x.`delete_indicator` = 0 " + - "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`digest`, legend_persistence_x.`validity_from_target`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`) as legend_persistence_x " + + "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`version`, legend_persistence_x.`digest`, legend_persistence_x.`validity_from_target`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`) as legend_persistence_x " + "LEFT OUTER JOIN " + tempWithDeleteIndicatorName + " as legend_persistence_y " + "ON ((legend_persistence_x.`id` = legend_persistence_y.`id`) AND (legend_persistence_x.`name` = legend_persistence_y.`name`)) AND (legend_persistence_y.`validity_through_target` > legend_persistence_x.`legend_persistence_start_date`) AND (legend_persistence_y.`validity_through_target` <= legend_persistence_x.`legend_persistence_end_date`) AND (legend_persistence_y.`delete_indicator` <> 0) " + - "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`digest`, legend_persistence_x.`legend_persistence_start_date`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`)"; + "GROUP BY legend_persistence_x.`id`, legend_persistence_x.`name`, legend_persistence_x.`amount`, legend_persistence_x.`version`, legend_persistence_x.`digest`, legend_persistence_x.`legend_persistence_start_date`, legend_persistence_x.`batch_id_in`, legend_persistence_x.`batch_id_out`)"; - Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyMainTableCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBitemporalFromOnlyMainTableWithVersionCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, operations.get(0).preActionsSql().get(1)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableCreateQuery, operations.get(0).preActionsSql().get(2)); Assertions.assertEquals(expectedBitemporalFromOnlyDefaultTempTableWithDeleteIndicatorCreateQuery, operations.get(0).preActionsSql().get(3)); @@ -915,6 +930,10 @@ public void verifyBitemporalDeltaBatchIdBasedWithDeleteIndWithDataSplitsFilterDu Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), operations.get(0).metadataIngestSql().get(0)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`main_legend_persistence_temp`"), operations.get(0).postCleanupSql().get(0)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`main_legend_persistence_tempWithDeleteIndicator`"), operations.get(0).postCleanupSql().get(1)); + Assertions.assertEquals(getDropTempTableQuery("`mydb`.`staging_legend_persistence_stageWithoutDuplicates`"), operations.get(0).postCleanupSql().get(2)); + Assertions.assertEquals(2, operations.size()); String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`) AND (sink2.`validity_from_target` = sink.`validity_from_target`)) AND (sink2.`batch_id_in` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'))))"; @@ -1005,7 +1024,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedStageToTemp = "INSERT INTO `mydb`.`temp` " + "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`," + - "legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "legend_persistence_y.`legend_persistence_end_date`,(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest` FROM `mydb`.`staging` as stage) as legend_persistence_x " + "LEFT OUTER JOIN " + @@ -1027,7 +1046,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedMainToTemp = "INSERT INTO `mydb`.`temp` " + "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM " + "(SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_id_in`,sink.`batch_id_out`,sink.`batch_time_in`," + "sink.`batch_time_out`,sink.`validity_from_target`,sink.`validity_through_target` FROM `mydb`.`main` as sink " + @@ -1051,7 +1070,7 @@ public void verifyBitemporalDeltaBatchIdAndTimeBasedNoDeleteIndNoDataSplits(Gene String expectedUpdateMain = "UPDATE `mydb`.`main` as sink SET " + "sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (EXISTS " + "(SELECT * FROM `mydb`.`temp` as temp WHERE ((sink.`id` = temp.`id`) AND (sink.`name` = temp.`name`)) " + "AND (sink.`validity_from_target` = temp.`validity_from_target`))) AND (sink.`batch_id_out` = 999999999)"; @@ -1086,7 +1105,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR String expectedStageToTemp = "INSERT INTO `mydb`.`temp` " + "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_time_in`, `batch_time_out`) " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`,legend_persistence_x.`validity_from_reference` as `legend_persistence_start_date`," + - "legend_persistence_y.`legend_persistence_end_date`,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "legend_persistence_y.`legend_persistence_end_date`,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`validity_from_reference`,stage.`digest` FROM `mydb`.`staging` as stage) as legend_persistence_x " + "LEFT OUTER JOIN " + @@ -1109,7 +1128,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR "(`id`, `name`, `amount`, `digest`, `validity_from_target`, `validity_through_target`, `batch_time_in`, `batch_time_out`) " + "(SELECT legend_persistence_x.`id`,legend_persistence_x.`name`,legend_persistence_x.`amount`,legend_persistence_x.`digest`," + "legend_persistence_x.`validity_from_target` as `legend_persistence_start_date`,legend_persistence_y.`legend_persistence_end_date`," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM (SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_time_in`," + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM (SELECT sink.`id`,sink.`name`,sink.`amount`,sink.`digest`,sink.`batch_time_in`," + "sink.`batch_time_out`,sink.`validity_from_target`,sink.`validity_through_target` " + "FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '9999-12-31 23:59:59') as legend_persistence_x " + "INNER JOIN " + @@ -1132,7 +1151,7 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR "AND (legend_persistence_x.`validity_from_target` = legend_persistence_y.`legend_persistence_start_date`))"; String expectedUpdateMain = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (EXISTS (SELECT * FROM `mydb`.`temp` as temp WHERE " + "((sink.`id` = temp.`id`) AND (sink.`name` = temp.`name`)) AND " + "(sink.`validity_from_target` = temp.`validity_from_target`))) AND (sink.`batch_time_out` = '9999-12-31 23:59:59')"; @@ -1154,8 +1173,8 @@ public void verifyBitemporalDeltaDateTimeBasedNoDeleteIndNoDataSplits(GeneratorR Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; - String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00'"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00') as `rowsInserted`"; + String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00.000000'"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00.000000') as `rowsInserted`"; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java index bd8903a0df6..4fcb9f97f5a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java @@ -54,7 +54,7 @@ public class IngestModeTest String[] partitionKeys = new String[]{"biz_date"}; HashMap> partitionFilter = new HashMap>() {{ - put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00", "2000-01-02 00:00:00"))); + put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00.000000", "2000-01-02 00:00:00"))); }}; // Base Columns: Primary keys : id, name @@ -167,9 +167,9 @@ public class IngestModeTest "`BATCH_STATUS` VARCHAR(32)," + "`TABLE_BATCH_ID` INTEGER)"; - protected String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`) (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + protected String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`) (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; - protected String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (`TABLE_NAME`, `TABLE_BATCH_ID`, `BATCH_START_TS_UTC`, `BATCH_END_TS_UTC`, `BATCH_STATUS`) (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as batch_metadata WHERE batch_metadata.`TABLE_NAME` = 'main'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + protected String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (`TABLE_NAME`, `TABLE_BATCH_ID`, `BATCH_START_TS_UTC`, `BATCH_END_TS_UTC`, `BATCH_STATUS`) (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as batch_metadata WHERE batch_metadata.`TABLE_NAME` = 'main'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; String expectedStagingCleanupQuery = "DELETE FROM `mydb`.`staging` as stage"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/MemsqlTestArtifacts.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/MemsqlTestArtifacts.java index 81bf04a1f2e..12cb43e9b10 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/MemsqlTestArtifacts.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/MemsqlTestArtifacts.java @@ -62,6 +62,30 @@ public class MemsqlTestArtifacts "`version` INTEGER," + "PRIMARY KEY (`id`, `name`))"; + public static String expectedBaseTempStagingTableWithCount = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`staging_legend_persistence_temp_staging`" + + "(`id` INTEGER NOT NULL," + + "`name` VARCHAR(256) NOT NULL," + + "`amount` DOUBLE," + + "`biz_date` DATE," + + "`legend_persistence_count` INTEGER)"; + + public static String expectedBaseTempStagingTablePlusDigestWithCount = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`staging_legend_persistence_temp_staging`" + + "(`id` INTEGER NOT NULL," + + "`name` VARCHAR(256) NOT NULL," + + "`amount` DOUBLE," + + "`biz_date` DATE," + + "`digest` VARCHAR(256)," + + "`legend_persistence_count` INTEGER)"; + + public static String expectedBaseTempStagingTablePlusDigestWithCountAndDataSplit = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`staging_legend_persistence_temp_staging`" + + "(`id` INTEGER NOT NULL," + + "`name` VARCHAR(256) NOT NULL," + + "`amount` DOUBLE," + + "`biz_date` DATE," + + "`digest` VARCHAR(256)," + + "`legend_persistence_count` INTEGER," + + "`data_split` INTEGER NOT NULL)"; + public static String expectedBaseTablePlusDigestPlusVersionCreateQueryUpperCase = "CREATE REFERENCE TABLE IF NOT EXISTS `MYDB`.`MAIN`(" + "`ID` INTEGER NOT NULL," + "`NAME` VARCHAR(256) NOT NULL," + @@ -113,6 +137,15 @@ public class MemsqlTestArtifacts "`batch_update_time` DATETIME NOT NULL," + "PRIMARY KEY (`id`, `name`, `batch_update_time`))"; + public static String expectedBaseTablePlusDigestPlusUpdateTimestampCreateQueryUpperCase = "CREATE REFERENCE TABLE IF NOT EXISTS `MYDB`.`MAIN`(" + + "`ID` INTEGER NOT NULL," + + "`NAME` VARCHAR(256) NOT NULL," + + "`AMOUNT` DOUBLE," + + "`BIZ_DATE` DATE," + + "`DIGEST` VARCHAR(256)," + + "`BATCH_UPDATE_TIME` DATETIME NOT NULL," + + "PRIMARY KEY (`ID`, `NAME`, `BATCH_UPDATE_TIME`))"; + public static String expectedBaseTableWithAuditNotPKCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`main`(" + "`id` INTEGER NOT NULL," + "`name` VARCHAR(256) NOT NULL," + @@ -131,6 +164,7 @@ public class MemsqlTestArtifacts "PRIMARY KEY (`id`, `name`, `batch_update_time`))"; public static String expectedStagingCleanupQuery = "DELETE FROM `mydb`.`staging` as stage"; + public static String expectedTempStagingCleanupQuery = "DELETE FROM `mydb`.`staging_legend_persistence_temp_staging` as stage"; public static String expectedDropTableQuery = "DROP TABLE IF EXISTS `mydb`.`staging` CASCADE"; @@ -162,10 +196,10 @@ public class MemsqlTestArtifacts "`BATCH_ID_IN` INTEGER NOT NULL,`BATCH_ID_OUT` INTEGER,PRIMARY KEY (`ID`, `NAME`, `BATCH_ID_IN`))"; public static String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`)" + - " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; public static String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (`TABLE_NAME`, `TABLE_BATCH_ID`, `BATCH_START_TS_UTC`, `BATCH_END_TS_UTC`, `BATCH_STATUS`)" + - " (SELECT 'MAIN',(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE')"; + " (SELECT 'MAIN',(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE')"; public static String expectedMetadataTableIngestQueryWithPlaceHolders = "INSERT INTO batch_metadata (`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`) " + "(SELECT 'main',{BATCH_ID_PATTERN},'{BATCH_START_TS_PATTERN}','{BATCH_END_TS_PATTERN}','DONE')"; @@ -222,11 +256,12 @@ public class MemsqlTestArtifacts "`digest` VARCHAR(256)," + "PRIMARY KEY (`id`, `name`, `validity_from_reference`))"; - public static String expectedBitemporalMainTableWithBatchIdDatetimeCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`main`" + + public static String expectedBitemporalMainTableWithVersionWithBatchIdDatetimeCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`main`" + "(`id` INTEGER NOT NULL," + "`name` VARCHAR(256) NOT NULL," + "`amount` DOUBLE," + "`digest` VARCHAR(256)," + + "`version` INTEGER," + "`batch_id_in` INTEGER NOT NULL," + "`batch_id_out` INTEGER," + "`batch_time_in` DATETIME," + @@ -235,11 +270,12 @@ public class MemsqlTestArtifacts "`validity_through_target` DATETIME," + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`))"; - public static String expectedBitemporalMainTableWithDatetimeCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`main`" + + public static String expectedBitemporalMainTableWithVersionBatchDateTimeCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`main`" + "(`id` INTEGER NOT NULL," + "`name` VARCHAR(256) NOT NULL," + "`amount` DOUBLE," + "`digest` VARCHAR(256)," + + "`version` INTEGER," + "`batch_time_in` DATETIME NOT NULL," + "`batch_time_out` DATETIME," + "`validity_from_target` DATETIME NOT NULL," + @@ -257,6 +293,18 @@ public class MemsqlTestArtifacts "`validity_through_target` DATETIME," + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`))"; + public static String expectedBitemporalFromOnlyMainTableWithVersionCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`main`" + + "(`id` INTEGER NOT NULL," + + "`name` VARCHAR(256) NOT NULL," + + "`amount` DOUBLE," + + "`digest` VARCHAR(256)," + + "`version` INTEGER," + + "`batch_id_in` INTEGER NOT NULL," + + "`batch_id_out` INTEGER," + + "`validity_from_target` DATETIME NOT NULL," + + "`validity_through_target` DATETIME," + + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`))"; + public static String expectedBitemporalFromOnlyStagingTableCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`staging`" + "(`id` INTEGER NOT NULL," + "`name` VARCHAR(256) NOT NULL," + @@ -310,6 +358,18 @@ public class MemsqlTestArtifacts "`validity_through_target` DATETIME," + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`))"; + public static String expectedBitemporalFromOnlyTempTableWithVersionCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`temp`" + + "(`id` INTEGER NOT NULL," + + "`name` VARCHAR(256) NOT NULL," + + "`amount` DOUBLE," + + "`digest` VARCHAR(256)," + + "`version` INTEGER," + + "`batch_id_in` INTEGER NOT NULL," + + "`batch_id_out` INTEGER," + + "`validity_from_target` DATETIME NOT NULL," + + "`validity_through_target` DATETIME," + + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`))"; + public static String expectedBitemporalFromOnlyTempTableBatchIdAndTimeBasedCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`temp`(" + "`id` INTEGER NOT NULL," + "`name` VARCHAR(256) NOT NULL," + @@ -354,12 +414,13 @@ public class MemsqlTestArtifacts "`delete_indicator` VARCHAR(256)," + "PRIMARY KEY (`id`, `name`, `batch_id_in`, `validity_from_target`))"; - public static String expectedBitemporalFromOnlyStageWithDataSplitWithoutDuplicatesTableCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`stagingWithoutDuplicates`" + + public static String expectedBitemporalFromOnlyStageWithVersionWithDataSplitWithoutDuplicatesTableCreateQuery = "CREATE REFERENCE TABLE IF NOT EXISTS `mydb`.`stagingWithoutDuplicates`" + "(`id` INTEGER NOT NULL," + "`name` VARCHAR(256) NOT NULL," + "`amount` DOUBLE," + "`validity_from_reference` DATETIME NOT NULL," + "`digest` VARCHAR(256)," + + "`version` INTEGER," + "`data_split` BIGINT NOT NULL," + "PRIMARY KEY (`id`, `name`, `validity_from_reference`, `data_split`))"; @@ -372,4 +433,60 @@ public class MemsqlTestArtifacts "`delete_indicator` VARCHAR(256)," + "PRIMARY KEY (`id`, `name`, `validity_from_reference`))"; + public static String expectedInsertIntoBaseTempStagingWithMaxVersionAndFilterDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `legend_persistence_count`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`legend_persistence_count` as `legend_persistence_count` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`legend_persistence_count` as `legend_persistence_count`," + + "DENSE_RANK() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`biz_date` DESC) as `legend_persistence_rank` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,COUNT(*) as `legend_persistence_count` FROM " + + "`mydb`.`staging` as stage GROUP BY stage.`id`, stage.`name`, stage.`amount`, stage.`biz_date`) as stage) " + + "as stage WHERE stage.`legend_persistence_rank` = 1)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `legend_persistence_count`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + + "COUNT(*) as `legend_persistence_count` FROM `mydb`.`staging` as stage " + + "GROUP BY stage.`id`, stage.`name`, stage.`amount`, stage.`biz_date`, stage.`digest`)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `legend_persistence_count`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`legend_persistence_count` as `legend_persistence_count` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`legend_persistence_count` as `legend_persistence_count`,DENSE_RANK() OVER " + + "(PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`biz_date` DESC) as `legend_persistence_rank` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,COUNT(*) as `legend_persistence_count` FROM " + + "`mydb`.`staging` as stage GROUP BY stage.`id`, stage.`name`, stage.`amount`, stage.`biz_date`, stage.`digest`) as stage) as stage " + + "WHERE stage.`legend_persistence_rank` = 1)"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithAllVersionAndFilterDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `legend_persistence_count`, `data_split`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`legend_persistence_count` as `legend_persistence_count`,DENSE_RANK() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`biz_date` ASC) as `data_split` " + + "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,COUNT(*) as `legend_persistence_count` FROM `mydb`.`staging` as stage " + + "GROUP BY stage.`id`, stage.`name`, stage.`amount`, stage.`biz_date`, stage.`digest`) as stage)"; + + public static String maxDupsErrorCheckSql = "SELECT MAX(stage.`legend_persistence_count`) as `MAX_DUPLICATES` FROM " + + "`mydb`.`staging_legend_persistence_temp_staging` as stage"; + + public static String dataErrorCheckSql = "SELECT MAX(`legend_persistence_distinct_rows`) as `MAX_DATA_ERRORS` FROM " + + "(SELECT COUNT(DISTINCT(`digest`)) as `legend_persistence_distinct_rows` FROM " + + "`mydb`.`staging_legend_persistence_temp_staging` as stage GROUP BY `id`, `name`, `biz_date`) as stage"; + + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicates = "INSERT INTO `mydb`.`staging_legend_persistence_temp_staging` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,DENSE_RANK() " + + "OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`biz_date` DESC) as `legend_persistence_rank` " + + "FROM `mydb`.`staging` as stage) as stage WHERE stage.`legend_persistence_rank` = 1)"; + + public static String expectedTempStagingCleanupQueryInUpperCase = "DELETE FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage"; + public static String expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicatesUpperCase = "INSERT INTO `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` " + + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `LEGEND_PERSISTENCE_COUNT`) " + + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`LEGEND_PERSISTENCE_COUNT` as `LEGEND_PERSISTENCE_COUNT` " + + "FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`," + + "stage.`LEGEND_PERSISTENCE_COUNT` as `LEGEND_PERSISTENCE_COUNT`," + + "DENSE_RANK() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`BIZ_DATE` DESC) as `LEGEND_PERSISTENCE_RANK` " + + "FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,COUNT(*) as `LEGEND_PERSISTENCE_COUNT` " + + "FROM `MYDB`.`STAGING` as stage GROUP BY stage.`ID`, stage.`NAME`, stage.`AMOUNT`, stage.`BIZ_DATE`, stage.`DIGEST`) as stage) as stage WHERE stage.`LEGEND_PERSISTENCE_RANK` = 1)"; + public static String dataErrorCheckSqlUpperCase = "SELECT MAX(`LEGEND_PERSISTENCE_DISTINCT_ROWS`) as `MAX_DATA_ERRORS` " + + "FROM (SELECT COUNT(DISTINCT(`DIGEST`)) as `LEGEND_PERSISTENCE_DISTINCT_ROWS` " + + "FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage GROUP BY `ID`, `NAME`, `BIZ_DATE`) as stage"; } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaTest.java index a608045e934..e5daeed87f7 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaTest.java @@ -30,11 +30,18 @@ public class NontemporalDeltaTest extends NontemporalDeltaTestCases protected String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; protected String incomingRecordCountWithSplits = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage WHERE " + "(stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + + protected String incomingRecordCountWithSplitsAndDuplicates = "SELECT COALESCE(SUM(stage.`legend_persistence_count`),0) as `incomingRecordCount` " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE " + + "(stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; + + protected String incomingRecordCountWithSplitsTempTable = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE " + + "(stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')"; protected String rowsTerminated = "SELECT 0 as `rowsTerminated`"; protected String rowsDeleted = "SELECT 0 as `rowsDeleted`"; @Override - public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalDeltaNoAuditingNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -50,7 +57,7 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operatio String insertSql = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))"; @@ -66,25 +73,25 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operatio } @Override - public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalDeltaWithAuditingFilterDupsNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String updateSql = "UPDATE `mydb`.`main` as sink " + - "INNER JOIN `mydb`.`staging` as stage " + + "INNER JOIN `mydb`.`staging_legend_persistence_temp_staging` as stage " + "ON ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` <> stage.`digest`) " + "SET sink.`id` = stage.`id`," + "sink.`name` = stage.`name`," + "sink.`amount` = stage.`amount`," + "sink.`biz_date` = stage.`biz_date`," + "sink.`digest` = stage.`digest`," + - "sink.`batch_update_time` = '2000-01-01 00:00:00'"; + "sink.`batch_update_time` = '2000-01-01 00:00:00.000000'"; String insertSql = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00' " + - "FROM `mydb`.`staging` as stage " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00.000000' " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))"; @@ -99,7 +106,39 @@ public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operat } @Override - public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyNonTemporalDeltaNoAuditingNoDedupAllVersion(List operations, List dataSplitRanges) + { + String updateSql = "UPDATE `mydb`.`main` as sink " + + "INNER JOIN " + + "(SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as stage " + + "ON ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` <> stage.`digest`) " + + "SET sink.`id` = stage.`id`," + + "sink.`name` = stage.`name`," + + "sink.`amount` = stage.`amount`," + + "sink.`biz_date` = stage.`biz_date`," + + "sink.`digest` = stage.`digest`"; + + String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) " + + "AND (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))))"; + + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(updateSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(1)); + + Assertions.assertEquals(enrichSqlWithDataSplits(updateSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(1)); + + // Stats + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsTempTable, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsTempTable, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsTerminated, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + Assertions.assertEquals(rowsDeleted, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + } + + @Override + public void verifyNonTemporalDeltaNoAuditingNoDedupAllVersionWithoutPerform(List operations, List dataSplitRanges) { String updateSql = "UPDATE `mydb`.`main` as sink " + "INNER JOIN " + @@ -122,7 +161,7 @@ public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List Assertions.assertEquals(enrichSqlWithDataSplits(updateSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(insertSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(1)); - + // Stats Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplits, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplits, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); @@ -131,21 +170,21 @@ public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List } @Override - public void verifyNonTemporalDeltaWithWithAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyNonTemporalDeltaWithWithAuditingFailOnDupsAllVersion(List operations, List dataSplitRanges) { String updateSql = "UPDATE `mydb`.`main` as sink " + "INNER JOIN " + - "(SELECT * FROM `mydb`.`staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as stage " + + "(SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE (stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) as stage " + "ON ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` <> stage.`digest`) SET " + "sink.`id` = stage.`id`," + "sink.`name` = stage.`name`," + "sink.`amount` = stage.`amount`," + "sink.`biz_date` = stage.`biz_date`," + "sink.`digest` = stage.`digest`," + - "sink.`batch_update_time` = '2000-01-01 00:00:00'"; + "sink.`batch_update_time` = '2000-01-01 00:00:00.000000'"; String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00' FROM `mydb`.`staging` as stage " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,'2000-01-01 00:00:00.000000' FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) " + "AND (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))))"; @@ -156,14 +195,14 @@ public void verifyNonTemporalDeltaWithWithAuditingWithDataSplit(List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -184,8 +223,8 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplitWithDeleteIndicator(Gener "WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))"; String deleteSql = "DELETE FROM `mydb`.`main` as sink " + - "WHERE EXISTS (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` " + - "FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) " + + "WHERE EXISTS (" + + "SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) " + "AND (sink.`digest` = stage.`digest`) AND (stage.`delete_indicator` IN ('yes','1','true')))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestCreateQuery, preActionsSqlList.get(0)); @@ -215,7 +254,7 @@ public void verifyNontemporalDeltaWithUpperCaseOptimizer(GeneratorResult operati "sink.`DIGEST` = stage.`DIGEST`"; String insertSql = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`) " + - "(SELECT * FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink " + + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST` FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink " + "WHERE (sink.`ID` = stage.`ID`) " + "AND (sink.`NAME` = stage.`NAME`))))"; @@ -239,7 +278,7 @@ public void verifyNontemporalDeltaWithLessColumnsInStaging(GeneratorResult opera "sink.`digest` = stage.`digest`"; String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`digest` FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))"; @@ -272,7 +311,7 @@ public void verifyNontemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult String insertSql = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`) " + - "(SELECT * FROM `mydb`.`staging` as stage WHERE (NOT (EXISTS " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest` FROM `mydb`.`staging` as stage WHERE (NOT (EXISTS " + "(SELECT * FROM `mydb`.`main` as sink WHERE (sink.`id` = stage.`id`) AND " + "(sink.`name` = stage.`name`)))) AND ((stage.`biz_date` > '2020-01-01') AND (stage.`biz_date` < '2020-01-03')))"; @@ -288,23 +327,19 @@ public void verifyNontemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult } @Override - public void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(GeneratorResult operations) + public void verifyNontemporalDeltaWithFilterDupsMaxVersionWithStagingFilters(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String updateSql = "UPDATE `mydb`.`main` as sink " + "INNER JOIN " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` FROM " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`,ROW_NUMBER() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`version` DESC) as `legend_persistence_row_num` FROM `mydb`.`staging` as stage WHERE stage.`snapshot_id` > 18972) as stage WHERE stage.`legend_persistence_row_num` = 1) as stage " + + "`mydb`.`staging_legend_persistence_temp_staging` as stage " + "ON ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (stage.`version` > sink.`version`) " + "SET sink.`id` = stage.`id`,sink.`name` = stage.`name`,sink.`amount` = stage.`amount`,sink.`biz_date` = stage.`biz_date`,sink.`digest` = stage.`digest`,sink.`version` = stage.`version`"; String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`, `version`) " + - "(SELECT * FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` FROM " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`,ROW_NUMBER() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`version` DESC) as `legend_persistence_row_num` FROM `mydb`.`staging` as stage " + - "WHERE stage.`snapshot_id` > 18972) as stage " + - "WHERE stage.`legend_persistence_row_num` = 1) as stage " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusVersionCreateQuery, preActionsSqlList.get(0)); @@ -319,7 +354,7 @@ public void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(Ge } @Override - public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaWithNoDedupMaxVersioningWithoutPerformWithStagingFilters(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -332,7 +367,7 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(Gene String insertSql = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `version`) " + - "(SELECT * FROM `mydb`.`staging` as stage WHERE (NOT (EXISTS " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` FROM `mydb`.`staging` as stage WHERE (NOT (EXISTS " + "(SELECT * FROM `mydb`.`main` as sink WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))) " + "AND (stage.`snapshot_id` > 18972))"; @@ -348,7 +383,7 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(Gene } @Override - public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaNoDedupMaxVersionWithoutPerform(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -359,7 +394,7 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( "SET sink.`id` = stage.`id`,sink.`name` = stage.`name`,sink.`amount` = stage.`amount`,sink.`biz_date` = stage.`biz_date`,sink.`digest` = stage.`digest`,sink.`version` = stage.`version`"; String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `digest`, `version`) " + - "(SELECT * FROM `mydb`.`staging` as stage " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusVersionCreateQuery, preActionsSqlList.get(0)); @@ -373,22 +408,19 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( } @Override - public void verifyNontemporalDeltaWithWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaAllowDuplicatesMaxVersionWithUpperCase(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String updateSql = "UPDATE `MYDB`.`MAIN` as sink " + "INNER JOIN " + - "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION` FROM " + - "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`,ROW_NUMBER() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`VERSION` DESC) as `LEGEND_PERSISTENCE_ROW_NUM` FROM `MYDB`.`STAGING` as stage) as stage WHERE stage.`LEGEND_PERSISTENCE_ROW_NUM` = 1) as stage " + + "`MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage " + "ON ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (stage.`VERSION` >= sink.`VERSION`) " + "SET sink.`ID` = stage.`ID`,sink.`NAME` = stage.`NAME`,sink.`AMOUNT` = stage.`AMOUNT`,sink.`BIZ_DATE` = stage.`BIZ_DATE`,sink.`DIGEST` = stage.`DIGEST`,sink.`VERSION` = stage.`VERSION`"; String insertSql = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `VERSION`) " + - "(SELECT * FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION` FROM " + - "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`,ROW_NUMBER() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`VERSION` DESC) as `LEGEND_PERSISTENCE_ROW_NUM` FROM `MYDB`.`STAGING` as stage) as stage " + - "WHERE stage.`LEGEND_PERSISTENCE_ROW_NUM` = 1) as stage " + + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION` FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`))))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTablePlusDigestPlusVersionCreateQueryUpperCase, preActionsSqlList.get(0)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotTest.java index 0441c583f16..9b5db338c56 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalSnapshotTest.java @@ -14,6 +14,7 @@ package org.finos.legend.engine.persistence.components.ingestmode; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.common.StatisticName; import org.finos.legend.engine.persistence.components.relational.RelationalSink; import org.finos.legend.engine.persistence.components.relational.SqlPlan; @@ -24,23 +25,25 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; + +import static org.finos.legend.engine.persistence.components.AnsiTestArtifacts.maxDupsErrorCheckSql; public class NontemporalSnapshotTest extends NontemporalSnapshotTestCases { String rowsDeleted = "SELECT COUNT(*) as `rowsDeleted` FROM `mydb`.`main` as sink"; - String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; String rowsUpdated = "SELECT 0 as `rowsUpdated`"; String rowsInserted = "SELECT COUNT(*) as `rowsInserted` FROM `mydb`.`main` as sink"; String rowsTerminated = "SELECT 0 as `rowsTerminated`"; @Override - public void verifyNontemporalSnapshotNoAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotNoAuditingNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`) " + - "(SELECT * FROM `mydb`.`staging` as stage)"; + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date` FROM `mydb`.`staging` as stage)"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedStagingTableCreateQuery, preActionsSqlList.get(1)); @@ -48,66 +51,58 @@ public void verifyNontemporalSnapshotNoAuditingNoDataSplit(GeneratorResult opera Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override - public void verifyNontemporalSnapshotNoAuditingWithDataSplit(GeneratorResult operations) - { - List preActionsSqlList = operations.preActionsSql(); - List milestoningSqlList = operations.ingestSql(); - - String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date` FROM `mydb`.`staging` as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`staging` as stage_right WHERE " + - "(stage.`data_split` < stage_right.`data_split`) AND ((stage.`id` = stage_right.`id`) AND (stage.`name` = stage_right.`name`)))))"; - - Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); - Assertions.assertEquals(MemsqlTestArtifacts.cleanUpMainTableSql, milestoningSqlList.get(0)); - Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); - - // Stats - verifyStats(operations); - } - - @Override - public void verifyNontemporalSnapshotWithAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotWithAuditingFilterDupsNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String insertSql = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,'2000-01-01 00:00:00' " + - "FROM `mydb`.`staging` as stage)"; + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,'2000-01-01 00:00:00.000000' " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage)"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTableWithAuditPKCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.cleanUpMainTableSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override - public void verifyNontemporalSnapshotWithAuditingWithDataSplit(GeneratorResult operations) + public void verifyNontemporalSnapshotWithAuditingFailOnDupMaxVersion(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); - String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, `batch_update_time`) " + - "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,'2000-01-01 00:00:00' " + - "FROM `mydb`.`staging` as stage WHERE NOT (EXISTS " + - "(SELECT * FROM `mydb`.`staging` as stage_right " + - "WHERE (stage.`data_split` < stage_right.`data_split`) AND ((stage.`id` = stage_right.`id`) AND " + - "(stage.`name` = stage_right.`name`)))))"; + String insertSql = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `batch_update_time`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,'2000-01-01 00:00:00.000000' " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage)"; + + String maxDataErrorCheckSql = "SELECT MAX(`legend_persistence_distinct_rows`) as `MAX_DATA_ERRORS` FROM " + + "(SELECT COUNT(DISTINCT(`amount`)) as `legend_persistence_distinct_rows` " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage GROUP BY `id`, `name`, `biz_date`) as stage"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTableWithAuditPKCreateQuery, preActionsSqlList.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTempStagingTableWithCount, preActionsSqlList.get(1)); Assertions.assertEquals(MemsqlTestArtifacts.cleanUpMainTableSql, milestoningSqlList.get(0)); Assertions.assertEquals(insertSql, milestoningSqlList.get(1)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(MemsqlTestArtifacts.maxDupsErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + Assertions.assertEquals(maxDataErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + // Stats - verifyStats(operations); + verifyStats(operations, "staging"); } @Override @@ -117,7 +112,7 @@ public void verifyNontemporalSnapshotWithUpperCaseOptimizer(GeneratorResult quer List milestoningSqlList = queries.ingestSql(); String insertSql = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`) " + - "(SELECT * FROM `MYDB`.`STAGING` as stage)"; + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE` FROM `MYDB`.`STAGING` as stage)"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTableCreateQueryWithUpperCase, preActionsSqlList.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.cleanupMainTableSqlUpperCase, milestoningSqlList.get(0)); @@ -131,7 +126,7 @@ public void verifyNontemporalSnapshotWithLessColumnsInStaging(GeneratorResult op List milestoningSqlList = operations.ingestSql(); String insertSql = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`) " + - "(SELECT * FROM `mydb`.`staging` as stage)"; + "(SELECT stage.`id`,stage.`name`,stage.`amount` FROM `mydb`.`staging` as stage)"; Assertions.assertEquals(MemsqlTestArtifacts.expectedBaseTableCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.cleanUpMainTableSql, milestoningSqlList.get(0)); @@ -148,9 +143,9 @@ public void verifyNontemporalSnapshotWithCleanStagingData(GeneratorResult operat } @Override - public void verifyNontemporalSnapshotWithDropStagingData(SqlPlan physicalPlanForPostActions) + public void verifyNontemporalSnapshotWithDropStagingData(SqlPlan physicalPlanForPostCleanup) { - List sqlsForPostActions = physicalPlanForPostActions.getSqlList(); + List sqlsForPostActions = physicalPlanForPostCleanup.getSqlList(); List expectedSQL = new ArrayList<>(); expectedSQL.add(MemsqlTestArtifacts.expectedDropTableQuery); assertIfListsAreSameIgnoringOrder(expectedSQL, sqlsForPostActions); @@ -162,12 +157,13 @@ public RelationalSink getRelationalSink() return MemSqlSink.get(); } - private void verifyStats(GeneratorResult operations) + private void verifyStats(GeneratorResult operations, String stageTableName) { // Pre stats: Assertions.assertEquals(rowsDeleted, operations.preIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); // Post Stats: + String incomingRecordCount = String.format("SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`%s` as stage", stageTableName); Assertions.assertEquals(incomingRecordCount, operations.postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsUpdated, operations.postIngestStatisticsSql().get(StatisticName.ROWS_UPDATED)); Assertions.assertEquals(rowsInserted, operations.postIngestStatisticsSql().get(StatisticName.ROWS_INSERTED)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java index 0737b1bb640..5719e415d13 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java @@ -27,7 +27,7 @@ public class UnitemporalDeltaBatchIdBasedTest extends UnitmemporalDeltaBatchIdBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -68,7 +68,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupAllVersionsWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + @@ -109,7 +109,7 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -119,7 +119,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + "WHERE " + "(sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) " + "AND ((sink.`digest` <> stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; @@ -127,7 +127,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999 FROM `mydb`.`staging` as stage " + + "999999999 FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))) AND " + @@ -148,13 +148,13 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndNoDedupAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET sink.`batch_id_out` = " + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + "WHERE " + "(sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) " + "AND ((sink.`digest` <> stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; @@ -162,7 +162,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + @@ -180,7 +180,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -258,7 +258,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFilters(G } @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFiltersIncludesNullValues(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndWithOptimizationFiltersIncludesNullValues(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -318,35 +318,24 @@ public void verifyUnitemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult } @Override - public void verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(GeneratorResult operations) + public void verifyUnitemporalDeltaWithFilterDupsMaxVersionWithStagingFilter(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 " + - "FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + - "WHERE (sink.`batch_id_out` = 999999999) AND (EXISTS " + - "(SELECT * FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` " + - "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`,ROW_NUMBER() " + - "OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`version` DESC) as `legend_persistence_row_num` " + - "FROM `mydb`.`staging` as stage WHERE stage.`batch_id_in` > 5) as stage " + - "WHERE stage.`legend_persistence_row_num` = 1) as stage " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 WHERE (sink.`batch_id_out` = 999999999) AND " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (stage.`version` > sink.`version`)))"; - String expectedUpsertQuery = "INSERT INTO `mydb`.`main` (`id`, `name`, `amount`, `biz_date`, " + - "`digest`, `version`, `batch_id_in`, `batch_id_out`) " + + String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `version`, `batch_id_in`, `batch_id_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 " + - "FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + - "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version` " + - "FROM (SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`," + - "ROW_NUMBER() OVER (PARTITION BY stage.`id`,stage.`name` ORDER BY stage.`version` DESC) " + - "as `legend_persistence_row_num` FROM `mydb`.`staging` as stage WHERE stage.`batch_id_in` > 5) as stage " + - "WHERE stage.`legend_persistence_row_num` = 1) as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + - "WHERE (sink.`batch_id_out` = 999999999) AND (stage.`version` <= sink.`version`) " + - "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))))"; + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE " + + "UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) AND " + + "(stage.`version` <= sink.`version`) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableBatchIdAndVersionBasedCreateQuery, preActionsSql.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, preActionsSql.get(1)); @@ -357,7 +346,7 @@ public void verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(Gen } @Override - public void verifyUnitemporalDeltaWithMaxVersionNoDedupAndStagingFilter(GeneratorResult operations) + public void verifyUnitemporalDeltaWithNoDedupMaxVersionWithoutPerformAndStagingFilters(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -394,7 +383,7 @@ public void verifyUnitemporalDeltaWithMaxVersionNoDedupAndStagingFilter(Generato } @Override - public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations) + public void verifyUnitemporalDeltaWithFailOnDupsMaxVersioningWithoutPerform(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -402,7 +391,7 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + "WHERE (sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(stage.`version` > sink.`version`)))"; @@ -411,7 +400,7 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`,stage.`version`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + "999999999 " + - "FROM `mydb`.`staging` as stage " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (stage.`version` <= sink.`version`) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)))))"; @@ -425,31 +414,24 @@ public void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( } @Override - public void verifyUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations) + public void verifyUnitemporalDeltaWithNoDedupMaxVersioningAndUpperCaseWithoutStagingFilters(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_ID_OUT` = " + - "(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA " + - "as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1 " + - "WHERE (sink.`BATCH_ID_OUT` = 999999999) AND " + - "(EXISTS (SELECT * FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION` " + - "FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`," + - "ROW_NUMBER() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`VERSION` DESC) " + - "as `LEGEND_PERSISTENCE_ROW_NUM` FROM `MYDB`.`STAGING` as stage) as stage WHERE stage.`LEGEND_PERSISTENCE_ROW_NUM` = 1) as stage " + - "WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (stage.`VERSION` >= sink.`VERSION`)))"; - - String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `VERSION`, `BATCH_ID_IN`, `BATCH_ID_OUT`) " + + String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink " + + "SET sink.`BATCH_ID_OUT` = (SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + + "WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1 WHERE (sink.`BATCH_ID_OUT` = 999999999) AND " + + "(EXISTS (SELECT * FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) " + + "AND (sink.`NAME` = stage.`NAME`)) AND (stage.`VERSION` >= sink.`VERSION`)))"; + + String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` " + + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `VERSION`, `BATCH_ID_IN`, `BATCH_ID_OUT`) " + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`," + "(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + - "WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999 FROM " + - "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION` " + - "FROM (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,stage.`VERSION`," + - "ROW_NUMBER() OVER (PARTITION BY stage.`ID`,stage.`NAME` ORDER BY stage.`VERSION` DESC) as `LEGEND_PERSISTENCE_ROW_NUM` " + - "FROM `MYDB`.`STAGING` as stage) as stage WHERE stage.`LEGEND_PERSISTENCE_ROW_NUM` = 1) as stage " + - "WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_ID_OUT` = 999999999) " + - "AND (stage.`VERSION` < sink.`VERSION`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; + "WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999 FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage " + + "WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_ID_OUT` = 999999999) AND " + + "(stage.`VERSION` < sink.`VERSION`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableBatchIdAndVersionBasedCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQueryWithUpperCase, preActionsSql.get(1)); @@ -481,7 +463,7 @@ protected String getExpectedMetadataTableIngestQueryWithStagingFilters(String st "(`table_name`, `table_batch_id`, `batch_start_ts_utc`, `batch_end_ts_utc`, `batch_status`, `staging_filters`) " + "(SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata " + "WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "'2000-01-01 00:00:00',CURRENT_TIMESTAMP(),'DONE'," + + "'2000-01-01 00:00:00.000000',CURRENT_TIMESTAMP(),'DONE'," + String.format("PARSE_JSON('%s'))", stagingFilters); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdDateTimeBasedTest.java index 53d5ea24e7e..02a21455a2b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdDateTimeBasedTest.java @@ -27,7 +27,7 @@ public class UnitemporalDeltaBatchIdDateTimeBasedTest extends UnitmemporalDeltaBatchIdDateTimeBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -35,7 +35,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -45,7 +45,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + @@ -68,13 +68,13 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndFilterDupsAllVersionWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(sink.`digest` <> stage.`digest`)))"; @@ -82,8 +82,8 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + @@ -101,7 +101,7 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -119,7 +119,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET sink.`batch_id_out` = " + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE " + "(sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + @@ -131,7 +131,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator "`batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM `mydb`.`staging` as stage " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM `mydb`.`staging` as stage " + "WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))) AND " + @@ -154,7 +154,7 @@ public void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(Generator } @Override - public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalDeltaWithDeleteInd(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -162,7 +162,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET sink.`batch_id_out` = " + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE " + "(sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + @@ -174,7 +174,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "`batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM `mydb`.`staging` as stage " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM `mydb`.`staging` as stage " + "WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))) AND " + @@ -189,13 +189,13 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndFailOnDupsAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + "sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE " + "((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "((sink.`digest` <> stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; @@ -203,7 +203,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) AND " + "(sink.`digest` = stage.`digest`) AND ((sink.`id` = stage.`id`) AND " + @@ -236,8 +236,8 @@ public void verifyUnitemporalDeltaWithUpperCaseOptimizer(GeneratorResult operati List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_ID_OUT` = (SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1,sink.`BATCH_TIME_OUT` = '2000-01-01 00:00:00' WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` <> stage.`DIGEST`)))"; - String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_ID_IN`, `BATCH_ID_OUT`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (sink.`DIGEST` = stage.`DIGEST`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; + String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_ID_OUT` = (SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1,sink.`BATCH_TIME_OUT` = '2000-01-01 00:00:00.000000' WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` <> stage.`DIGEST`)))"; + String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_ID_IN`, `BATCH_ID_OUT`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (sink.`DIGEST` = stage.`DIGEST`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQueryWithUpperCase, preActionsSql.get(1)); Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); @@ -254,7 +254,7 @@ public void verifyUnitemporalDeltaWithLessColumnsInStaging(GeneratorResult opera String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE " + "((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` <> stage.`digest`)))"; @@ -263,7 +263,7 @@ public void verifyUnitemporalDeltaWithLessColumnsInStaging(GeneratorResult opera "(`id`, `name`, `amount`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) AND (sink.`digest` = stage.`digest`) " + @@ -329,7 +329,7 @@ public void verifyUnitemporalDeltaWithOnlySchemaSet(GeneratorResult operations) String expectedMilestoneQuery = "UPDATE `my_schema`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `my_schema`.`staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -339,7 +339,7 @@ public void verifyUnitemporalDeltaWithOnlySchemaSet(GeneratorResult operations) "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `my_schema`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `my_schema`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + @@ -374,7 +374,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothSet(GeneratorResult operati String expectedMilestoneQuery = "UPDATE `mydb`.`my_schema`.`main` as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM `mydb`.`my_schema`.`staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -384,7 +384,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothSet(GeneratorResult operati "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`my_schema`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`my_schema`.`main` as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + @@ -419,7 +419,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothNotSet(GeneratorResult oper String expectedMilestoneQuery = "UPDATE main as sink " + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1," + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) AND " + "(EXISTS (SELECT * FROM staging as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -429,7 +429,7 @@ public void verifyUnitemporalDeltaWithDbAndSchemaBothNotSet(GeneratorResult oper "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')," + - "999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM staging as stage " + "WHERE NOT (EXISTS (SELECT * FROM main as sink " + "WHERE (sink.`batch_id_out` = 999999999) " + diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaDateTimeBasedTest.java index 9d4aa02c54f..ec8d442fc5e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaDateTimeBasedTest.java @@ -27,14 +27,14 @@ public class UnitemporalDeltaDateTimeBasedTest extends UnitmemporalDeltaDateTimeBasedTestCases { @Override - public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations) + public void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + @@ -43,7 +43,7 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') " + @@ -58,29 +58,29 @@ public void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operatio // Stats String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; - String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00'"; + String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00.000000'"; String rowsDeleted = "SELECT 0 as `rowsDeleted`"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00') as `rowsInserted`"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00.000000') as `rowsInserted`"; String rowsTerminated = "SELECT 0 as `rowsTerminated`"; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } @Override - public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaNoDeleteIndFailOnDupsAllVersionWithoutPerform(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND " + "(sink.`digest` <> stage.`digest`)))"; String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' " + - "FROM `mydb`.`staging` as stage " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') " + @@ -98,23 +98,23 @@ public void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE " + "(sink.`batch_time_out` = '9999-12-31 23:59:59') AND " + "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + @@ -125,7 +125,7 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper "(`id`, `name`, `amount`, `biz_date`, `digest`, " + "`batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM `mydb`.`staging` as stage " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM `mydb`.`staging` as stage " + "WHERE (NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') AND (sink.`digest` = stage.`digest`) " + "AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`))))) AND " + @@ -140,21 +140,21 @@ public void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult oper // Stats String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; - String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00')))"; + String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00.000000')))"; String rowsDeleted = "SELECT 0 as `rowsDeleted`"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00')))) as `rowsInserted`"; - String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00')))) as `rowsTerminated`"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00.000000')))) as `rowsInserted`"; + String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00.000000')))) as `rowsTerminated`"; verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } @Override - public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges) + public void verifyUnitemporalDeltaWithDeleteIndFilterDupsAllVersion(List operations, List dataSplitRanges) { String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE " + "(sink.`batch_time_out` = '9999-12-31 23:59:59') AND " + - "(EXISTS (SELECT * FROM `mydb`.`staging` as stage " + + "(EXISTS (SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + "WHERE ((stage.`data_split` >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) " + "AND ((sink.`digest` <> stage.`digest`) OR (stage.`delete_indicator` IN ('yes','1','true')))))"; @@ -162,7 +162,7 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.`data_split` <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') AND (sink.`digest` = stage.`digest`) " + @@ -181,11 +181,11 @@ public void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); - String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_TIME_OUT` = '2000-01-01 00:00:00' WHERE (sink.`BATCH_TIME_OUT` = '9999-12-31 23:59:59') AND (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` <> stage.`DIGEST`)))"; + String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_TIME_OUT` = '2000-01-01 00:00:00.000000' WHERE (sink.`BATCH_TIME_OUT` = '9999-12-31 23:59:59') AND (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` <> stage.`DIGEST`)))"; - String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_TIME_OUT` = '9999-12-31 23:59:59') AND (sink.`DIGEST` = stage.`DIGEST`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; + String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM `MYDB`.`STAGING` as stage WHERE NOT (EXISTS (SELECT * FROM `MYDB`.`MAIN` as sink WHERE (sink.`BATCH_TIME_OUT` = '9999-12-31 23:59:59') AND (sink.`DIGEST` = stage.`DIGEST`) AND ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)))))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableTimeBasedCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQueryWithUpperCase, preActionsSql.get(1)); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdBasedTest.java index e1624d0c58d..81c42dab355 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdBasedTest.java @@ -14,15 +14,15 @@ package org.finos.legend.engine.persistence.components.ingestmode; -import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; -import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; import org.finos.legend.engine.persistence.components.relational.memsql.MemSqlSink; import org.finos.legend.engine.persistence.components.testcases.ingestmode.unitemporal.UnitmemporalSnapshotBatchIdBasedTestCases; import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; public class UnitemporalSnapshotBatchIdBasedTest extends UnitmemporalSnapshotBatchIdBasedTestCases { @@ -33,7 +33,7 @@ public class UnitemporalSnapshotBatchIdBasedTest extends UnitmemporalSnapshotBat String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1)-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_id_in` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'))))) as `rowsTerminated`"; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -62,6 +62,41 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } + @Override + public void verifyUnitemporalSnapshotWithoutPartitionFailOnDupsNoVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1 " + + "WHERE (sink.`batch_id_out` = 999999999) " + + "AND (NOT (EXISTS " + + "(SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + + String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999 " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999)))"; + + Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableBatchIdBasedCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedStagingTableWithDigestCreateQuery, preActionsSql.get(1)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, preActionsSql.get(2)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithFilterDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(MemsqlTestArtifacts.maxDupsErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + } + @Override public void verifyUnitemporalSnapshotWithoutPartitionWithNoOpEmptyBatchHandling(GeneratorResult operations) { @@ -92,7 +127,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); @@ -122,7 +157,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdDateTimeBasedTest.java index 22dfbe78a9f..2112ff4dedc 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotBatchIdDateTimeBasedTest.java @@ -14,15 +14,15 @@ package org.finos.legend.engine.persistence.components.ingestmode; -import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; -import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; import org.finos.legend.engine.persistence.components.relational.memsql.MemSqlSink; import org.finos.legend.engine.persistence.components.testcases.ingestmode.unitemporal.UnitmemporalSnapshotBatchIdDateTimeBasedTestCases; import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; public class UnitemporalSnapshotBatchIdDateTimeBasedTest extends UnitmemporalSnapshotBatchIdDateTimeBasedTestCases { @@ -33,14 +33,14 @@ public class UnitemporalSnapshotBatchIdDateTimeBasedTest extends UnitmemporalSna String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1)-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1) AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_id_in` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'))))) as `rowsTerminated`"; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; @@ -48,7 +48,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999)))"; @@ -62,6 +62,42 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } + @Override + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupMaxVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + + "WHERE (sink.`batch_id_out` = 999999999) " + + "AND (NOT (EXISTS " + + "(SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + + String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999)))"; + + Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, preActionsSql.get(1)); + + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicates, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(MemsqlTestArtifacts.dataErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + } + @Override public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBatchHandling(GeneratorResult operations) { @@ -69,7 +105,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBa List milestoningSql = operations.ingestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE sink.`batch_id_out` = 999999999"; Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableCreateQuery, preActionsSql.get(0)); @@ -78,31 +114,49 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBa } @Override - public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizerFilterDupsMaxVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_ID_OUT` = " + + "(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE " + + "UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1,sink.`BATCH_TIME_OUT` = '2000-01-01 00:00:00.000000' WHERE " + + "(sink.`BATCH_ID_OUT` = 999999999) AND (NOT (EXISTS (SELECT * FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage WHERE " + + "((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` = stage.`DIGEST`))))"; + + String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` " + + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_ID_IN`, `BATCH_ID_OUT`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) " + + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`," + + "(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA " + + "WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + + "FROM `MYDB`.`STAGING_LEGEND_PERSISTENCE_TEMP_STAGING` as stage WHERE NOT (stage.`DIGEST` IN (SELECT sink.`DIGEST` FROM `MYDB`.`MAIN` as sink " + + "WHERE sink.`BATCH_ID_OUT` = 999999999)))"; - String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET sink.`BATCH_ID_OUT` = (SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN')-1,sink.`BATCH_TIME_OUT` = '2000-01-01 00:00:00' WHERE (sink.`BATCH_ID_OUT` = 999999999) AND (NOT (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` = stage.`DIGEST`))))"; - String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` (`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_ID_IN`, `BATCH_ID_OUT`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) (SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`,(SELECT COALESCE(MAX(BATCH_METADATA.`TABLE_BATCH_ID`),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.`TABLE_NAME`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM `MYDB`.`STAGING` as stage WHERE NOT (stage.`DIGEST` IN (SELECT sink.`DIGEST` FROM `MYDB`.`MAIN` as sink WHERE sink.`BATCH_ID_OUT` = 999999999)))"; Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableCreateQueryWithUpperCase, preActionsSql.get(0)); Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQueryWithUpperCase, preActionsSql.get(1)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQueryInUpperCase, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndAllowDuplicatesUpperCase, deduplicationAndVersioningSql.get(1)); + Assertions.assertEquals(MemsqlTestArtifacts.dataErrorCheckSqlUpperCase, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); Assertions.assertEquals(getExpectedMetadataTableIngestQueryWithUpperCase(), metadataIngestSql.get(0)); } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`)))) " + @@ -111,7 +165,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) AND (sink.`biz_date` = stage.`biz_date`))))"; @@ -139,14 +193,14 @@ public void verifyUnitemporalSnapshotWithPartitionWithDefaultEmptyDataHandling(G } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersioning(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`)))) " + @@ -155,7 +209,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorR String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE (sink.`batch_id_out` = 999999999) AND (sink.`biz_date` IN ('2000-01-01 00:00:00','2000-01-02 00:00:00')))))"; @@ -175,7 +229,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersWithDeleteTargetDataEmp List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (sink.`biz_date` IN ('2000-01-01 00:00:00','2000-01-02 00:00:00'))"; @@ -201,7 +255,7 @@ public void verifyUnitemporalSnapshotWithLessColumnsInStaging(GeneratorResult op List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "SET sink.`batch_id_out` = (SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN')-1,sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_id_out` = 999999999) " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; @@ -209,7 +263,7 @@ public void verifyUnitemporalSnapshotWithLessColumnsInStaging(GeneratorResult op String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `digest`, `batch_id_in`, `batch_id_out`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`digest`," + - "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "(SELECT COALESCE(MAX(batch_metadata.`table_batch_id`),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.`table_name`) = 'MAIN'),999999999,'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_id_out` = 999999999)))"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotDateTimeBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotDateTimeBasedTest.java index 6dee33f7e13..0f96eeb95b6 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotDateTimeBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-memsql/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalSnapshotDateTimeBasedTest.java @@ -14,34 +14,34 @@ package org.finos.legend.engine.persistence.components.ingestmode; -import org.finos.legend.engine.persistence.components.AnsiTestArtifacts; +import org.finos.legend.engine.persistence.components.common.DedupAndVersionErrorStatistics; import org.finos.legend.engine.persistence.components.relational.RelationalSink; -import org.finos.legend.engine.persistence.components.relational.ansi.AnsiSqlSink; import org.finos.legend.engine.persistence.components.relational.api.GeneratorResult; import org.finos.legend.engine.persistence.components.relational.memsql.MemSqlSink; import org.finos.legend.engine.persistence.components.testcases.ingestmode.unitemporal.UnitmemporalSnapshotDateTimeBasedTestCases; import org.junit.jupiter.api.Assertions; import java.util.List; +import java.util.Map; public class UnitemporalSnapshotDateTimeBasedTest extends UnitmemporalSnapshotDateTimeBasedTestCases { String incomingRecordCount = "SELECT COUNT(*) as `incomingRecordCount` FROM `mydb`.`staging` as stage"; - String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00')))"; + String rowsUpdated = "SELECT COUNT(*) as `rowsUpdated` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00.000000')))"; String rowsDeleted = "SELECT 0 as `rowsDeleted`"; - String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00')))) as `rowsInserted`"; - String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00')))) as `rowsTerminated`"; + String rowsInserted = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_in` = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00.000000')))) as `rowsInserted`"; + String rowsTerminated = "SELECT (SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '2000-01-01 00:00:00.000000')-(SELECT COUNT(*) FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '2000-01-01 00:00:00.000000') AND (EXISTS (SELECT * FROM `mydb`.`main` as sink2 WHERE ((sink2.`id` = sink.`id`) AND (sink2.`name` = sink.`name`)) AND (sink2.`batch_time_in` = '2000-01-01 00:00:00.000000')))) as `rowsTerminated`"; @Override - public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "SET sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage " + @@ -50,7 +50,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '9999-12-31 23:59:59')))"; @@ -63,6 +63,44 @@ public void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResul verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); } + @Override + public void verifyUnitemporalSnapshotWithoutPartitionFailOnDupsMaxVersion(GeneratorResult operations) + { + List preActionsSql = operations.preActionsSql(); + List milestoningSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); + List deduplicationAndVersioningSql = operations.deduplicationAndVersioningSql(); + Map deduplicationAndVersioningErrorChecksSql = operations.deduplicationAndVersioningErrorChecksSql(); + + String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + + "SET sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') " + + "AND (NOT (EXISTS " + + "(SELECT * FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`))))"; + + String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + + "FROM `mydb`.`staging_legend_persistence_temp_staging` as stage " + + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE sink.`batch_time_out` = '9999-12-31 23:59:59')))"; + + Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableTimeBasedCreateQuery, preActionsSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedMetadataTableCreateQuery, preActionsSql.get(1)); + + Assertions.assertEquals(expectedMilestoneQuery, milestoningSql.get(0)); + Assertions.assertEquals(expectedUpsertQuery, milestoningSql.get(1)); + Assertions.assertEquals(getExpectedMetadataTableIngestQuery(), metadataIngestSql.get(0)); + verifyStats(operations, incomingRecordCount, rowsUpdated, rowsDeleted, rowsInserted, rowsTerminated); + + Assertions.assertEquals(MemsqlTestArtifacts.expectedTempStagingCleanupQuery, deduplicationAndVersioningSql.get(0)); + Assertions.assertEquals(MemsqlTestArtifacts.expectedInsertIntoBaseTempStagingPlusDigestWithMaxVersionAndFilterDuplicates, deduplicationAndVersioningSql.get(1)); + + Assertions.assertEquals(MemsqlTestArtifacts.maxDupsErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DUPLICATES)); + Assertions.assertEquals(MemsqlTestArtifacts.dataErrorCheckSql, deduplicationAndVersioningErrorChecksSql.get(DedupAndVersionErrorStatistics.MAX_DATA_ERRORS)); + } + @Override public void verifyUnitemporalSnapshotWithoutPartitionWithDefaultEmptyBatchHandling(GeneratorResult operations) { @@ -71,7 +109,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithDefaultEmptyBatchHandli List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE sink.`batch_time_out` = '9999-12-31 23:59:59'"; Assertions.assertEquals(MemsqlTestArtifacts.expectedMainTableTimeBasedCreateQuery, preActionsSql.get(0)); @@ -89,7 +127,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `MYDB`.`MAIN` as sink SET " + - "sink.`BATCH_TIME_OUT` = '2000-01-01 00:00:00' " + + "sink.`BATCH_TIME_OUT` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`BATCH_TIME_OUT` = '9999-12-31 23:59:59') AND " + "(NOT (EXISTS (SELECT * FROM `MYDB`.`STAGING` as stage WHERE ((sink.`ID` = stage.`ID`) " + "AND (sink.`NAME` = stage.`NAME`)) AND (sink.`DIGEST` = stage.`DIGEST`))))"; @@ -97,7 +135,7 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene String expectedUpsertQuery = "INSERT INTO `MYDB`.`MAIN` " + "(`ID`, `NAME`, `AMOUNT`, `BIZ_DATE`, `DIGEST`, `BATCH_TIME_IN`, `BATCH_TIME_OUT`) " + "(SELECT stage.`ID`,stage.`NAME`,stage.`AMOUNT`,stage.`BIZ_DATE`,stage.`DIGEST`," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM `MYDB`.`STAGING` as stage " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM `MYDB`.`STAGING` as stage " + "WHERE NOT (stage.`DIGEST` IN (SELECT sink.`DIGEST` FROM `MYDB`.`MAIN` as sink " + "WHERE sink.`BATCH_TIME_OUT` = '9999-12-31 23:59:59')))"; @@ -110,14 +148,14 @@ public void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(Gene } @Override - public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink " + - "SET sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "SET sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') " + "AND (NOT (EXISTS " + "(SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND (sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`)))) " + @@ -126,7 +164,7 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' " + "FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') AND (sink.`biz_date` = stage.`biz_date`))))"; @@ -140,14 +178,14 @@ public void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult o } @Override - public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations) + public void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(GeneratorResult operations) { List preActionsSql = operations.preActionsSql(); List milestoningSql = operations.ingestSql(); List metadataIngestSql = operations.metadataIngestSql(); String expectedMilestoneQuery = "UPDATE `mydb`.`main` as sink SET " + - "sink.`batch_time_out` = '2000-01-01 00:00:00' " + + "sink.`batch_time_out` = '2000-01-01 00:00:00.000000' " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') AND " + "(NOT (EXISTS (SELECT * FROM `mydb`.`staging` as stage WHERE ((sink.`id` = stage.`id`) AND " + "(sink.`name` = stage.`name`)) AND (sink.`digest` = stage.`digest`)))) AND " + @@ -156,7 +194,7 @@ public void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorR String expectedUpsertQuery = "INSERT INTO `mydb`.`main` " + "(`id`, `name`, `amount`, `biz_date`, `digest`, `batch_time_in`, `batch_time_out`) " + "(SELECT stage.`id`,stage.`name`,stage.`amount`,stage.`biz_date`,stage.`digest`," + - "'2000-01-01 00:00:00','9999-12-31 23:59:59' FROM `mydb`.`staging` as stage " + + "'2000-01-01 00:00:00.000000','9999-12-31 23:59:59' FROM `mydb`.`staging` as stage " + "WHERE NOT (stage.`digest` IN (SELECT sink.`digest` FROM `mydb`.`main` as sink " + "WHERE (sink.`batch_time_out` = '9999-12-31 23:59:59') AND " + "(sink.`biz_date` IN ('2000-01-01 00:00:00','2000-01-02 00:00:00')))))"; diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/pom.xml index ddbe1e3431e..72822a9a00e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/main/java/org/finos/legend/engine/persistence/components/relational/snowflake/SnowflakeSink.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/main/java/org/finos/legend/engine/persistence/components/relational/snowflake/SnowflakeSink.java index 3d438dd721f..46465426e97 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/main/java/org/finos/legend/engine/persistence/components/relational/snowflake/SnowflakeSink.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/main/java/org/finos/legend/engine/persistence/components/relational/snowflake/SnowflakeSink.java @@ -85,6 +85,7 @@ import java.util.Objects; import java.util.ArrayList; +import static org.finos.legend.engine.persistence.components.relational.api.RelationalIngestorAbstract.BATCH_ID_PATTERN; import static org.finos.legend.engine.persistence.components.relational.api.RelationalIngestorAbstract.BATCH_START_TS_PATTERN; public class SnowflakeSink extends AnsiSqlSink @@ -109,6 +110,7 @@ public class SnowflakeSink extends AnsiSqlSink capabilities.add(Capability.ADD_COLUMN); capabilities.add(Capability.IMPLICIT_DATA_TYPE_CONVERSION); capabilities.add(Capability.DATA_TYPE_LENGTH_CHANGE); + capabilities.add(Capability.TRANSFORM_WHILE_COPY); CAPABILITIES = Collections.unmodifiableSet(capabilities); Map, LogicalPlanVisitor> logicalPlanVisitorByClass = new HashMap<>(); @@ -254,29 +256,32 @@ public IngestorResult performBulkLoad(Datasets datasets, Executor stats = new HashMap<>(); + stats.put(StatisticName.ROWS_INSERTED, totalRowsLoaded); + stats.put(StatisticName.ROWS_WITH_ERRORS, totalRowsWithError); + stats.put(StatisticName.FILES_LOADED, totalFilesLoaded); + + IngestorResult.Builder resultBuilder = IngestorResult.builder() + .updatedDatasets(datasets) + .putAllStatisticByName(stats) + .ingestionTimestampUTC(placeHolderKeyValues.get(BATCH_START_TS_PATTERN)) + .batchId(Optional.ofNullable(placeHolderKeyValues.containsKey(BATCH_ID_PATTERN) ? Integer.valueOf(placeHolderKeyValues.get(BATCH_ID_PATTERN)) : null)); IngestorResult result; + if (dataFilePathsWithFailedBulkLoad.isEmpty()) { - Map stats = new HashMap<>(); - stats.put(StatisticName.ROWS_INSERTED, totalRowsLoaded); - stats.put(StatisticName.ROWS_WITH_ERRORS, totalRowsWithError); - stats.put(StatisticName.FILES_LOADED, totalFilesLoaded); - result = IngestorResult.builder() - .status(IngestStatus.SUCCEEDED) - .updatedDatasets(datasets) - .putAllStatisticByName(stats) - .ingestionTimestampUTC(placeHolderKeyValues.get(BATCH_START_TS_PATTERN)) - .build(); + result = resultBuilder + .status(IngestStatus.SUCCEEDED) + .build(); } else { String errorMessage = String.format("Unable to bulk load these files: %s", String.join(",", dataFilePathsWithFailedBulkLoad)); - result = IngestorResult.builder() - .status(IngestStatus.FAILED) - .message(errorMessage) - .updatedDatasets(datasets) - .ingestionTimestampUTC(placeHolderKeyValues.get(BATCH_START_TS_PATTERN)) - .build(); + result = resultBuilder + .status(IngestStatus.FAILED) + .message(errorMessage) + .build(); } return result; } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/SnowflakeTestArtifacts.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/SnowflakeTestArtifacts.java index 616630967cb..1cb69388daf 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/SnowflakeTestArtifacts.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/SnowflakeTestArtifacts.java @@ -17,10 +17,10 @@ public class SnowflakeTestArtifacts { public static String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\")" + - " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00',SYSDATE(),'DONE')"; + " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00.000000',SYSDATE(),'DONE')"; public static String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (\"TABLE_NAME\", \"TABLE_BATCH_ID\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\")" + - " (SELECT 'MAIN',(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),'2000-01-01 00:00:00',SYSDATE(),'DONE')"; + " (SELECT 'MAIN',(SELECT COALESCE(MAX(BATCH_METADATA.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as BATCH_METADATA WHERE UPPER(BATCH_METADATA.\"TABLE_NAME\") = 'MAIN'),'2000-01-01 00:00:00.000000',SYSDATE(),'DONE')"; public static String expectedMetadataTableCreateQuery = "CREATE TABLE IF NOT EXISTS batch_metadata" + "(\"table_name\" VARCHAR(255)," + diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadTest.java index 32793a5c47d..70529e54d7e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/BulkLoadTest.java @@ -42,8 +42,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import static org.finos.legend.engine.persistence.components.common.StatisticName.*; @@ -54,7 +52,6 @@ public class BulkLoadTest private static Field col1 = Field.builder() .name("col_int") .type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())) - .primaryKey(true) .build(); private static Field col2 = Field.builder() .name("col_integer") @@ -104,7 +101,8 @@ public void testBulkLoadWithDigestNotGeneratedColumnNumbersDerived() .relationalSink(SnowflakeSink.get()) .collectStatistics(true) .executionTimestampClock(fixedClock_2000_01_01) - .bulkLoadBatchIdValue("batch123") + .bulkLoadTaskIdValue("task123") + .batchIdPattern("{NEXT_BATCH_ID}") .build(); GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); @@ -114,16 +112,16 @@ public void testBulkLoadWithDigestNotGeneratedColumnNumbersDerived() List metadataIngestSql = operations.metadataIngestSql(); Map statsSql = operations.postIngestStatisticsSql(); - String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"my_db\".\"my_name\"(\"col_int\" INTEGER NOT NULL PRIMARY KEY,\"col_integer\" INTEGER,\"batch_id\" VARCHAR,\"append_time\" DATETIME)"; + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"my_db\".\"my_name\"(\"col_int\" INTEGER,\"col_integer\" INTEGER,\"batch_id\" INTEGER,\"append_time\" DATETIME)"; String expectedIngestSql = "COPY INTO \"my_db\".\"my_name\" " + "(\"col_int\", \"col_integer\", \"batch_id\", \"append_time\") " + "FROM " + - "(SELECT legend_persistence_stage.$1 as \"col_int\",legend_persistence_stage.$2 as \"col_integer\",'batch123','2000-01-01 00:00:00' " + + "(SELECT legend_persistence_stage.$1 as \"col_int\",legend_persistence_stage.$2 as \"col_integer\",{NEXT_BATCH_ID},'2000-01-01 00:00:00.000000' " + "FROM my_location (FILE_FORMAT => 'my_file_format', PATTERN => '(/path/xyz/file1.csv)|(/path/xyz/file2.csv)') as legend_persistence_stage)" + " on_error = 'ABORT_STATEMENT'"; String expectedMetadataIngestSql = "INSERT INTO bulk_load_batch_metadata (\"batch_id\", \"table_name\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\", \"batch_source_info\") " + - "(SELECT 'batch123','my_name','2000-01-01 00:00:00',SYSDATE(),'{BULK_LOAD_BATCH_STATUS_PLACEHOLDER}',PARSE_JSON('{\"files\":[\"/path/xyz/file1.csv\",\"/path/xyz/file2.csv\"]}'))"; + "(SELECT {NEXT_BATCH_ID},'my_name','2000-01-01 00:00:00.000000',SYSDATE(),'{BULK_LOAD_BATCH_STATUS_PLACEHOLDER}',PARSE_JSON('{\"files\":[\"/path/xyz/file1.csv\",\"/path/xyz/file2.csv\"],\"task_id\":\"task123\"}'))"; Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); Assertions.assertEquals(expectedIngestSql, ingestSql.get(0)); @@ -132,7 +130,7 @@ public void testBulkLoadWithDigestNotGeneratedColumnNumbersDerived() Assertions.assertEquals("SELECT 0 as \"rowsDeleted\"", statsSql.get(ROWS_DELETED)); Assertions.assertEquals("SELECT 0 as \"rowsTerminated\"", statsSql.get(ROWS_TERMINATED)); Assertions.assertEquals("SELECT 0 as \"rowsUpdated\"", statsSql.get(ROWS_UPDATED)); - Assertions.assertEquals("SELECT COUNT(*) as \"rowsInserted\" FROM \"my_db\".\"my_name\" as my_alias WHERE my_alias.\"append_time\" = '2000-01-01 00:00:00'", statsSql.get(ROWS_INSERTED)); + Assertions.assertEquals("SELECT COUNT(*) as \"rowsInserted\" FROM \"my_db\".\"my_name\" as my_alias WHERE my_alias.\"append_time\" = '2000-01-01 00:00:00.000000'", statsSql.get(ROWS_INSERTED)); } @Test @@ -163,7 +161,7 @@ public void testBulkLoadWithDigestNotGeneratedColumnNumbersProvided() .ingestMode(bulkLoad) .relationalSink(SnowflakeSink.get()) .collectStatistics(true) - .bulkLoadBatchIdValue("batch123") + .bulkLoadTaskIdValue("task123") .build(); GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); @@ -172,11 +170,11 @@ public void testBulkLoadWithDigestNotGeneratedColumnNumbersProvided() List ingestSql = operations.ingestSql(); Map statsSql = operations.postIngestStatisticsSql(); - String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"my_db\".\"my_name\"(\"col_bigint\" BIGINT,\"col_variant\" VARIANT,\"batch_id\" VARCHAR)"; + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"my_db\".\"my_name\"(\"col_bigint\" BIGINT,\"col_variant\" VARIANT,\"batch_id\" INTEGER)"; String expectedIngestSql = "COPY INTO \"my_db\".\"my_name\" " + "(\"col_bigint\", \"col_variant\", \"batch_id\") " + "FROM " + - "(SELECT t.$4 as \"col_bigint\",TO_VARIANT(PARSE_JSON(t.$5)) as \"col_variant\",'batch123' " + + "(SELECT t.$4 as \"col_bigint\",TO_VARIANT(PARSE_JSON(t.$5)) as \"col_variant\",(SELECT COALESCE(MAX(bulk_load_batch_metadata.\"batch_id\"),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.\"table_name\") = 'MY_NAME') " + "FROM my_location (FILE_FORMAT => 'my_file_format', PATTERN => '(/path/xyz/file1.csv)|(/path/xyz/file2.csv)') as t) " + "on_error = 'ABORT_STATEMENT'"; @@ -189,7 +187,7 @@ public void testBulkLoadWithDigestNotGeneratedColumnNumbersProvided() } @Test - public void testBulkLoadWithUpperCaseConversionAndDefaultBatchId() + public void testBulkLoadWithUpperCaseConversionAndNoTaskId() { BulkLoad bulkLoad = BulkLoad.builder() .batchIdField("batch_id") @@ -223,36 +221,33 @@ public void testBulkLoadWithUpperCaseConversionAndDefaultBatchId() List preActionsSql = operations.preActionsSql(); List ingestSql = operations.ingestSql(); + List metadataIngestSql = operations.metadataIngestSql(); Map statsSql = operations.postIngestStatisticsSql(); - // Extract the generated UUID - Pattern pattern = Pattern.compile("[a-f0-9]{8}(?:-[a-f0-9]{4}){4}[a-f0-9]{8}"); - Matcher matcher = pattern.matcher(ingestSql.get(0)); - String uuid = ""; - if (matcher.find()) - { - uuid = matcher.group(); - } - - String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"MY_DB\".\"MY_NAME\"(\"COL_INT\" INTEGER NOT NULL PRIMARY KEY," + - "\"COL_INTEGER\" INTEGER,\"DIGEST\" VARCHAR,\"BATCH_ID\" VARCHAR,\"APPEND_TIME\" DATETIME)"; + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"MY_DB\".\"MY_NAME\"(\"COL_INT\" INTEGER," + + "\"COL_INTEGER\" INTEGER,\"DIGEST\" VARCHAR,\"BATCH_ID\" INTEGER,\"APPEND_TIME\" DATETIME)"; String expectedIngestSql = "COPY INTO \"MY_DB\".\"MY_NAME\" " + "(\"COL_INT\", \"COL_INTEGER\", \"DIGEST\", \"BATCH_ID\", \"APPEND_TIME\") " + "FROM " + "(SELECT legend_persistence_stage.$1 as \"COL_INT\",legend_persistence_stage.$2 as \"COL_INTEGER\"," + "LAKEHOUSE_MD5(OBJECT_CONSTRUCT('COL_INT',legend_persistence_stage.$1,'COL_INTEGER',legend_persistence_stage.$2))," + - "'%s','2000-01-01 00:00:00' " + + "(SELECT COALESCE(MAX(BULK_LOAD_BATCH_METADATA.\"BATCH_ID\"),0)+1 FROM BULK_LOAD_BATCH_METADATA as BULK_LOAD_BATCH_METADATA WHERE UPPER(BULK_LOAD_BATCH_METADATA.\"TABLE_NAME\") = 'MY_NAME'),'2000-01-01 00:00:00.000000' " + "FROM my_location (FILE_FORMAT => 'my_file_format', " + "PATTERN => '(/path/xyz/file1.csv)|(/path/xyz/file2.csv)') as legend_persistence_stage) " + "on_error = 'ABORT_STATEMENT'"; + String expectedMetadataIngestSql = "INSERT INTO BULK_LOAD_BATCH_METADATA (\"BATCH_ID\", \"TABLE_NAME\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\", \"BATCH_SOURCE_INFO\") " + + "(SELECT (SELECT COALESCE(MAX(BULK_LOAD_BATCH_METADATA.\"BATCH_ID\"),0)+1 FROM BULK_LOAD_BATCH_METADATA as BULK_LOAD_BATCH_METADATA WHERE UPPER(BULK_LOAD_BATCH_METADATA.\"TABLE_NAME\") = 'MY_NAME')," + + "'MY_NAME','2000-01-01 00:00:00.000000',SYSDATE(),'{BULK_LOAD_BATCH_STATUS_PLACEHOLDER}',PARSE_JSON('{\"files\":[\"/path/xyz/file1.csv\",\"/path/xyz/file2.csv\"]}'))"; + Assertions.assertEquals(expectedCreateTableSql, preActionsSql.get(0)); - Assertions.assertEquals(String.format(expectedIngestSql, uuid), ingestSql.get(0)); + Assertions.assertEquals(expectedIngestSql, ingestSql.get(0)); + Assertions.assertEquals(expectedMetadataIngestSql, metadataIngestSql.get(0)); Assertions.assertEquals("SELECT 0 as \"ROWSDELETED\"", statsSql.get(ROWS_DELETED)); Assertions.assertEquals("SELECT 0 as \"ROWSTERMINATED\"", statsSql.get(ROWS_TERMINATED)); Assertions.assertEquals("SELECT 0 as \"ROWSUPDATED\"", statsSql.get(ROWS_UPDATED)); - Assertions.assertEquals("SELECT COUNT(*) as \"ROWSINSERTED\" FROM \"MY_DB\".\"MY_NAME\" as my_alias WHERE my_alias.\"APPEND_TIME\" = '2000-01-01 00:00:00'", statsSql.get(ROWS_INSERTED)); + Assertions.assertEquals("SELECT COUNT(*) as \"ROWSINSERTED\" FROM \"MY_DB\".\"MY_NAME\" as my_alias WHERE my_alias.\"APPEND_TIME\" = '2000-01-01 00:00:00.000000'", statsSql.get(ROWS_INSERTED)); } @Test @@ -317,7 +312,7 @@ public void testBulkLoadStagedFilesDatasetNotProvided() .relationalSink(SnowflakeSink.get()) .collectStatistics(true) .executionTimestampClock(fixedClock_2000_01_01) - .bulkLoadBatchIdValue("batch123") + .bulkLoadTaskIdValue("batch123") .build(); GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagingDataset)); @@ -357,7 +352,7 @@ public void testBulkLoadWithDigest() .relationalSink(SnowflakeSink.get()) .collectStatistics(true) .executionTimestampClock(fixedClock_2000_01_01) - .bulkLoadBatchIdValue("batch123") + .bulkLoadTaskIdValue("task123") .build(); GeneratorResult operations = generator.generateOperations(Datasets.of(mainDataset, stagedFilesDataset)); @@ -366,14 +361,14 @@ public void testBulkLoadWithDigest() List ingestSql = operations.ingestSql(); Map statsSql = operations.postIngestStatisticsSql(); - String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"my_db\".\"my_name\"(\"col_int\" INTEGER NOT NULL PRIMARY KEY,\"col_integer\" INTEGER,\"digest\" VARCHAR,\"batch_id\" VARCHAR,\"append_time\" DATETIME)"; + String expectedCreateTableSql = "CREATE TABLE IF NOT EXISTS \"my_db\".\"my_name\"(\"col_int\" INTEGER,\"col_integer\" INTEGER,\"digest\" VARCHAR,\"batch_id\" INTEGER,\"append_time\" DATETIME)"; String expectedIngestSql = "COPY INTO \"my_db\".\"my_name\" " + "(\"col_int\", \"col_integer\", \"digest\", \"batch_id\", \"append_time\") " + "FROM " + "(SELECT legend_persistence_stage.$1 as \"col_int\",legend_persistence_stage.$2 as \"col_integer\"," + "LAKEHOUSE_UDF(OBJECT_CONSTRUCT('col_int',legend_persistence_stage.$1,'col_integer',legend_persistence_stage.$2))," + - "'batch123','2000-01-01 00:00:00' " + + "(SELECT COALESCE(MAX(bulk_load_batch_metadata.\"batch_id\"),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.\"table_name\") = 'MY_NAME'),'2000-01-01 00:00:00.000000' " + "FROM my_location (FILE_FORMAT => 'my_file_format', " + "PATTERN => '(/path/xyz/file1.csv)|(/path/xyz/file2.csv)') as legend_persistence_stage) " + "on_error = 'ABORT_STATEMENT'"; @@ -384,6 +379,6 @@ public void testBulkLoadWithDigest() Assertions.assertEquals("SELECT 0 as \"rowsDeleted\"", statsSql.get(ROWS_DELETED)); Assertions.assertEquals("SELECT 0 as \"rowsTerminated\"", statsSql.get(ROWS_TERMINATED)); Assertions.assertEquals("SELECT 0 as \"rowsUpdated\"", statsSql.get(ROWS_UPDATED)); - Assertions.assertEquals("SELECT COUNT(*) as \"rowsInserted\" FROM \"my_db\".\"my_name\" as my_alias WHERE my_alias.\"append_time\" = '2000-01-01 00:00:00'", statsSql.get(ROWS_INSERTED)); + Assertions.assertEquals("SELECT COUNT(*) as \"rowsInserted\" FROM \"my_db\".\"my_name\" as my_alias WHERE my_alias.\"append_time\" = '2000-01-01 00:00:00.000000'", statsSql.get(ROWS_INSERTED)); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java index 164f830f123..2b0010d6365 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/IngestModeTest.java @@ -52,7 +52,7 @@ public class IngestModeTest String[] partitionKeys = new String[]{"biz_date"}; HashMap> partitionFilter = new HashMap>() {{ - put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00", "2000-01-02 00:00:00"))); + put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00.000000", "2000-01-02 00:00:00"))); }}; // Base Columns: Primary keys : id, name @@ -104,10 +104,10 @@ public class IngestModeTest "\"TABLE_BATCH_ID\" INTEGER)"; protected String expectedMetadataTableIngestQuery = "INSERT INTO batch_metadata (\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\")" + - " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00',SYSDATE(),'DONE')"; + " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN'),'2000-01-01 00:00:00.000000',SYSDATE(),'DONE')"; protected String expectedMetadataTableIngestQueryWithUpperCase = "INSERT INTO BATCH_METADATA (\"TABLE_NAME\", \"TABLE_BATCH_ID\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\")" + - " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as batch_metadata WHERE batch_metadata.\"TABLE_NAME\" = 'main'),'2000-01-01 00:00:00',SYSDATE(),'DONE')"; + " (SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"TABLE_BATCH_ID\"),0)+1 FROM BATCH_METADATA as batch_metadata WHERE batch_metadata.\"TABLE_NAME\" = 'main'),'2000-01-01 00:00:00.000000',SYSDATE(),'DONE')"; String expectedMainTableCreateQuery = "CREATE TABLE IF NOT EXISTS \"mydb\".\"main\"" + diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaMergeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaMergeTest.java index 0c915338f49..49ffe9bfc6e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaMergeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/NontemporalDeltaMergeTest.java @@ -35,7 +35,7 @@ public RelationalSink getRelationalSink() } @Override - public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalDeltaNoAuditingNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -65,13 +65,13 @@ public void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operatio } @Override - public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operations) + public void verifyNontemporalDeltaWithAuditingFilterDupsNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String mergeSql = "MERGE INTO \"mydb\".\"main\" as sink " + - "USING \"mydb\".\"staging\" as stage " + + "USING \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "ON (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\") " + "WHEN MATCHED AND sink.\"digest\" <> stage.\"digest\" " + "THEN UPDATE SET " + @@ -80,10 +80,10 @@ public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operat "sink.\"amount\" = stage.\"amount\"," + "sink.\"biz_date\" = stage.\"biz_date\"," + "sink.\"digest\" = stage.\"digest\"," + - "sink.\"batch_update_time\" = '2000-01-01 00:00:00' " + + "sink.\"batch_update_time\" = '2000-01-01 00:00:00.000000' " + "WHEN NOT MATCHED THEN INSERT " + "(\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + - "VALUES (stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00')"; + "VALUES (stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000')"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, preActionsSqlList.get(0)); Assertions.assertEquals(mergeSql, milestoningSqlList.get(0)); @@ -95,7 +95,31 @@ public void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operat } @Override - public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyNonTemporalDeltaNoAuditingNoDedupAllVersion(List operations, List dataSplitRanges) + { + String mergeSql = "MERGE INTO \"mydb\".\"main\" as sink " + + "USING (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + + "WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) " + + "as stage ON (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\") " + + "WHEN MATCHED AND sink.\"digest\" <> stage.\"digest\" " + + "THEN UPDATE SET sink.\"id\" = stage.\"id\",sink.\"name\" = stage.\"name\",sink.\"amount\" = stage.\"amount\",sink.\"biz_date\" = stage.\"biz_date\",sink.\"digest\" = stage.\"digest\" " + + "WHEN NOT MATCHED " + + "THEN INSERT (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\") " + + "VALUES (stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\")"; + + Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestCreateQuery, operations.get(0).preActionsSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(mergeSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); + Assertions.assertEquals(enrichSqlWithDataSplits(mergeSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); + + // Stats + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsTempStagingTable, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsTempStagingTable, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(rowsTerminated, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); + Assertions.assertEquals(rowsDeleted, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); + } + + @Override + public void verifyNonTemporalDeltaNoAuditingNoDedupAllVersionWithoutPerform(List operations, List dataSplitRanges) { String mergeSql = "MERGE INTO \"mydb\".\"main\" as sink " + "USING (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM \"mydb\".\"staging\" as stage " + @@ -119,31 +143,31 @@ public void verifyNonTemporalDeltaNoAuditingWithDataSplit(List } @Override - public void verifyNonTemporalDeltaWithWithAuditingWithDataSplit(List operations, List dataSplitRanges) + public void verifyNonTemporalDeltaWithWithAuditingFailOnDupsAllVersion(List operations, List dataSplitRanges) { String mergeSql = "MERGE INTO \"mydb\".\"main\" as sink " + - "USING (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM \"mydb\".\"staging\" as stage " + + "USING (SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\" FROM \"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "WHERE (stage.\"data_split\" >= '{DATA_SPLIT_LOWER_BOUND_PLACEHOLDER}') AND (stage.\"data_split\" <= '{DATA_SPLIT_UPPER_BOUND_PLACEHOLDER}')) " + "as stage ON (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\") " + "WHEN MATCHED AND sink.\"digest\" <> stage.\"digest\" " + - "THEN UPDATE SET sink.\"id\" = stage.\"id\",sink.\"name\" = stage.\"name\",sink.\"amount\" = stage.\"amount\",sink.\"biz_date\" = stage.\"biz_date\",sink.\"digest\" = stage.\"digest\",sink.\"batch_update_time\" = '2000-01-01 00:00:00' " + + "THEN UPDATE SET sink.\"id\" = stage.\"id\",sink.\"name\" = stage.\"name\",sink.\"amount\" = stage.\"amount\",sink.\"biz_date\" = stage.\"biz_date\",sink.\"digest\" = stage.\"digest\",sink.\"batch_update_time\" = '2000-01-01 00:00:00.000000' " + "WHEN NOT MATCHED " + "THEN INSERT (\"id\", \"name\", \"amount\", \"biz_date\", \"digest\", \"batch_update_time\") " + - "VALUES (stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00')"; + "VALUES (stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",'2000-01-01 00:00:00.000000')"; Assertions.assertEquals(AnsiTestArtifacts.expectedBaseTablePlusDigestPlusUpdateTimestampCreateQuery, operations.get(0).preActionsSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(mergeSql, dataSplitRanges.get(0)), operations.get(0).ingestSql().get(0)); Assertions.assertEquals(enrichSqlWithDataSplits(mergeSql, dataSplitRanges.get(1)), operations.get(1).ingestSql().get(0)); // Stats - Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplits, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); - Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplits, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsWithDuplicates, dataSplitRanges.get(0)), operations.get(0).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); + Assertions.assertEquals(enrichSqlWithDataSplits(incomingRecordCountWithSplitsWithDuplicates, dataSplitRanges.get(1)), operations.get(1).postIngestStatisticsSql().get(StatisticName.INCOMING_RECORD_COUNT)); Assertions.assertEquals(rowsTerminated, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_TERMINATED)); Assertions.assertEquals(rowsDeleted, operations.get(0).postIngestStatisticsSql().get(StatisticName.ROWS_DELETED)); } @Override - public void verifyNontemporalDeltaNoAuditingNoDataSplitWithDeleteIndicator(GeneratorResult operations) + public void verifyNontemporalDeltaNoAuditingWithDeleteIndicatorNoDedupNoVersioning(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -242,15 +266,14 @@ public void verifyNontemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult } @Override - public void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(GeneratorResult operations) + public void verifyNontemporalDeltaWithFilterDupsMaxVersionWithStagingFilters(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String mergeSql = "MERGE INTO \"mydb\".\"main\" as sink " + "USING " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\" FROM " + - "(SELECT stage.\"id\",stage.\"name\",stage.\"amount\",stage.\"biz_date\",stage.\"digest\",stage.\"version\",ROW_NUMBER() OVER (PARTITION BY stage.\"id\",stage.\"name\" ORDER BY stage.\"version\" DESC) as \"legend_persistence_row_num\" FROM \"mydb\".\"staging\" as stage WHERE stage.\"snapshot_id\" > 18972) as stage WHERE stage.\"legend_persistence_row_num\" = 1) as stage " + + "\"mydb\".\"staging_legend_persistence_temp_staging\" as stage " + "ON (sink.\"id\" = stage.\"id\") AND (sink.\"name\" = stage.\"name\") " + "WHEN MATCHED AND stage.\"version\" > sink.\"version\" " + "THEN UPDATE SET sink.\"id\" = stage.\"id\",sink.\"name\" = stage.\"name\",sink.\"amount\" = stage.\"amount\",sink.\"biz_date\" = stage.\"biz_date\",sink.\"digest\" = stage.\"digest\",sink.\"version\" = stage.\"version\" " + @@ -267,7 +290,7 @@ public void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(Ge } @Override - public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaWithNoDedupMaxVersioningWithoutPerformWithStagingFilters(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -291,7 +314,7 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(Gene } @Override - public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaNoDedupMaxVersionWithoutPerform(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); @@ -314,15 +337,14 @@ public void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters( } @Override - public void verifyNontemporalDeltaWithWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations) + public void verifyNontemporalDeltaAllowDuplicatesMaxVersionWithUpperCase(GeneratorResult operations) { List preActionsSqlList = operations.preActionsSql(); List milestoningSqlList = operations.ingestSql(); String mergeSql = "MERGE INTO \"MYDB\".\"MAIN\" as sink " + "USING " + - "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\" FROM " + - "(SELECT stage.\"ID\",stage.\"NAME\",stage.\"AMOUNT\",stage.\"BIZ_DATE\",stage.\"DIGEST\",stage.\"VERSION\",ROW_NUMBER() OVER (PARTITION BY stage.\"ID\",stage.\"NAME\" ORDER BY stage.\"VERSION\" DESC) as \"LEGEND_PERSISTENCE_ROW_NUM\" FROM \"MYDB\".\"STAGING\" as stage) as stage WHERE stage.\"LEGEND_PERSISTENCE_ROW_NUM\" = 1) as stage " + + "\"MYDB\".\"STAGING_LEGEND_PERSISTENCE_TEMP_STAGING\" as stage " + "ON (sink.\"ID\" = stage.\"ID\") AND (sink.\"NAME\" = stage.\"NAME\") " + "WHEN MATCHED AND stage.\"VERSION\" >= sink.\"VERSION\" " + "THEN UPDATE SET sink.\"ID\" = stage.\"ID\",sink.\"NAME\" = stage.\"NAME\",sink.\"AMOUNT\" = stage.\"AMOUNT\",sink.\"BIZ_DATE\" = stage.\"BIZ_DATE\",sink.\"DIGEST\" = stage.\"DIGEST\",sink.\"VERSION\" = stage.\"VERSION\" " + diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java index c6025c62f8f..02e3c0512a8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/ingestmode/UnitemporalDeltaBatchIdBasedTest.java @@ -56,7 +56,7 @@ protected String getExpectedMetadataTableIngestQueryWithStagingFilters(String st "(\"table_name\", \"table_batch_id\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\", \"staging_filters\") " + "(SELECT 'main',(SELECT COALESCE(MAX(batch_metadata.\"table_batch_id\"),0)+1 FROM batch_metadata as batch_metadata " + "WHERE UPPER(batch_metadata.\"table_name\") = 'MAIN')," + - "'2000-01-01 00:00:00',SYSDATE(),'DONE'," + + "'2000-01-01 00:00:00.000000',SYSDATE(),'DONE'," + String.format("PARSE_JSON('%s'))", stagingFilters); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsSnowflakeTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsSnowflakeTest.java index 8ad9c6351ef..820d4783d74 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsSnowflakeTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-snowflake/src/test/java/org/finos/legend/engine/persistence/components/util/BulkLoadDatasetUtilsSnowflakeTest.java @@ -24,13 +24,13 @@ public String getExpectedSqlForMetadata() { return "INSERT INTO bulk_load_batch_metadata " + "(\"batch_id\", \"table_name\", \"batch_start_ts_utc\", \"batch_end_ts_utc\", \"batch_status\", \"batch_source_info\") " + - "(SELECT 'batch_id_123','appeng_log_table_name','2000-01-01 00:00:00',SYSDATE(),'',PARSE_JSON('my_lineage_value'))"; + "(SELECT (SELECT COALESCE(MAX(bulk_load_batch_metadata.\"batch_id\"),0)+1 FROM bulk_load_batch_metadata as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.\"table_name\") = 'APPENG_LOG_TABLE_NAME'),'appeng_log_table_name','2000-01-01 00:00:00.000000',SYSDATE(),'',PARSE_JSON('my_lineage_value'))"; } public String getExpectedSqlForMetadataUpperCase() { return "INSERT INTO BULK_LOAD_BATCH_METADATA (\"BATCH_ID\", \"TABLE_NAME\", \"BATCH_START_TS_UTC\", \"BATCH_END_TS_UTC\", \"BATCH_STATUS\", \"BATCH_SOURCE_INFO\") " + - "(SELECT 'batch_id_123','BULK_LOAD_TABLE_NAME','2000-01-01 00:00:00',SYSDATE(),'',PARSE_JSON('my_lineage_value'))"; + "(SELECT (SELECT COALESCE(MAX(bulk_load_batch_metadata.\"BATCH_ID\"),0)+1 FROM BULK_LOAD_BATCH_METADATA as bulk_load_batch_metadata WHERE UPPER(bulk_load_batch_metadata.\"TABLE_NAME\") = 'BULK_LOAD_TABLE_NAME'),'BULK_LOAD_TABLE_NAME','2000-01-01 00:00:00.000000',SYSDATE(),'',PARSE_JSON('my_lineage_value'))"; } public RelationalSink getRelationalSink() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/pom.xml index a98253b12b6..1e5352d9da8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/pom.xml @@ -15,7 +15,7 @@ org.finos.legend.engine legend-engine-xt-persistence-component - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/BaseTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/BaseTest.java index 7ba5fea6187..adf0d700745 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/BaseTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/BaseTest.java @@ -75,6 +75,7 @@ public class BaseTest protected String digestField = "digest"; protected String versionField = "version"; + protected String bizDateField = "biz_date"; protected String snapshotIdField = "snapshot_id"; protected String dataSplitField = "data_split"; protected String batchUpdateTimeField = "batch_update_time"; @@ -95,7 +96,6 @@ public class BaseTest {{ put("biz_date", new HashSet<>(Arrays.asList("2000-01-01 00:00:00", "2000-01-02 00:00:00"))); }}; - protected String[] bitemporalPartitionKeys = new String[]{validityFromReferenceField}; // Base Columns: Primary keys : id, name protected Field id = Field.builder().name("id").type(FieldType.of(DataType.INT, Optional.empty(), Optional.empty())).primaryKey(true).build(); @@ -154,48 +154,12 @@ public class BaseTest .addFields(bizDate) .build(); - protected SchemaDefinition baseTableSchemaWithDataSplit = SchemaDefinition.builder() - .addFields(id) - .addFields(name) - .addFields(amount) - .addFields(bizDate) - .addFields(dataSplit) - .build(); - protected SchemaDefinition baseTableShortenedSchema = SchemaDefinition.builder() .addFields(id) .addFields(name) .addFields(amount) .build(); - protected SchemaDefinition stagingTableEvolvedSize = SchemaDefinition.builder() - .addFields(id) - .addFields(nameModified) - .addFields(amount) - .addFields(bizDate) - .build(); - - protected SchemaDefinition stagingTableImplicitDatatypeChange = SchemaDefinition.builder() - .addFields(id) - .addFields(name) - .addFields(floatAmount) - .addFields(bizDate) - .build(); - - protected SchemaDefinition stagingTableNonBreakingDatatypeChange = SchemaDefinition.builder() - .addFields(tinyIntId) - .addFields(name) - .addFields(amount) - .addFields(bizDate) - .build(); - - protected SchemaDefinition stagingTableBreakingDatatypeChange = SchemaDefinition.builder() - .addFields(tinyIntString) - .addFields(name) - .addFields(amount) - .addFields(bizDate) - .build(); - protected SchemaDefinition mainTableSchemaWithBatchIdAndTime = SchemaDefinition.builder() .addFields(id) .addFields(name) @@ -331,15 +295,6 @@ public class BaseTest .addFields(batchUpdateTime) .build(); - protected SchemaDefinition baseTableSchemaWithUpdateBatchTimeFieldNotPk = SchemaDefinition.builder() - .addFields(id) - .addFields(name) - .addFields(amount) - .addFields(bizDate) - .addFields(digest) - .addFields(batchUpdateTimeNonPK) - .build(); - protected SchemaDefinition stagingTableSchemaWithLimitedColumns = SchemaDefinition.builder() .addFields(id) .addFields(name) @@ -356,16 +311,6 @@ public class BaseTest .addFields(deleteIndicator) .build(); - protected SchemaDefinition stagingTableSchemaWithDeleteIndicatorWithDataSplit = SchemaDefinition.builder() - .addFields(id) - .addFields(name) - .addFields(amount) - .addFields(bizDate) - .addFields(digest) - .addFields(deleteIndicator) - .addFields(dataSplit) - .build(); - protected SchemaDefinition stagingTableSchemaWithBooleanDeleteIndicator = SchemaDefinition.builder() .addFields(id) .addFields(name) @@ -386,11 +331,12 @@ public class BaseTest .addFields(validityThroughTarget) .build(); - protected SchemaDefinition bitemporalMainTableSchemaWithBatchIdAndTime = SchemaDefinition.builder() + protected SchemaDefinition bitemporalMainTableSchemaWithVersionBatchIdAndTime = SchemaDefinition.builder() .addFields(id) .addFields(name) .addFields(amount) .addFields(digest) + .addFields(version) .addFields(batchIdIn) .addFields(batchIdOut) .addFields(batchTimeInNonPrimary) @@ -399,11 +345,12 @@ public class BaseTest .addFields(validityThroughTarget) .build(); - protected SchemaDefinition bitemporalMainTableSchemaWithDateTime = SchemaDefinition.builder() + protected SchemaDefinition bitemporalMainTableSchemaWithVersionBatchDateTime = SchemaDefinition.builder() .addFields(id) .addFields(name) .addFields(amount) .addFields(digest) + .addFields(version) .addFields(batchTimeIn) .addFields(batchTimeOut) .addFields(validityFromTarget) @@ -421,6 +368,18 @@ public class BaseTest .addFields(validityThroughTarget) .build(); + protected SchemaDefinition bitemporalFromOnlyMainTableWithVersionSchema = SchemaDefinition.builder() + .addFields(id) + .addFields(name) + .addFields(amount) + .addFields(digest) + .addFields(version) + .addFields(batchIdIn) + .addFields(batchIdOut) + .addFields(validityFromTarget) + .addFields(validityThroughTarget) + .build(); + protected SchemaDefinition bitemporalFromOnlyMainTableBatchIdAndTimeBasedSchema = SchemaDefinition.builder() .addFields(id) .addFields(name) @@ -454,13 +413,14 @@ public class BaseTest .addFields(digest) .build(); - protected SchemaDefinition bitemporalStagingTableSchemaWithDataSplit = SchemaDefinition.builder() + protected SchemaDefinition bitemporalStagingTableSchemaWithVersionWithDataSplit = SchemaDefinition.builder() .addFields(id) .addFields(name) .addFields(amount) .addFields(validityFromReference) .addFields(validityThroughReference) .addFields(digest) + .addFields(version) .addFields(dataSplit) .build(); @@ -474,13 +434,14 @@ public class BaseTest .addFields(deleteIndicator) .build(); - protected SchemaDefinition bitemporalStagingTableSchemaWithDeleteIndicatorAndDataSplit = SchemaDefinition.builder() + protected SchemaDefinition bitemporalStagingTableSchemaWithDeleteIndicatorVersionAndDataSplit = SchemaDefinition.builder() .addFields(id) .addFields(name) .addFields(amount) .addFields(validityFromReference) .addFields(validityThroughReference) .addFields(digest) + .addFields(version) .addFields(dataSplit) .addFields(deleteIndicator) .build(); @@ -493,12 +454,13 @@ public class BaseTest .addFields(digest) .build(); - protected SchemaDefinition bitemporalFromOnlyStagingTableSchemaWithDataSplit = SchemaDefinition.builder() + protected SchemaDefinition bitemporalFromOnlyStagingTableSchemaWithVersionWithDataSplit = SchemaDefinition.builder() .addFields(id) .addFields(name) .addFields(amount) .addFields(validityFromReference) .addFields(digest) + .addFields(version) .addFields(dataSplit) .build(); @@ -511,12 +473,13 @@ public class BaseTest .addFields(deleteIndicator) .build(); - protected SchemaDefinition bitemporalFromOnlyStagingTableSchemaWithDeleteIndicatorWithDataSplit = SchemaDefinition.builder() + protected SchemaDefinition bitemporalFromOnlyStagingTableSchemaWithDeleteIndicatorWithVersionWithDataSplit = SchemaDefinition.builder() .addFields(id) .addFields(name) .addFields(amount) .addFields(validityFromReference) .addFields(digest) + .addFields(version) .addFields(deleteIndicator) .addFields(dataSplit) .build(); @@ -532,6 +495,18 @@ public class BaseTest .addFields(validityThroughTarget) .build(); + protected SchemaDefinition bitemporalFromOnlyTempTableWithVersionSchema = SchemaDefinition.builder() + .addFields(id) + .addFields(name) + .addFields(amount) + .addFields(digest) + .addFields(version) + .addFields(batchIdIn) + .addFields(batchIdOut) + .addFields(validityFromTarget) + .addFields(validityThroughTarget) + .build(); + protected SchemaDefinition bitemporalFromOnlyTempTableWithDeleteIndicatorSchema = SchemaDefinition.builder() .addFields(id) .addFields(name) @@ -652,21 +627,11 @@ protected String enrichSqlWithDataSplits(String sql, DataSplitRange dataSplitRan .schema(baseTableSchemaWithUpdateBatchTimeField) .build(); - protected Dataset mainTableWithBaseSchemaHavingAuditFieldNotPk = DatasetDefinition.builder() - .database(mainDbName).name(mainTableName).alias(mainTableAlias) - .schema(baseTableSchemaWithUpdateBatchTimeFieldNotPk) - .build(); - protected Dataset stagingTableWithBaseSchemaHavingDigestAndDataSplit = DatasetDefinition.builder() .database(stagingDbName).name(stagingTableName).alias(stagingTableAlias) .schema(baseTableSchemaWithDigestAndDataSplit) .build(); - protected Dataset stagingTableWithBaseSchemaHavingDataSplit = DatasetDefinition.builder() - .database(stagingDbName).name(stagingTableName).alias(stagingTableAlias) - .schema(baseTableSchemaWithDataSplit) - .build(); - protected Dataset mainTableWithBatchIdBasedSchema = DatasetDefinition.builder() .database(mainDbName).name(mainTableName).alias(mainTableAlias) .schema(mainTableBatchIdBasedSchema) @@ -692,13 +657,6 @@ protected String enrichSqlWithDataSplits(String sql, DataSplitRange dataSplitRan .schema(stagingTableSchemaWithBooleanDeleteIndicator) .build(); - protected Dataset stagingTableWithDeleteIndicatorWithDataSplit = DatasetDefinition.builder() - .database(stagingDbName) - .name(stagingTableName) - .alias(stagingTableAlias) - .schema(stagingTableSchemaWithDeleteIndicatorWithDataSplit) - .build(); - protected Dataset mainTableWithBatchIdAndTime = DatasetDefinition.builder() .database(mainDbName).name(mainTableName).alias(mainTableAlias) .schema(mainTableSchemaWithBatchIdAndTime) @@ -714,9 +672,9 @@ protected String enrichSqlWithDataSplits(String sql, DataSplitRange dataSplitRan .schema(bitemporalMainTableSchema) .build(); - protected Dataset mainTableWithBitemporalSchemaWithDateTime = DatasetDefinition.builder() + protected Dataset mainTableWithBitemporalSchemaWithVersionBatchDateTime = DatasetDefinition.builder() .database(mainDbName).name(mainTableName).alias(mainTableAlias) - .schema(bitemporalMainTableSchemaWithDateTime) + .schema(bitemporalMainTableSchemaWithVersionBatchDateTime) .build(); protected Dataset stagingTableWithBitemporalSchema = DatasetDefinition.builder() @@ -731,21 +689,21 @@ protected String enrichSqlWithDataSplits(String sql, DataSplitRange dataSplitRan .schema(bitemporalStagingTableSchemaWithDeleteIndicator) .build(); - protected Dataset stagingTableWithBitemporalSchemaWithDeleteIndicatorAndDataSplit = DatasetDefinition.builder() + protected Dataset stagingTableWithBitemporalSchemaWithDeleteIndicatorVersionAndDataSplit = DatasetDefinition.builder() .database(stagingDbName) .name(stagingTableName) .alias(stagingTableAlias) - .schema(bitemporalStagingTableSchemaWithDeleteIndicatorAndDataSplit) + .schema(bitemporalStagingTableSchemaWithDeleteIndicatorVersionAndDataSplit) .build(); - protected Dataset mainTableWithBitemporalSchemaWithBatchIdAndTime = DatasetDefinition.builder() + protected Dataset mainTableWithBitemporalSchemaWithVersionBatchIdAndTime = DatasetDefinition.builder() .database(mainDbName).name(mainTableName).alias(mainTableAlias) - .schema(bitemporalMainTableSchemaWithBatchIdAndTime) + .schema(bitemporalMainTableSchemaWithVersionBatchIdAndTime) .build(); - protected Dataset stagingTableWithBitemporalSchemaWithDataSplit = DatasetDefinition.builder() + protected Dataset stagingTableWithBitemporalSchemaWithVersionWithDataSplit = DatasetDefinition.builder() .database(stagingDbName).name(stagingTableName).alias(stagingTableAlias) - .schema(bitemporalStagingTableSchemaWithDataSplit) + .schema(bitemporalStagingTableSchemaWithVersionWithDataSplit) .build(); protected DatasetDefinition mainTableWithBitemporalFromOnlySchema = DatasetDefinition.builder() @@ -755,6 +713,13 @@ protected String enrichSqlWithDataSplits(String sql, DataSplitRange dataSplitRan .schema(bitemporalFromOnlyMainTableSchema) .build(); + protected DatasetDefinition mainTableWithBitemporalFromOnlyWithVersionSchema = DatasetDefinition.builder() + .database(mainDbName) + .name(mainTableName) + .alias(mainTableAlias) + .schema(bitemporalFromOnlyMainTableWithVersionSchema) + .build(); + protected DatasetDefinition mainTableWithBitemporalFromOnlyWithBatchIdAndTimeBasedSchema = DatasetDefinition.builder() .database(mainDbName) .name(mainTableName) @@ -790,11 +755,11 @@ protected String enrichSqlWithDataSplits(String sql, DataSplitRange dataSplitRan .schema(bitemporalFromOnlyStagingTableSchema) .build(); - protected DatasetDefinition stagingTableWithBitemporalFromOnlySchemaWithDataSplit = DatasetDefinition.builder() + protected DatasetDefinition stagingTableWithBitemporalFromOnlySchemaWithVersionWithDataSplit = DatasetDefinition.builder() .database(stagingDbName) .name(stagingTableName) .alias(stagingTableAlias) - .schema(bitemporalFromOnlyStagingTableSchemaWithDataSplit) + .schema(bitemporalFromOnlyStagingTableSchemaWithVersionWithDataSplit) .build(); protected DatasetDefinition tempTableWithBitemporalFromOnlySchema = DatasetDefinition.builder() @@ -804,6 +769,13 @@ protected String enrichSqlWithDataSplits(String sql, DataSplitRange dataSplitRan .schema(bitemporalFromOnlyTempTableSchema) .build(); + protected DatasetDefinition tempTableWithBitemporalFromOnlyWithVersionSchema = DatasetDefinition.builder() + .database(tempDbName) + .name(tempTableName) + .alias(tempTableAlias) + .schema(bitemporalFromOnlyTempTableWithVersionSchema) + .build(); + protected DatasetDefinition stagingTableWithBitemporalFromOnlySchemaWithDeleteInd = DatasetDefinition.builder() .database(stagingDbName) .name(stagingTableName) @@ -811,11 +783,11 @@ protected String enrichSqlWithDataSplits(String sql, DataSplitRange dataSplitRan .schema(bitemporalFromOnlyStagingTableSchemaWithDeleteIndicator) .build(); - protected DatasetDefinition stagingTableWithBitemporalFromOnlySchemaWithDeleteIndWithDataSplit = DatasetDefinition.builder() + protected DatasetDefinition stagingTableWithBitemporalFromOnlySchemaWithDeleteIndWithVersionWithDataSplit = DatasetDefinition.builder() .database(stagingDbName) .name(stagingTableName) .alias(stagingTableAlias) - .schema(bitemporalFromOnlyStagingTableSchemaWithDeleteIndicatorWithDataSplit) + .schema(bitemporalFromOnlyStagingTableSchemaWithDeleteIndicatorWithVersionWithDataSplit) .build(); protected DatasetDefinition stagingTableBitemporalWithoutDuplicates = DatasetDefinition.builder() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/AppendOnlyScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/AppendOnlyScenarios.java index 71a68981a46..3c71d88d913 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/AppendOnlyScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/AppendOnlyScenarios.java @@ -21,8 +21,10 @@ import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; - -import java.util.Optional; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategy; public class AppendOnlyScenarios extends BaseTest { @@ -31,123 +33,198 @@ public class AppendOnlyScenarios extends BaseTest Test Scenarios for Non-temporal Delta Variables: 1) Auditing: No Auditing, With Auditing - 2) DataSplit: Enabled, Disabled - 3) DeduplicationStrategy: Allow_Duplicates, Filter Duplicates, Fail on Duplicates - - Valid Combinations: - 1) Allow_Duplicates, No Auditing - 2) Allow_Duplicates, With Auditing - 3) Allow_Duplicates, With Auditing, With Data Splits + 2) Versioning: NoVersion, MaxVersion, AllVersion + 3) Deduplication: Allow Duplicates, Filter Duplicates, Fail on Duplicates + 4) filterExistingRecords: true / false - 4) Fail on Duplicates, No Auditing - 5) Fail on Duplicates, With Auditing - 6) Filter Duplicates, No Auditing - 7) Filter Duplicates, With Auditing - 8) Filter Duplicates, With Auditing, With Data Splits - - Invalid Combinations - 1) Any Deduplication Strategy, No Auditing, With Data Splits - 2) Fail on Duplicates, With Data Splits + Valid Combinations: + NoVersion: + 1) With Auditing, NoVersion, Allow Duplicates, true + 2) With Auditing, NoVersion, Filter Duplicates, true - tested (perform deduplication, auditing, filter existing) + 3) With Auditing, NoVersion, Fail on Duplicates, true + 4) No Auditing, NoVersion, Allow Duplicates, false - tested (the most basic case) + 5) With Auditing, NoVersion, Allow Duplicates, false + 6) No Auditing, NoVersion, Filter Duplicates, false + 7) With Auditing, NoVersion, Filter Duplicates, false + 8) No Auditing, NoVersion, Fail on Duplicates, false + 9) With Auditing, NoVersion, Fail on Duplicates, false + + MaxVersion: + 10) With Auditing, MaxVersion, Allow Duplicates, true + 11) With Auditing, MaxVersion, Filter Duplicates, true + 12) With Auditing, MaxVersion, Fail on Duplicates, true - tested (perform deduplication and versioning, auditing, filter existing) + 13) With Auditing, MaxVersion, Allow Duplicates, false + 14) With Auditing, MaxVersion, Filter Duplicates, false - tested (perform deduplication and versioning, auditing) + 15) With Auditing, MaxVersion, Fail on Duplicates, false + + AllVersion: + 16) With Auditing, AllVersion, Allow Duplicates, true + 17) With Auditing, AllVersion, Filter Duplicates, true - tested (perform deduplication and versioning, data split, auditing, filter existing) + 18) With Auditing, AllVersion, Fail on Duplicates, true + 19) With Auditing, AllVersion, Allow Duplicates, false + 20) With Auditing, AllVersion, Filter Duplicates, false + 21) With Auditing, AllVersion, Fail on Duplicates, false - tested (perform deduplication and versioning, data split, auditing) + + + Invalid Combinations: + NoAuditing + MaxVersion/AllVersion: + 22) No Auditing, MaxVersion, Allow Duplicates, true + 23) No Auditing, MaxVersion, Filter Duplicates, true + 24) No Auditing, MaxVersion, Fail on Duplicates, true + 25) No Auditing, MaxVersion, Allow Duplicates, false + 26) No Auditing, MaxVersion, Filter Duplicates, false + 27) No Auditing, MaxVersion, Fail on Duplicates, false + 28) No Auditing, AllVersion, Allow Duplicates, true + 29) No Auditing, AllVersion, Filter Duplicates, true + 30) No Auditing, AllVersion, Fail on Duplicates, true + 31) No Auditing, AllVersion, Allow Duplicates, false + 32) No Auditing, AllVersion, Filter Duplicates, false - tested + 33) No Auditing, AllVersion, Fail on Duplicates, false + + NoAuditing + filterExistingRecords + 34) No Auditing, NoVersion, Allow Duplicates, true - tested + 35) No Auditing, NoVersion, Filter Duplicates, true + 36) No Auditing, NoVersion, Fail on Duplicates, true */ - public TestScenario ALLOW_DUPLICATES_NO_AUDITING() + public TestScenario NO_AUDITING__NO_DEDUP__NO_VERSIONING__NO_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() .digestField(digestField) .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) .auditing(NoAuditing.builder().build()) + .filterExistingRecords(false) .build(); return new TestScenario(mainTableWithNoPrimaryKeys, stagingTableWithNoPrimaryKeys, ingestMode); } - public TestScenario ALLOW_DUPLICATES_NO_AUDITING_DERIVE_MAIN_SCHEMA() + public TestScenario NO_AUDITING__NO_DEDUP__NO_VERSIONING__NO_FILTER_EXISTING_RECORDS__DERIVE_MAIN_SCHEMA() { - TestScenario scenario = ALLOW_DUPLICATES_NO_AUDITING(); + TestScenario scenario = NO_AUDITING__NO_DEDUP__NO_VERSIONING__NO_FILTER_EXISTING_RECORDS(); scenario.setMainTable(mainTableWithNoFields); return scenario; } - public TestScenario ALLOW_DUPLICATES_WITH_AUDITING() + public TestScenario WITH_AUDITING__FILTER_DUPS__NO_VERSIONING__WITH_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestField) - .deduplicationStrategy(AllowDuplicates.builder().build()) - .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) - .build(); - return new TestScenario(mainTableWithNoPrimaryKeysHavingAuditField, stagingTableWithNoPrimaryKeys, ingestMode); + .digestField(digestField) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) + .filterExistingRecords(true) + .build(); + return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario ALLOW_DUPLICATES_WITH_AUDITING__WITH_DATASPLIT() + public TestScenario WITH_AUDITING__FAIL_ON_DUPS__ALL_VERSION__NO_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() .digestField(digestField) - .deduplicationStrategy(AllowDuplicates.builder().build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(bizDateField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) - .dataSplitField(Optional.of(dataSplitField)) + .filterExistingRecords(false) .build(); - return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaHavingDigestAndDataSplit, ingestMode); + return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario FAIL_ON_DUPLICATES_NO_AUDITING() + // failure case + public TestScenario NO_AUDITING__FILTER_DUPS__ALL_VERSION__NO_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() .digestField(digestField) - .deduplicationStrategy(FailOnDuplicates.builder().build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(bizDateField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) .auditing(NoAuditing.builder().build()) + .filterExistingRecords(false) .build(); return new TestScenario(mainTableWithBaseSchemaAndDigest, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario FAIL_ON_DUPLICATES_WITH_AUDITING() + public TestScenario WITH_AUDITING__FILTER_DUPS__ALL_VERSION__WITH_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() .digestField(digestField) - .deduplicationStrategy(FailOnDuplicates.builder().build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(bizDateField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) + .filterExistingRecords(true) .build(); - return new TestScenario(mainTableWithBaseSchemaHavingAuditFieldNotPk, stagingTableWithBaseSchema, ingestMode); + return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario FILTER_DUPLICATES_NO_AUDITING() + public TestScenario WITH_AUDITING__FAIL_ON_DUPS__MAX_VERSION__WITH_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestField) - .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) - .build(); - return new TestScenario(mainTableWithBaseSchemaAndDigest, stagingTableWithBaseSchemaAndDigest, ingestMode); + .digestField(digestField) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(bizDateField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) + .filterExistingRecords(true) + .build(); + return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario FILTER_DUPLICATES_NO_AUDITING_WITH_DATA_SPLIT() + public TestScenario WITH_AUDITING__FILTER_DUPS__MAX_VERSION__NO_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) - .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(NoAuditing.builder().build()) - .build(); - return new TestScenario(mainTableWithBaseSchemaAndDigest, stagingTableWithBaseSchemaHavingDigestAndDataSplit, ingestMode); + .digestField(digestField) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder() + .versioningField(bizDateField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(true) + .build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) + .filterExistingRecords(false) + .build(); + return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario FILTER_DUPLICATES_WITH_AUDITING() + // failure case + public TestScenario NO_AUDITING__NO_DEDUP__NO_VERSIONING__WITH_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestField) - .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) - .build(); - return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaAndDigest, ingestMode); + .digestField(digestField) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) + .auditing(NoAuditing.builder().build()) + .filterExistingRecords(true) + .build(); + return new TestScenario(mainTableWithNoPrimaryKeys, stagingTableWithNoPrimaryKeys, ingestMode); } - public TestScenario FILTER_DUPLICATES_WITH_AUDITING_WITH_DATA_SPLIT() + public TestScenario WITH_AUDITING__ALLOW_DUPLICATES__NO_VERSIONING__NO_FILTER_EXISTING_RECORDS() { AppendOnly ingestMode = AppendOnly.builder() - .digestField(digestField) - .deduplicationStrategy(FilterDuplicates.builder().build()) - .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) - .dataSplitField(Optional.of(dataSplitField)) - .build(); - return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaHavingDigestAndDataSplit, ingestMode); + .digestField(digestField) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) + .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) + .filterExistingRecords(false) + .build(); + return new TestScenario(mainTableWithNoPrimaryKeysHavingAuditField, stagingTableWithNoPrimaryKeys, ingestMode); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/BitemporalDeltaSourceSpecifiesFromAndThroughScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/BitemporalDeltaSourceSpecifiesFromAndThroughScenarios.java index 020a4b3d524..41bba4b65b6 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/BitemporalDeltaSourceSpecifiesFromAndThroughScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/BitemporalDeltaSourceSpecifiesFromAndThroughScenarios.java @@ -16,19 +16,19 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.ingestmode.BitemporalDelta; -import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalDelta; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchId; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAndDateTime; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionDateTime; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.ValidDateTime; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromAndThruDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; import java.util.Arrays; -import java.util.Optional; public class BitemporalDeltaSourceSpecifiesFromAndThroughScenarios extends BaseTest { @@ -71,7 +71,12 @@ public TestScenario BATCH_ID_AND_TIME_BASED__NO_DEL_IND__WITH_DATA_SPLITS() { BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchIdAndDateTime.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) @@ -87,7 +92,7 @@ public TestScenario BATCH_ID_AND_TIME_BASED__NO_DEL_IND__WITH_DATA_SPLITS() .build()) .build()) .build(); - return new TestScenario(mainTableWithBitemporalSchemaWithBatchIdAndTime, stagingTableWithBitemporalSchemaWithDataSplit, ingestMode); + return new TestScenario(mainTableWithBitemporalSchemaWithVersionBatchIdAndTime, stagingTableWithBitemporalSchemaWithVersionWithDataSplit, ingestMode); } public TestScenario BATCH_ID_BASED__WITH_DEL_IND__NO_DATA_SPLITS() @@ -118,7 +123,12 @@ public TestScenario DATETIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS() { BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestField) - .dataSplitField(dataSplitField) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(TransactionDateTime.builder() .dateTimeInName(batchTimeInField) .dateTimeOutName(batchTimeOutField) @@ -136,7 +146,7 @@ public TestScenario DATETIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS() .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) .build(); - return new TestScenario(mainTableWithBitemporalSchemaWithDateTime, stagingTableWithBitemporalSchemaWithDeleteIndicatorAndDataSplit, ingestMode); + return new TestScenario(mainTableWithBitemporalSchemaWithVersionBatchDateTime, stagingTableWithBitemporalSchemaWithDeleteIndicatorVersionAndDataSplit, ingestMode); } public TestScenario BATCH_ID_BASED__VALIDITY_FIELDS_SAME_NAME() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/BitemporalDeltaSourceSpecifiesFromOnlyScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/BitemporalDeltaSourceSpecifiesFromOnlyScenarios.java index 7cf1d886f6c..66a8808867b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/BitemporalDeltaSourceSpecifiesFromOnlyScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/BitemporalDeltaSourceSpecifiesFromOnlyScenarios.java @@ -17,19 +17,19 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.common.Datasets; import org.finos.legend.engine.persistence.components.ingestmode.BitemporalDelta; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchId; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAndDateTime; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionDateTime; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.ValidDateTime; import org.finos.legend.engine.persistence.components.ingestmode.validitymilestoning.derivation.SourceSpecifiesFromDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; import org.finos.legend.engine.persistence.components.logicalplan.datasets.SchemaDefinition; import java.util.Arrays; -import java.util.Optional; public class BitemporalDeltaSourceSpecifiesFromOnlyScenarios extends BaseTest { @@ -86,7 +86,12 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_DATA_SPLITS() { BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) @@ -102,9 +107,9 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_DATA_SPLITS() TestScenario testScenario = new TestScenario(ingestMode); testScenario.setDatasets(Datasets.builder() - .mainDataset(mainTableWithBitemporalFromOnlySchema) - .stagingDataset(stagingTableWithBitemporalFromOnlySchemaWithDataSplit) - .tempDataset(tempTableWithBitemporalFromOnlySchema) + .mainDataset(mainTableWithBitemporalFromOnlyWithVersionSchema) + .stagingDataset(stagingTableWithBitemporalFromOnlySchemaWithVersionWithDataSplit) + .tempDataset(tempTableWithBitemporalFromOnlyWithVersionSchema) .build()); return testScenario; } @@ -143,7 +148,12 @@ public TestScenario BATCH_ID_BASED__WITH_DEL_IND__WITH_DATA_SPLITS__USING_DEFAUL { BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) @@ -160,7 +170,7 @@ public TestScenario BATCH_ID_BASED__WITH_DEL_IND__WITH_DATA_SPLITS__USING_DEFAUL .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) .build(); - return new TestScenario(mainTableWithBitemporalFromOnlySchema, stagingTableWithBitemporalFromOnlySchemaWithDeleteIndWithDataSplit, ingestMode); + return new TestScenario(mainTableWithBitemporalFromOnlyWithVersionSchema, stagingTableWithBitemporalFromOnlySchemaWithDeleteIndWithVersionWithDataSplit, ingestMode); } public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__FILTER_DUPLICATES() @@ -178,7 +188,7 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__FILTER_DUPLICATE .sourceDateTimeFromField(validityFromReferenceField) .build()) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); TestScenario testScenario = new TestScenario(ingestMode); testScenario.setDatasets(Datasets.builder() @@ -194,7 +204,12 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_DATA_SPLITS__FILTER_DUPLICA { BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) @@ -206,7 +221,7 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_DATA_SPLITS__FILTER_DUPLICA .sourceDateTimeFromField(validityFromReferenceField) .build()) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); TestScenario testScenario = new TestScenario(ingestMode); @@ -214,13 +229,13 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_DATA_SPLITS__FILTER_DUPLICA .database(stagingWithoutDuplicatesDbName) .name(stagingTableWithoutDuplicatesName) .alias(stagingTableWithoutDuplicatesAlias) - .schema(bitemporalFromOnlyStagingTableSchemaWithDataSplit) + .schema(bitemporalFromOnlyStagingTableSchemaWithVersionWithDataSplit) .build(); testScenario.setDatasets(Datasets.builder() - .mainDataset(mainTableWithBitemporalFromOnlySchema) - .stagingDataset(stagingTableWithBitemporalFromOnlySchemaWithDataSplit) - .tempDataset(tempTableWithBitemporalFromOnlySchema) + .mainDataset(mainTableWithBitemporalFromOnlyWithVersionSchema) + .stagingDataset(stagingTableWithBitemporalFromOnlySchemaWithVersionWithDataSplit) + .tempDataset(tempTableWithBitemporalFromOnlyWithVersionSchema) .stagingDatasetWithoutDuplicates(stagingTableWithoutDuplicates) .build()); return testScenario; @@ -245,7 +260,7 @@ public TestScenario BATCH_ID_BASED__WITH_DEL_IND__NO_DATA_SPLITS__FILTER_DUPLICA .deleteField(deleteIndicatorField) .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); TestScenario testScenario = new TestScenario(ingestMode); @@ -269,7 +284,12 @@ public TestScenario BATCH_ID_BASED__WITH_DEL_IND__WITH_DATA_SPLITS__FILTER_DUPLI { BitemporalDelta ingestMode = BitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) + .versioningStrategy(AllVersionsStrategy.builder() + .versioningField(versionField) + .dataSplitFieldName(dataSplitField) + .mergeDataVersionResolver(DigestBasedResolver.INSTANCE) + .performStageVersioning(false) + .build()) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) @@ -285,10 +305,10 @@ public TestScenario BATCH_ID_BASED__WITH_DEL_IND__WITH_DATA_SPLITS__FILTER_DUPLI .deleteField(deleteIndicatorField) .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) - .deduplicationStrategy(FilterDuplicates.builder().build()) + .filterExistingRecords(true) .build(); - return new TestScenario(mainTableWithBitemporalFromOnlySchema, stagingTableWithBitemporalFromOnlySchemaWithDeleteIndWithDataSplit, ingestMode); + return new TestScenario(mainTableWithBitemporalFromOnlyWithVersionSchema, stagingTableWithBitemporalFromOnlySchemaWithDeleteIndWithVersionWithDataSplit, ingestMode); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/NonTemporalDeltaScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/NonTemporalDeltaScenarios.java index 4029791bbbb..5fb0be6ba7a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/NonTemporalDeltaScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/NonTemporalDeltaScenarios.java @@ -20,11 +20,15 @@ import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; -import java.util.Optional; - -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.MaxVersionStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningComparator; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.*; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionColumnBasedResolver; public class NonTemporalDeltaScenarios extends BaseTest { @@ -33,12 +37,12 @@ public class NonTemporalDeltaScenarios extends BaseTest Test Scenarios for Non-temporal Delta Variables: 1) Auditing: No Auditing, With Auditing - 2) DataSplit: Enabled, Disabled - 3) MergeStrategy: No MergeStrategy, With Delete Indicator - 4) DerivedDataset with different InterBatchDedupStrategy + 2) MergeStrategy: No MergeStrategy, With Delete Indicator + 3) Deduplication: Allow duplicates, Filter duplicates, Fail on duplicates + 4) Versioning: No Versioning, Max Versioning, All Versioning */ - public TestScenario NO_AUDTING__NO_DATASPLIT() + public TestScenario NO_AUDTING__NO_DEDUP__NO_VERSIONING() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) @@ -47,7 +51,7 @@ public TestScenario NO_AUDTING__NO_DATASPLIT() return new TestScenario(mainTableWithBaseSchemaAndDigest, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario NO_AUDTING__NO_DATASPLIT__WITH_DELETE_INDICATOR() + public TestScenario NO_AUDTING__WITH_DELETE_INDICATOR__NO_DEDUP__NO_VERSIONING() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) @@ -61,33 +65,47 @@ public TestScenario NO_AUDTING__NO_DATASPLIT__WITH_DELETE_INDICATOR() return new TestScenario(mainTableWithBaseSchemaAndDigest, stagingTableWithBaseSchemaAndDigestAndDeleteIndicator, ingestMode); } - public TestScenario NO_AUDTING__WITH_DATASPLIT() + public TestScenario NO_AUDTING__NO_DEDUP__ALL_VERSION() + { + NontemporalDelta ingestMode = NontemporalDelta.builder() + .digestField(digestField) + .auditing(NoAuditing.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").dataSplitFieldName(dataSplitField).mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .build(); + return new TestScenario(mainTableWithBaseSchemaAndDigest, stagingTableWithBaseSchemaAndDigest, ingestMode); + } + + public TestScenario NO_AUDTING__NO_DEDUP__ALL_VERSION_WITHOUT_PERFORM() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) .auditing(NoAuditing.builder().build()) - .dataSplitField(Optional.of(dataSplitField)) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").dataSplitFieldName(dataSplitField).performStageVersioning(false).mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .deduplicationStrategy(AllowDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithBaseSchemaAndDigest, stagingTableWithBaseSchemaHavingDigestAndDataSplit, ingestMode); } - public TestScenario WITH_AUDTING__NO_DATASPLIT() + public TestScenario WITH_AUDTING__FILTER_DUPLICATES__NO_VERSIONING() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario WITH_AUDTING__WITH_DATASPLIT() + public TestScenario WITH_AUDTING__FAIL_ON_DUPS__ALL_VERSION() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) - .dataSplitField(Optional.of(dataSplitField)) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).dataSplitFieldName(dataSplitField).build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) .build(); - return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaHavingDigestAndDataSplit, ingestMode); + return new TestScenario(mainTableWithBaseSchemaHavingDigestAndAuditField, stagingTableWithBaseSchemaAndDigest, ingestMode); } public TestScenario NO_VERSIONING__WITH_STAGING_FILTER() @@ -99,58 +117,61 @@ public TestScenario NO_VERSIONING__WITH_STAGING_FILTER() return new TestScenario(mainTableWithBaseSchemaAndDigest, stagingTableWithFilters, ingestMode); } - public TestScenario MAX_VERSIONING_WITH_GREATER_THAN__DEDUP__WITH_STAGING_FILTER() + public TestScenario FILTER_DUPS__MAX_VERSION__WITH_STAGING_FILTER() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) .auditing(NoAuditing.builder().build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(version.name()) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(true) + .mergeDataVersionResolver(VersionColumnBasedResolver.builder().versionComparator(VersionComparator.GREATER_THAN).build()) + .performStageVersioning(true) .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithVersion, stagingTableWithVersionAndSnapshotId, ingestMode); } - public TestScenario MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__WITH_STAGING_FILTER() + public TestScenario NO_DEDUP__MAX_VERSION_WITHOUT_PERFORM__WITH_STAGING_FILTER() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) .auditing(NoAuditing.builder().build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(version.name()) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(false) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(false) .build()) + .deduplicationStrategy(AllowDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithVersion, stagingTableWithVersionAndSnapshotId, ingestMode); } - public TestScenario MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__WITHOUT_STAGING_FILTER() + public TestScenario NO_DEDUP__MAX_VERSION_WITHOUT_PERFORM() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) .auditing(NoAuditing.builder().build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(version.name()) - .versioningComparator(VersioningComparator.GREATER_THAN) - .performDeduplication(false) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)) + .performStageVersioning(false) .build()) .build(); return new TestScenario(mainTableWithVersion, stagingTableWithVersion, ingestMode); } - public TestScenario MAX_VERSIONING_WITH_GREATER_THAN_EQUAL__DEDUP__WITHOUT_STAGING_FILTER() + public TestScenario NO_DEDUP__MAX_VERSION() { NontemporalDelta ingestMode = NontemporalDelta.builder() .digestField(digestField) .auditing(NoAuditing.builder().build()) .versioningStrategy(MaxVersionStrategy.builder() .versioningField(version.name()) - .versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO) - .performDeduplication(true) + .mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)) + .performStageVersioning(true) .build()) + .deduplicationStrategy(AllowDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithVersion, stagingTableWithVersion, ingestMode); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/NontemporalSnapshotTestScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/NontemporalSnapshotTestScenarios.java index 04a9bcb372a..fb5a61bff51 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/NontemporalSnapshotTestScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/NontemporalSnapshotTestScenarios.java @@ -18,6 +18,10 @@ import org.finos.legend.engine.persistence.components.ingestmode.NontemporalSnapshot; import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategy; public class NontemporalSnapshotTestScenarios extends BaseTest { @@ -26,41 +30,41 @@ public class NontemporalSnapshotTestScenarios extends BaseTest Test Scenarios of Non-temporal Snapshot Variables: 1) Auditing: No Auditing, With Auditing - 2) DataSplit: Enabled, Disabled + 2) Deduplication: Allow duplicates, Filter duplicates, Fail on duplicates + 3) Versioning: No Versioning, Max Versioning + + Valid Scenarios: + 1. No Auditing , Allow Dups , No Versioining + 2. With Auditing, Filter Dups, No Versioining + 3. With Auditing, Fail on duplicates, Max version + + Invalid Scenario: + 1. All Versioning */ - public TestScenario NO_AUDTING__NO_DATASPLIT() + public TestScenario NO_AUDTING__NO_DEDUP__NO_VERSIONING() { NontemporalSnapshot ingestMode = NontemporalSnapshot.builder().auditing(NoAuditing.builder().build()).build(); return new TestScenario(mainTableWithBaseSchema, stagingTableWithBaseSchema, ingestMode); } - public TestScenario NO_AUDTING__WITH_DATASPLIT() - { - NontemporalSnapshot ingestMode = NontemporalSnapshot.builder() - .auditing(NoAuditing.builder().build()) - .dataSplitField(dataSplitField) - .build(); - return new TestScenario(mainTableWithBaseSchema, stagingTableWithBaseSchemaHavingDataSplit, ingestMode); - } - - public TestScenario WITH_AUDTING__NO_DATASPLIT() + public TestScenario WITH_AUDTING__FILTER_DUPLICATES__NO_VERSIONING() { NontemporalSnapshot ingestMode = NontemporalSnapshot.builder() .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) + .versioningStrategy(NoVersioningStrategy.builder().build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithBaseSchemaHavingAuditField, stagingTableWithBaseSchema, ingestMode); } - public TestScenario WITH_AUDTING__WITH_DATASPLIT() + public TestScenario WITH_AUDTING__FAIL_ON_DUP__MAX_VERSION() { NontemporalSnapshot ingestMode = NontemporalSnapshot.builder() .auditing(DateTimeAuditing.builder().dateTimeField(batchUpdateTimeField).build()) - .dataSplitField("data_split") + .versioningStrategy(MaxVersionStrategy.builder().versioningField(bizDateField).build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) .build(); - return new TestScenario(mainTableWithBaseSchemaHavingAuditField, stagingTableWithBaseSchemaHavingDataSplit, ingestMode); + return new TestScenario(mainTableWithBaseSchemaHavingAuditField, stagingTableWithBaseSchema, ingestMode); } - - - } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/TestScenario.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/TestScenario.java index 1891b57d339..b8dbd83de98 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/TestScenario.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/TestScenario.java @@ -65,6 +65,6 @@ public void setDatasets(Datasets datasets) public void setMainTable(Dataset dataset) { - this.mainTable = mainTable; + this.mainTable = dataset; } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaBatchIdBasedScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaBatchIdBasedScenarios.java index e21e0869862..49b3f90aa94 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaBatchIdBasedScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaBatchIdBasedScenarios.java @@ -17,34 +17,32 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.common.OptimizationFilter; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalDelta; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.MaxVersionStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.NoVersioningStrategy; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.VersioningComparator; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.*; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchId; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.NoVersioningStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.VersionColumnBasedResolver; import java.util.Arrays; -import java.util.Optional; public class UnitemporalDeltaBatchIdBasedScenarios extends BaseTest { - /* Test Scenarios for Non-temporal Delta Variables: 1) transactionMilestoning = BatchId 2) deleteIndicator : Enabled, Disabled - 3) DataSplit: Enabled, Disabled - - Valid Combinations: - 1) No Delete Ind, No Data Splits - 2) No Delete Ind, With Data Splits - 3) With Delete Ind, No Data Splits - 4) With Delete Ind, With Data Splits - 5) No Delete Ind, No Data Splits, With Filter Predicates + 3) Deduplication: Allow duplicates, Filter duplicates, Fail on duplicates + 4) Versioning: No Versioning, Max Versioning, All Versioning */ - public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS() + + public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -56,21 +54,20 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS() return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_DATA_SPLITS() + public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__ALL_VERSION_WITHOUT_PERFORM() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) .build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").dataSplitFieldName(dataSplitField).mergeDataVersionResolver(DigestBasedResolver.INSTANCE).performStageVersioning(false).build()) .build(); - return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithBaseSchemaHavingDigestAndDataSplit, ingestMode); } - public TestScenario BATCH_ID_BASED__WITH_DEL_IND__NO_DATA_SPLITS() + public TestScenario BATCH_ID_BASED__WITH_DEL_IND__FILTER_DUPS__NO_VERSIONING() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -82,15 +79,15 @@ public TestScenario BATCH_ID_BASED__WITH_DEL_IND__NO_DATA_SPLITS() .deleteField(deleteIndicatorField) .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithDeleteIndicator, ingestMode); } - public TestScenario BATCH_ID_BASED__WITH_DEL_IND__WITH_DATA_SPLITS() + public TestScenario BATCH_ID_BASED__WITH_DEL_IND__NO_DEDUP__ALL_VERSION() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) .transactionMilestoning(BatchId.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) @@ -99,12 +96,13 @@ public TestScenario BATCH_ID_BASED__WITH_DEL_IND__WITH_DATA_SPLITS() .deleteField(deleteIndicatorField) .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).dataSplitFieldName(dataSplitField).build()) .build(); - return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithDeleteIndicatorWithDataSplit, ingestMode); + return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithDeleteIndicator, ingestMode); } - public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_OPTIMIZATION_FILTERS() + public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_OPTIMIZATION_FILTERS() { OptimizationFilter filter = OptimizationFilter.of("id", "{ID_LOWER_BOUND}", "{ID_UPPER_BOUND}"); UnitemporalDelta ingestMode = UnitemporalDelta.builder() @@ -118,7 +116,7 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_OPTIMIZATIO return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_OPTIMIZATION_FILTERS__INCLUDES_NULL_VALUES() + public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_OPTIMIZATION_FILTERS__INCLUDES_NULL_VALUES() { OptimizationFilter filter = OptimizationFilter.of("id", "{ID_LOWER_BOUND}", "{ID_UPPER_BOUND}").withIncludesNullValues(true); UnitemporalDelta ingestMode = UnitemporalDelta.builder() @@ -132,7 +130,7 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_OPTIMIZATIO return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_MISSING_OPTIMIZATION_FILTER() + public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_MISSING_OPTIMIZATION_FILTER() { OptimizationFilter filter = OptimizationFilter.of("unknown_column", "{ID_LOWER_BOUND}", "{ID_UPPER_BOUND}"); UnitemporalDelta ingestMode = UnitemporalDelta.builder() @@ -146,7 +144,7 @@ public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_MISSING_OPT return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_OPTIMIZATION_FILTER_UNSUPPORTED_DATATYPE() + public TestScenario BATCH_ID_BASED__NO_DEL_IND__WITH_OPTIMIZATION_FILTER_UNSUPPORTED_DATATYPE() { OptimizationFilter filter = OptimizationFilter.of("name", "{NAME_LOWER_BOUND}", "{NAME_UPPER_BOUND}"); UnitemporalDelta ingestMode = UnitemporalDelta.builder() @@ -173,7 +171,7 @@ public TestScenario BATCH_ID_BASED__NO_VERSIONING__WITH_STAGING_FILTER() return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithFilter, ingestMode); } - public TestScenario BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__DEDUP__WITH_STAGING_FILTER() + public TestScenario BATCH_ID_BASED__FILTER_DUPS__MAX_VERSION__WITH_STAGING_FILTER() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -181,12 +179,13 @@ public TestScenario BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__DEDUP__WIT .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) .build()) - .versioningStrategy(MaxVersionStrategy.builder().performDeduplication(true).versioningField(version.name()).versioningComparator(VersioningComparator.GREATER_THAN).build()) + .versioningStrategy(MaxVersionStrategy.builder().performStageVersioning(true).versioningField(version.name()).mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)).build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithBatchIdAndVersionBasedSchema, stagingTableWithFilterAndVersion, ingestMode); } - public TestScenario BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__WITH_STAGING_FILTER() + public TestScenario BATCH_ID_BASED__NO_DEDUP__MAX_VERSION_WITHOUT_PERFORM__WITH_STAGING_FILTER() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -194,12 +193,12 @@ public TestScenario BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__ .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) .build()) - .versioningStrategy(MaxVersionStrategy.builder().performDeduplication(false).versioningField(version.name()).versioningComparator(VersioningComparator.GREATER_THAN).build()) + .versioningStrategy(MaxVersionStrategy.builder().performStageVersioning(false).versioningField(version.name()).mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)).build()) .build(); return new TestScenario(mainTableWithBatchIdAndVersionBasedSchema, stagingTableWithFilterAndVersion, ingestMode); } - public TestScenario BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__WITHOUT_STAGING_FILTER() + public TestScenario BATCH_ID_BASED__FAIL_ON_DUPS__MAX_VERSIONING_WITHOUT_PERFORM__NO_STAGING_FILTER() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -207,12 +206,13 @@ public TestScenario BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__ .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) .build()) - .versioningStrategy(MaxVersionStrategy.builder().performDeduplication(false).versioningField(version.name()).versioningComparator(VersioningComparator.GREATER_THAN).build()) + .versioningStrategy(MaxVersionStrategy.builder().performStageVersioning(false).versioningField(version.name()).mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN)).build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) .build(); return new TestScenario(mainTableWithBatchIdAndVersionBasedSchema, stagingTableWithVersion, ingestMode); } - public TestScenario BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN_EQUAL__DEDUP__WITHOUT_STAGING_FILTER() + public TestScenario BATCH_ID_BASED__NO_DEDUP__MAX_VERSIONING__NO_STAGING_FILTER() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -220,7 +220,7 @@ public TestScenario BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN_EQUAL__DEDU .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) .build()) - .versioningStrategy(MaxVersionStrategy.builder().performDeduplication(true).versioningField(version.name()).versioningComparator(VersioningComparator.GREATER_THAN_EQUAL_TO).build()) + .versioningStrategy(MaxVersionStrategy.builder().performStageVersioning(true).versioningField(version.name()).mergeDataVersionResolver(VersionColumnBasedResolver.of(VersionComparator.GREATER_THAN_EQUAL_TO)).build()) .build(); return new TestScenario(mainTableWithBatchIdAndVersionBasedSchema, stagingTableWithVersion, ingestMode); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaBatchIdDateTimeBasedScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaBatchIdDateTimeBasedScenarios.java index 5a1f593df3a..283aa64a426 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaBatchIdDateTimeBasedScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaBatchIdDateTimeBasedScenarios.java @@ -16,11 +16,14 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalDelta; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAndDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; import java.util.Arrays; -import java.util.Optional; public class UnitemporalDeltaBatchIdDateTimeBasedScenarios extends BaseTest { @@ -30,16 +33,11 @@ public class UnitemporalDeltaBatchIdDateTimeBasedScenarios extends BaseTest Variables: 1) transactionMilestoning = BatchIdAndDateTime 2) deleteIndicator : Enabled, Disabled - 3) DataSplit: Enabled, Disabled - - Valid Combinations: - 1) No Delete Ind, No Data Splits - 2) No Delete Ind, With Data Splits - 3) With Delete Ind, No Data Splits - 4) With Delete Ind, With Data Splits + 3) Deduplication: Allow duplicates, Filter duplicates, Fail on duplicates + 4) Versioning: No Versioning, Max Versioning, All Versioning */ - public TestScenario BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -53,23 +51,24 @@ public TestScenario BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS() return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_AND_TIME_BASED__NO_DEL_IND__WITH_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__NO_DEL_IND__FILTER_DUPS__ALL_VERSION_WITHOUT_PERFORM() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) .transactionMilestoning(BatchIdAndDateTime.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) .dateTimeInName(batchTimeInField) .dateTimeOutName(batchTimeOutField) .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).dataSplitFieldName(dataSplitField).performStageVersioning(false).build()) .build(); return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithBaseSchemaHavingDigestAndDataSplit, ingestMode); } - public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND_MULTI_VALUES__NO_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND_MULTI_VALUES__NO_DEDUP_NO_VERSION() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -88,7 +87,7 @@ public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND_MULTI_VALUES__NO_DATA_ return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithDeleteIndicator, ingestMode); } - public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__NO_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__NO_DEDUP__NO_VERSION() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -107,11 +106,10 @@ public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__NO_DATA_SPLITS() return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithBooleanDeleteIndicator, ingestMode); } - public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__FAIL_ON_DUP__ALL_VERSION() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) .transactionMilestoning(BatchIdAndDateTime.builder() .batchIdInName(batchIdInField) .batchIdOutName(batchIdOutField) @@ -122,9 +120,11 @@ public TestScenario BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS() .deleteField(deleteIndicatorField) .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).dataSplitFieldName(dataSplitField).performStageVersioning(true).build()) .build(); - return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithDeleteIndicatorWithDataSplit, ingestMode); + return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithDeleteIndicator, ingestMode); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaDateTimeBasedScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaDateTimeBasedScenarios.java index af8e8fb7b33..35f2028680c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaDateTimeBasedScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalDeltaDateTimeBasedScenarios.java @@ -16,11 +16,14 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalDelta; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.merge.DeleteIndicatorMergeStrategy; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; import java.util.Arrays; -import java.util.Optional; public class UnitemporalDeltaDateTimeBasedScenarios extends BaseTest { @@ -30,16 +33,11 @@ public class UnitemporalDeltaDateTimeBasedScenarios extends BaseTest Variables: 1) transactionMilestoning = DateTime 2) deleteIndicator : Enabled, Disabled - 3) DataSplit: Enabled, Disabled - - Valid Combinations: - 1) No Delete Ind, No Data Splits - 2) No Delete Ind, With Data Splits - 3) With Delete Ind, No Data Splits - 4) With Delete Ind, With Data Splits + 3) Deduplication: Allow duplicates, Filter duplicates, Fail on duplicates + 4) Versioning: No Versioning, Max Versioning, All Versioning */ - public TestScenario DATETIME_BASED__NO_DEL_IND__NO_DATA_SPLITS() + public TestScenario DATETIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSIONING() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -51,21 +49,22 @@ public TestScenario DATETIME_BASED__NO_DEL_IND__NO_DATA_SPLITS() return new TestScenario(mainTableWithDateTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario DATETIME_BASED__NO_DEL_IND__WITH_DATA_SPLITS() + public TestScenario DATETIME_BASED__NO_DEL_IND__FAIL_ON_DUPS__ALL_VERSION_WITHOUT_PERFORM() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) .transactionMilestoning(TransactionDateTime.builder() .dateTimeInName(batchTimeInField) .dateTimeOutName(batchTimeOutField) .build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).dataSplitFieldName(dataSplitField).performStageVersioning(false).build()) .build(); return new TestScenario(mainTableWithDateTime, stagingTableWithBaseSchemaHavingDigestAndDataSplit, ingestMode); } - public TestScenario DATETIME_BASED__WITH_DEL_IND__NO_DATA_SPLITS() + public TestScenario DATETIME_BASED__WITH_DEL_IND__NO_DEDUP__NO_VERSION() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) @@ -81,11 +80,10 @@ public TestScenario DATETIME_BASED__WITH_DEL_IND__NO_DATA_SPLITS() return new TestScenario(mainTableWithDateTime, stagingTableWithDeleteIndicator, ingestMode); } - public TestScenario DATETIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS() + public TestScenario DATETIME_BASED__WITH_DEL_IND__FILTER_DUPS__ALL_VERSION() { UnitemporalDelta ingestMode = UnitemporalDelta.builder() .digestField(digestField) - .dataSplitField(Optional.of(dataSplitField)) .transactionMilestoning(TransactionDateTime.builder() .dateTimeInName(batchTimeInField) .dateTimeOutName(batchTimeOutField) @@ -94,8 +92,10 @@ public TestScenario DATETIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS() .deleteField(deleteIndicatorField) .addAllDeleteValues(Arrays.asList(deleteIndicatorValues)) .build()) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).dataSplitFieldName(dataSplitField).performStageVersioning(true).build()) .build(); - return new TestScenario(mainTableWithDateTime, stagingTableWithDeleteIndicatorWithDataSplit, ingestMode); + return new TestScenario(mainTableWithDateTime, stagingTableWithDeleteIndicator, ingestMode); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotBatchIdBasedScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotBatchIdBasedScenarios.java index 5162e437250..7f0991b20d4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotBatchIdBasedScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotBatchIdBasedScenarios.java @@ -16,6 +16,7 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalSnapshot; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.emptyhandling.NoOp; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchId; @@ -29,18 +30,18 @@ public class UnitemporalSnapshotBatchIdBasedScenarios extends BaseTest Variables: 1) transactionMilestoning = BatchId 2) partition : Enabled, Disabled - 3) DataSplit: Enabled, Disabled - 4) partitionValuesByField: Enabled, Disabled + 3) partitionValuesByField: Enabled, Disabled + 4) Versioning: NoVersioning, MaxVersioning + 5) Deduplication: AllowDups, FailOnDups, FilterDups Valid Combinations: - 1) Without Partition, No Data Splits - 2) Without Partition, With Data Splits -> TBD - 3) With Partition, No Data Splits - 4) With Partition, With Data Splits -> TBD - 5) Without Partition, No Data Splits, Partition Filter + 1) Without Partition, No Dedup No Versioning + 2) Without Partition, FailOnDups No Versioning + 3) With Partition, No Dedup No Versioning + 4) With Partition Filter, No Dedup No Versioning */ - public TestScenario BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS() + public TestScenario BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) @@ -53,12 +54,21 @@ public TestScenario BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS() return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_BASED__WITHOUT_PARTITIONS__WITH_DATA_SPLITS() + public TestScenario BATCH_ID_BASED__WITHOUT_PARTITIONS__FAIL_ON_DUPS__NO_VERSION() { - return null; + UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() + .digestField(digestField) + .transactionMilestoning(BatchId.builder() + .batchIdInName(batchIdInField) + .batchIdOutName(batchIdOutField) + .build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .emptyDatasetHandling(NoOp.builder().build()) + .build(); + return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_BASED__WITH_PARTITIONS__NO_DATA_SPLITS() + public TestScenario BATCH_ID_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) @@ -71,13 +81,7 @@ public TestScenario BATCH_ID_BASED__WITH_PARTITIONS__NO_DATA_SPLITS() return new TestScenario(mainTableWithBatchIdBasedSchema, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_BASED__WITH_PARTITIONS__WITH_DATA_SPLITS() - { - return null; - } - - - public TestScenario BATCH_ID_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS() + public TestScenario BATCH_ID_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotBatchIdDateTimeBasedScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotBatchIdDateTimeBasedScenarios.java index 57419aa4c90..dd7359eb530 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotBatchIdDateTimeBasedScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotBatchIdDateTimeBasedScenarios.java @@ -16,8 +16,12 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalSnapshot; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.emptyhandling.DeleteTargetData; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchIdAndDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; import java.util.Arrays; @@ -29,18 +33,19 @@ public class UnitemporalSnapshotBatchIdDateTimeBasedScenarios extends BaseTest Variables: 1) transactionMilestoning = BatchIdAndDateTimeBased 2) partition : Enabled, Disabled - 3) DataSplit: Enabled, Disabled - 4) partitionValuesByField: Enabled, Disabled + 3) partitionValuesByField: Enabled, Disabled + 4) Versioning: NoVersioning, MaxVersioning + 5) Deduplication: AllowDups, FailOnDups, FilterDups Valid Combinations: - 1) Without Partition, No Data Splits - 2) Without Partition, With Data Splits -> TBD - 3) With Partition, No Data Splits - 4) With Partition, With Data Splits -> TBD - 5) Without Partition, No Data Splits, Partition Filter + 1) Without Partition, No Dedup No Versioning + 2) Without Partition, No Dedup MaxVersioning + 2) Without Partition, Filter Dups MaxVersioning + 3) With Partition, No Dedup No Versioning + 5) With Partition Filter, No Dedup No Versioning */ - public TestScenario BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) @@ -55,12 +60,24 @@ public TestScenario BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS( return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__WITH_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__MAX_VERSION() { - return null; + UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() + .digestField(digestField) + .transactionMilestoning(BatchIdAndDateTime.builder() + .batchIdInName(batchIdInField) + .batchIdOutName(batchIdOutField) + .dateTimeInName(batchTimeInField) + .dateTimeOutName(batchTimeOutField) + .build()) + .deduplicationStrategy(AllowDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .emptyDatasetHandling(DeleteTargetData.builder().build()) + .build(); + return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__FILTER_DUPS__MAX_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) @@ -70,18 +87,29 @@ public TestScenario BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS() .dateTimeInName(batchTimeInField) .dateTimeOutName(batchTimeOutField) .build()) - .addAllPartitionFields(Arrays.asList(partitionKeys)) + .deduplicationStrategy(FilterDuplicates.builder().build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .emptyDatasetHandling(DeleteTargetData.builder().build()) .build(); return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__WITH_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION() { - return null; + UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() + .digestField(digestField) + .transactionMilestoning(BatchIdAndDateTime.builder() + .batchIdInName(batchIdInField) + .batchIdOutName(batchIdOutField) + .dateTimeInName(batchTimeInField) + .dateTimeOutName(batchTimeOutField) + .build()) + .addAllPartitionFields(Arrays.asList(partitionKeys)) + .build(); + return new TestScenario(mainTableWithBatchIdAndTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - - public TestScenario BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS() + public TestScenario BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotDateTimeBasedScenarios.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotDateTimeBasedScenarios.java index 08d653bb416..87b6b09ed74 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotDateTimeBasedScenarios.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/scenarios/UnitemporalSnapshotDateTimeBasedScenarios.java @@ -16,7 +16,10 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalSnapshot; +import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.TransactionDateTime; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.DigestBasedResolver; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.MaxVersionStrategy; import java.util.Arrays; @@ -28,18 +31,18 @@ public class UnitemporalSnapshotDateTimeBasedScenarios extends BaseTest Variables: 1) transactionMilestoning = Datetime based 2) partition : Enabled, Disabled - 3) DataSplit: Enabled, Disabled - 4) partitionValuesByField: Enabled, Disabled + 3) partitionValuesByField: Enabled, Disabled + 4) Versioning: NoVersioning, MaxVersioning + 5) Deduplication: AllowDups, FailOnDups, FilterDups Valid Combinations: - 1) Without Partition, No Data Splits - 2) Without Partition, With Data Splits -> TBD - 3) With Partition, No Data Splits - 4) With Partition, With Data Splits -> TBD - 5) Without Partition, No Data Splits, Partition Filter + 1) Without Partition, No Dedup No Versioning + 2) Without Partition, FailOnDups MaxVersioning + 3) With Partition, No Dedup No Versioning + 4) With Partition Filter, No Dedup No Versioning */ - public TestScenario DATETIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS() + public TestScenario DATETIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) @@ -51,12 +54,21 @@ public TestScenario DATETIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS() return new TestScenario(mainTableWithDateTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario DATETIME_BASED__WITHOUT_PARTITIONS__WITH_DATA_SPLITS() + public TestScenario DATETIME_BASED__WITHOUT_PARTITIONS__FAIL_ON_DUP__MAX_VERSION() { - return null; + UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() + .digestField(digestField) + .transactionMilestoning(TransactionDateTime.builder() + .dateTimeInName(batchTimeInField) + .dateTimeOutName(batchTimeOutField) + .build()) + .versioningStrategy(MaxVersionStrategy.builder().versioningField("biz_date").mergeDataVersionResolver(DigestBasedResolver.INSTANCE).build()) + .deduplicationStrategy(FailOnDuplicates.builder().build()) + .build(); + return new TestScenario(mainTableWithDateTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario DATETIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS() + public TestScenario DATETIME_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) @@ -69,13 +81,7 @@ public TestScenario DATETIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS() return new TestScenario(mainTableWithDateTime, stagingTableWithBaseSchemaAndDigest, ingestMode); } - public TestScenario DATETIME_BASED__WITH_PARTITIONS__WITH_DATA_SPLITS() - { - return null; - } - - - public TestScenario DATETIME_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS() + public TestScenario DATETIME_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION() { UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/AppendOnlyTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/AppendOnlyTestCases.java index a76656c6870..a3cf0a5f3e3 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/AppendOnlyTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/AppendOnlyTestCases.java @@ -37,9 +37,9 @@ public abstract class AppendOnlyTestCases extends BaseTest AppendOnlyScenarios scenarios = new AppendOnlyScenarios(); @Test - void testAppendOnlyAllowDuplicatesNoAuditing() + void testAppendOnlyNoAuditingNoDedupNoVersioningNoFilterExistingRecords() { - TestScenario scenario = scenarios.ALLOW_DUPLICATES_NO_AUDITING(); + TestScenario scenario = scenarios.NO_AUDITING__NO_DEDUP__NO_VERSIONING__NO_FILTER_EXISTING_RECORDS(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -49,13 +49,13 @@ void testAppendOnlyAllowDuplicatesNoAuditing() .executionTimestampClock(fixedClock_2000_01_01) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyAppendOnlyAllowDuplicatesNoAuditing(operations); + verifyAppendOnlyNoAuditingNoDedupNoVersioningNoFilterExistingRecordsDeriveMainSchema(operations); } @Test - void testAppendOnlyAllowDuplicatesNoAuditingDeriveMainSchema() + void testAppendOnlyNoAuditingNoDedupNoVersioningNoFilterExistingRecordsDeriveMainSchema() { - TestScenario scenario = scenarios.ALLOW_DUPLICATES_NO_AUDITING_DERIVE_MAIN_SCHEMA(); + TestScenario scenario = scenarios.NO_AUDITING__NO_DEDUP__NO_VERSIONING__NO_FILTER_EXISTING_RECORDS__DERIVE_MAIN_SCHEMA(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -65,31 +65,15 @@ void testAppendOnlyAllowDuplicatesNoAuditingDeriveMainSchema() .executionTimestampClock(fixedClock_2000_01_01) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyAppendOnlyAllowDuplicatesNoAuditing(operations); + verifyAppendOnlyNoAuditingNoDedupNoVersioningNoFilterExistingRecordsDeriveMainSchema(operations); } - public abstract void verifyAppendOnlyAllowDuplicatesNoAuditing(GeneratorResult operations); + public abstract void verifyAppendOnlyNoAuditingNoDedupNoVersioningNoFilterExistingRecordsDeriveMainSchema(GeneratorResult operations); @Test - void testAppendOnlyAllowDuplicatesWithAuditing() + void testAppendOnlyWithAuditingFailOnDuplicatesAllVersionNoFilterExistingRecords() { - TestScenario scenario = scenarios.ALLOW_DUPLICATES_WITH_AUDITING(); - RelationalGenerator generator = RelationalGenerator.builder() - .ingestMode(scenario.getIngestMode()) - .relationalSink(getRelationalSink()) - .collectStatistics(true) - .executionTimestampClock(fixedClock_2000_01_01) - .build(); - GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyAppendOnlyAllowDuplicatesWithAuditing(operations); - } - - public abstract void verifyAppendOnlyAllowDuplicatesWithAuditing(GeneratorResult operations); - - @Test - void testAppendOnlyAllowDuplicatesWithAuditingWithDataSplits() - { - TestScenario scenario = scenarios.ALLOW_DUPLICATES_WITH_AUDITING__WITH_DATASPLIT(); + TestScenario scenario = scenarios.WITH_AUDITING__FAIL_ON_DUPS__ALL_VERSION__NO_FILTER_EXISTING_RECORDS(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -97,64 +81,15 @@ void testAppendOnlyAllowDuplicatesWithAuditingWithDataSplits() .executionTimestampClock(fixedClock_2000_01_01) .build(); List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); - verifyAppendOnlyAllowDuplicatesWithAuditingWithDataSplits(operations, dataSplitRangesOneToTwo); - } - - public abstract void verifyAppendOnlyAllowDuplicatesWithAuditingWithDataSplits(List generatorResults, List dataSplitRanges); - - @Test - void testAppendOnlyFailOnDuplicatesNoAuditing() - { - TestScenario scenario = scenarios.FAIL_ON_DUPLICATES_NO_AUDITING(); - RelationalGenerator generator = RelationalGenerator.builder() - .ingestMode(scenario.getIngestMode()) - .relationalSink(getRelationalSink()) - .collectStatistics(true) - .build(); - - GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyAppendOnlyFailOnDuplicatesNoAuditing(operations); + verifyAppendOnlyWithAuditingFailOnDuplicatesAllVersionNoFilterExistingRecords(operations, dataSplitRangesOneToTwo); } - public abstract void verifyAppendOnlyFailOnDuplicatesNoAuditing(GeneratorResult operations); + public abstract void verifyAppendOnlyWithAuditingFailOnDuplicatesAllVersionNoFilterExistingRecords(List generatorResults, List dataSplitRanges); @Test - void testAppendOnlyFailOnDuplicatesWithAuditing() + void testAppendOnlyWithAuditingFilterDuplicatesNoVersioningWithFilterExistingRecords() { - TestScenario scenario = scenarios.FAIL_ON_DUPLICATES_WITH_AUDITING(); - RelationalGenerator generator = RelationalGenerator.builder() - .ingestMode(scenario.getIngestMode()) - .relationalSink(getRelationalSink()) - .executionTimestampClock(fixedClock_2000_01_01) - .collectStatistics(true) - .build(); - - GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyAppendOnlyFailOnDuplicatesWithAuditing(operations); - } - - public abstract void verifyAppendOnlyFailOnDuplicatesWithAuditing(GeneratorResult operations); - - @Test - void testAppendOnlyFilterDuplicatesNoAuditing() - { - TestScenario scenario = scenarios.FILTER_DUPLICATES_NO_AUDITING(); - RelationalGenerator generator = RelationalGenerator.builder() - .ingestMode(scenario.getIngestMode()) - .relationalSink(getRelationalSink()) - .collectStatistics(true) - .build(); - - GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyAppendOnlyFilterDuplicatesNoAuditing(operations); - } - - public abstract void verifyAppendOnlyFilterDuplicatesNoAuditing(GeneratorResult operations); - - @Test - void testAppendOnlyFilterDuplicatesWithAuditing() - { - TestScenario scenario = scenarios.FILTER_DUPLICATES_WITH_AUDITING(); + TestScenario scenario = scenarios.WITH_AUDITING__FILTER_DUPS__NO_VERSIONING__WITH_FILTER_EXISTING_RECORDS(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -164,15 +99,15 @@ void testAppendOnlyFilterDuplicatesWithAuditing() .build(); GeneratorResult queries = generator.generateOperations(scenario.getDatasets()); - verifyAppendOnlyFilterDuplicatesWithAuditing(queries); + verifyAppendOnlyWithAuditingFilterDuplicatesNoVersioningWithFilterExistingRecords(queries); } - public abstract void verifyAppendOnlyFilterDuplicatesWithAuditing(GeneratorResult queries); + public abstract void verifyAppendOnlyWithAuditingFilterDuplicatesNoVersioningWithFilterExistingRecords(GeneratorResult queries); @Test - void testAppendOnlyFilterDuplicatesNoAuditingWithDataSplit() + void testAppendOnlyNoAuditingValidation() { - TestScenario scenario = scenarios.FILTER_DUPLICATES_NO_AUDITING_WITH_DATA_SPLIT(); + TestScenario scenario = scenarios.NO_AUDITING__FILTER_DUPS__ALL_VERSION__NO_FILTER_EXISTING_RECORDS(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -184,14 +119,14 @@ void testAppendOnlyFilterDuplicatesNoAuditingWithDataSplit() } catch (Exception e) { - Assertions.assertEquals("DataSplits not supported for NoAuditing mode", e.getMessage()); + Assertions.assertEquals("NoAuditing not allowed when there are primary keys", e.getMessage()); } } @Test - public void testAppendOnlyFilterDuplicatesWithAuditingWithDataSplit() + public void testAppendOnlyWithAuditingFilterDuplicatesAllVersionWithFilterExistingRecords() { - TestScenario scenario = scenarios.FILTER_DUPLICATES_WITH_AUDITING_WITH_DATA_SPLIT(); + TestScenario scenario = scenarios.WITH_AUDITING__FILTER_DUPS__ALL_VERSION__WITH_FILTER_EXISTING_RECORDS(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -200,19 +135,20 @@ public void testAppendOnlyFilterDuplicatesWithAuditingWithDataSplit() .build(); List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); - verifyAppendOnlyFilterDuplicatesWithAuditingWithDataSplit(operations, dataSplitRangesOneToTwo); + verifyAppendOnlyWithAuditingFilterDuplicatesAllVersionWithFilterExistingRecords(operations, dataSplitRangesOneToTwo); } - public abstract void verifyAppendOnlyFilterDuplicatesWithAuditingWithDataSplit(List operations, List dataSplitRanges); + public abstract void verifyAppendOnlyWithAuditingFilterDuplicatesAllVersionWithFilterExistingRecords(List operations, List dataSplitRanges); @Test void testAppendOnlyWithUpperCaseOptimizer() { - TestScenario scenario = scenarios.FILTER_DUPLICATES_NO_AUDITING(); + TestScenario scenario = scenarios.WITH_AUDITING__FILTER_DUPS__NO_VERSIONING__WITH_FILTER_EXISTING_RECORDS(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) .caseConversion(CaseConversion.TO_UPPER) + .executionTimestampClock(fixedClock_2000_01_01) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); @@ -224,13 +160,14 @@ void testAppendOnlyWithUpperCaseOptimizer() @Test void testAppendOnlyWithLessColumnsInStaging() { - TestScenario scenario = scenarios.FILTER_DUPLICATES_NO_AUDITING(); + TestScenario scenario = scenarios.WITH_AUDITING__FILTER_DUPS__NO_VERSIONING__WITH_FILTER_EXISTING_RECORDS(); Dataset stagingTable = scenario.getStagingTable().withSchema(stagingTableSchemaWithLimitedColumns); Datasets datasets = Datasets.of(scenario.getMainTable(), stagingTable); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) + .executionTimestampClock(fixedClock_2000_01_01) .build(); GeneratorResult operations = generator.generateOperations(datasets); @@ -240,28 +177,39 @@ void testAppendOnlyWithLessColumnsInStaging() public abstract void verifyAppendOnlyWithLessColumnsInStaging(GeneratorResult operations); @Test - void testAppendOnlyValidationPkFieldsMissing() + void testAppendOnlyWithAuditingFailOnDuplicatesMaxVersionWithFilterExistingRecords() { - TestScenario testScenario = scenarios.FILTER_DUPLICATES_NO_AUDITING(); - // Staging table has no pks - Dataset stagingTable = testScenario.getStagingTable().withSchema(baseTableSchemaWithNoPrimaryKeys); - Datasets datasets = Datasets.of(testScenario.getMainTable(), stagingTable); - try - { - RelationalGenerator generator = RelationalGenerator.builder() - .ingestMode(testScenario.getIngestMode()) - .relationalSink(getRelationalSink()) - .executionTimestampClock(fixedClock_2000_01_01) - .build(); - GeneratorResult queries = generator.generateOperations(datasets); - Assertions.fail("Exception was not thrown"); - } - catch (Exception e) - { - Assertions.assertEquals("Primary key list must not be empty", e.getMessage()); - } + TestScenario scenario = scenarios.WITH_AUDITING__FAIL_ON_DUPS__MAX_VERSION__WITH_FILTER_EXISTING_RECORDS(); + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(scenario.getIngestMode()) + .relationalSink(getRelationalSink()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .build(); + + GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); + verifyAppendOnlyWithAuditingFailOnDuplicatesMaxVersionWithFilterExistingRecords(operations); } + public abstract void verifyAppendOnlyWithAuditingFailOnDuplicatesMaxVersionWithFilterExistingRecords(GeneratorResult operations); + + @Test + void testAppendOnlyWithAuditingFilterDupsMaxVersionNoFilterExistingRecords() + { + TestScenario scenario = scenarios.WITH_AUDITING__FILTER_DUPS__MAX_VERSION__NO_FILTER_EXISTING_RECORDS(); + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(scenario.getIngestMode()) + .relationalSink(getRelationalSink()) + .collectStatistics(true) + .executionTimestampClock(fixedClock_2000_01_01) + .build(); + + GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); + verifyAppendOnlyWithAuditingFilterDupsMaxVersionNoFilterExistingRecords(operations); + } + + public abstract void verifyAppendOnlyWithAuditingFilterDupsMaxVersionNoFilterExistingRecords(GeneratorResult operations); + @Test void testAppendOnlyValidationDateTimeFieldMissing() { @@ -280,5 +228,24 @@ void testAppendOnlyValidationDateTimeFieldMissing() } } + @Test + void testAppendOnlyNoAuditingFilterExistingRecords() + { + TestScenario scenario = scenarios.NO_AUDITING__NO_DEDUP__NO_VERSIONING__WITH_FILTER_EXISTING_RECORDS(); + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(scenario.getIngestMode()) + .relationalSink(getRelationalSink()) + .collectStatistics(true) + .build(); + try + { + List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); + } + catch (Exception e) + { + Assertions.assertEquals("Primary keys and digest are mandatory for filterExistingRecords", e.getMessage()); + } + } + public abstract RelationalSink getRelationalSink(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/NontemporalDeltaTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/NontemporalDeltaTestCases.java index 87a69bf5da6..87ad65d7f20 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/NontemporalDeltaTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/NontemporalDeltaTestCases.java @@ -36,9 +36,9 @@ public abstract class NontemporalDeltaTestCases extends BaseTest NonTemporalDeltaScenarios scenarios = new NonTemporalDeltaScenarios(); @Test - void testNontemporalDeltaNoAuditingNoDataSplit() + void testNontemporalDeltaNoAuditingNoDedupNoVersioning() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -49,15 +49,15 @@ void testNontemporalDeltaNoAuditingNoDataSplit() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalDeltaNoAuditingNoDataSplit(operations); + verifyNontemporalDeltaNoAuditingNoDedupNoVersioning(operations); } - public abstract void verifyNontemporalDeltaNoAuditingNoDataSplit(GeneratorResult operations); + public abstract void verifyNontemporalDeltaNoAuditingNoDedupNoVersioning(GeneratorResult operations); @Test - void testNontemporalDeltaNoAuditingNoDataSplitWithDeleteIndicator() + void testNontemporalDeltaNoAuditingWithDeleteIndicatorNoDedupNoVersioning() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT__WITH_DELETE_INDICATOR(); + TestScenario testScenario = scenarios.NO_AUDTING__WITH_DELETE_INDICATOR__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -65,15 +65,15 @@ void testNontemporalDeltaNoAuditingNoDataSplitWithDeleteIndicator() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalDeltaNoAuditingNoDataSplitWithDeleteIndicator(operations); + verifyNontemporalDeltaNoAuditingWithDeleteIndicatorNoDedupNoVersioning(operations); } - public abstract void verifyNontemporalDeltaNoAuditingNoDataSplitWithDeleteIndicator(GeneratorResult operations); + public abstract void verifyNontemporalDeltaNoAuditingWithDeleteIndicatorNoDedupNoVersioning(GeneratorResult operations); @Test - void testNontemporalDeltaWithAuditingNoDataSplit() + void testNontemporalDeltaWithAuditingFilterDupsNoVersioning() { - TestScenario testScenario = scenarios.WITH_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.WITH_AUDTING__FILTER_DUPLICATES__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -82,15 +82,15 @@ void testNontemporalDeltaWithAuditingNoDataSplit() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalDeltaWithAuditingNoDataSplit(operations); + verifyNontemporalDeltaWithAuditingFilterDupsNoVersioning(operations); } - public abstract void verifyNontemporalDeltaWithAuditingNoDataSplit(GeneratorResult operations); + public abstract void verifyNontemporalDeltaWithAuditingFilterDupsNoVersioning(GeneratorResult operations); @Test - void testNonTemporalDeltaNoAuditingWithDataSplit() + void testNonTemporalDeltaNoAuditingNoDedupAllVersion() { - TestScenario testScenario = scenarios.NO_AUDTING__WITH_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__ALL_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -98,15 +98,31 @@ void testNonTemporalDeltaNoAuditingWithDataSplit() .build(); List operations = generator.generateOperationsWithDataSplits(testScenario.getDatasets(), dataSplitRangesOneToTwo); - verifyNonTemporalDeltaNoAuditingWithDataSplit(operations, dataSplitRangesOneToTwo); + verifyNonTemporalDeltaNoAuditingNoDedupAllVersion(operations, dataSplitRangesOneToTwo); } - public abstract void verifyNonTemporalDeltaNoAuditingWithDataSplit(List operations, List dataSplitRanges); + public abstract void verifyNonTemporalDeltaNoAuditingNoDedupAllVersion(List operations, List dataSplitRanges); @Test - void testNonTemporalDeltaWithWithAuditingWithDataSplit() + void testNonTemporalDeltaNoAuditingNoDedupAllVersionWithoutPerform() { - TestScenario testScenario = scenarios.WITH_AUDTING__WITH_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__ALL_VERSION_WITHOUT_PERFORM(); + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(testScenario.getIngestMode()) + .relationalSink(getRelationalSink()) + .collectStatistics(true) + .build(); + + List operations = generator.generateOperationsWithDataSplits(testScenario.getDatasets(), dataSplitRangesOneToTwo); + verifyNonTemporalDeltaNoAuditingNoDedupAllVersionWithoutPerform(operations, dataSplitRangesOneToTwo); + } + + public abstract void verifyNonTemporalDeltaNoAuditingNoDedupAllVersionWithoutPerform(List operations, List dataSplitRanges); + + @Test + void testNonTemporalDeltaWithWithAuditingFailOnDupsAllVersion() + { + TestScenario testScenario = scenarios.WITH_AUDTING__FAIL_ON_DUPS__ALL_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -115,15 +131,15 @@ void testNonTemporalDeltaWithWithAuditingWithDataSplit() .build(); List operations = generator.generateOperationsWithDataSplits(testScenario.getDatasets(), dataSplitRangesOneToTwo); - verifyNonTemporalDeltaWithWithAuditingWithDataSplit(operations, dataSplitRangesOneToTwo); + verifyNonTemporalDeltaWithWithAuditingFailOnDupsAllVersion(operations, dataSplitRangesOneToTwo); } - public abstract void verifyNonTemporalDeltaWithWithAuditingWithDataSplit(List operations, List dataSplitRanges); + public abstract void verifyNonTemporalDeltaWithWithAuditingFailOnDupsAllVersion(List operations, List dataSplitRanges); @Test void testNontemporalDeltaWithUpperCaseOptimizer() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -139,7 +155,7 @@ void testNontemporalDeltaWithUpperCaseOptimizer() @Test void testNontemporalDeltaWithLessColumnsInStaging() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); Dataset stagingTable = testScenario.getStagingTable().withSchema(stagingTableSchemaWithLimitedColumns); Datasets datasets = Datasets.of(testScenario.getMainTable(), stagingTable); @@ -157,7 +173,7 @@ void testNontemporalDeltaWithLessColumnsInStaging() @Test void testNontemporalDeltaValidationPkFieldsMissing() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); // Staging table has no pks Dataset stagingTable = testScenario.getStagingTable().withSchema(baseTableSchemaWithNoPrimaryKeys); Datasets datasets = Datasets.of(testScenario.getMainTable(), stagingTable); @@ -198,7 +214,7 @@ void testNontemporalDeltaValidationDateTimeFieldMissing() @Test public void testNontemporalDeltaPostActionSqlAndCleanStagingData() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -228,9 +244,9 @@ void testNontemporalDeltaWithNoVersionAndStagingFilter() public abstract void verifyNontemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult operations); @Test - void testNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup() + void testNontemporalDeltaWithFilterDupsMaxVersionWithStagingFilters() { - TestScenario testScenario = scenarios.MAX_VERSIONING_WITH_GREATER_THAN__DEDUP__WITH_STAGING_FILTER(); + TestScenario testScenario = scenarios.FILTER_DUPS__MAX_VERSION__WITH_STAGING_FILTER(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -238,15 +254,15 @@ void testNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(operations); + verifyNontemporalDeltaWithFilterDupsMaxVersionWithStagingFilters(operations); } - public abstract void verifyNontemporalDeltaWithMaxVersioningAndStagingFiltersWithDedup(GeneratorResult operations); + public abstract void verifyNontemporalDeltaWithFilterDupsMaxVersionWithStagingFilters(GeneratorResult operations); @Test - void testNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters() + void testNontemporalDeltaWithNoDedupMaxVersioningWithoutPerformWithStagingFilters() { - TestScenario testScenario = scenarios.MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__WITH_STAGING_FILTER(); + TestScenario testScenario = scenarios.NO_DEDUP__MAX_VERSION_WITHOUT_PERFORM__WITH_STAGING_FILTER(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -254,15 +270,15 @@ void testNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(operations); + verifyNontemporalDeltaWithNoDedupMaxVersioningWithoutPerformWithStagingFilters(operations); } - public abstract void verifyNontemporalDeltaWithMaxVersioningNoDedupAndStagingFilters(GeneratorResult operations); + public abstract void verifyNontemporalDeltaWithNoDedupMaxVersioningWithoutPerformWithStagingFilters(GeneratorResult operations); @Test - void testNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters() + void testNontemporalDeltaNoDedupMaxVersionWithoutPerform() { - TestScenario testScenario = scenarios.MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__WITHOUT_STAGING_FILTER(); + TestScenario testScenario = scenarios.NO_DEDUP__MAX_VERSION_WITHOUT_PERFORM(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -270,15 +286,15 @@ void testNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(operations); + verifyNontemporalDeltaNoDedupMaxVersionWithoutPerform(operations); } - public abstract void verifyNontemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations); + public abstract void verifyNontemporalDeltaNoDedupMaxVersionWithoutPerform(GeneratorResult operations); @Test - void testNontemporalDeltaWithWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters() + void testNontemporalDeltaAllowDuplicatesMaxVersionWithUpperCase() { - TestScenario testScenario = scenarios.MAX_VERSIONING_WITH_GREATER_THAN_EQUAL__DEDUP__WITHOUT_STAGING_FILTER(); + TestScenario testScenario = scenarios.NO_DEDUP__MAX_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -287,10 +303,10 @@ void testNontemporalDeltaWithWithMaxVersioningDedupEnabledAndUpperCaseWithoutSta .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalDeltaWithWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(operations); + verifyNontemporalDeltaAllowDuplicatesMaxVersionWithUpperCase(operations); } - public abstract void verifyNontemporalDeltaWithWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations); + public abstract void verifyNontemporalDeltaAllowDuplicatesMaxVersionWithUpperCase(GeneratorResult operations); public abstract RelationalSink getRelationalSink(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/NontemporalSnapshotTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/NontemporalSnapshotTestCases.java index 4a81ebce332..695253d295e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/NontemporalSnapshotTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/NontemporalSnapshotTestCases.java @@ -19,6 +19,8 @@ import org.finos.legend.engine.persistence.components.common.Resources; import org.finos.legend.engine.persistence.components.ingestmode.NontemporalSnapshot; import org.finos.legend.engine.persistence.components.ingestmode.audit.DateTimeAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; import org.finos.legend.engine.persistence.components.logicalplan.LogicalPlan; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.logicalplan.datasets.DatasetDefinition; @@ -41,9 +43,9 @@ public abstract class NontemporalSnapshotTestCases extends BaseTest NontemporalSnapshotTestScenarios scenarios = new NontemporalSnapshotTestScenarios(); @Test - void testNontemporalSnapshotNoAuditingNoDataSplit() + void testNontemporalSnapshotNoAuditingNoDedupNoVersioning() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -54,31 +56,15 @@ void testNontemporalSnapshotNoAuditingNoDataSplit() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalSnapshotNoAuditingNoDataSplit(operations); + verifyNontemporalSnapshotNoAuditingNoDedupNoVersioning(operations); } - public abstract void verifyNontemporalSnapshotNoAuditingNoDataSplit(GeneratorResult operations); + public abstract void verifyNontemporalSnapshotNoAuditingNoDedupNoVersioning(GeneratorResult operations); @Test - void testNontemporalSnapshotNoAuditingWithDataSplit() + void testNontemporalSnapshotWithAuditingFilterDupsNoVersioning() { - TestScenario testScenario = scenarios.NO_AUDTING__WITH_DATASPLIT(); - RelationalGenerator generator = RelationalGenerator.builder() - .ingestMode(testScenario.getIngestMode()) - .relationalSink(getRelationalSink()) - .collectStatistics(true) - .build(); - - GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalSnapshotNoAuditingWithDataSplit(operations); - } - - public abstract void verifyNontemporalSnapshotNoAuditingWithDataSplit(GeneratorResult operations); - - @Test - void testNontemporalSnapshotWithAuditingNoDataSplit() - { - TestScenario testScenario = scenarios.WITH_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.WITH_AUDTING__FILTER_DUPLICATES__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -87,15 +73,15 @@ void testNontemporalSnapshotWithAuditingNoDataSplit() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalSnapshotWithAuditingNoDataSplit(operations); + verifyNontemporalSnapshotWithAuditingFilterDupsNoVersioning(operations); } - public abstract void verifyNontemporalSnapshotWithAuditingNoDataSplit(GeneratorResult operations); + public abstract void verifyNontemporalSnapshotWithAuditingFilterDupsNoVersioning(GeneratorResult operations); @Test - void testNontemporalSnapshotWithAuditingWithDataSplit() + void testNontemporalSnapshotWithAuditingFailOnDupMaxVersion() { - TestScenario testScenario = scenarios.WITH_AUDTING__WITH_DATASPLIT(); + TestScenario testScenario = scenarios.WITH_AUDTING__FAIL_ON_DUP__MAX_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -104,15 +90,15 @@ void testNontemporalSnapshotWithAuditingWithDataSplit() .build(); GeneratorResult operations = generator.generateOperations(testScenario.getDatasets()); - verifyNontemporalSnapshotWithAuditingWithDataSplit(operations); + verifyNontemporalSnapshotWithAuditingFailOnDupMaxVersion(operations); } - public abstract void verifyNontemporalSnapshotWithAuditingWithDataSplit(GeneratorResult operations); + public abstract void verifyNontemporalSnapshotWithAuditingFailOnDupMaxVersion(GeneratorResult operations); @Test void testNontemporalSnapshotWithUpperCaseOptimizer() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -128,7 +114,7 @@ void testNontemporalSnapshotWithUpperCaseOptimizer() @Test void testNontemporalSnapshotWithLessColumnsInStaging() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); Dataset stagingTable = testScenario.getStagingTable().withSchema(baseTableShortenedSchema); Datasets datasets = Datasets.of(testScenario.getMainTable(), stagingTable); @@ -165,6 +151,23 @@ void testNontemporalSnapshotMandatoryDatasetMissing() } } + @Test + void testNontemporalSnapshotAllVersionValidation() + { + try + { + NontemporalSnapshot.builder() + .auditing(NoAuditing.builder().build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("xyz").build()) + .build(); + Assertions.fail("Exception was not thrown"); + } + catch (Exception e) + { + Assertions.assertEquals("Cannot build NontemporalSnapshot, AllVersionsStrategy not supported", e.getMessage()); + } + } + @Test void testNontemporalSnapshotDateTimeAuditingValidation() { @@ -184,7 +187,7 @@ void testNontemporalSnapshotDateTimeAuditingValidation() @Test public void testNontemporalSnapshotWithCleanStagingData() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(testScenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -200,19 +203,19 @@ public void testNontemporalSnapshotWithCleanStagingData() @Test public void testNontemporalSnapshotWithDropStagingData() { - TestScenario testScenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario testScenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); PlannerOptions options = PlannerOptions.builder().collectStatistics(true).build(); Resources resources = Resources.builder().externalDatasetImported(true).build(); - Planner planner = Planners.get(testScenario.getDatasets(), testScenario.getIngestMode(), options); + Planner planner = Planners.get(testScenario.getDatasets(), testScenario.getIngestMode(), options, getRelationalSink().capabilities()); RelationalTransformer transformer = new RelationalTransformer(getRelationalSink()); // post actions - LogicalPlan postActionsLogicalPlan = planner.buildLogicalPlanForPostActions(resources); - SqlPlan physicalPlanForPostActions = transformer.generatePhysicalPlan(postActionsLogicalPlan); - verifyNontemporalSnapshotWithDropStagingData(physicalPlanForPostActions); + LogicalPlan postCleanupLogicalPlan = planner.buildLogicalPlanForPostCleanup(resources); + SqlPlan physicalPlanForPostCleanup = transformer.generatePhysicalPlan(postCleanupLogicalPlan); + verifyNontemporalSnapshotWithDropStagingData(physicalPlanForPostCleanup); } - public abstract void verifyNontemporalSnapshotWithDropStagingData(SqlPlan physicalPlanForPostActions); + public abstract void verifyNontemporalSnapshotWithDropStagingData(SqlPlan physicalPlanForPostCleanup); public abstract RelationalSink getRelationalSink(); } \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/AppendOnlyBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/AppendOnlyBasedDerivationTest.java index 6725f7e0a5d..29aa39969cd 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/AppendOnlyBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/AppendOnlyBasedDerivationTest.java @@ -20,7 +20,6 @@ import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; import org.finos.legend.engine.persistence.components.ingestmode.deduplication.AllowDuplicates; import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FailOnDuplicates; -import org.finos.legend.engine.persistence.components.ingestmode.deduplication.FilterDuplicates; import org.finos.legend.engine.persistence.components.scenarios.AppendOnlyScenarios; import org.finos.legend.engine.persistence.components.scenarios.TestScenario; import org.junit.jupiter.api.Assertions; @@ -33,9 +32,9 @@ public class AppendOnlyBasedDerivationTest AppendOnlyScenarios scenarios = new AppendOnlyScenarios(); @Test - void testAppendOnlyAllowDuplicatesNoAuditing() + void testAppendOnlyAllowDuplicatesNoAuditingNoVersioningNoFilterExistingRecords() { - TestScenario scenario = scenarios.ALLOW_DUPLICATES_NO_AUDITING(); + TestScenario scenario = scenarios.NO_AUDITING__NO_DEDUP__NO_VERSIONING__NO_FILTER_EXISTING_RECORDS(); assertDerivedMainDataset(scenario); AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertEquals("DIGEST", mode.digestField().get()); @@ -44,22 +43,10 @@ void testAppendOnlyAllowDuplicatesNoAuditing() } @Test - void testAppendOnlyAllowDuplicatesWithAuditing() + void testAppendOnlyFailOnDuplicatesWithAuditingAllVersionNoFilterExistingRecords() { - TestScenario scenario = scenarios.ALLOW_DUPLICATES_WITH_AUDITING(); - assertDerivedMainDataset(scenario); - AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); - Assertions.assertEquals("DIGEST", mode.digestField().get()); - Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); - DateTimeAuditing auditing = (DateTimeAuditing) mode.auditing(); - Assertions.assertEquals("BATCH_UPDATE_TIME", auditing.dateTimeField()); - Assertions.assertTrue(mode.deduplicationStrategy() instanceof AllowDuplicates); - } - - @Test - void testAppendOnlyAllowDuplicatesWithAuditingWithDataSplit() - { - TestScenario scenario = scenarios.ALLOW_DUPLICATES_WITH_AUDITING__WITH_DATASPLIT(); + // Auditing column is a PK + TestScenario scenario = scenarios.WITH_AUDITING__FAIL_ON_DUPS__ALL_VERSION__NO_FILTER_EXISTING_RECORDS(); assertDerivedMainDataset(scenario); AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertEquals("DIGEST", mode.digestField().get()); @@ -67,79 +54,20 @@ void testAppendOnlyAllowDuplicatesWithAuditingWithDataSplit() Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); DateTimeAuditing auditing = (DateTimeAuditing) mode.auditing(); Assertions.assertEquals("BATCH_UPDATE_TIME", auditing.dateTimeField()); - Assertions.assertTrue(mode.deduplicationStrategy() instanceof AllowDuplicates); - } - - @Test - void testAppendOnlyFailOnDuplicatesNoAuditing() - { - TestScenario scenario = scenarios.FAIL_ON_DUPLICATES_NO_AUDITING(); - assertDerivedMainDataset(scenario); - AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); - Assertions.assertEquals("DIGEST", mode.digestField().get()); - Assertions.assertTrue(mode.auditing() instanceof NoAuditing); Assertions.assertTrue(mode.deduplicationStrategy() instanceof FailOnDuplicates); } @Test - void testAppendOnlyFailOnDuplicatesWithAuditing() + void testAppendOnlyAllowDuplicatesWithAuditingNoVersioningNoFilterExistingRecords() { - TestScenario scenario = scenarios.FAIL_ON_DUPLICATES_WITH_AUDITING(); + // Auditing column is not a PK + TestScenario scenario = scenarios.WITH_AUDITING__ALLOW_DUPLICATES__NO_VERSIONING__NO_FILTER_EXISTING_RECORDS(); assertDerivedMainDataset(scenario); AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertEquals("DIGEST", mode.digestField().get()); Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); DateTimeAuditing auditing = (DateTimeAuditing) mode.auditing(); Assertions.assertEquals("BATCH_UPDATE_TIME", auditing.dateTimeField()); - Assertions.assertTrue(mode.deduplicationStrategy() instanceof FailOnDuplicates); - } - - @Test - void testAppendOnlyFilterDuplicatesNoAuditing() - { - TestScenario scenario = scenarios.FILTER_DUPLICATES_NO_AUDITING(); - assertDerivedMainDataset(scenario); - AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); - Assertions.assertEquals("DIGEST", mode.digestField().get()); - Assertions.assertTrue(mode.auditing() instanceof NoAuditing); - Assertions.assertTrue(mode.deduplicationStrategy() instanceof FilterDuplicates); - } - - @Test - void testAppendOnlyFilterDuplicatesNoAuditingWithDataSplit() - { - TestScenario scenario = scenarios.FILTER_DUPLICATES_NO_AUDITING_WITH_DATA_SPLIT(); - assertDerivedMainDataset(scenario); - AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); - Assertions.assertEquals("DIGEST", mode.digestField().get()); - Assertions.assertEquals("DATA_SPLIT", mode.dataSplitField().get()); - Assertions.assertTrue(mode.deduplicationStrategy() instanceof FilterDuplicates); - } - - @Test - void testAppendOnlyFilterDuplicatesWithAuditing() - { - TestScenario scenario = scenarios.FILTER_DUPLICATES_WITH_AUDITING(); - assertDerivedMainDataset(scenario); - AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); - Assertions.assertEquals("DIGEST", mode.digestField().get()); - Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); - DateTimeAuditing auditing = (DateTimeAuditing) mode.auditing(); - Assertions.assertEquals("BATCH_UPDATE_TIME", auditing.dateTimeField()); - Assertions.assertTrue(mode.deduplicationStrategy() instanceof FilterDuplicates); - } - - @Test - void testAppendOnlyFilterDuplicatesWithAuditingWithDataSplit() - { - TestScenario scenario = scenarios.FILTER_DUPLICATES_WITH_AUDITING_WITH_DATA_SPLIT(); - assertDerivedMainDataset(scenario); - AppendOnly mode = (AppendOnly) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); - Assertions.assertEquals("DIGEST", mode.digestField().get()); - Assertions.assertEquals("DATA_SPLIT", mode.dataSplitField().get()); - Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); - DateTimeAuditing auditing = (DateTimeAuditing) mode.auditing(); - Assertions.assertEquals("BATCH_UPDATE_TIME", auditing.dateTimeField()); - Assertions.assertTrue(mode.deduplicationStrategy() instanceof FilterDuplicates); + Assertions.assertTrue(mode.deduplicationStrategy() instanceof AllowDuplicates); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/NontemporalDeltaBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/NontemporalDeltaBasedDerivationTest.java index 82880a126bc..8ad56b8c1c8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/NontemporalDeltaBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/NontemporalDeltaBasedDerivationTest.java @@ -34,7 +34,7 @@ public class NontemporalDeltaBasedDerivationTest @Test void testNontemporalDeltaNoAuditingNoDataSplit() { - TestScenario scenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario scenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); assertDerivedMainDataset(scenario); NontemporalDelta mode = (NontemporalDelta) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertEquals("DIGEST", mode.digestField()); @@ -44,7 +44,7 @@ void testNontemporalDeltaNoAuditingNoDataSplit() @Test void testNontemporalDeltaNoAuditingNoDataSplitWithDeleteInd() { - TestScenario scenario = scenarios.NO_AUDTING__NO_DATASPLIT__WITH_DELETE_INDICATOR(); + TestScenario scenario = scenarios.NO_AUDTING__WITH_DELETE_INDICATOR__NO_DEDUP__NO_VERSIONING(); assertDerivedMainDataset(scenario); NontemporalDelta mode = (NontemporalDelta) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertEquals("DIGEST", mode.digestField()); @@ -57,7 +57,7 @@ void testNontemporalDeltaNoAuditingNoDataSplitWithDeleteInd() @Test void testNontemporalDeltaNoAuditingWithDataSplit() { - TestScenario scenario = scenarios.NO_AUDTING__WITH_DATASPLIT(); + TestScenario scenario = scenarios.NO_AUDTING__NO_DEDUP__ALL_VERSION(); assertDerivedMainDataset(scenario); NontemporalDelta mode = (NontemporalDelta) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertTrue(mode.auditing() instanceof NoAuditing); @@ -68,7 +68,7 @@ void testNontemporalDeltaNoAuditingWithDataSplit() @Test void testNontemporalDeltaWithAuditingNoDataSplit() { - TestScenario scenario = scenarios.WITH_AUDTING__NO_DATASPLIT(); + TestScenario scenario = scenarios.WITH_AUDTING__FILTER_DUPLICATES__NO_VERSIONING(); assertDerivedMainDataset(scenario); NontemporalDelta mode = (NontemporalDelta) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); @@ -80,7 +80,7 @@ void testNontemporalDeltaWithAuditingNoDataSplit() @Test void testNontemporalSnapshotWithAuditingWithDataSplit() { - TestScenario scenario = scenarios.WITH_AUDTING__WITH_DATASPLIT(); + TestScenario scenario = scenarios.WITH_AUDTING__FAIL_ON_DUPS__ALL_VERSION(); assertDerivedMainDataset(scenario); NontemporalDelta mode = (NontemporalDelta) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/NontemporalSnapshotBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/NontemporalSnapshotBasedDerivationTest.java index c00bcb73d69..e2b8b8b9061 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/NontemporalSnapshotBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/nontemporal/derivation/NontemporalSnapshotBasedDerivationTest.java @@ -31,28 +31,18 @@ public class NontemporalSnapshotBasedDerivationTest NontemporalSnapshotTestScenarios scenarios = new NontemporalSnapshotTestScenarios(); @Test - void testNontemporalSnapshotNoAuditingNoDataSplit() + void testNontemporalSnapshotNoAuditingNoDedupNoVersioning() { - TestScenario scenario = scenarios.NO_AUDTING__NO_DATASPLIT(); + TestScenario scenario = scenarios.NO_AUDTING__NO_DEDUP__NO_VERSIONING(); assertDerivedMainDataset(scenario); NontemporalSnapshot mode = (NontemporalSnapshot) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertTrue(mode.auditing() instanceof NoAuditing); } @Test - void testNontemporalSnapshotNoAuditingWithDataSplit() + void testNontemporalSnapshotWithAuditingFilterDupsNoVersioning() { - TestScenario scenario = scenarios.NO_AUDTING__WITH_DATASPLIT(); - assertDerivedMainDataset(scenario); - NontemporalSnapshot mode = (NontemporalSnapshot) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); - Assertions.assertTrue(mode.auditing() instanceof NoAuditing); - Assertions.assertEquals("DATA_SPLIT", mode.dataSplitField().get()); - } - - @Test - void testNontemporalSnapshotWithAuditingNoDataSplit() - { - TestScenario scenario = scenarios.WITH_AUDTING__NO_DATASPLIT(); + TestScenario scenario = scenarios.WITH_AUDTING__FILTER_DUPLICATES__NO_VERSIONING(); assertDerivedMainDataset(scenario); NontemporalSnapshot mode = (NontemporalSnapshot) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); @@ -61,15 +51,14 @@ void testNontemporalSnapshotWithAuditingNoDataSplit() } @Test - void testNontemporalSnapshotWithAuditingWithDataSplit() + void testNontemporalSnapshotWithAuditingFailOnDupMaxVersion() { - TestScenario scenario = scenarios.WITH_AUDTING__WITH_DATASPLIT(); + TestScenario scenario = scenarios.WITH_AUDTING__FAIL_ON_DUP__MAX_VERSION(); assertDerivedMainDataset(scenario); NontemporalSnapshot mode = (NontemporalSnapshot) scenario.getIngestMode().accept(new IngestModeCaseConverter(String::toUpperCase)); Assertions.assertTrue(mode.auditing() instanceof DateTimeAuditing); DateTimeAuditing auditing = (DateTimeAuditing) mode.auditing(); Assertions.assertEquals("BATCH_UPDATE_TIME", auditing.dateTimeField()); - Assertions.assertEquals("DATA_SPLIT", mode.dataSplitField().get()); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaBatchIdBasedTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaBatchIdBasedTestCases.java index fd0a9b3593c..63a34113aa7 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaBatchIdBasedTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaBatchIdBasedTestCases.java @@ -39,9 +39,9 @@ public abstract class UnitmemporalDeltaBatchIdBasedTestCases extends BaseTest UnitemporalDeltaBatchIdBasedScenarios scenarios = new UnitemporalDeltaBatchIdBasedScenarios(); @Test - void testUnitemporalDeltaNoDeleteIndNoDataSplits() + void testUnitemporalDeltaNoDeleteIndNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -51,15 +51,15 @@ void testUnitemporalDeltaNoDeleteIndNoDataSplits() .enableConcurrentSafety(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaNoDeleteIndNoAuditing(operations); + verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(GeneratorResult operations); @Test - void testUnitemporalDeltaNoDeleteIndWithDataSplits() + void testUnitemporalDeltaNoDeleteIndNoDedupAllVersionsWithoutPerform() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__ALL_VERSION_WITHOUT_PERFORM(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -67,15 +67,15 @@ void testUnitemporalDeltaNoDeleteIndWithDataSplits() .collectStatistics(true) .build(); List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); - verifyUnitemporalDeltaNoDeleteIndWithDataSplits(operations, dataSplitRangesOneToTwo); + verifyUnitemporalDeltaNoDeleteIndNoDedupAllVersionsWithoutPerform(operations, dataSplitRangesOneToTwo); } - public abstract void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges); + public abstract void verifyUnitemporalDeltaNoDeleteIndNoDedupAllVersionsWithoutPerform(List operations, List dataSplitRanges); @Test - void testUnitemporalDeltaWithDeleteIndNoDataSplits() + void testUnitemporalDeltaWithDeleteIndFilterDupsNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_DEL_IND__FILTER_DUPS__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -83,15 +83,15 @@ void testUnitemporalDeltaWithDeleteIndNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaWithDeleteIndNoDataSplits(operations); + verifyUnitemporalDeltaWithDeleteIndFilterDupsNoVersion(operations); } - public abstract void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaWithDeleteIndFilterDupsNoVersion(GeneratorResult operations); @Test - void testUnitemporalDeltaWithDeleteIndWithDataSplits() + void testUnitemporalDeltaWithDeleteIndNoDedupAllVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_DEL_IND__NO_DEDUP__ALL_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -99,15 +99,15 @@ void testUnitemporalDeltaWithDeleteIndWithDataSplits() .collectStatistics(true) .build(); List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); - verifyUnitemporalDeltaWithDeleteIndWithDataSplits(operations, dataSplitRangesOneToTwo); + verifyUnitemporalDeltaWithDeleteIndNoDedupAllVersion(operations, dataSplitRangesOneToTwo); } - public abstract void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges); + public abstract void verifyUnitemporalDeltaWithDeleteIndNoDedupAllVersion(List operations, List dataSplitRanges); @Test void testUnitemporalDeltaWithUpperCaseOptimizer() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -125,7 +125,7 @@ void testUnitemporalDeltaWithUpperCaseOptimizer() @Test void testUnitemporalDeltaWithCleanStagingData() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -139,9 +139,9 @@ void testUnitemporalDeltaWithCleanStagingData() public abstract void verifyUnitemporalDeltaWithCleanStagingData(GeneratorResult operations); @Test - void testUnitemporalDeltaNoDeleteIndNoDataSplitsWithOptimizationFilters() + void testUnitemporalDeltaNoDeleteIndWithOptimizationFilters() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_OPTIMIZATION_FILTERS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__WITH_OPTIMIZATION_FILTERS(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -149,15 +149,15 @@ void testUnitemporalDeltaNoDeleteIndNoDataSplitsWithOptimizationFilters() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFilters(operations); + verifyUnitemporalDeltaNoDeleteIndWithOptimizationFilters(operations); } - public abstract void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFilters(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaNoDeleteIndWithOptimizationFilters(GeneratorResult operations); @Test - void testUnitemporalDeltaNoDeleteIndNoDataSplitsWithOptimizationFiltersIncludesNullValues() + void testUnitemporalDeltaNoDeleteIndWithOptimizationFiltersIncludesNullValues() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_OPTIMIZATION_FILTERS__INCLUDES_NULL_VALUES(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__WITH_OPTIMIZATION_FILTERS__INCLUDES_NULL_VALUES(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -165,10 +165,10 @@ void testUnitemporalDeltaNoDeleteIndNoDataSplitsWithOptimizationFiltersIncludesN .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFiltersIncludesNullValues(operations); + verifyUnitemporalDeltaNoDeleteIndWithOptimizationFiltersIncludesNullValues(operations); } - public abstract void verifyUnitemporalDeltaNoDeleteIndNoAuditingWithOptimizationFiltersIncludesNullValues(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaNoDeleteIndWithOptimizationFiltersIncludesNullValues(GeneratorResult operations); @Test void testUnitemporalDeltaValidationBatchIdOutMissing() @@ -197,7 +197,7 @@ void testUnitemporalDeltaValidationBatchIdOutMissing() @Test void testUnitemporalDeltaValidationBatchIdInNotPrimaryKey() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -222,7 +222,7 @@ void testUnitemporalDeltaValidationBatchIdInNotPrimaryKey() @Test void testUnitemporalDeltaValidationOptimizationColumnsNotPresent() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_MISSING_OPTIMIZATION_FILTER(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__WITH_MISSING_OPTIMIZATION_FILTER(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -243,7 +243,7 @@ void testUnitemporalDeltaValidationOptimizationColumnsNotPresent() @Test void testUnitemporalDeltaValidationOptimizationColumnUnsupportedDataType() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS__WITH_OPTIMIZATION_FILTER_UNSUPPORTED_DATATYPE(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__WITH_OPTIMIZATION_FILTER_UNSUPPORTED_DATATYPE(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -278,9 +278,9 @@ void testUnitemporalDeltaWithNoVersioningAndStagingFilters() public abstract void verifyUnitemporalDeltaWithNoVersionAndStagingFilter(GeneratorResult operations); @Test - void testUnitemporalDeltaWithMaxVersioningDedupEnabledAndStagingFiltersWithDedup() + void testUnitemporalDeltaWithFilterDupsMaxVersionWithStagingFilter() { - TestScenario scenario = scenarios.BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__DEDUP__WITH_STAGING_FILTER(); + TestScenario scenario = scenarios.BATCH_ID_BASED__FILTER_DUPS__MAX_VERSION__WITH_STAGING_FILTER(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) @@ -289,15 +289,15 @@ void testUnitemporalDeltaWithMaxVersioningDedupEnabledAndStagingFiltersWithDedup .cleanupStagingData(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - this.verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(operations); + this.verifyUnitemporalDeltaWithFilterDupsMaxVersionWithStagingFilter(operations); } - public abstract void verifyUnitemporalDeltaWithMaxVersionDedupEnabledAndStagingFilter(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaWithFilterDupsMaxVersionWithStagingFilter(GeneratorResult operations); @Test - void testUnitemporalDeltaWithMaxVersioningNoDedupAndStagingFilters() + void testUnitemporalDeltaWithNoDedupMaxVersionWithoutPerformAndStagingFilters() { - TestScenario scenario = scenarios.BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__WITH_STAGING_FILTER(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEDUP__MAX_VERSION_WITHOUT_PERFORM__WITH_STAGING_FILTER(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) @@ -307,15 +307,15 @@ void testUnitemporalDeltaWithMaxVersioningNoDedupAndStagingFilters() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - this.verifyUnitemporalDeltaWithMaxVersionNoDedupAndStagingFilter(operations); + this.verifyUnitemporalDeltaWithNoDedupMaxVersionWithoutPerformAndStagingFilters(operations); } - public abstract void verifyUnitemporalDeltaWithMaxVersionNoDedupAndStagingFilter(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaWithNoDedupMaxVersionWithoutPerformAndStagingFilters(GeneratorResult operations); @Test - void testUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters() + void testUnitemporalDeltaWithFailOnDupsMaxVersioningWithoutPerform() { - TestScenario scenario = scenarios.BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN__NO_DEDUP__WITHOUT_STAGING_FILTER(); + TestScenario scenario = scenarios.BATCH_ID_BASED__FAIL_ON_DUPS__MAX_VERSIONING_WITHOUT_PERFORM__NO_STAGING_FILTER(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) @@ -324,15 +324,15 @@ void testUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters() .cleanupStagingData(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - this.verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(operations); + this.verifyUnitemporalDeltaWithFailOnDupsMaxVersioningWithoutPerform(operations); } - public abstract void verifyUnitemporalDeltaWithMaxVersioningNoDedupWithoutStagingFilters(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaWithFailOnDupsMaxVersioningWithoutPerform(GeneratorResult operations); @Test - void testUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters() + void testUnitemporalDeltaWithNoDedupMaxVersioningAndUpperCaseWithoutStagingFilters() { - TestScenario scenario = scenarios.BATCH_ID_BASED__MAX_VERSIONING_WITH_GREATER_THAN_EQUAL__DEDUP__WITHOUT_STAGING_FILTER(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEDUP__MAX_VERSIONING__NO_STAGING_FILTER(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) @@ -342,10 +342,10 @@ void testUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWithoutStaging .caseConversion(CaseConversion.TO_UPPER) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - this.verifyUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(operations); + this.verifyUnitemporalDeltaWithNoDedupMaxVersioningAndUpperCaseWithoutStagingFilters(operations); } - public abstract void verifyUnitemporalDeltaWithMaxVersioningDedupEnabledAndUpperCaseWithoutStagingFilters(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaWithNoDedupMaxVersioningAndUpperCaseWithoutStagingFilters(GeneratorResult operations); public abstract RelationalSink getRelationalSink(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaBatchIdDateTimeBasedTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaBatchIdDateTimeBasedTestCases.java index 15a203fe743..47aa50fbc7b 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaBatchIdDateTimeBasedTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaBatchIdDateTimeBasedTestCases.java @@ -38,9 +38,9 @@ public abstract class UnitmemporalDeltaBatchIdDateTimeBasedTestCases extends Bas UnitemporalDeltaBatchIdDateTimeBasedScenarios scenarios = new UnitemporalDeltaBatchIdDateTimeBasedScenarios(); @Test - void testUnitemporalDeltaNoDeleteIndNoDataSplits() + void testUnitemporalDeltaNoDeleteIndNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -48,15 +48,15 @@ void testUnitemporalDeltaNoDeleteIndNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaNoDeleteIndNoAuditing(operations); + verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersion(GeneratorResult operations); @Test - void testUnitemporalDeltaNoDeleteIndWithDataSplits() + void testUnitemporalDeltaNoDeleteIndFilterDupsAllVersionWithoutPerform() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__FILTER_DUPS__ALL_VERSION_WITHOUT_PERFORM(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -64,15 +64,15 @@ void testUnitemporalDeltaNoDeleteIndWithDataSplits() .collectStatistics(true) .build(); List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); - verifyUnitemporalDeltaNoDeleteIndWithDataSplits(operations, dataSplitRangesOneToTwo); + verifyUnitemporalDeltaNoDeleteIndFilterDupsAllVersionWithoutPerform(operations, dataSplitRangesOneToTwo); } - public abstract void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges); + public abstract void verifyUnitemporalDeltaNoDeleteIndFilterDupsAllVersionWithoutPerform(List operations, List dataSplitRanges); @Test - void testUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits() + void testUnitemporalDeltaWithDeleteIndMultiValuesNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND_MULTI_VALUES__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND_MULTI_VALUES__NO_DEDUP_NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -80,15 +80,15 @@ void testUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(operations); + verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaWithDeleteIndMultiValuesNoDedupNoVersion(GeneratorResult operations); @Test - void testUnitemporalDeltaWithDeleteIndNoDataSplits() + void testUnitemporalDeltaWithDeleteInd() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -96,15 +96,15 @@ void testUnitemporalDeltaWithDeleteIndNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaWithDeleteIndNoDataSplits(operations); + verifyUnitemporalDeltaWithDeleteInd(operations); } - public abstract void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaWithDeleteInd(GeneratorResult operations); @Test - void testUnitemporalDeltaWithDeleteIndWithDataSplits() + void testUnitemporalDeltaWithDeleteIndFailOnDupsAllVersion() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__FAIL_ON_DUP__ALL_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -112,15 +112,15 @@ void testUnitemporalDeltaWithDeleteIndWithDataSplits() .collectStatistics(true) .build(); List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); - verifyUnitemporalDeltaWithDeleteIndWithDataSplits(operations, dataSplitRangesOneToTwo); + verifyUnitemporalDeltaWithDeleteIndFailOnDupsAllVersion(operations, dataSplitRangesOneToTwo); } - public abstract void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges); + public abstract void verifyUnitemporalDeltaWithDeleteIndFailOnDupsAllVersion(List operations, List dataSplitRanges); @Test void testUnitemporalDeltaWithUpperCaseOptimizer() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -138,7 +138,7 @@ void testUnitemporalDeltaWithUpperCaseOptimizer() @Test void testUnitemporalDeltaWithLessColumnsInStaging() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); Dataset stagingDataset = scenario.getStagingTable().withSchema(stagingTableSchemaWithLimitedColumns); Datasets datasets = Datasets.of(scenario.getMainTable(), stagingDataset); @@ -157,7 +157,7 @@ void testUnitemporalDeltaWithLessColumnsInStaging() @Test void testUnitemporalDeltaWithPlaceholders() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -176,7 +176,7 @@ void testUnitemporalDeltaWithPlaceholders() @Test void testUnitemporalDeltaWithOnlySchemaSet() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); Dataset mainTable = getMainDatasetWithOnlySchemaSet(scenario.getMainTable().schema()); Dataset stagingTable = getStagingDatasetWithOnlySchemaSet(scenario.getStagingTable().schema()); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -197,7 +197,7 @@ void testUnitemporalDeltaWithOnlySchemaSet() @Test void testUnitemporalDeltaWithDbAndSchemaBothSet() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); Dataset mainTable = getMainDatasetWithDbAndSchemaBothSet(scenario.getMainTable().schema()); Dataset stagingTable = getStagingDatasetWithDbAndSchemaBothSet(scenario.getStagingTable().schema()); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -218,7 +218,7 @@ void testUnitemporalDeltaWithDbAndSchemaBothSet() @Test void testUnitemporalDeltaWithDbAndSchemaBothNotSet() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); Dataset mainTable = getMainDatasetWithDbAndSchemaBothNotSet(scenario.getMainTable().schema()); Dataset stagingTable = getStagingDatasetWithDbAndSchemaBothNotSet(scenario.getStagingTable().schema()); Datasets datasets = Datasets.of(mainTable, stagingTable); @@ -239,7 +239,7 @@ void testUnitemporalDeltaWithDbAndSchemaBothNotSet() @Test void testUnitemporalDeltaWithCleanStagingData() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -357,7 +357,7 @@ void testUnitemporalDeltaValidationDeleteIndicatorValuesMissing() @Test void testUnitemporalDeltaValidationBatchIdInNotPrimaryKey() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaDateTimeBasedTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaDateTimeBasedTestCases.java index 548fce115fa..cf3d7e63ca1 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaDateTimeBasedTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalDeltaDateTimeBasedTestCases.java @@ -36,9 +36,9 @@ public abstract class UnitmemporalDeltaDateTimeBasedTestCases extends BaseTest UnitemporalDeltaDateTimeBasedScenarios scenarios = new UnitemporalDeltaDateTimeBasedScenarios(); @Test - void testUnitemporalDeltaNoDeleteIndNoDataSplits() + void testUnitemporalDeltaNoDeleteIndNoDedupNoVersioning() { - TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -46,15 +46,15 @@ void testUnitemporalDeltaNoDeleteIndNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaNoDeleteIndNoAuditing(operations); + verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersioning(operations); } - public abstract void verifyUnitemporalDeltaNoDeleteIndNoAuditing(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaNoDeleteIndNoDedupNoVersioning(GeneratorResult operations); @Test - void testUnitemporalDeltaNoDeleteIndWithDataSplits() + void testUnitemporalDeltaNoDeleteIndFailOnDupsAllVersionWithoutPerform() { - TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__FAIL_ON_DUPS__ALL_VERSION_WITHOUT_PERFORM(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -62,15 +62,15 @@ void testUnitemporalDeltaNoDeleteIndWithDataSplits() .collectStatistics(true) .build(); List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); - verifyUnitemporalDeltaNoDeleteIndWithDataSplits(operations, dataSplitRangesOneToTwo); + verifyUnitemporalDeltaNoDeleteIndFailOnDupsAllVersionWithoutPerform(operations, dataSplitRangesOneToTwo); } - public abstract void verifyUnitemporalDeltaNoDeleteIndWithDataSplits(List operations, List dataSplitRanges); + public abstract void verifyUnitemporalDeltaNoDeleteIndFailOnDupsAllVersionWithoutPerform(List operations, List dataSplitRanges); @Test - void testUnitemporalDeltaWithDeleteIndNoDataSplits() + void testUnitemporalDeltaWithDeleteIndNoDedupNoVersioning() { - TestScenario scenario = scenarios.DATETIME_BASED__WITH_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITH_DEL_IND__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -78,15 +78,15 @@ void testUnitemporalDeltaWithDeleteIndNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalDeltaWithDeleteIndNoDataSplits(operations); + verifyUnitemporalDeltaWithDeleteIndNoDedupNoVersioning(operations); } - public abstract void verifyUnitemporalDeltaWithDeleteIndNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalDeltaWithDeleteIndNoDedupNoVersioning(GeneratorResult operations); @Test - void testUnitemporalDeltaWithDeleteIndWithDataSplits() + void testUnitemporalDeltaWithDeleteIndFilterDupsAllVersion() { - TestScenario scenario = scenarios.DATETIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITH_DEL_IND__FILTER_DUPS__ALL_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -94,15 +94,15 @@ void testUnitemporalDeltaWithDeleteIndWithDataSplits() .collectStatistics(true) .build(); List operations = generator.generateOperationsWithDataSplits(scenario.getDatasets(), dataSplitRangesOneToTwo); - verifyUnitemporalDeltaWithDeleteIndWithDataSplits(operations, dataSplitRangesOneToTwo); + verifyUnitemporalDeltaWithDeleteIndFilterDupsAllVersion(operations, dataSplitRangesOneToTwo); } - public abstract void verifyUnitemporalDeltaWithDeleteIndWithDataSplits(List operations, List dataSplitRanges); + public abstract void verifyUnitemporalDeltaWithDeleteIndFilterDupsAllVersion(List operations, List dataSplitRanges); @Test void testUnitemporalDeltaWithUpperCaseOptimizer() { - TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -119,7 +119,7 @@ void testUnitemporalDeltaWithUpperCaseOptimizer() @Test void testUnitemporalDeltaWithCleanStagingData() { - TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -155,7 +155,7 @@ void testUnitemporalDeltaValidationBatchTimeInMissing() @Test void testUnitemporalDeltaValidationBatchTimeInNotPrimaryKey() { - TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSIONING(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotBatchIdBasedTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotBatchIdBasedTestCases.java index 1930004c250..29617a200a8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotBatchIdBasedTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotBatchIdBasedTestCases.java @@ -16,8 +16,11 @@ import org.finos.legend.engine.persistence.components.BaseTest; import org.finos.legend.engine.persistence.components.common.Datasets; +import org.finos.legend.engine.persistence.components.ingestmode.NontemporalSnapshot; import org.finos.legend.engine.persistence.components.ingestmode.UnitemporalSnapshot; +import org.finos.legend.engine.persistence.components.ingestmode.audit.NoAuditing; import org.finos.legend.engine.persistence.components.ingestmode.transactionmilestoning.BatchId; +import org.finos.legend.engine.persistence.components.ingestmode.versioning.AllVersionsStrategy; import org.finos.legend.engine.persistence.components.logicalplan.datasets.Dataset; import org.finos.legend.engine.persistence.components.relational.CaseConversion; import org.finos.legend.engine.persistence.components.relational.RelationalSink; @@ -36,9 +39,9 @@ public abstract class UnitmemporalSnapshotBatchIdBasedTestCases extends BaseTest UnitemporalSnapshotBatchIdBasedScenarios scenarios = new UnitemporalSnapshotBatchIdBasedScenarios(); @Test - void testUnitemporalSnapshotWithoutPartitionNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -48,15 +51,34 @@ void testUnitemporalSnapshotWithoutPartitionNoDataSplits() .enableConcurrentSafety(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(operations); + verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(GeneratorResult operations); + + @Test + void testUnitemporalSnapshotWithoutPartitionFailOnDupsNoVersion() + { + TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__FAIL_ON_DUPS__NO_VERSION(); + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(scenario.getIngestMode()) + .relationalSink(getRelationalSink()) + .executionTimestampClock(fixedClock_2000_01_01) + .collectStatistics(true) + .createStagingDataset(true) + .enableConcurrentSafety(true) + .build(); + GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); + verifyUnitemporalSnapshotWithoutPartitionFailOnDupsNoVersion(operations); + } + + public abstract void verifyUnitemporalSnapshotWithoutPartitionFailOnDupsNoVersion(GeneratorResult operations); + @Test void testUnitemporalSnapshotWithoutPartitionWithNoOpEmptyBatchHandling() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -72,7 +94,7 @@ void testUnitemporalSnapshotWithoutPartitionWithNoOpEmptyBatchHandling() @Test void testUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -87,9 +109,9 @@ void testUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer() public abstract void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(GeneratorResult operations); @Test - void testUnitemporalSnapshotWithPartitionNoDataSplits() + void testUnitemporalSnapshotWithPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -97,15 +119,15 @@ void testUnitemporalSnapshotWithPartitionNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithPartitionNoDataSplits(operations); + verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(GeneratorResult operations); @Test - void testUnitemporalSnapshotWithPartitionFiltersNoDataSplits() + void testUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -113,15 +135,15 @@ void testUnitemporalSnapshotWithPartitionFiltersNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(operations); + verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(GeneratorResult operations); @Test void testUnitemporalSnapshotWithCleanStagingData() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -160,7 +182,7 @@ void testUnitemporalSnasphotValidationBatchIdInMissing() @Test void testUnitemporalSnapshotValidationBatchIdInNotPrimaryKey() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -197,6 +219,27 @@ void testUnitemporalSnapshotValidationMainDatasetMissing() } } + @Test + void testUnitemporalSnapshotAllVersionValidation() + { + try + { + UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() + .digestField(digestField) + .transactionMilestoning(BatchId.builder() + .batchIdInName(batchIdInField) + .batchIdOutName(batchIdOutField) + .build()) + .versioningStrategy(AllVersionsStrategy.builder().versioningField("xyz").build()) + .build(); + Assertions.fail("Exception was not thrown"); + } + catch (Exception e) + { + Assertions.assertEquals("Cannot build UnitemporalSnapshot, AllVersionsStrategy not supported", e.getMessage()); + } + } + public abstract RelationalSink getRelationalSink(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotBatchIdDateTimeBasedTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotBatchIdDateTimeBasedTestCases.java index f012b3bec1a..3bde27e3b8c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotBatchIdDateTimeBasedTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotBatchIdDateTimeBasedTestCases.java @@ -38,9 +38,9 @@ public abstract class UnitmemporalSnapshotBatchIdDateTimeBasedTestCases extends UnitemporalSnapshotBatchIdDateTimeBasedScenarios scenarios = new UnitemporalSnapshotBatchIdDateTimeBasedScenarios(); @Test - void testUnitemporalSnapshotWithoutPartitionNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionNoDedupNoVersioning() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -48,15 +48,31 @@ void testUnitemporalSnapshotWithoutPartitionNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(operations); + verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersioning(operations); } - public abstract void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersioning(GeneratorResult operations); + + @Test + void testUnitemporalSnapshotWithoutPartitionNoDedupMaxVersion() + { + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__MAX_VERSION(); + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(scenario.getIngestMode()) + .relationalSink(getRelationalSink()) + .executionTimestampClock(fixedClock_2000_01_01) + .collectStatistics(true) + .build(); + GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); + verifyUnitemporalSnapshotWithoutPartitionNoDedupMaxVersion(operations); + } + + public abstract void verifyUnitemporalSnapshotWithoutPartitionNoDedupMaxVersion(GeneratorResult operations); @Test void testUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBatchHandling() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -70,9 +86,9 @@ void testUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBatchHandli public abstract void verifyUnitemporalSnapshotWithoutPartitionWithDeleteTargetDataEmptyBatchHandling(GeneratorResult operations); @Test - void testUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer() + void testUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizerFilterDupsMaxVersion() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__FILTER_DUPS__MAX_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -81,15 +97,15 @@ void testUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer() .caseConversion(CaseConversion.TO_UPPER) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(operations); + verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizerFilterDupsMaxVersion(operations); } - public abstract void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizerFilterDupsMaxVersion(GeneratorResult operations); @Test - void testUnitemporalSnapshotWithPartitionNoDataSplits() + void testUnitemporalSnapshotWithPartitionNoDedupNoVersioning() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -97,15 +113,15 @@ void testUnitemporalSnapshotWithPartitionNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithPartitionNoDataSplits(operations); + verifyUnitemporalSnapshotWithPartitionNoDedupNoVersioning(operations); } - public abstract void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersioning(GeneratorResult operations); @Test void testUnitemporalSnapshotWithPartitionWithDefaultEmptyDataHandling() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -121,7 +137,7 @@ void testUnitemporalSnapshotWithPartitionWithDefaultEmptyDataHandling() @Test void testUnitemporalSnapshotWithPartitionWithNoOpEmptyBatchHandling() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION(); UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) .transactionMilestoning(BatchIdAndDateTime.builder() @@ -145,9 +161,9 @@ void testUnitemporalSnapshotWithPartitionWithNoOpEmptyBatchHandling() } @Test - void testUnitemporalSnapshotWithPartitionFiltersNoDataSplits() + void testUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersioning() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -155,15 +171,15 @@ void testUnitemporalSnapshotWithPartitionFiltersNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(operations); + verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersioning(operations); } - public abstract void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersioning(GeneratorResult operations); @Test void testUnitemporalSnapshotWithPartitionFiltersWithDeleteTargetDataEmptyDataHandling() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -179,7 +195,7 @@ void testUnitemporalSnapshotWithPartitionFiltersWithDeleteTargetDataEmptyDataHan @Test void testUnitemporalSnapshotWithPartitionFiltersWithNoOpEmptyDataHandling() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION(); UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) .transactionMilestoning(BatchIdAndDateTime.builder() @@ -206,7 +222,7 @@ void testUnitemporalSnapshotWithPartitionFiltersWithNoOpEmptyDataHandling() @Test void testUnitemporalSnapshotWithCleanStagingData() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -223,7 +239,7 @@ void testUnitemporalSnapshotWithCleanStagingData() @Test void testUnitemporalSnapshotWithLessColumnsInStaging() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); Dataset stagingDataset = scenario.getStagingTable().withSchema(stagingTableSchemaWithLimitedColumns); Datasets datasets = Datasets.of(scenario.getMainTable(), stagingDataset); @@ -242,7 +258,7 @@ void testUnitemporalSnapshotWithLessColumnsInStaging() @Test void testUnitemporalSnapshotWithPlaceholders() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -282,7 +298,7 @@ void testUnitemporalSnasphotValidationBatchIdInMissing() @Test void testUnitemporalSnapshotValidationBatchIdInNotPrimaryKey() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -331,7 +347,7 @@ void testUnitemporalSnapshotPartitionKeysValidation() @Test void testUnitemporalSnapshotFailOnEmptyBatch() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); UnitemporalSnapshot ingestMode = UnitemporalSnapshot.builder() .digestField(digestField) .transactionMilestoning(BatchIdAndDateTime.builder() diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotDateTimeBasedTestCases.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotDateTimeBasedTestCases.java index bd79faf2246..13bc1fca81d 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotDateTimeBasedTestCases.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/UnitmemporalSnapshotDateTimeBasedTestCases.java @@ -36,9 +36,9 @@ public abstract class UnitmemporalSnapshotDateTimeBasedTestCases extends BaseTes UnitemporalSnapshotDateTimeBasedScenarios scenarios = new UnitemporalSnapshotDateTimeBasedScenarios(); @Test - void testUnitemporalSnapshotWithoutPartitionNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -46,15 +46,32 @@ void testUnitemporalSnapshotWithoutPartitionNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(operations); + verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalSnapshotWithoutPartitionNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithoutPartitionNoDedupNoVersion(GeneratorResult operations); + + @Test + void testUnitemporalSnapshotWithoutPartitionFailOnDupsMaxVersion() + { + TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__FAIL_ON_DUP__MAX_VERSION(); + RelationalGenerator generator = RelationalGenerator.builder() + .ingestMode(scenario.getIngestMode()) + .relationalSink(getRelationalSink()) + .executionTimestampClock(fixedClock_2000_01_01) + .collectStatistics(true) + .build(); + GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); + verifyUnitemporalSnapshotWithoutPartitionFailOnDupsMaxVersion(operations); + } + + public abstract void verifyUnitemporalSnapshotWithoutPartitionFailOnDupsMaxVersion(GeneratorResult operations); + @Test void testUnitemporalSnapshotWithoutPartitionWithDefaultEmptyBatchHandling() { - TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -70,7 +87,7 @@ void testUnitemporalSnapshotWithoutPartitionWithDefaultEmptyBatchHandling() @Test void testUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer() { - TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -85,9 +102,9 @@ void testUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer() public abstract void verifyUnitemporalSnapshotWithoutPartitionWithUpperCaseOptimizer(GeneratorResult operations); @Test - void testUnitemporalSnapshotWithPartitionNoDataSplits() + void testUnitemporalSnapshotWithPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.DATETIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -95,15 +112,15 @@ void testUnitemporalSnapshotWithPartitionNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithPartitionNoDataSplits(operations); + verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalSnapshotWithPartitionNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithPartitionNoDedupNoVersion(GeneratorResult operations); @Test - void testUnitemporalSnapshotWithPartitionFiltersNoDataSplits() + void testUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion() { - TestScenario scenario = scenarios.DATETIME_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -111,15 +128,15 @@ void testUnitemporalSnapshotWithPartitionFiltersNoDataSplits() .collectStatistics(true) .build(); GeneratorResult operations = generator.generateOperations(scenario.getDatasets()); - verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(operations); + verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(operations); } - public abstract void verifyUnitemporalSnapshotWithPartitionFiltersNoDataSplits(GeneratorResult operations); + public abstract void verifyUnitemporalSnapshotWithPartitionFiltersNoDedupNoVersion(GeneratorResult operations); @Test void testUnitemporalSnapshotWithCleanStagingData() { - TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) @@ -158,7 +175,7 @@ void testUnitemporalSnasphotValidationBatchTimeInMissing() @Test void testUnitemporalSnapshotValidationBatchTimeInNotPrimaryKey() { - TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); RelationalGenerator generator = RelationalGenerator.builder() .ingestMode(scenario.getIngestMode()) .relationalSink(getRelationalSink()) diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaBatchIdBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaBatchIdBasedDerivationTest.java index acf6fe217a0..20621a15953 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaBatchIdBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaBatchIdBasedDerivationTest.java @@ -28,28 +28,28 @@ public class UnitemporalDeltaBatchIdBasedDerivationTest @Test void testUnitemporalDeltaNoDeleteIndNoDataSplits() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaNoDeleteIndWithDataSplits() { - TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__NO_DEL_IND__NO_DEDUP__ALL_VERSION_WITHOUT_PERFORM(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaWithDeleteIndNoDataSplits() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_DEL_IND__FILTER_DUPS__NO_VERSIONING(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaWithDeleteIndWithDataSplits() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_DEL_IND__NO_DEDUP__ALL_VERSION(); assertDerivedMainDataset(scenario); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaBatchIdDateTimeBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaBatchIdDateTimeBasedDerivationTest.java index 3fb34706ea5..4d760207656 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaBatchIdDateTimeBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaBatchIdDateTimeBasedDerivationTest.java @@ -28,28 +28,28 @@ public class UnitemporalDeltaBatchIdDateTimeBasedDerivationTest @Test void testUnitemporalDeltaNoDeleteIndNoDataSplits() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaNoDeleteIndWithDataSplits() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__NO_DEL_IND__FILTER_DUPS__ALL_VERSION_WITHOUT_PERFORM(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaWithDeleteIndNoDataSplits() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaWithDeleteIndWithDataSplits() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_DEL_IND__FAIL_ON_DUP__ALL_VERSION(); assertDerivedMainDataset(scenario); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaDateTimeBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaDateTimeBasedDerivationTest.java index ffd318b803b..3380781c9e4 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaDateTimeBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalDeltaDateTimeBasedDerivationTest.java @@ -28,28 +28,28 @@ public class UnitemporalDeltaDateTimeBasedDerivationTest @Test void testUnitemporalDeltaNoDeleteIndNoDataSplits() { - TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__NO_DEDUP__NO_VERSIONING(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaNoDeleteIndWithDataSplits() { - TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__NO_DEL_IND__FAIL_ON_DUPS__ALL_VERSION_WITHOUT_PERFORM(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaWithDeleteIndNoDataSplits() { - TestScenario scenario = scenarios.DATETIME_BASED__WITH_DEL_IND__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITH_DEL_IND__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } @Test void testUnitemporalDeltaWithDeleteIndWithDataSplits() { - TestScenario scenario = scenarios.DATETIME_BASED__WITH_DEL_IND__WITH_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITH_DEL_IND__FILTER_DUPS__ALL_VERSION(); assertDerivedMainDataset(scenario); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotBatchIdBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotBatchIdBasedDerivationTest.java index 75d4d978f8f..905e3c2faa8 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotBatchIdBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotBatchIdBasedDerivationTest.java @@ -26,23 +26,30 @@ public class UnitemporalSnapshotBatchIdBasedDerivationTest UnitemporalSnapshotBatchIdBasedScenarios scenarios = new UnitemporalSnapshotBatchIdBasedScenarios(); @Test - void testUnitemporalSnapshotWithoutPartitionNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } @Test - void testUnitemporalSnapshotWithPartitionNoDataSplits() + void testUnitemporalSnapshotWithPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } @Test - void testUnitemporalSnapshotWithPartitionFilterNoDataSplits() + void testUnitemporalSnapshotWithPartitionFailOnDupsNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_BASED__WITHOUT_PARTITIONS__FAIL_ON_DUPS__NO_VERSION(); + assertDerivedMainDataset(scenario); + } + + @Test + void testUnitemporalSnapshotWithPartitionFilterNoDedupNoVersion() + { + TestScenario scenario = scenarios.BATCH_ID_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotBatchIdDateTimeBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotBatchIdDateTimeBasedDerivationTest.java index 2dce5f71f0c..f6bd9419e32 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotBatchIdDateTimeBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotBatchIdDateTimeBasedDerivationTest.java @@ -26,23 +26,37 @@ public class UnitemporalSnapshotBatchIdDateTimeBasedDerivationTest UnitemporalSnapshotBatchIdDateTimeBasedScenarios scenarios = new UnitemporalSnapshotBatchIdDateTimeBasedScenarios(); @Test - void testUnitemporalSnapshotWithoutPartitionNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } @Test - void testUnitemporalSnapshotWithPartitionNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionNoDedupMaxVersion() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__MAX_VERSION(); assertDerivedMainDataset(scenario); } @Test - void testUnitemporalSnapshotWithPartitionFilterNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionFilterDupsMaxVersion() { - TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITHOUT_PARTITIONS__FILTER_DUPS__MAX_VERSION(); + assertDerivedMainDataset(scenario); + } + + @Test + void testUnitemporalSnapshotWithPartitionNoDedupNoVersion() + { + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION(); + assertDerivedMainDataset(scenario); + } + + @Test + void testUnitemporalSnapshotWithPartitionFilterNoDedupNoVersion() + { + TestScenario scenario = scenarios.BATCH_ID_AND_TIME_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotDateTimeBasedDerivationTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotDateTimeBasedDerivationTest.java index 5e7e4b817a6..0194e70c58e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotDateTimeBasedDerivationTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/legend-engine-xt-persistence-component-relational-test/src/test/java/org/finos/legend/engine/persistence/components/testcases/ingestmode/unitemporal/derivation/UnitemporalSnapshotDateTimeBasedDerivationTest.java @@ -26,23 +26,30 @@ public class UnitemporalSnapshotDateTimeBasedDerivationTest UnitemporalSnapshotDateTimeBasedScenarios scenarios = new UnitemporalSnapshotDateTimeBasedScenarios(); @Test - void testUnitemporalSnapshotWithoutPartitionNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } @Test - void testUnitemporalSnapshotWithPartitionNoDataSplits() + void testUnitemporalSnapshotWithoutPartitionFailOnDupsMaxVersion() { - TestScenario scenario = scenarios.DATETIME_BASED__WITH_PARTITIONS__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITHOUT_PARTITIONS__FAIL_ON_DUP__MAX_VERSION(); assertDerivedMainDataset(scenario); } @Test - void testUnitemporalSnapshotWithPartitionFilterNoDataSplits() + void testUnitemporalSnapshotWithPartitionNoDedupNoVersion() { - TestScenario scenario = scenarios.DATETIME_BASED__WITH_PARTITION_FILTER__NO_DATA_SPLITS(); + TestScenario scenario = scenarios.DATETIME_BASED__WITH_PARTITIONS__NO_DEDUP__NO_VERSION(); + assertDerivedMainDataset(scenario); + } + + @Test + void testUnitemporalSnapshotWithPartitionFilterNoDedupNoVersion() + { + TestScenario scenario = scenarios.DATETIME_BASED__WITH_PARTITION_FILTER__NO_DEDUP__NO_VERSION(); assertDerivedMainDataset(scenario); } } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/pom.xml index 29dfea691f4..f1d7764c511 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-component/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-component/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-grammar/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-grammar/pom.xml index 308dea15132..7e38d0fb124 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-grammar/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-grammar/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-protocol/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-protocol/pom.xml index 7c6f4c0c516..cdac2629896 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-protocol/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-protocol/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-pure/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-pure/pom.xml index 5d660f97d43..05140ff3d40 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-pure/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-grammar/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-grammar/pom.xml index 1a7c1bd7bda..f8118f3ddea 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-grammar/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-grammar/pom.xml @@ -3,7 +3,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-protocol/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-protocol/pom.xml index 606b708d0f2..82a2fd0790a 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-protocol/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-protocol/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-pure/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-pure/pom.xml index 162a1b0e3c5..120867dcec1 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-pure/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-target-relational-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/pom.xml b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/pom.xml index 71af3c9ac2a..63f19b54c07 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/pom.xml +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-persistence - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/extension/PersistenceTestRunner.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/extension/PersistenceTestRunner.java index 4c20e8cdee6..68989b4fdc9 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/extension/PersistenceTestRunner.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/extension/PersistenceTestRunner.java @@ -194,7 +194,7 @@ private IngestorResult invokePersistence(Dataset targetDataset, Persistence pers .enableSchemaEvolution(SCHEMA_EVOLUTION_DEFAULT) .build(); - IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(connection), enrichedDatasets); + IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(connection), enrichedDatasets).get(0); return result; } @@ -212,7 +212,7 @@ private IngestorResult invokePersistence(Dataset targetDataset, ServiceOutputTar .enableSchemaEvolution(SCHEMA_EVOLUTION_DEFAULT) .build(); - IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(connection), enrichedDatasets); + IngestorResult result = ingestor.performFullIngestion(JdbcConnection.of(connection), enrichedDatasets).get(0); return result; } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/AppendOnlyMapper.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/AppendOnlyMapper.java index 0ba21b06a7b..7e97be5066e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/AppendOnlyMapper.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/AppendOnlyMapper.java @@ -30,12 +30,9 @@ public class AppendOnlyMapper { public static org.finos.legend.engine.persistence.components.ingestmode.AppendOnly from(AppendOnly appendOnly) { - DeduplicationStrategy deduplicationStrategy = appendOnly.filterDuplicates ? - FilterDuplicates.builder().build() : AllowDuplicates.builder().build(); - return org.finos.legend.engine.persistence.components.ingestmode.AppendOnly.builder() .digestField(DIGEST_FIELD_DEFAULT) - .deduplicationStrategy(deduplicationStrategy) + .filterExistingRecords(appendOnly.filterDuplicates) .auditing(appendOnly.auditing.accept(MappingVisitors.MAP_TO_COMPONENT_AUDITING)) .build(); } @@ -47,22 +44,16 @@ public static org.finos.legend.engine.persistence.components.ingestmode.AppendOn { temporality.auditing = new NoAuditing(); } + boolean filterExistingRecords = false; org.finos.legend.engine.protocol.pure.v1.model.packageableElement.persistence.relational.temporality.updatesHandling.AppendOnly appendOnlyHandling = (org.finos.legend.engine.protocol.pure.v1.model.packageableElement.persistence.relational.temporality.updatesHandling.AppendOnly) temporality.updatesHandling; if (appendOnlyHandling.appendStrategy instanceof org.finos.legend.engine.protocol.pure.v1.model.packageableElement.persistence.relational.temporality.updatesHandling.appendStrategy.FilterDuplicates) { - deduplicationStrategy = FilterDuplicates.builder().build(); - } - else if (appendOnlyHandling.appendStrategy instanceof org.finos.legend.engine.protocol.pure.v1.model.packageableElement.persistence.relational.temporality.updatesHandling.appendStrategy.FailOnDuplicates) - { - deduplicationStrategy = FailOnDuplicates.builder().build(); - } - else - { - deduplicationStrategy = AllowDuplicates.builder().build(); + filterExistingRecords = true; } + return org.finos.legend.engine.persistence.components.ingestmode.AppendOnly.builder() .digestField(DIGEST_FIELD_DEFAULT) - .deduplicationStrategy(deduplicationStrategy) + .filterExistingRecords(filterExistingRecords) .auditing(temporality.auditing.accept(org.finos.legend.engine.testable.persistence.mapper.v2.MappingVisitors.MAP_TO_COMPONENT_NONTEMPORAL_AUDITING)) .build(); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/v1/DeriveDatasets.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/v1/DeriveDatasets.java index 20482a67222..aeb17585d55 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/v1/DeriveDatasets.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/v1/DeriveDatasets.java @@ -82,7 +82,8 @@ public Datasets visit(AppendOnly appendOnly) { enrichMainSchemaWithDigest(); } - appendOnly.auditing.accept(new MappingVisitors.EnrichSchemaWithAuditing(mainSchemaDefinitionBuilder, baseSchema)); + boolean baseSchemaHasPks = baseSchema.fields().stream().anyMatch(field -> field.primaryKey()); + appendOnly.auditing.accept(new MappingVisitors.EnrichSchemaWithAuditing(mainSchemaDefinitionBuilder, baseSchema, baseSchemaHasPks)); Dataset enrichedMainDataset = mainDatasetDefinitionBuilder.schema(mainSchemaDefinitionBuilder.build()).build(); return Datasets.of(enrichedMainDataset, stagingDataset); @@ -123,7 +124,7 @@ public Datasets visit(NontemporalDelta nontemporalDelta) Dataset stagingDataset = stagingDatasetBuilder.schema(stagingSchemaDefinitionBuilder.build()).build(); enrichMainSchemaWithDigest(); - nontemporalDelta.auditing.accept(new MappingVisitors.EnrichSchemaWithAuditing(mainSchemaDefinitionBuilder, baseSchema)); + nontemporalDelta.auditing.accept(new MappingVisitors.EnrichSchemaWithAuditing(mainSchemaDefinitionBuilder, baseSchema, true)); Dataset enrichedMainDataset = mainDatasetDefinitionBuilder.schema(mainSchemaDefinitionBuilder.build()).build(); return Datasets.of(enrichedMainDataset, stagingDataset); @@ -133,8 +134,8 @@ public Datasets visit(NontemporalDelta nontemporalDelta) public Datasets visit(NontemporalSnapshot nontemporalSnapshot) { Dataset stagingDataset = stagingDatasetBuilder.schema(stagingSchemaDefinitionBuilder.build()).build(); - - nontemporalSnapshot.auditing.accept(new MappingVisitors.EnrichSchemaWithAuditing(mainSchemaDefinitionBuilder, baseSchema)); + boolean baseSchemaHasPks = baseSchema.fields().stream().anyMatch(field -> field.primaryKey()); + nontemporalSnapshot.auditing.accept(new MappingVisitors.EnrichSchemaWithAuditing(mainSchemaDefinitionBuilder, baseSchema, baseSchemaHasPks)); Dataset enrichedMainDataset = mainDatasetDefinitionBuilder.schema(mainSchemaDefinitionBuilder.build()).build(); return Datasets.of(enrichedMainDataset, stagingDataset); diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/v1/MappingVisitors.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/v1/MappingVisitors.java index ead0fbc76f0..0b61881f9ab 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/v1/MappingVisitors.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/main/java/org/finos/legend/engine/testable/persistence/mapper/v1/MappingVisitors.java @@ -154,10 +154,13 @@ public static class EnrichSchemaWithAuditing implements AuditingVisitor private SchemaDefinition.Builder schemaDefinitionBuilder; private SchemaDefinition baseSchema; - public EnrichSchemaWithAuditing(SchemaDefinition.Builder schemaDefinitionBuilder, SchemaDefinition baseSchema) + private boolean isPk; + + public EnrichSchemaWithAuditing(SchemaDefinition.Builder schemaDefinitionBuilder, SchemaDefinition baseSchema, boolean isPk) { this.schemaDefinitionBuilder = schemaDefinitionBuilder; this.baseSchema = baseSchema; + this.isPk = isPk; } @Override @@ -175,6 +178,7 @@ public Void visit(DateTimeAuditing auditing) Field auditDateTime = Field.builder() .name(auditing.dateTimeName) .type(FieldType.of(DataType.TIMESTAMP, Optional.empty(), Optional.empty())) + .primaryKey(isPk) .build(); schemaDefinitionBuilder.addFields(auditDateTime); } diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/java/org/finos/legend/engine/testable/persistence/ingestmode/appendonly/TestAppendOnlyWithFilterDuplicates.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/java/org/finos/legend/engine/testable/persistence/ingestmode/appendonly/TestAppendOnlyWithFilterDuplicates.java index 36eef2df357..a28c89e100c 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/java/org/finos/legend/engine/testable/persistence/ingestmode/appendonly/TestAppendOnlyWithFilterDuplicates.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/java/org/finos/legend/engine/testable/persistence/ingestmode/appendonly/TestAppendOnlyWithFilterDuplicates.java @@ -25,18 +25,6 @@ public class TestAppendOnlyWithFilterDuplicates extends TestPersistenceBase { - @Test - public void testAppendOnlyWithNoAuditing() throws Exception - { - String path = "src/test/resources/append-only/filter_duplicates/persistence_no_audit.txt"; - String persistenceSpec = readPureCode(path); - TestResult result = testPersistence(persistenceSpec).results.get(0); - - assertTrue(result instanceof TestExecuted); - Assert.assertEquals(TestExecutionStatus.PASS, ((TestExecuted) result).testExecutionStatus); - Assert.assertEquals("test::TestPersistence", result.testable); - } - @Test public void testAppendOnlyWithDateTimeAuditing() throws Exception { @@ -50,19 +38,6 @@ public void testAppendOnlyWithDateTimeAuditing() throws Exception } // v2 tests - - @Test - public void testAppendOnlyWithNoAuditingV2() throws Exception - { - String path = "src/test/resources/v2/append-only/filter_duplicates/persistence_no_audit.txt"; - String persistenceSpec = readPureCode(path); - TestResult result = testPersistence(persistenceSpec).results.get(0); - - assertTrue(result instanceof TestExecuted); - Assert.assertEquals(TestExecutionStatus.PASS, ((TestExecuted) result).testExecutionStatus); - Assert.assertEquals("test::TestPersistence", result.testable); - } - @Test public void testAppendOnlyWithDateTimeAuditingV2() throws Exception { diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/java/org/finos/legend/engine/testable/persistence/mapper/IngestModeMapperTest.java b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/java/org/finos/legend/engine/testable/persistence/mapper/IngestModeMapperTest.java index 077e8bfa41f..0d6245a757d 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/java/org/finos/legend/engine/testable/persistence/mapper/IngestModeMapperTest.java +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/java/org/finos/legend/engine/testable/persistence/mapper/IngestModeMapperTest.java @@ -84,7 +84,7 @@ public void testMapperForAppendOnly() throws Exception AppendOnly appendOnly = (AppendOnly) componentIngestMode; Assert.assertEquals("DIGEST", appendOnly.digestField().get()); Assert.assertTrue(appendOnly.auditing() instanceof NoAuditing); - Assert.assertTrue(appendOnly.deduplicationStrategy() instanceof AllowDuplicates); + Assert.assertFalse(appendOnly.filterExistingRecords()); ingestMode = getAppendOnlyNoAuditingWithFilteringDuplicates(); persistence = getPersistence(ingestMode); @@ -95,7 +95,7 @@ public void testMapperForAppendOnly() throws Exception appendOnly = (AppendOnly) componentIngestMode; Assert.assertEquals("DIGEST", appendOnly.digestField().get()); Assert.assertTrue(appendOnly.auditing() instanceof NoAuditing); - Assert.assertTrue(appendOnly.deduplicationStrategy() instanceof FilterDuplicates); + Assert.assertTrue(appendOnly.filterExistingRecords()); ingestMode = getAppendOnlyDatetimeAuditingNoFilteringDuplicates(); persistence = getPersistence(ingestMode); @@ -108,7 +108,7 @@ public void testMapperForAppendOnly() throws Exception Assert.assertTrue(appendOnly.auditing() instanceof DateTimeAuditing); DateTimeAuditing dateTimeAuditing = (DateTimeAuditing) appendOnly.auditing(); Assert.assertEquals("AUDIT_TIME", dateTimeAuditing.dateTimeField()); - Assert.assertTrue(appendOnly.deduplicationStrategy() instanceof AllowDuplicates); + Assert.assertFalse(appendOnly.filterExistingRecords()); ingestMode = getAppendOnlyDatetimeAuditingWithFilteringDuplicates(); persistence = getPersistence(ingestMode); @@ -121,7 +121,7 @@ public void testMapperForAppendOnly() throws Exception Assert.assertTrue(appendOnly.auditing() instanceof DateTimeAuditing); dateTimeAuditing = (DateTimeAuditing) appendOnly.auditing(); Assert.assertEquals("AUDIT_TIME", dateTimeAuditing.dateTimeField()); - Assert.assertTrue(appendOnly.deduplicationStrategy() instanceof FilterDuplicates); + Assert.assertTrue(appendOnly.filterExistingRecords()); } @Test diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/append-only/filter_duplicates/persistence_no_audit.txt b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/append-only/filter_duplicates/persistence_no_audit.txt deleted file mode 100644 index 0a2f54a8d73..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/append-only/filter_duplicates/persistence_no_audit.txt +++ /dev/null @@ -1,134 +0,0 @@ -###Pure -Class test::Person -{ - name: String[1]; -} - -Class test::ServiceResult -{ - ID: String[1]; - NAME: String[1]; -} - -###Mapping -Mapping test::Mapping () - -###Relational -Database test::TestDatabase -( - Table personTable - ( - ID INTEGER PRIMARY KEY, - NAME VARCHAR(100) - ) -) -###Service -Service test::Service -{ - pattern : 'test'; - documentation : 'test'; - autoActivateUpdates: true; - execution: Single - { - query: src: test::Person[1]|$src.name; - mapping: test::Mapping; - runtime: - #{ - connections: []; - }#; - } - test: Single - { - data: 'test'; - asserts: []; - } -} - -###Persistence -Persistence test::TestPersistence -{ - doc: 'This is test documentation.'; - trigger: Manual; - service: test::Service; - persister: Batch - { - sink: Relational - { - database: test::TestDatabase; - } - ingestMode: AppendOnly - { - auditing: None; - filterDuplicates: true; - } - targetShape: Flat - { - targetName: 'personTable'; - modelClass: test::ServiceResult; - } - } - tests: - [ - test1: - { - testBatches: - [ - testBatch1: - { - data: - { - connection: - { - ExternalFormat - #{ - contentType: 'application/json'; - data: '[{"ID":1, "NAME":"ANDY"},{"ID":2, "NAME":"BRAD"}]'; - }# - } - } - asserts: - [ - assert1: - EqualToJson - #{ - expected: - ExternalFormat - #{ - contentType: 'application/json'; - data: '[{"ID":1, "NAME":"ANDY"},{"ID":2, "NAME":"BRAD"}]'; - }#; - }# - ] - }, - testBatch2: - { - data: - { - connection: - { - ExternalFormat - #{ - contentType: 'application/json'; - data: '[{"ID":2, "NAME":"BRAD"},{"ID":3, "NAME":"CATHY"}]'; - }# - } - } - asserts: - [ - assert1: - EqualToJson - #{ - expected: - ExternalFormat - #{ - contentType: 'application/json'; - data: '[{"ID":1, "NAME":"ANDY"},{"ID":2, "NAME":"BRAD"},{"ID":3, "NAME":"CATHY"}]'; - }#; - }# - ] - } - ] - isTestDataFromServiceOutput: true; - } - ] -} \ No newline at end of file diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/v2/append-only/filter_duplicates/persistence_date_time_auditing.txt b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/v2/append-only/filter_duplicates/persistence_date_time_auditing.txt index 4c90c627db5..fc50f93b78e 100644 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/v2/append-only/filter_duplicates/persistence_date_time_auditing.txt +++ b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/v2/append-only/filter_duplicates/persistence_date_time_auditing.txt @@ -20,7 +20,7 @@ Database test::TestDatabase ( ID INTEGER PRIMARY KEY, NAME VARCHAR(100), - BATCH_TIME_IN TIMESTAMP + BATCH_TIME_IN TIMESTAMP PRIMARY KEY ) ) ###Service diff --git a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/v2/append-only/filter_duplicates/persistence_no_audit.txt b/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/v2/append-only/filter_duplicates/persistence_no_audit.txt deleted file mode 100644 index 58068e34a3c..00000000000 --- a/legend-engine-xts-persistence/legend-engine-xt-persistence-test-runner/src/test/resources/v2/append-only/filter_duplicates/persistence_no_audit.txt +++ /dev/null @@ -1,143 +0,0 @@ -###Pure -Class test::Person -{ - name: String[1]; -} - -Class test::ServiceResult -{ - ID: String[1]; - NAME: String[1]; -} - -###Mapping -Mapping test::Mapping () - -###Relational -Database test::TestDatabase -( - Table personTable - ( - ID INTEGER PRIMARY KEY, - NAME VARCHAR(100) - ) -) -###Service -Service test::Service -{ - pattern : 'test'; - documentation : 'test'; - autoActivateUpdates: true; - execution: Single - { - query: src: test::Person[1]|$src.name; - mapping: test::Mapping; - runtime: - #{ - connections: []; - }#; - } - test: Single - { - data: 'test'; - asserts: []; - } -} - -###Persistence -Persistence test::TestPersistence -{ - doc: 'This is test documentation.'; - trigger: Manual; - service: test::Service; - serviceOutputTargets: - [ - TDS - { - keys: [ID] - deduplication: None; - datasetType: Delta - { - actionIndicator: None; - } - } - -> - Relational - #{ - table: personTable; - database: test::TestDatabase; - temporality: None - { - auditing: None; - updatesHandling: AppendOnly - { - appendStrategy: FilterDuplicates; - } - } - }# - ]; - tests: - [ - test1: - { - testBatches: - [ - testBatch1: - { - data: - { - connection: - { - ExternalFormat - #{ - contentType: 'application/json'; - data: '[{"ID":1, "NAME":"ANDY"},{"ID":2, "NAME":"BRAD"}]'; - }# - } - } - asserts: - [ - assert1: - EqualToJson - #{ - expected: - ExternalFormat - #{ - contentType: 'application/json'; - data: '[{"ID":1, "NAME":"ANDY"},{"ID":2, "NAME":"BRAD"}]'; - }#; - }# - ] - }, - testBatch2: - { - data: - { - connection: - { - ExternalFormat - #{ - contentType: 'application/json'; - data: '[{"ID":2, "NAME":"BRAD"},{"ID":3, "NAME":"CATHY"}]'; - }# - } - } - asserts: - [ - assert1: - EqualToJson - #{ - expected: - ExternalFormat - #{ - contentType: 'application/json'; - data: '[{"ID":1, "NAME":"ANDY"},{"ID":2, "NAME":"BRAD"},{"ID":3, "NAME":"CATHY"}]'; - }#; - }# - ] - } - ] - isTestDataFromServiceOutput: true; - } - ] -} \ No newline at end of file diff --git a/legend-engine-xts-persistence/pom.xml b/legend-engine-xts-persistence/pom.xml index b438b169025..135091e59a9 100644 --- a/legend-engine-xts-persistence/pom.xml +++ b/legend-engine-xts-persistence/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-protobuf/legend-engine-xt-protobuf-grammar/pom.xml b/legend-engine-xts-protobuf/legend-engine-xt-protobuf-grammar/pom.xml index 12292f5e5d2..bae302a3221 100644 --- a/legend-engine-xts-protobuf/legend-engine-xt-protobuf-grammar/pom.xml +++ b/legend-engine-xts-protobuf/legend-engine-xt-protobuf-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-protobuf - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-xt-protobuf-grammar diff --git a/legend-engine-xts-protobuf/legend-engine-xt-protobuf-protocol/pom.xml b/legend-engine-xts-protobuf/legend-engine-xt-protobuf-protocol/pom.xml index 4516e69fa0b..74221f43181 100644 --- a/legend-engine-xts-protobuf/legend-engine-xt-protobuf-protocol/pom.xml +++ b/legend-engine-xts-protobuf/legend-engine-xt-protobuf-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-protobuf - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-xt-protobuf-protocol diff --git a/legend-engine-xts-protobuf/legend-engine-xt-protobuf-pure/pom.xml b/legend-engine-xts-protobuf/legend-engine-xt-protobuf-pure/pom.xml index 05fe28adcf3..fe770ee85f1 100644 --- a/legend-engine-xts-protobuf/legend-engine-xt-protobuf-pure/pom.xml +++ b/legend-engine-xts-protobuf/legend-engine-xt-protobuf-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-protobuf - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-protobuf/legend-engine-xt-protobuf/pom.xml b/legend-engine-xts-protobuf/legend-engine-xt-protobuf/pom.xml index f6e8a518ee4..4d3a003dfe0 100644 --- a/legend-engine-xts-protobuf/legend-engine-xt-protobuf/pom.xml +++ b/legend-engine-xts-protobuf/legend-engine-xt-protobuf/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-protobuf - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -57,7 +57,7 @@ org.finos.legend.engine legend-engine-protocol-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT org.finos.legend.pure diff --git a/legend-engine-xts-protobuf/pom.xml b/legend-engine-xts-protobuf/pom.xml index 4a4241f2955..994167c3f78 100644 --- a/legend-engine-xts-protobuf/pom.xml +++ b/legend-engine-xts-protobuf/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/legend-engine-xt-relationalStore-store-entitlement-analytics/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/legend-engine-xt-relationalStore-store-entitlement-analytics/pom.xml index dd036aabade..de8db33ef42 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/legend-engine-xt-relationalStore-store-entitlement-analytics/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/legend-engine-xt-relationalStore-store-entitlement-analytics/pom.xml @@ -19,7 +19,7 @@ legend-engine-xt-relationalStore-analytics org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/legend-engine-xt-relationalStore-store-entitlement-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/legend-engine-xt-relationalStore-store-entitlement-pure/pom.xml index 3d1355e1a0d..008a77e2f89 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/legend-engine-xt-relationalStore-store-entitlement-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/legend-engine-xt-relationalStore-store-entitlement-pure/pom.xml @@ -19,7 +19,7 @@ legend-engine-xt-relationalStore-analytics org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/pom.xml index 91fae8f4668..94952e826c3 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-analytics/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-relationalStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/pom.xml index a02a3111a95..4ad10b16753 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-relationalStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -35,7 +35,11 @@ org.finos.legend.engine - legend-engine-xt-authentication-connection-factory + legend-engine-xt-connection-factory + + + org.finos.legend.engine + legend-engine-xt-connection-protocol org.finos.legend.engine diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/HACKY__RelationalDatabaseConnectionAdapter.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/HACKY__RelationalDatabaseConnectionAdapter.java index 172bdd2a8d2..a924ad9b89a 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/HACKY__RelationalDatabaseConnectionAdapter.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/HACKY__RelationalDatabaseConnectionAdapter.java @@ -14,7 +14,7 @@ package org.finos.legend.connection; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.RelationalDatabaseConnection; import org.finos.legend.engine.shared.core.identity.Identity; @@ -29,12 +29,12 @@ public interface HACKY__RelationalDatabaseConnectionAdapter class ConnectionFactoryMaterial { - public final StoreInstance storeInstance; + public final Connection connection; public final AuthenticationConfiguration authenticationConfiguration; - public ConnectionFactoryMaterial(StoreInstance storeInstance, AuthenticationConfiguration authenticationConfiguration) + public ConnectionFactoryMaterial(Connection connection, AuthenticationConfiguration authenticationConfiguration) { - this.storeInstance = storeInstance; + this.connection = connection; this.authenticationConfiguration = authenticationConfiguration; } } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/DatabaseManager.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/RelationalDatabaseManager.java similarity index 95% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/DatabaseManager.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/RelationalDatabaseManager.java index d7320f0a503..7ff111b7394 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/DatabaseManager.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/RelationalDatabaseManager.java @@ -17,7 +17,7 @@ import java.util.List; import java.util.Properties; -public interface DatabaseManager +public interface RelationalDatabaseManager { List getIds(); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/RelationalDatabaseStoreSupport.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/RelationalDatabaseStoreSupport.java deleted file mode 100644 index b81cb654a97..00000000000 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/RelationalDatabaseStoreSupport.java +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection; - -import org.eclipse.collections.api.factory.Lists; - -import java.util.List; -import java.util.Objects; - -public class RelationalDatabaseStoreSupport extends StoreSupport -{ - private final Database database; - - private RelationalDatabaseStoreSupport(String identifier, Database database, List authenticationMechanismConfigurations) - { - super(identifier, authenticationMechanismConfigurations); - this.database = Objects.requireNonNull(database, "Relational database store support database type is missing"); - } - - public Database getDatabase() - { - return database; - } - - public static RelationalDatabaseStoreSupport cast(StoreSupport storeSupport) - { - return cast(storeSupport, null); - } - - public static RelationalDatabaseStoreSupport cast(StoreSupport storeSupport, Database database) - { - if (!(storeSupport instanceof RelationalDatabaseStoreSupport)) - { - throw new RuntimeException("Expected store support for relational databases"); - } - RelationalDatabaseStoreSupport relationalDatabaseStoreSupport = (RelationalDatabaseStoreSupport) storeSupport; - if (database != null && !database.equals(relationalDatabaseStoreSupport.getDatabase())) - { - - throw new RuntimeException(String.format("Expected relational database store support for '%s'", database.getLabel())); - } - return relationalDatabaseStoreSupport; - } - - public static class Builder - { - private final Database database; - private String identifier; - private final List authenticationMechanismConfigurations = Lists.mutable.empty(); - - public Builder(Database database) - { - this.database = database; - } - - public Builder withIdentifier(String identifier) - { - this.identifier = identifier; - return this; - } - - public Builder withAuthenticationMechanismConfiguration(AuthenticationMechanismConfiguration authenticationMechanismConfiguration) - { - this.authenticationMechanismConfigurations.add(authenticationMechanismConfiguration); - return this; - } - - public Builder withAuthenticationMechanismConfigurations(List authenticationMechanismConfigurations) - { - this.authenticationMechanismConfigurations.addAll(authenticationMechanismConfigurations); - return this; - } - - public Builder withAuthenticationMechanismConfigurations(AuthenticationMechanismConfiguration... authenticationMechanismConfigurations) - { - this.authenticationMechanismConfigurations.addAll(Lists.mutable.of(authenticationMechanismConfigurations)); - return this; - } - - public RelationalDatabaseStoreSupport build() - { - return new RelationalDatabaseStoreSupport( - this.identifier, - this.database, - this.authenticationMechanismConfigurations - ); - } - } -} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/H2DatabaseManager.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/H2RelationalDatabaseManager.java similarity index 83% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/H2DatabaseManager.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/H2RelationalDatabaseManager.java index 44fdf427975..6a44059a547 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/H2DatabaseManager.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/H2RelationalDatabaseManager.java @@ -15,18 +15,17 @@ package org.finos.legend.connection.impl; import org.eclipse.collections.impl.factory.Lists; -import org.finos.legend.connection.DatabaseManager; -import org.finos.legend.connection.DatabaseType; +import org.finos.legend.connection.RelationalDatabaseManager; import java.util.List; import java.util.Properties; -public class H2DatabaseManager implements DatabaseManager +public class H2RelationalDatabaseManager implements RelationalDatabaseManager { @Override public List getIds() { - return Lists.mutable.with(DatabaseType.H2.name()); + return Lists.mutable.with(RelationalDatabaseType.H2.name()); } @Override diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/JDBCConnectionBuilder.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/JDBCConnectionBuilder.java similarity index 89% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/JDBCConnectionBuilder.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/JDBCConnectionBuilder.java index 0c09de0784e..d380bb07a9c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/JDBCConnectionBuilder.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/JDBCConnectionBuilder.java @@ -12,10 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection; +package org.finos.legend.connection.impl; +import org.finos.legend.connection.ConnectionBuilder; import org.finos.legend.connection.impl.JDBCConnectionManager; -import org.finos.legend.connection.protocol.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; import org.finos.legend.engine.shared.core.identity.Credential; import java.sql.Connection; diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/JDBCConnectionManager.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/JDBCConnectionManager.java index 8b96bf0aa7a..3fd59cf54fd 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/JDBCConnectionManager.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/JDBCConnectionManager.java @@ -21,18 +21,17 @@ import org.eclipse.collections.impl.map.mutable.ConcurrentHashMap; import org.finos.legend.connection.Authenticator; import org.finos.legend.connection.ConnectionManager; -import org.finos.legend.connection.Database; -import org.finos.legend.connection.DatabaseManager; +import org.finos.legend.connection.DatabaseType; +import org.finos.legend.connection.RelationalDatabaseManager; import org.finos.legend.connection.LegendEnvironment; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.protocol.AuthenticationConfiguration; -import org.finos.legend.connection.protocol.ConnectionSpecification; +import org.finos.legend.connection.Connection; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.AuthenticationConfiguration; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; import javax.sql.DataSource; import java.io.PrintWriter; -import java.sql.Connection; import java.sql.Driver; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; @@ -50,7 +49,7 @@ public class JDBCConnectionManager implements ConnectionManager private static final int HIKARICP_DEFAULT_MAX_POOL_SIZE = 100; private static final int HIKARICP_DEFAULT_MIN_POOL_SIZE = 0; - private static final ConcurrentHashMap managerByName = ConcurrentHashMap.newMap(); + private static final ConcurrentHashMap managerByName = ConcurrentHashMap.newMap(); private static final AtomicBoolean isInitialized = new AtomicBoolean(); private static JDBCConnectionManager INSTANCE; @@ -79,7 +78,7 @@ private static void setup() { if (!isInitialized.get()) { - for (DatabaseManager manager : ServiceLoader.load(DatabaseManager.class)) + for (RelationalDatabaseManager manager : ServiceLoader.load(RelationalDatabaseManager.class)) { manager.getIds().forEach(i -> managerByName.put(i, manager)); } @@ -95,24 +94,24 @@ public void initialize(LegendEnvironment environment) JDBCConnectionManager.setup(); } - public Connection getConnection(Database database, - String host, - int port, - String databaseName, - Properties connectionProperties, - ConnectionPoolConfig connectionPoolConfig, - Function authenticationPropertiesSupplier, - Authenticator authenticator, - Identity identity + public java.sql.Connection getConnection(DatabaseType databaseType, + String host, + int port, + String databaseName, + Properties connectionProperties, + ConnectionPoolConfig connectionPoolConfig, + Function authenticationPropertiesSupplier, + Authenticator authenticator, + Identity identity ) throws SQLException { - StoreInstance storeInstance = authenticator.getStoreInstance(); - ConnectionSpecification connectionSpecification = storeInstance.getConnectionSpecification(); + Connection connection = authenticator.getConnection(); + ConnectionSpecification connectionSpecification = connection.getConnectionSpecification(); AuthenticationConfiguration authenticationConfiguration = authenticator.getAuthenticationConfiguration(); String poolName = getPoolName(identity, connectionSpecification, authenticationConfiguration); // TODO: @akphi - this is simplistic, we need to handle concurrency and errors - Supplier dataSourceSupplier = () -> this.buildDataSource(database, host, port, databaseName, connectionProperties, connectionPoolConfig, authenticationPropertiesSupplier, authenticator, identity); + Supplier dataSourceSupplier = () -> this.buildDataSource(databaseType, host, port, databaseName, connectionProperties, connectionPoolConfig, authenticationPropertiesSupplier, authenticator, identity); Function0 connectionPoolSupplier = () -> new ConnectionPool(dataSourceSupplier.get()); ConnectionPool connectionPool = this.poolIndex.getIfAbsentPut(poolName, connectionPoolSupplier); @@ -147,7 +146,7 @@ public Connection getConnection(Database database, } protected HikariDataSource buildDataSource( - Database database, + DatabaseType databaseType, String host, int port, String databaseName, @@ -158,16 +157,16 @@ protected HikariDataSource buildDataSource( Identity identity ) { - StoreInstance storeInstance = authenticator.getStoreInstance(); - ConnectionSpecification connectionSpecification = storeInstance.getConnectionSpecification(); + Connection connection = authenticator.getConnection(); + ConnectionSpecification connectionSpecification = connection.getConnectionSpecification(); AuthenticationConfiguration authenticationConfiguration = authenticator.getAuthenticationConfiguration(); - DatabaseManager databaseManager = getManagerForDatabase(database); + RelationalDatabaseManager relationalDatabaseManager = getManagerForDatabase(databaseType); - String jdbcUrl = databaseManager.buildURL(host, port, databaseName, connectionProperties); + String jdbcUrl = relationalDatabaseManager.buildURL(host, port, databaseName, connectionProperties); String poolName = getPoolName(identity, connectionSpecification, authenticationConfiguration); HikariConfig jdbcConfig = new HikariConfig(); - jdbcConfig.setDriverClassName(databaseManager.getDriver()); + jdbcConfig.setDriverClassName(relationalDatabaseManager.getDriver()); jdbcConfig.setPoolName(poolName); jdbcConfig.setJdbcUrl(jdbcUrl); @@ -184,7 +183,7 @@ protected HikariDataSource buildDataSource( jdbcConfig.addDataSourceProperty("prepStmtCacheSqlLimit", 0); jdbcConfig.addDataSourceProperty("useServerPrepStmts", false); - jdbcConfig.setDataSource(new DataSourceWrapper(jdbcUrl, connectionProperties, databaseManager, authenticationPropertiesSupplier, authenticator, identity)); + jdbcConfig.setDataSource(new DataSourceWrapper(jdbcUrl, connectionProperties, relationalDatabaseManager, authenticationPropertiesSupplier, authenticator, identity)); return new HikariDataSource(jdbcConfig); } @@ -213,16 +212,16 @@ public static String getPoolName(Identity identity, ConnectionSpecification conn ); } - private static DatabaseManager getManagerForDatabase(Database database) + private static RelationalDatabaseManager getManagerForDatabase(DatabaseType databaseType) { if (!isInitialized.get()) { throw new IllegalStateException("JDBC connection manager has not been configured properly"); } - DatabaseManager manager = managerByName.get(database.getLabel()); + RelationalDatabaseManager manager = managerByName.get(databaseType.getIdentifier()); if (manager == null) { - throw new RuntimeException(String.format("Can't find any matching managers for database type '%s'", database.getLabel())); + throw new RuntimeException(String.format("Can't find any matching managers for database type '%s'", databaseType.getIdentifier())); } return manager; } @@ -334,7 +333,7 @@ private static class DataSourceWrapper implements DataSource public DataSourceWrapper( String url, Properties connectionProperties, - DatabaseManager databaseManager, + RelationalDatabaseManager relationalDatabaseManager, Function authenticationPropertiesSupplier, Authenticator authenticator, Identity identity @@ -344,7 +343,7 @@ public DataSourceWrapper( this.connectionProperties = connectionProperties; try { - this.driver = (Driver) Class.forName(databaseManager.getDriver()).getDeclaredConstructor().newInstance(); + this.driver = (Driver) Class.forName(relationalDatabaseManager.getDriver()).getDeclaredConstructor().newInstance(); } catch (Exception e) { @@ -356,7 +355,7 @@ public DataSourceWrapper( } @Override - public Connection getConnection() throws SQLException + public java.sql.Connection getConnection() throws SQLException { Properties properties = new Properties(); properties.putAll(this.connectionProperties); @@ -377,7 +376,7 @@ public Connection getConnection() throws SQLException } @Override - public Connection getConnection(String username, String password) throws SQLException + public java.sql.Connection getConnection(String username, String password) throws SQLException { throw new RuntimeException(); } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/RelationalConnectionExtension.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/RelationalConnectionExtension.java new file mode 100644 index 00000000000..138e56e5506 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/RelationalConnectionExtension.java @@ -0,0 +1,31 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.connection.impl; + +import org.eclipse.collections.api.factory.Lists; +import org.finos.legend.connection.AuthenticationMechanismType; +import org.finos.legend.connection.ConnectionExtension; +import org.finos.legend.connection.DatabaseType; + +import java.util.List; + +public class RelationalConnectionExtension implements ConnectionExtension +{ + @Override + public List getExtraDatabaseTypes() + { + return Lists.mutable.of(RelationalDatabaseType.values()); + } +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/RelationalDatabaseType.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/RelationalDatabaseType.java new file mode 100644 index 00000000000..e0cdd1811f8 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/RelationalDatabaseType.java @@ -0,0 +1,38 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.connection.impl; + +import org.finos.legend.connection.DatabaseType; + +public enum RelationalDatabaseType implements DatabaseType +{ + H2("H2"), + POSTGRES("Postgres"), + BIG_QUERY("BigQuery"), + SNOWFLAKE("Snowflake"); + + private final String identifier; + + private RelationalDatabaseType(String identifier) + { + this.identifier = identifier; + } + + @Override + public String getIdentifier() + { + return this.identifier; + } +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/StaticJDBCConnectionBuilder.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/StaticJDBCConnectionBuilder.java index 1cbe12abf4f..6d12df9cdae 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/StaticJDBCConnectionBuilder.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/impl/StaticJDBCConnectionBuilder.java @@ -15,15 +15,13 @@ package org.finos.legend.connection.impl; import org.finos.legend.connection.Authenticator; -import org.finos.legend.connection.JDBCConnectionBuilder; -import org.finos.legend.connection.RelationalDatabaseStoreSupport; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.protocol.StaticJDBCConnectionSpecification; +import org.finos.legend.connection.Connection; +import org.finos.legend.connection.DatabaseSupport; +import org.finos.legend.engine.protocol.pure.v1.model.connection.StaticJDBCConnectionSpecification; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.engine.shared.core.identity.credential.PlaintextUserPasswordCredential; -import java.sql.Connection; import java.util.Properties; import java.util.function.Function; @@ -31,10 +29,10 @@ public class StaticJDBCConnectionBuilder { public static class WithPlaintextUsernamePassword extends JDBCConnectionBuilder { - public Connection getConnection(StaticJDBCConnectionSpecification connectionSpecification, Authenticator authenticator, Identity identity) throws Exception + public java.sql.Connection getConnection(StaticJDBCConnectionSpecification connectionSpecification, Authenticator authenticator, Identity identity) throws Exception { - StoreInstance storeInstance = authenticator.getStoreInstance(); - RelationalDatabaseStoreSupport storeSupport = RelationalDatabaseStoreSupport.cast(storeInstance.getStoreSupport()); + Connection connection = authenticator.getConnection(); + DatabaseSupport databaseSupport = connection.getDatabaseSupport(); Properties connectionProperties = new Properties(); Function authenticationPropertiesSupplier = cred -> @@ -46,7 +44,7 @@ public Connection getConnection(StaticJDBCConnectionSpecification connectionSpec return properties; }; - return this.getConnectionManager().getConnection(storeSupport.getDatabase(), connectionSpecification.host, connectionSpecification.port, connectionSpecification.databaseName, connectionProperties, this.getConnectionPoolConfig(), authenticationPropertiesSupplier, authenticator, identity); + return this.getConnectionManager().getConnection(databaseSupport.getDatabaseType(), connectionSpecification.host, connectionSpecification.port, connectionSpecification.databaseName, connectionProperties, this.getConnectionPoolConfig(), authenticationPropertiesSupplier, authenticator, identity); } } } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.ConnectionExtension b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.ConnectionExtension new file mode 100644 index 00000000000..460c36f956e --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.ConnectionExtension @@ -0,0 +1 @@ +org.finos.legend.connection.impl.RelationalConnectionExtension \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager deleted file mode 100644 index 069422f1bf1..00000000000 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager +++ /dev/null @@ -1 +0,0 @@ -org.finos.legend.connection.impl.H2DatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager new file mode 100644 index 00000000000..8a0c42480c5 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager @@ -0,0 +1 @@ +org.finos.legend.connection.impl.H2RelationalDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-execution-tests/pom.xml index 0bf4f64c441..9c93f6bd6ef 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-execution-tests/pom.xml @@ -3,7 +3,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-athena - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-execution/pom.xml index 3b449ef884b..ef796e2f6da 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-athena - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-grammar/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-grammar/pom.xml index b3747e68f50..9656a62ff83 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-grammar/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-athena - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-protocol/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-protocol/pom.xml index 8743d43226b..6a5470e853e 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-protocol/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-athena - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/pom.xml index 28e889f26a1..d4ba94bcd77 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-athena - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena.definition.json b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena.definition.json index 4199d545c10..e78ea742726 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena.definition.json +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena.definition.json @@ -1,6 +1,6 @@ { "name": "core_relational_athena", - "pattern": "(meta::relational::functions::sqlQueryToString::athena|meta::relational::tests::sqlQueryToString::athena|meta::pure::alloy::connections|meta::external::store::relational::runtime|meta::protocols::pure)(::.*)?", + "pattern": "(meta::relational::functions::sqlQueryToString::athena|meta::relational::tests::sqlQueryToString::athena|meta::pure::alloy::connections|meta::external::store::relational::runtime|meta::protocols::pure|meta::relational::tests::connEquality)(::.*)?", "dependencies": [ "platform", "platform_functions", diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena/relational/connection/connectionEqualityTest.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena/relational/connection/connectionEqualityTest.pure new file mode 100644 index 00000000000..85071259587 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena/relational/connection/connectionEqualityTest.pure @@ -0,0 +1,46 @@ +// Copyright 2021 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import meta::relational::metamodel::execute::tests::*; +import meta::external::store::relational::runtime::*; +import meta::pure::runtime::*; +import meta::relational::translation::*; +import meta::pure::extension::*; +import meta::relational::extension::*; +import meta::relational::runtime::*; +import meta::relational::tests::csv::*; +import meta::relational::metamodel::execute::*; +import meta::relational::metamodel::*; +import meta::pure::mapping::*; + +function <> meta::relational::tests::connEquality::testConnectionEqualityAllSameAthena() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + + type = DatabaseType.Athena, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::AthenaDatasourceSpecification(awsRegion='awsR', s3OutputLocation='s3OL', databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + let c2 = ^RelationalDatabaseConnection( + + type = DatabaseType.Athena, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::AthenaDatasourceSpecification(awsRegion='awsR', s3OutputLocation='s3OL', databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + assert(runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena/relational/connection/metamodel.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena/relational/connection/metamodel.pure index 5af0eea4fad..3d5185442bd 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena/relational/connection/metamodel.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/main/resources/core_relational_athena/relational/connection/metamodel.pure @@ -14,7 +14,7 @@ Class meta::pure::alloy::connections::alloy::specification::AthenaDatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - awsRegion: String[1]; - s3OutputLocation: String[1]; - databaseName: String[1]; + <> awsRegion: String[1]; + <> s3OutputLocation: String[1]; + <> databaseName: String[1]; } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java new file mode 100644 index 00000000000..c072278365f --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/legend-engine-xt-relationalStore-athena-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java @@ -0,0 +1,31 @@ +// Copyright 2022 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.pure.code.core; + +import junit.framework.TestSuite; +import org.finos.legend.pure.m3.execution.test.PureTestBuilder; +import org.finos.legend.pure.m3.execution.test.TestCollection; +import org.finos.legend.pure.runtime.java.compiled.execution.CompiledExecutionSupport; +import org.finos.legend.pure.runtime.java.compiled.testHelper.PureTestBuilderCompiled; + +public class Test_Pure_Relational_ConnectionEquality +{ + public static TestSuite suite() + { + String testPackage = "meta::relational::tests::connEquality"; + CompiledExecutionSupport executionSupport = PureTestBuilderCompiled.getClassLoaderExecutionSupport(); + return PureTestBuilderCompiled.buildSuite(TestCollection.collectTests(testPackage, executionSupport.getProcessorSupport(), ci -> PureTestBuilder.satisfiesConditions(ci, executionSupport.getProcessorSupport())), executionSupport); + } +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/pom.xml index 92c900c899d..2accf2b9803 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-athena/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-execution-tests/pom.xml index 4d4ca89a640..6943097b517 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-execution-tests/pom.xml @@ -3,7 +3,7 @@ legend-engine-xt-relationalStore-bigquery org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-execution/pom.xml index 5995b72d51c..5973fe05bba 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-bigquery - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-grammar/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-grammar/pom.xml index 956161185bf..8c0efb7ed16 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-grammar/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-bigquery - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-protocol/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-protocol/pom.xml index 6bf0cb36e03..5c3f0cd17a3 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-protocol/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-bigquery - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/pom.xml index 772eb8eebd3..ce19e3be1f5 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-bigquery - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery.definition.json b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery.definition.json index c1f607ba686..5b06468c642 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery.definition.json +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery.definition.json @@ -1,6 +1,6 @@ { "name": "core_relational_bigquery", - "pattern": "(meta::relational::functions::sqlQueryToString::bigQuery|meta::relational::tests::sqlQueryToString::bigQuery|meta::relational::bigQuery::tests|meta::pure::alloy::connections|meta::external::store::relational::runtime|meta::protocols::pure)(::.*)?", + "pattern": "(meta::relational::functions::sqlQueryToString::bigQuery|meta::relational::tests::sqlQueryToString::bigQuery|meta::relational::bigQuery::tests|meta::pure::alloy::connections|meta::external::store::relational::runtime|meta::protocols::pure|meta::relational::tests::connEquality)(::.*)?", "dependencies": [ "platform", "platform_functions", diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery/relational/runtime/connection/bigQuerySpecification.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery/relational/runtime/connection/bigQuerySpecification.pure index 2a26ad1a66c..b2fd2efd681 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery/relational/runtime/connection/bigQuerySpecification.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery/relational/runtime/connection/bigQuerySpecification.pure @@ -14,8 +14,8 @@ Class meta::pure::alloy::connections::alloy::specification::BigQueryDatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - projectId:String[1]; - defaultDataset:String[1]; - proxyHost: String[0..1]; - proxyPort: String[0..1]; + <> projectId:String[1]; + <> defaultDataset:String[1]; + <> proxyHost: String[0..1]; + <> proxyPort: String[0..1]; } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery/relational/runtime/connection/connectionEqualityTest.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery/relational/runtime/connection/connectionEqualityTest.pure new file mode 100644 index 00000000000..1a3cbd31418 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/main/resources/core_relational_bigquery/relational/runtime/connection/connectionEqualityTest.pure @@ -0,0 +1,46 @@ +// Copyright 2021 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import meta::relational::metamodel::execute::tests::*; +import meta::external::store::relational::runtime::*; +import meta::pure::runtime::*; +import meta::relational::translation::*; +import meta::pure::extension::*; +import meta::relational::extension::*; +import meta::relational::runtime::*; +import meta::relational::tests::csv::*; +import meta::relational::metamodel::execute::*; +import meta::relational::metamodel::*; +import meta::pure::mapping::*; + +function <> meta::relational::tests::connEquality::testConnectionEqualityAllSameBigQuery() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + + type = DatabaseType.Snowflake, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::BigQueryDatasourceSpecification(projectId='project', defaultDataset='defDs', proxyHost='ph', proxyPort='8080'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + let c2 = ^RelationalDatabaseConnection( + + type = DatabaseType.Snowflake, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::BigQueryDatasourceSpecification(projectId='project', defaultDataset='defDs', proxyHost='ph', proxyPort='8080'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + assert(runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java new file mode 100644 index 00000000000..c072278365f --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/legend-engine-xt-relationalStore-bigquery-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java @@ -0,0 +1,31 @@ +// Copyright 2022 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.pure.code.core; + +import junit.framework.TestSuite; +import org.finos.legend.pure.m3.execution.test.PureTestBuilder; +import org.finos.legend.pure.m3.execution.test.TestCollection; +import org.finos.legend.pure.runtime.java.compiled.execution.CompiledExecutionSupport; +import org.finos.legend.pure.runtime.java.compiled.testHelper.PureTestBuilderCompiled; + +public class Test_Pure_Relational_ConnectionEquality +{ + public static TestSuite suite() + { + String testPackage = "meta::relational::tests::connEquality"; + CompiledExecutionSupport executionSupport = PureTestBuilderCompiled.getClassLoaderExecutionSupport(); + return PureTestBuilderCompiled.buildSuite(TestCollection.collectTests(testPackage, executionSupport.getProcessorSupport(), ci -> PureTestBuilder.satisfiesConditions(ci, executionSupport.getProcessorSupport())), executionSupport); + } +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/pom.xml index ab117934571..56ccac48f13 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-bigquery/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-execution-tests/pom.xml index 81e894dd1b7..2592b5f5d03 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-execution-tests/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-databricks - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-execution/pom.xml index f82546b0207..a39350fd32a 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-databricks - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-grammar/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-grammar/pom.xml index 4266d8e7fc8..8e151982bc3 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-grammar/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-databricks - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-protocol/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-protocol/pom.xml index 4bdb231f29d..ab584d88f62 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-protocol/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-databricks - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/pom.xml index 0236a5704f7..144037e552f 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-databricks - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks.definition.json b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks.definition.json index 878f243e236..9f1b5e9e0ba 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks.definition.json +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks.definition.json @@ -1,5 +1,5 @@ { "name" : "core_relational_databricks", - "pattern" : "(meta::relational::functions::sqlQueryToString::databricks|meta::relational::tests::sqlQueryToString::databricks|meta::relational::databricks::tests|meta::relational::tests::functions::sqlstring::databricks|meta::pure::alloy::connections|meta::protocols::pure)(::.*)?", + "pattern" : "(meta::relational::functions::sqlQueryToString::databricks|meta::relational::tests::sqlQueryToString::databricks|meta::relational::tests::connEquality|meta::relational::databricks::tests|meta::relational::tests::functions::sqlstring::databricks|meta::pure::alloy::connections|meta::protocols::pure)(::.*)?", "dependencies" : ["platform", "platform_functions", "platform_store_relational", "platform_dsl_mapping", "core_functions", "core", "core_relational"] } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks/relational/connection/connectionEqualityTest.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks/relational/connection/connectionEqualityTest.pure new file mode 100644 index 00000000000..d60490a6cf9 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks/relational/connection/connectionEqualityTest.pure @@ -0,0 +1,47 @@ +// Copyright 2021 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import meta::relational::metamodel::execute::tests::*; +import meta::external::store::relational::runtime::*; +import meta::pure::runtime::*; +import meta::relational::translation::*; +import meta::pure::extension::*; +import meta::relational::extension::*; +import meta::relational::runtime::*; +import meta::relational::tests::csv::*; +import meta::relational::metamodel::execute::*; +import meta::relational::metamodel::*; +import meta::pure::mapping::*; + +function <> meta::relational::tests::connEquality::testConnectionEqualityAllSameDataBricks() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + + type = DatabaseType.Databricks, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::DatabricksDatasourceSpecification(hostname='host', port='8080', protocol='http', httpPath='http://path'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + let c2 = ^RelationalDatabaseConnection( + + type = DatabaseType.Databricks, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::DatabricksDatasourceSpecification(hostname='host', port='8080', protocol='http', httpPath='http://path'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + assert(runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks/relational/connection/metamodel.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks/relational/connection/metamodel.pure index 6c3138786f6..fec225e36c7 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks/relational/connection/metamodel.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/main/resources/core_relational_databricks/relational/connection/metamodel.pure @@ -14,8 +14,8 @@ Class meta::pure::alloy::connections::alloy::specification::DatabricksDatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - hostname:String[1]; - port:String[1]; - protocol:String[1]; - httpPath:String[1]; + <> hostname:String[1]; + <> port:String[1]; + <> protocol:String[1]; + <> httpPath:String[1]; } \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java new file mode 100644 index 00000000000..c072278365f --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/legend-engine-xt-relationalStore-databricks-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java @@ -0,0 +1,31 @@ +// Copyright 2022 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.pure.code.core; + +import junit.framework.TestSuite; +import org.finos.legend.pure.m3.execution.test.PureTestBuilder; +import org.finos.legend.pure.m3.execution.test.TestCollection; +import org.finos.legend.pure.runtime.java.compiled.execution.CompiledExecutionSupport; +import org.finos.legend.pure.runtime.java.compiled.testHelper.PureTestBuilderCompiled; + +public class Test_Pure_Relational_ConnectionEquality +{ + public static TestSuite suite() + { + String testPackage = "meta::relational::tests::connEquality"; + CompiledExecutionSupport executionSupport = PureTestBuilderCompiled.getClassLoaderExecutionSupport(); + return PureTestBuilderCompiled.buildSuite(TestCollection.collectTests(testPackage, executionSupport.getProcessorSupport(), ci -> PureTestBuilder.satisfiesConditions(ci, executionSupport.getProcessorSupport())), executionSupport); + } +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/pom.xml index e9810e038c3..70f05d06f75 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-databricks/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/pom.xml index 5eee2575af5..809d786e3df 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/pom.xml @@ -3,7 +3,7 @@ legend-engine-xt-relationalStore-dbExtension org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/main/resources/core_relational___dbtype__.definition.json b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/main/resources/core_relational___dbtype__.definition.json index 2c16a1c0f72..7be070a1dbf 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/main/resources/core_relational___dbtype__.definition.json +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/main/resources/core_relational___dbtype__.definition.json @@ -1,5 +1,5 @@ { "name" : "core_relational_${dbtype}", - "pattern" : "(meta::relational::functions::sqlQueryToString::${dbType}|meta::relational::tests::sqlQueryToString::${dbType}|meta::pure::alloy::connections|meta::external::store::relational::runtime|meta::protocols::pure)(::.*)?", + "pattern" : "(meta::relational::functions::sqlQueryToString::${dbType}|meta::relational::tests::sqlQueryToString::${dbType}|meta::pure::alloy::connections|meta::external::store::relational::runtime|meta::protocols::pure|meta::relational::tests::connEquality)(::.*)?", "dependencies" : ["platform", "platform_functions", "platform_store_relational", "core", "core_relational"] } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/main/resources/core_relational___dbtype__/relational/connection/connectionEqualityTest.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/main/resources/core_relational___dbtype__/relational/connection/connectionEqualityTest.pure new file mode 100644 index 00000000000..84b2d65cd54 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/main/resources/core_relational___dbtype__/relational/connection/connectionEqualityTest.pure @@ -0,0 +1,47 @@ +// Copyright 2021 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import meta::relational::metamodel::execute::tests::*; +import meta::external::store::relational::runtime::*; +import meta::pure::runtime::*; +import meta::relational::translation::*; +import meta::pure::extension::*; +import meta::relational::extension::*; +import meta::relational::runtime::*; +import meta::relational::tests::csv::*; +import meta::relational::metamodel::execute::*; +import meta::relational::metamodel::*; +import meta::pure::mapping::*; + + +function <> meta::relational::tests::connEquality::testConnectionEqualityAllSame__dbtype__() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + + type = DatabaseType.__dbtype__, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::__dbtype__DatasourceSpecification(), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + let c2 = ^RelationalDatabaseConnection( + + type = DatabaseType.Snowflake, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::BigQueryDatasourceSpecification(projectId='project', defaultDataset='defDs', proxyHost='ph', proxyPort='8080'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + assert(runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java new file mode 100644 index 00000000000..c072278365f --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-dbExtension-archetype/src/main/resources/archetype-resources/legend-engine-xt-relationalStore-__dbtype__-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java @@ -0,0 +1,31 @@ +// Copyright 2022 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.pure.code.core; + +import junit.framework.TestSuite; +import org.finos.legend.pure.m3.execution.test.PureTestBuilder; +import org.finos.legend.pure.m3.execution.test.TestCollection; +import org.finos.legend.pure.runtime.java.compiled.execution.CompiledExecutionSupport; +import org.finos.legend.pure.runtime.java.compiled.testHelper.PureTestBuilderCompiled; + +public class Test_Pure_Relational_ConnectionEquality +{ + public static TestSuite suite() + { + String testPackage = "meta::relational::tests::connEquality"; + CompiledExecutionSupport executionSupport = PureTestBuilderCompiled.getClassLoaderExecutionSupport(); + return PureTestBuilderCompiled.buildSuite(TestCollection.collectTests(testPackage, executionSupport.getProcessorSupport(), ci -> PureTestBuilder.satisfiesConditions(ci, executionSupport.getProcessorSupport())), executionSupport); + } +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-hive/legend-engine-xt-relationalStore-hive-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-hive/legend-engine-xt-relationalStore-hive-pure/pom.xml index 69a735cf579..e74d9de4f0c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-hive/legend-engine-xt-relationalStore-hive-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-hive/legend-engine-xt-relationalStore-hive-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-hive - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-hive/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-hive/pom.xml index d771fc231d0..74439f2d708 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-hive/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-hive/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/pom.xml index 806be46c5f8..4687f0cab38 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-memsql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/java/org/finos/legend/connection/jdbc/driver/MemSQLDatabaseManager.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/java/org/finos/legend/connection/jdbc/driver/MemSQLRelationalDatabaseManager.java similarity index 90% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/java/org/finos/legend/connection/jdbc/driver/MemSQLDatabaseManager.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/java/org/finos/legend/connection/jdbc/driver/MemSQLRelationalDatabaseManager.java index a9a81b97815..a4dda5f09e3 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/java/org/finos/legend/connection/jdbc/driver/MemSQLDatabaseManager.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/java/org/finos/legend/connection/jdbc/driver/MemSQLRelationalDatabaseManager.java @@ -15,13 +15,13 @@ package org.finos.legend.connection.jdbc.driver; import org.eclipse.collections.impl.factory.Lists; -import org.finos.legend.connection.DatabaseManager; +import org.finos.legend.connection.RelationalDatabaseManager; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.DatabaseType; import java.util.List; import java.util.Properties; -public class MemSQLDatabaseManager implements DatabaseManager +public class MemSQLRelationalDatabaseManager implements RelationalDatabaseManager { @Override public List getIds() diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager deleted file mode 100644 index 29c0a912ba4..00000000000 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager +++ /dev/null @@ -1 +0,0 @@ -org.finos.legend.connection.jdbc.driver.MemSQLDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager new file mode 100644 index 00000000000..e83f44fb392 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager @@ -0,0 +1 @@ +org.finos.legend.connection.jdbc.driver.MemSQLRelationalDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution-tests/pom.xml index 90c49a13f5d..eea1b95281e 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution-tests/pom.xml @@ -19,7 +19,7 @@ legend-engine-xt-relationalStore-memsql org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution-tests/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/connection/tests/api/dynamicTestConnections/MemSQLTestContainer.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution-tests/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/connection/tests/api/dynamicTestConnections/MemSQLTestContainer.java index 61e719cb061..a5b0c09f0c6 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution-tests/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/connection/tests/api/dynamicTestConnections/MemSQLTestContainer.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution-tests/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/connection/tests/api/dynamicTestConnections/MemSQLTestContainer.java @@ -51,6 +51,7 @@ public void setup() Statement statement = connection.createStatement()) { statement.execute("create schema if not exists " + DATABASE_NAME + ";"); + statement.execute("SET GLOBAL maximum_blob_cache_size_mb = 1024"); } catch (Exception e) { diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution/pom.xml index c55623b1adb..3f7fcd87630 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-execution/pom.xml @@ -20,7 +20,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-memsql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-relationalStore-memsql-execution diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/pom.xml index b50fbb9c384..92b41259806 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/pom.xml @@ -20,7 +20,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-memsql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT legend-engine-xt-relationalStore-memsql-pure diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/src/main/resources/core_relational_memsql/relational/sqlQueryToString/memSQLExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/src/main/resources/core_relational_memsql/relational/sqlQueryToString/memSQLExtension.pure index 973cef0e638..c1f1adda84d 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/src/main/resources/core_relational_memsql/relational/sqlQueryToString/memSQLExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/src/main/resources/core_relational_memsql/relational/sqlQueryToString/memSQLExtension.pure @@ -81,9 +81,10 @@ function <> meta::relational::functions::sqlQueryToString::memsq dynaFnToSql('concat', $allStates, ^ToSql(format='concat%s', transform={p:String[*]|$p->joinStrings('(', ', ', ')')})), dynaFnToSql('convertDate', $allStates, ^ToSql(format='%s', transform={p:String[*] | $p->convertToDateMemSQL()})), dynaFnToSql('convertVarchar128', $allStates, ^ToSql(format='convert(%s, CHAR)')), - dynaFnToSql('dateDiff', $allStates, ^ToSql(format='%s', transform={p:String[*]|generateDateDiffExpressionForMemSQL ([$p->at(1), $p->at(0), $p->at(2)->replace('\'', '')])})), + dynaFnToSql('dateDiff', $allStates, ^ToSql(format='%s', transform={p:String[*]|generateDateDiffExpressionForMemSQL ([$p->at(0), $p->at(1), $p->at(2)->replace('\'', '')])})), dynaFnToSql('datePart', $allStates, ^ToSql(format='date(%s)')), dynaFnToSql('dayOfMonth', $allStates, ^ToSql(format='day(%s)')), + dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='dayname(%s)')), dynaFnToSql('dayOfWeekNumber', $allStates, ^ToSql(format='dayofweek(%s)')), dynaFnToSql('dayOfYear', $allStates, ^ToSql(format='dayofyear(%s)')), dynaFnToSql('decodeBase64', $allStates, ^ToSql(format='cast(from_base64(%s) as char)')), @@ -103,6 +104,7 @@ function <> meta::relational::functions::sqlQueryToString::memsq dynaFnToSql('minute', $allStates, ^ToSql(format='minute(%s)')), dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='month(%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='monthname(%s)')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='month(%s)')), dynaFnToSql('mostRecentDayOfWeek', $allStates, ^ToSql(format='adddate(%s, INTERVAL case when %s - dayofweek(%s) > 0 then %s - dayofweek(%s) - 7 else %s - dayofweek(%s) end DAY)', transform={p:String[1..2] | $p->formatMostRecentMemSQL('curdate()')}, parametersWithinWhenClause = [false, false])), dynaFnToSql('now', $allStates, ^ToSql(format='now()')), @@ -169,18 +171,31 @@ function <> meta::relational::functions::sqlQueryToString::mems let dbSpecificUnits = [ - {| fail('The DurationUnit \''+$params->at(2)+'\' is not supported yet!');'';}, - {| fail('The DurationUnit \''+$params->at(2)+'\' is not supported yet!');'';}, - {| fail('The DurationUnit \''+$params->at(2)+'\' is not supported yet!');'';}, {| format('(%s)', [ - 'datediff(%s , %s)' + 'timestampdiff(YEAR, %s , %s)' ])}, - {| fail('The DurationUnit \''+$params->at(2)+'\' is not supported yet!');'';}, - {| fail('The DurationUnit \''+$params->at(2)+'\' is not supported yet!');'';}, {| format('(%s)', [ - 'time_to_sec(timediff(%s , %s))' + 'timestampdiff(MONTH, %s , %s)' ])}, - {| fail('The DurationUnit \''+$params->at(2)+'\' is not supported yet!');'';} + {| format('(%s)', [ + 'timestampdiff(WEEK, %s , %s)' + ])}, + {| format('(%s)', [ + 'timestampdiff(DAY, %s , %s)' + ])}, + {| format('(%s)', [ + 'timestampdiff(HOUR, %s , %s)' + ])}, + {| format('(%s)', [ + 'timestampdiff(MINUTE, %s , %s)' + ])}, + {| format('(%s)', [ + 'timestampdiff(SECOND, %s , %s)' + ])}, + {| format('(%s)', [ + // Since SingleStore doesn't support millisecond diff, using microsecond diff to calculate required value (https://docs.singlestore.com/cloud/reference/sql-reference/date-and-time-functions/timestampdiff/) + 'floor(timestampdiff(MICROSECOND, %s , %s)/1000)' + ])} ]; format($dbSpecificUnits->at($indexOfDiff)->eval(), [$params->at(0), $params->at(1)]); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/src/main/resources/core_relational_memsql/relational/tests/mapping/sqlFunction/testSqlFunctionsInMapping.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/src/main/resources/core_relational_memsql/relational/tests/mapping/sqlFunction/testSqlFunctionsInMapping.pure index ec9eb4ca7ea..a02ba4f6546 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/src/main/resources/core_relational_memsql/relational/tests/mapping/sqlFunction/testSqlFunctionsInMapping.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/legend-engine-xt-relationalStore-memsql-pure/src/main/resources/core_relational_memsql/relational/tests/mapping/sqlFunction/testSqlFunctionsInMapping.pure @@ -79,7 +79,7 @@ function <> meta::relational::memsql::tests::mapping::sqlFunction::pa let s = toSQLString(|SqlFunctionDemo.all()->project([s | dateDiff($da, $db, DurationUnit.DAYS)], ['dateDiff']), testMapping, meta::relational::runtime::DatabaseType.MemSQL, meta::relational::extension::relationalExtensions()); - assertEquals('select (datediff(\'2017-04-01\' , \'2017-03-01\')) as `dateDiff` from dataTable as `root`',$s); + assertEquals('select (timestampdiff(DAY, \'2017-03-01\' , \'2017-04-01\')) as `dateDiff` from dataTable as `root`',$s); } function <> meta::relational::memsql::tests::mapping::sqlFunction::parseInteger::testToSQLStringDateDiffInSeconds_MemSQL():Boolean[1] @@ -90,7 +90,7 @@ function <> meta::relational::memsql::tests::mapping::sqlFunction::pa testMapping, meta::relational::runtime::DatabaseType.MemSQL, meta::relational::extension::relationalExtensions()); - assertEquals('select (time_to_sec(timediff(\'2017-03-01 20:08:08\' , \'2017-03-01 19:09:20\'))) as `dateDiff` from dataTable as `root`',$s); + assertEquals('select (timestampdiff(SECOND, \'2017-03-01 19:09:20\' , \'2017-03-01 20:08:08\')) as `dateDiff` from dataTable as `root`',$s); } function <> meta::relational::memsql::tests::mapping::sqlFunction::parseInteger::testToSQLStringConvertVarchar128_MemSQL():Boolean[1] diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/pom.xml index 6c59e3b3798..9fae3e44975 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-memsql/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/pom.xml index a74494a1a6e..338e10a6e25 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-postgres - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/java/org/finos/legend/connection/impl/PostgresDatabaseManager.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/java/org/finos/legend/connection/impl/PostgresRelationalDatabaseManager.java similarity index 81% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/java/org/finos/legend/connection/impl/PostgresDatabaseManager.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/java/org/finos/legend/connection/impl/PostgresRelationalDatabaseManager.java index 26565642f82..12947ba478c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/java/org/finos/legend/connection/impl/PostgresDatabaseManager.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/java/org/finos/legend/connection/impl/PostgresRelationalDatabaseManager.java @@ -15,18 +15,17 @@ package org.finos.legend.connection.impl; import org.eclipse.collections.impl.factory.Lists; -import org.finos.legend.connection.DatabaseType; -import org.finos.legend.connection.DatabaseManager; +import org.finos.legend.connection.RelationalDatabaseManager; import java.util.List; import java.util.Properties; -public class PostgresDatabaseManager implements DatabaseManager +public class PostgresRelationalDatabaseManager implements RelationalDatabaseManager { @Override public List getIds() { - return Lists.mutable.with("PostgreSQL", DatabaseType.POSTGRES.getLabel()); + return Lists.mutable.with("PostgreSQL", RelationalDatabaseType.POSTGRES.getIdentifier()); } @Override diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager deleted file mode 100644 index 29f38d7a5c6..00000000000 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager +++ /dev/null @@ -1 +0,0 @@ -org.finos.legend.connection.impl.PostgresDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager new file mode 100644 index 00000000000..ed38b2b933e --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager @@ -0,0 +1 @@ +org.finos.legend.connection.impl.PostgresRelationalDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-execution-tests/pom.xml index 08d34b2d2b7..f462fb57ff6 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-execution-tests/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-postgres - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-execution/pom.xml index 2c0295f7306..be9121c570f 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-postgres - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-pure/pom.xml index df533361f06..367c3fe3476 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-postgres - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-pure/src/main/resources/core_relational_postgres/relational/sqlQueryToString/postgresExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-pure/src/main/resources/core_relational_postgres/relational/sqlQueryToString/postgresExtension.pure index c66d916ebdb..6b2051e742c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-pure/src/main/resources/core_relational_postgres/relational/sqlQueryToString/postgresExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-pure/src/main/resources/core_relational_postgres/relational/sqlQueryToString/postgresExtension.pure @@ -24,7 +24,7 @@ function <> meta::relational::functions::sqlQueryToString::postg ^DbExtension( isBooleanLiteralSupported = true, - isDbReservedIdentifier = {str:String[1]| $str->in($reservedWords)}, + isDbReservedIdentifier = {str:String[1]| $str->toLower()->in($reservedWords)}, literalProcessor = $literalProcessor, joinStringsProcessor = processJoinStringsOperationForPostgres_JoinStrings_1__SqlGenerationContext_1__String_1_, selectSQLQueryProcessor = processSelectSQLQueryForPostgres_SelectSQLQuery_1__SqlGenerationContext_1__Boolean_1__String_1_, @@ -36,7 +36,110 @@ function <> meta::relational::functions::sqlQueryToString::postg function <> meta::relational::functions::sqlQueryToString::postgres::postgresReservedWords():String[*] { - []; + // https://www.postgresql.org/docs/current/sql-keywords-appendix.html + [ + 'all', + 'analyse', + 'analyze', + 'and', + 'any', + 'array', + 'as', + 'asc', + 'asymmetric', + 'authorization', + 'binary', + 'both', + 'case', + 'cast', + 'check', + 'collate', + 'collation', + 'column', + 'concurrently', + 'constraint', + 'create', + 'cross', + 'current_catalog', + 'current_date', + 'current_role', + 'current_schema', + 'current_time', + 'current_timestamp', + 'current_user', + 'default', + 'deferrable', + 'desc', + 'distinct', + 'do', + 'else', + 'end', + 'except', + 'false', + 'fetch', + 'for', + 'foreign', + 'freeze', + 'from', + 'full', + 'grant', + 'group', + 'having', + 'ilike', + 'in', + 'initially', + 'inner', + 'intersect', + 'into', + 'is', + 'isnull', + 'join', + 'lateral', + 'leading', + 'left', + 'like', + 'limit', + 'localtime', + 'localtimestamp', + 'natural', + 'not', + 'notnull', + 'null', + 'offset', + 'on', + 'only', + 'or', + 'order', + 'outer', + 'overlaps', + 'placing', + 'primary', + 'references', + 'returning', + 'right', + 'select', + 'session_user', + 'similar', + 'some', + 'symmetric', + 'system_user', + 'table', + 'tablesample', + 'then', + 'to', + 'trailing', + 'true', + 'union', + 'unique', + 'user', + 'using', + 'variadic', + 'verbose', + 'when', + 'where', + 'window', + 'with' + ]; } function <> meta::relational::functions::sqlQueryToString::postgres::getLiteralProcessorsForPostgres():Map[1] @@ -63,6 +166,7 @@ function <> meta::relational::functions::sqlQueryToString::postg dynaFnToSql('dateDiff', $allStates, ^ToSql(format='%s', transform={p:String[*]|generateDateDiffExpressionForPostgres($p)})), dynaFnToSql('datePart', $allStates, ^ToSql(format='Date(%s)')), dynaFnToSql('dayOfMonth', $allStates, ^ToSql(format='date_part(\'day\', %s)')), + dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='to_char(%s, \'FMDay\')')), dynaFnToSql('dayOfWeekNumber', $allStates, ^ToSql(format='date_part(\'dow\', %s)')), dynaFnToSql('dayOfYear', $allStates, ^ToSql(format='date_part(\'doy\', %s)')), dynaFnToSql('firstDayOfMonth', $allStates, ^ToSql(format='date_trunc(\'month\', %s)')), @@ -82,6 +186,7 @@ function <> meta::relational::functions::sqlQueryToString::postg dynaFnToSql('length', $allStates, ^ToSql(format='char_length(%s)')), dynaFnToSql('minute', $allStates, ^ToSql(format='date_part(\'minute\', %s)')), dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='to_char(%s, \'FMMonth\')')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='date_part(\'month\', %s)')), dynaFnToSql('now', $allStates, ^ToSql(format='now()')), dynaFnToSql('parseDecimal', $allStates, ^ToSql(format='cast(%s as decimal)')), diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-test-support/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-test-support/pom.xml index d2017fa2ad4..2ed974bc997 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-test-support/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/legend-engine-xt-relationalStore-postgres-test-support/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-postgres - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/pom.xml index 12feefb3d03..67695dbdf9a 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-postgres/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/legend-engine-xt-relationalStore-presto-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/legend-engine-xt-relationalStore-presto-pure/pom.xml index d0ad6061fc3..94fa54ca6a5 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/legend-engine-xt-relationalStore-presto-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/legend-engine-xt-relationalStore-presto-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-presto - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/legend-engine-xt-relationalStore-presto-pure/src/main/resources/core_relational_presto/relational/sqlQueryToString/prestoExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/legend-engine-xt-relationalStore-presto-pure/src/main/resources/core_relational_presto/relational/sqlQueryToString/prestoExtension.pure index 5ade44da7fe..92c28c34b4a 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/legend-engine-xt-relationalStore-presto-pure/src/main/resources/core_relational_presto/relational/sqlQueryToString/prestoExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/legend-engine-xt-relationalStore-presto-pure/src/main/resources/core_relational_presto/relational/sqlQueryToString/prestoExtension.pure @@ -58,6 +58,7 @@ function <> meta::relational::functions::sqlQueryToString::prest dynaFnToSql('convertVarchar128', $allStates, ^ToSql(format='cast(%s as VARCHAR(128))')), dynaFnToSql('dateDiff', $allStates, ^ToSql(format='date_diff(%s,%s,%s)', transform={p:String[*]|[$p->at(2)->replace('\'', '')->processDateDiffDurationUnitForPresto(),$p->at(0),$p->at(1)]})), dynaFnToSql('datePart', $allStates, ^ToSql(format='Date(%s)')), + dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='date_format(%s, \'%W\')')), dynaFnToSql('dayOfWeekNumber', $allStates, ^ToSql(format='day_of_week(%s)')), dynaFnToSql('dayOfYear', $allStates, ^ToSql(format='day_of_year(%s)')), dynaFnToSql('firstDayOfMonth', $allStates, ^ToSql(format='date_trunc(\'month\', %s)')), @@ -79,6 +80,7 @@ function <> meta::relational::functions::sqlQueryToString::prest dynaFnToSql('minute', $allStates, ^ToSql(format='minute(%s)')), dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='month(%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='date_format(%s, \'%M\')')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='month(%s)')), dynaFnToSql('mostRecentDayOfWeek', $allStates, ^ToSql(format='date_add(\'day\', case when %s - day_of_week(%s) > 0 then %s - day_of_week(%s) - 7 else %s - day_of_week(%s) end, %s)', transform={p:String[1..2] | $p->formatMostRecentPresto('current_date')}, parametersWithinWhenClause = [false, false])), dynaFnToSql('now', $allStates, ^ToSql(format='current_timestamp')), diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/pom.xml index 76b70da91f5..43e2bee48a0 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-presto/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-execution-tests/pom.xml index 20fb01d5f9f..79c8ad0887e 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-execution-tests/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-redshift - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-execution/pom.xml index b2c9dd23ef8..9b991a6274f 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-redshift - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-grammar/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-grammar/pom.xml index 3fa7c830dfb..55c80d01dc1 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-grammar/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-redshift - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-protocol/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-protocol/pom.xml index f319632f4e2..2876d1ec4ff 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-protocol/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-redshift - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/pom.xml index a8684018722..d9b014e2a07 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-redshift - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/src/main/resources/core_relational_redshift/relational/connection/metamodel.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/src/main/resources/core_relational_redshift/relational/connection/metamodel.pure index e7f45966730..49c2e1c7f73 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/src/main/resources/core_relational_redshift/relational/connection/metamodel.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/src/main/resources/core_relational_redshift/relational/connection/metamodel.pure @@ -14,10 +14,10 @@ Class {doc.doc ='Specification for the AWS redshift database'} meta::pure::legend::connections::legend::specification::RedshiftDatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - {doc.doc ='clusterID'} clusterID:String[1]; - {doc.doc ='The aws region'} region:String[1]; - {doc.doc ='the full host url'} host:String[1]; - {doc.doc ='database name'} databaseName:String[1]; - {doc.doc ='port'} port:Integer[1]; - {doc.doc ='Optional URL used for redshift service execution'} endpointURL:String[0..1]; + <> {doc.doc ='clusterID'} clusterID:String[1]; + <> {doc.doc ='The aws region'} region:String[1]; + <> {doc.doc ='the full host url'} host:String[1]; + <> {doc.doc ='database name'} databaseName:String[1]; + <> {doc.doc ='port'} port:Integer[1]; + <> {doc.doc ='Optional URL used for redshift service execution'} endpointURL:String[0..1]; } \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/src/main/resources/core_relational_redshift/relational/sqlQueryToString/redshiftExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/src/main/resources/core_relational_redshift/relational/sqlQueryToString/redshiftExtension.pure index f161fc6ed01..6d03e9c5220 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/src/main/resources/core_relational_redshift/relational/sqlQueryToString/redshiftExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/legend-engine-xt-relationalStore-redshift-pure/src/main/resources/core_relational_redshift/relational/sqlQueryToString/redshiftExtension.pure @@ -52,12 +52,14 @@ function <> meta::relational::functions::sqlQueryToString::redsh dynaFnToSql('atan2', $allStates, ^ToSql(format='atan2(%s,%s)')), dynaFnToSql('concat', $allStates, ^ToSql(format='%s', transform={p:String[*]|$p->joinStrings(' + ')})), dynaFnToSql('datePart', $allStates, ^ToSql(format='trunc(%s)')), + dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='to_char(%s, \'FMDay\')')), dynaFnToSql('hour', $allStates, ^ToSql(format='date_part(hour, %s)')), dynaFnToSql('joinStrings', $allStates, ^ToSql(format='listagg(%s, %s)')), dynaFnToSql('log10', $allStates, ^ToSql(format='log(%s)')), dynaFnToSql('minute', $allStates, ^ToSql(format='extract(minute from %s)')), dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='extract(month from %s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='to_char(%s, \'FMMonth\')')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='extract(month from %s)')), dynaFnToSql('now', $allStates, ^ToSql(format='now()')), dynaFnToSql('parseDecimal', $allStates, ^ToSql(format='cast(%s as decimal)')), diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/pom.xml index e0c9170f489..ff63e076668 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-redshift/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/pom.xml index 903c4c38542..bd87a1ee8ef 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-snowflake - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -35,7 +35,11 @@ org.finos.legend.engine - legend-engine-xt-authentication-connection-factory + legend-engine-xt-connection-factory + + + org.finos.legend.engine + legend-engine-xt-connection-protocol org.finos.legend.engine diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/SnowflakeAccountType.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/SnowflakeAccountType.java deleted file mode 100644 index ddc11f3bd40..00000000000 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/SnowflakeAccountType.java +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package org.finos.legend.connection; - -public enum SnowflakeAccountType -{ - VPS, - MultiTenant -} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/HACKY__SnowflakeConnectionAdapter.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/HACKY__SnowflakeConnectionAdapter.java index 32c89b91622..dbbbe7a9297 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/HACKY__SnowflakeConnectionAdapter.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/HACKY__SnowflakeConnectionAdapter.java @@ -14,15 +14,16 @@ package org.finos.legend.connection.impl; +import org.finos.legend.connection.Connection; import org.finos.legend.connection.HACKY__RelationalDatabaseConnectionAdapter; import org.finos.legend.connection.LegendEnvironment; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.protocol.SnowflakeConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.connection.SnowflakeConnectionSpecification; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.authentication.vault.PropertiesFileSecret; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.DatabaseType; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.RelationalDatabaseConnection; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.authentication.SnowflakePublicAuthenticationStrategy; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.specification.SnowflakeDatasourceSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.EncryptedPrivateKeyPairAuthenticationConfiguration; import org.finos.legend.engine.shared.core.identity.Identity; public class HACKY__SnowflakeConnectionAdapter @@ -56,10 +57,10 @@ public ConnectionFactoryMaterial adapt(RelationalDatabaseConnection relationalDa connectionSpecification.accountType = datasourceSpecification.accountType; connectionSpecification.role = datasourceSpecification.role; - StoreInstance storeInstance = new StoreInstance.Builder(environment) - .withIdentifier("adapted-store") - .withStoreSupportIdentifier("Snowflake") - .withConnectionSpecification(connectionSpecification) + Connection connection = Connection.builder() + .databaseSupport(environment.getDatabaseSupport(RelationalDatabaseType.SNOWFLAKE)) + .identifier("adapted-store") + .connectionSpecification(connectionSpecification) .build(); EncryptedPrivateKeyPairAuthenticationConfiguration authenticationConfiguration = new EncryptedPrivateKeyPairAuthenticationConfiguration(); @@ -67,7 +68,7 @@ public ConnectionFactoryMaterial adapt(RelationalDatabaseConnection relationalDa authenticationConfiguration.privateKey = new PropertiesFileSecret(authenticationStrategy.privateKeyVaultReference); authenticationConfiguration.passphrase = new PropertiesFileSecret(authenticationStrategy.passPhraseVaultReference); - return new ConnectionFactoryMaterial(storeInstance, authenticationConfiguration); + return new ConnectionFactoryMaterial(connection, authenticationConfiguration); } return null; } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeConnectionBuilder.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeConnectionBuilder.java index 7c6d11bcaec..9b01d03bd6d 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeConnectionBuilder.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeConnectionBuilder.java @@ -15,31 +15,28 @@ package org.finos.legend.connection.impl; import org.finos.legend.connection.Authenticator; -import org.finos.legend.connection.DatabaseType; -import org.finos.legend.connection.JDBCConnectionBuilder; -import org.finos.legend.connection.RelationalDatabaseStoreSupport; -import org.finos.legend.connection.StoreInstance; -import org.finos.legend.connection.protocol.SnowflakeConnectionSpecification; +import org.finos.legend.connection.Connection; +import org.finos.legend.connection.DatabaseSupport; +import org.finos.legend.engine.protocol.pure.v1.connection.SnowflakeConnectionSpecification; import org.finos.legend.engine.shared.core.identity.Credential; import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.engine.shared.core.identity.credential.PrivateKeyCredential; -import java.sql.Connection; import java.util.Optional; import java.util.Properties; import java.util.function.Function; -import static org.finos.legend.connection.impl.SnowflakeDatabaseManager.*; +import static org.finos.legend.connection.impl.SnowflakeRelationalDatabaseManager.*; public class SnowflakeConnectionBuilder { public static class WithKeyPair extends JDBCConnectionBuilder { @Override - public Connection getConnection(SnowflakeConnectionSpecification connectionSpecification, Authenticator authenticator, Identity identity) throws Exception + public java.sql.Connection getConnection(SnowflakeConnectionSpecification connectionSpecification, Authenticator authenticator, Identity identity) throws Exception { - StoreInstance storeInstance = authenticator.getStoreInstance(); - RelationalDatabaseStoreSupport.cast(storeInstance.getStoreSupport(), DatabaseType.SNOWFLAKE); + Connection connection = authenticator.getConnection(); + DatabaseSupport.verifyDatabaseType(connection.getDatabaseSupport(), RelationalDatabaseType.SNOWFLAKE); Properties connectionProperties = generateJDBCConnectionProperties(connectionSpecification); Function authenticationPropertiesSupplier = cred -> @@ -51,7 +48,7 @@ public Connection getConnection(SnowflakeConnectionSpecification connectionSpeci return properties; }; - return this.getConnectionManager().getConnection(DatabaseType.SNOWFLAKE, null, 0, connectionSpecification.databaseName, connectionProperties, this.getConnectionPoolConfig(), authenticationPropertiesSupplier, authenticator, identity); + return this.getConnectionManager().getConnection(RelationalDatabaseType.SNOWFLAKE, null, 0, connectionSpecification.databaseName, connectionProperties, this.getConnectionPoolConfig(), authenticationPropertiesSupplier, authenticator, identity); } } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeDatabaseManager.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeRelationalDatabaseManager.java similarity index 93% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeDatabaseManager.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeRelationalDatabaseManager.java index 81f78f2a73e..dafc4d2470c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeDatabaseManager.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/impl/SnowflakeRelationalDatabaseManager.java @@ -15,15 +15,13 @@ package org.finos.legend.connection.impl; import org.eclipse.collections.impl.factory.Lists; -import org.finos.legend.connection.DatabaseType; -import org.finos.legend.connection.SnowflakeAccountType; -import org.finos.legend.connection.DatabaseManager; +import org.finos.legend.connection.RelationalDatabaseManager; import org.finos.legend.engine.shared.core.operational.Assert; import java.util.List; import java.util.Properties; -public class SnowflakeDatabaseManager implements DatabaseManager +public class SnowflakeRelationalDatabaseManager implements RelationalDatabaseManager { private static final String PRIVATELINK_SNOWFLAKECOMPUTING_COM = ".privatelink.snowflakecomputing.com"; private static final String SNOWFLAKECOMPUTING_COM = ".snowflakecomputing.com"; @@ -48,7 +46,7 @@ public class SnowflakeDatabaseManager implements DatabaseManager @Override public List getIds() { - return Lists.mutable.with(DatabaseType.SNOWFLAKE.getLabel()); + return Lists.mutable.with(RelationalDatabaseType.SNOWFLAKE.getIdentifier()); } @Override @@ -104,4 +102,10 @@ public void buildMultiTenantHostname(String accountName, String region, StringBu { url.append(accountName).append(".").append(region); } + + public static enum SnowflakeAccountType + { + VPS, + MultiTenant + } } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager deleted file mode 100644 index fa4a918a8d0..00000000000 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager +++ /dev/null @@ -1 +0,0 @@ -org.finos.legend.connection.impl.SnowflakeDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager new file mode 100644 index 00000000000..aaffb6f32f8 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager @@ -0,0 +1 @@ +org.finos.legend.connection.impl.SnowflakeRelationalDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution-tests/pom.xml index b9dad684373..7b2aec87e68 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution-tests/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-snowflake - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/pom.xml index d5fd3a87cbe..c55c72deaa3 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-snowflake - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/test/semiStructured/TestSnowflakeExplodeSemiStructured.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/test/semiStructured/TestSnowflakeExplodeSemiStructured.java index 067d778e714..5d1a410d1e6 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/test/semiStructured/TestSnowflakeExplodeSemiStructured.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/test/semiStructured/TestSnowflakeExplodeSemiStructured.java @@ -33,7 +33,7 @@ public void testSimplePrimitivePropertiesProjectExplodeSource() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Orders/Id, String, VARCHAR(100), \"\"), (Orders/Identifier, String, VARCHAR(100), \"\"), (Orders/Price, Float, DOUBLE, \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Account\", VARCHAR(100)), (\"Orders/Id\", VARCHAR(100)), (\"Orders/Identifier\", VARCHAR(100)), (\"Orders/Price\", DOUBLE)]\n" + - " sql = select \"root\".ID as \"Id\", \"root\".ACCOUNT as \"Account\", \"blocks_1\".ID as \"Orders/Id\", \"blocks_1\".IDENTIFIER as \"Orders/Identifier\", \"blocks_1\".PRICE as \"Orders/Price\" from Semistructured.Blocks as \"root\" left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0)\n" + + " sql = select \"root\".ID as \"Id\", \"root\".\"ACCOUNT\" as \"Account\", \"blocks_1\".ID as \"Orders/Id\", \"blocks_1\".IDENTIFIER as \"Orders/Identifier\", \"blocks_1\".PRICE as \"Orders/Price\" from Semistructured.Blocks as \"root\" left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Orders/Id, String, VARCHAR(100), \"\"), (Orders/Identifier, String, VARCHAR(100), \"\"), (Orders/Price, Float, DOUBLE, \"\")]\n"; @@ -51,7 +51,7 @@ public void testSimplePrimitivePropertiesProjectExplodeTarget() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Status, String, VARCHAR(100), \"\"), (Block/Id, String, VARCHAR(100), \"\"), (Block/Account, String, VARCHAR(100), \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Status\", VARCHAR(100)), (\"Block/Id\", VARCHAR(100)), (\"Block/Account\", VARCHAR(100))]\n" + - " sql = select \"root\".ID as \"Id\", \"root\".STATUS as \"Status\", \"trades_1\".ID as \"Block/Id\", \"trades_1\".ACCOUNT as \"Block/Account\" from Semistructured.Trades as \"root\" left outer join (select \"trades_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".ACCOUNT, \"blocks_0\".BLOCKDATA from Semistructured.Trades as \"trades_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".ACCOUNT, \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"trades_2\".ID)) as \"trades_1\" on (\"root\".ID = \"trades_1\".leftJoinKey_0)\n" + + " sql = select \"root\".ID as \"Id\", \"root\".STATUS as \"Status\", \"trades_1\".ID as \"Block/Id\", \"trades_1\".\"ACCOUNT\" as \"Block/Account\" from Semistructured.Trades as \"root\" left outer join (select \"trades_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".\"ACCOUNT\", \"blocks_0\".BLOCKDATA from Semistructured.Trades as \"trades_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".\"ACCOUNT\", \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"trades_2\".ID)) as \"trades_1\" on (\"root\".ID = \"trades_1\".leftJoinKey_0)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Status, String, VARCHAR(100), \"\"), (Block/Id, String, VARCHAR(100), \"\"), (Block/Account, String, VARCHAR(100), \"\")]\n"; @@ -87,7 +87,7 @@ public void testComplexProjectMultiplePropertiesToExplodeInProject() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Orders/Id, String, VARCHAR(100), \"\"), (Orders/Identifier, String, VARCHAR(100), \"\"), (Trades/Id, String, VARCHAR(100), \"\"), (Trades/Status, String, VARCHAR(100), \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Account\", VARCHAR(100)), (\"Orders/Id\", VARCHAR(100)), (\"Orders/Identifier\", VARCHAR(100)), (\"Trades/Id\", VARCHAR(100)), (\"Trades/Status\", VARCHAR(100))]\n" + - " sql = select \"root\".ID as \"Id\", \"root\".ACCOUNT as \"Account\", \"blocks_1\".ID as \"Orders/Id\", \"blocks_1\".IDENTIFIER as \"Orders/Identifier\", \"blocks_3\".ID as \"Trades/Id\", \"blocks_3\".STATUS as \"Trades/Status\" from Semistructured.Blocks as \"root\" left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0) left outer join (select \"trades_0\".ID, \"trades_0\".STATUS, \"trades_0\".TRADESUMMARY, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Trades as \"trades_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"trades_0\".ID)) as \"blocks_3\" on (\"root\".ID = \"blocks_3\".leftJoinKey_0)\n" + + " sql = select \"root\".ID as \"Id\", \"root\".\"ACCOUNT\" as \"Account\", \"blocks_1\".ID as \"Orders/Id\", \"blocks_1\".IDENTIFIER as \"Orders/Identifier\", \"blocks_3\".ID as \"Trades/Id\", \"blocks_3\".STATUS as \"Trades/Status\" from Semistructured.Blocks as \"root\" left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0) left outer join (select \"trades_0\".ID, \"trades_0\".STATUS, \"trades_0\".TRADESUMMARY, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Trades as \"trades_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"trades_0\".ID)) as \"blocks_3\" on (\"root\".ID = \"blocks_3\".leftJoinKey_0)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Orders/Id, String, VARCHAR(100), \"\"), (Orders/Identifier, String, VARCHAR(100), \"\"), (Trades/Id, String, VARCHAR(100), \"\"), (Trades/Status, String, VARCHAR(100), \"\")]\n"; @@ -105,7 +105,7 @@ public void testSimplePrimitivePropertiesProjectWithFilterOnSource() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Trades/Id, String, VARCHAR(100), \"\"), (Trades/Status, String, VARCHAR(100), \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Account\", VARCHAR(100)), (\"Trades/Id\", VARCHAR(100)), (\"Trades/Status\", VARCHAR(100))]\n" + - " sql = select \"root\".ID as \"Id\", \"root\".ACCOUNT as \"Account\", \"blocks_1\".ID as \"Trades/Id\", \"blocks_1\".STATUS as \"Trades/Status\" from Semistructured.Blocks as \"root\" left outer join (select \"trades_0\".ID, \"trades_0\".STATUS, \"trades_0\".TRADESUMMARY, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Trades as \"trades_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"trades_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0) where (\"root\".BLOCKDATA['status']::varchar <> 'cancelled' OR \"root\".BLOCKDATA['status']::varchar is null)\n" + + " sql = select \"root\".ID as \"Id\", \"root\".\"ACCOUNT\" as \"Account\", \"blocks_1\".ID as \"Trades/Id\", \"blocks_1\".STATUS as \"Trades/Status\" from Semistructured.Blocks as \"root\" left outer join (select \"trades_0\".ID, \"trades_0\".STATUS, \"trades_0\".TRADESUMMARY, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Trades as \"trades_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"trades_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0) where (\"root\".BLOCKDATA['status']::varchar <> 'cancelled' OR \"root\".BLOCKDATA['status']::varchar is null)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Trades/Id, String, VARCHAR(100), \"\"), (Trades/Status, String, VARCHAR(100), \"\")]\n"; @@ -123,7 +123,7 @@ public void testSimplePrimitivePropertiesProjectWithFilterOnTarget() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Status, String, VARCHAR(100), \"\"), (Block/Id, String, VARCHAR(100), \"\"), (Block/Account, String, VARCHAR(100), \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Status\", VARCHAR(100)), (\"Block/Id\", VARCHAR(100)), (\"Block/Account\", VARCHAR(100))]\n" + - " sql = select \"root\".ID as \"Id\", \"root\".STATUS as \"Status\", \"trades_1\".ID as \"Block/Id\", \"trades_1\".ACCOUNT as \"Block/Account\" from Semistructured.Trades as \"root\" left outer join (select \"trades_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".ACCOUNT, \"blocks_0\".BLOCKDATA from Semistructured.Trades as \"trades_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".ACCOUNT, \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"trades_2\".ID)) as \"trades_1\" on (\"root\".ID = \"trades_1\".leftJoinKey_0) where (\"trades_1\".BLOCKDATA['status']::varchar <> 'cancelled' OR \"trades_1\".BLOCKDATA['status']::varchar is null)\n" + + " sql = select \"root\".ID as \"Id\", \"root\".STATUS as \"Status\", \"trades_1\".ID as \"Block/Id\", \"trades_1\".\"ACCOUNT\" as \"Block/Account\" from Semistructured.Trades as \"root\" left outer join (select \"trades_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".\"ACCOUNT\", \"blocks_0\".BLOCKDATA from Semistructured.Trades as \"trades_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".\"ACCOUNT\", \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"trades_2\".ID)) as \"trades_1\" on (\"root\".ID = \"trades_1\".leftJoinKey_0) where (\"trades_1\".BLOCKDATA['status']::varchar <> 'cancelled' OR \"trades_1\".BLOCKDATA['status']::varchar is null)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Status, String, VARCHAR(100), \"\"), (Block/Id, String, VARCHAR(100), \"\"), (Block/Account, String, VARCHAR(100), \"\")]\n"; @@ -141,7 +141,7 @@ public void testProjectWithExplodedPropertyAccessOnlyInFilter() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Status, String, VARCHAR(100), \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Status\", VARCHAR(100))]\n" + - " sql = select \"root\".ID as \"Id\", \"root\".STATUS as \"Status\" from Semistructured.Trades as \"root\" left outer join (select \"trades_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".ACCOUNT, \"blocks_0\".BLOCKDATA from Semistructured.Trades as \"trades_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".ACCOUNT, \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"trades_2\".ID)) as \"trades_1\" on (\"root\".ID = \"trades_1\".leftJoinKey_0) where (\"trades_1\".BLOCKDATA['status']::varchar <> 'cancelled' OR \"trades_1\".BLOCKDATA['status']::varchar is null)\n" + + " sql = select \"root\".ID as \"Id\", \"root\".STATUS as \"Status\" from Semistructured.Trades as \"root\" left outer join (select \"trades_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".\"ACCOUNT\", \"blocks_0\".BLOCKDATA from Semistructured.Trades as \"trades_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".\"ACCOUNT\", \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"trades_2\".ID)) as \"trades_1\" on (\"root\".ID = \"trades_1\".leftJoinKey_0) where (\"trades_1\".BLOCKDATA['status']::varchar <> 'cancelled' OR \"trades_1\".BLOCKDATA['status']::varchar is null)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Status, String, VARCHAR(100), \"\")]\n"; @@ -159,7 +159,7 @@ public void testFilterOnExplodedPropertyFilteringInsideProject() " (\n" + " type = TDS[(Block/Id, String, VARCHAR(100), \"\"), (Block/Account, String, VARCHAR(100), \"\"), (Big Buy Orders, String, VARCHAR(100), \"\"), (Orders/Id, String, VARCHAR(100), \"\")]\n" + " resultColumns = [(\"Block/Id\", VARCHAR(100)), (\"Block/Account\", VARCHAR(100)), (\"Big Buy Orders\", VARCHAR(100)), (\"Orders/Id\", VARCHAR(100))]\n" + - " sql = select \"root\".ID as \"Block/Id\", \"root\".ACCOUNT as \"Block/Account\", \"blocks_1\".ID as \"Big Buy Orders\", \"blocks_3\".ID as \"Orders/Id\" from Semistructured.Blocks as \"root\" left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0 and (\"blocks_1\".QUANTITY >= 100 and \"blocks_1\".SIDE = 'BUY')) left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_3\" on (\"root\".ID = \"blocks_3\".leftJoinKey_0)\n" + + " sql = select \"root\".ID as \"Block/Id\", \"root\".\"ACCOUNT\" as \"Block/Account\", \"blocks_1\".ID as \"Big Buy Orders\", \"blocks_3\".ID as \"Orders/Id\" from Semistructured.Blocks as \"root\" left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0 and (\"blocks_1\".QUANTITY >= 100 and \"blocks_1\".SIDE = 'BUY')) left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_3\" on (\"root\".ID = \"blocks_3\".leftJoinKey_0)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Block/Id, String, VARCHAR(100), \"\"), (Block/Account, String, VARCHAR(100), \"\"), (Big Buy Orders, String, VARCHAR(100), \"\"), (Orders/Id, String, VARCHAR(100), \"\")]\n"; @@ -177,7 +177,7 @@ public void testAggregationAggregateExplodedPropertyUsingGroupBy() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (quantity, Integer, INT, \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Account\", VARCHAR(100)), (\"quantity\", \"\")]\n" + - " sql = select \"root\".ID as \"Id\", \"root\".ACCOUNT as \"Account\", sum(\"blocks_1\".TRADESUMMARY['execQuantity']) as \"quantity\" from Semistructured.Blocks as \"root\" left outer join (select \"trades_0\".ID, \"trades_0\".STATUS, \"trades_0\".TRADESUMMARY, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Trades as \"trades_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"trades_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0) group by \"Id\",\"Account\"\n" + + " sql = select \"root\".ID as \"Id\", \"root\".\"ACCOUNT\" as \"Account\", sum(\"blocks_1\".TRADESUMMARY['execQuantity']) as \"quantity\" from Semistructured.Blocks as \"root\" left outer join (select \"trades_0\".ID, \"trades_0\".STATUS, \"trades_0\".TRADESUMMARY, \"blocks_2\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_2\" inner join Semistructured.Trades as \"trades_0\" on (to_varchar(get_path(\"blocks_2\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_2\".flattened_prop, 'tagId')) = \"trades_0\".ID)) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".leftJoinKey_0) group by \"Id\",\"Account\"\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (quantity, Integer, INT, \"\")]\n"; @@ -195,7 +195,7 @@ public void testAggregationAggregateExplodedPropertyInsideProject() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Buy Order, Integer, \"\", \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Account\", VARCHAR(100)), (\"Buy Order\", INT)]\n" + - " sql = select \"root\".ID as \"Id\", \"root\".ACCOUNT as \"Account\", \"blocks_1\".aggCol as \"Buy Order\" from Semistructured.Blocks as \"root\" left outer join (select \"blocks_2\".ID as ID, sum(\"blocks_3\".QUANTITY) as aggCol from Semistructured.Blocks as \"blocks_2\" left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_4\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_4\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_4\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_4\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_3\" on (\"blocks_2\".ID = \"blocks_3\".leftJoinKey_0) where \"blocks_3\".SIDE = 'BUY' group by \"blocks_2\".ID) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".ID)\n" + + " sql = select \"root\".ID as \"Id\", \"root\".\"ACCOUNT\" as \"Account\", \"blocks_1\".aggCol as \"Buy Order\" from Semistructured.Blocks as \"root\" left outer join (select \"blocks_2\".ID as ID, sum(\"blocks_3\".QUANTITY) as aggCol from Semistructured.Blocks as \"blocks_2\" left outer join (select \"orders_0\".ID, \"orders_0\".IDENTIFIER, \"orders_0\".QUANTITY, \"orders_0\".SIDE, \"orders_0\".PRICE, \"blocks_4\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_4\" inner join Semistructured.Orders as \"orders_0\" on (to_varchar(get_path(\"blocks_4\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_4\".flattened_prop, 'tagId')) = \"orders_0\".ID)) as \"blocks_3\" on (\"blocks_2\".ID = \"blocks_3\".leftJoinKey_0) where \"blocks_3\".SIDE = 'BUY' group by \"blocks_2\".ID) as \"blocks_1\" on (\"root\".ID = \"blocks_1\".ID)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Buy Order, Integer, \"\", \"\")]\n"; @@ -213,7 +213,7 @@ public void testSimpleJoinChainOneJoin() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Block/Id, String, VARCHAR(100), \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Account\", VARCHAR(100)), (\"Block/Id\", VARCHAR(100))]\n" + - " sql = select \"root\".ID as \"Id\", \"orders_1\".ACCOUNT as \"Account\", \"orders_1\".ID as \"Block/Id\" from Semistructured.Orders as \"root\" left outer join (select \"orders_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".ACCOUNT, \"blocks_0\".BLOCKDATA from Semistructured.Orders as \"orders_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".ACCOUNT, \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"orders_2\".ID)) as \"orders_1\" on (\"root\".ID = \"orders_1\".leftJoinKey_0)\n" + + " sql = select \"root\".ID as \"Id\", \"orders_1\".\"ACCOUNT\" as \"Account\", \"orders_1\".ID as \"Block/Id\" from Semistructured.Orders as \"root\" left outer join (select \"orders_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".\"ACCOUNT\", \"blocks_0\".BLOCKDATA from Semistructured.Orders as \"orders_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".\"ACCOUNT\", \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"orders_2\".ID)) as \"orders_1\" on (\"root\".ID = \"orders_1\".leftJoinKey_0)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Account, String, VARCHAR(100), \"\"), (Block/Id, String, VARCHAR(100), \"\")]\n"; @@ -249,7 +249,7 @@ public void testJoinChainMultipleJoinsMultipleExplode() " (\n" + " type = TDS[(Id, String, VARCHAR(100), \"\"), (Trade Id, String, VARCHAR(100), \"\")]\n" + " resultColumns = [(\"Id\", VARCHAR(100)), (\"Trade Id\", VARCHAR(100))]\n" + - " sql = select \"root\".ID as \"Id\", \"blocks_2\".ID as \"Trade Id\" from Semistructured.Orders as \"root\" left outer join (select \"orders_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".ACCOUNT, \"blocks_0\".BLOCKDATA from Semistructured.Orders as \"orders_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".ACCOUNT, \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"orders_2\".ID)) as \"orders_1\" on (\"root\".ID = \"orders_1\".leftJoinKey_0) left outer join (select \"trades_0\".ID, \"trades_0\".STATUS, \"trades_0\".TRADESUMMARY, \"blocks_0\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" inner join Semistructured.Trades as \"trades_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"trades_0\".ID)) as \"blocks_2\" on (\"orders_1\".ID = \"blocks_2\".leftJoinKey_0)\n" + + " sql = select \"root\".ID as \"Id\", \"blocks_2\".ID as \"Trade Id\" from Semistructured.Orders as \"root\" left outer join (select \"orders_2\".ID as leftJoinKey_0, \"blocks_0\".ID, \"blocks_0\".\"ACCOUNT\", \"blocks_0\".BLOCKDATA from Semistructured.Orders as \"orders_2\" inner join (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID, \"root\".\"ACCOUNT\", \"root\".BLOCKDATA from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'order' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"orders_2\".ID)) as \"orders_1\" on (\"root\".ID = \"orders_1\".leftJoinKey_0) left outer join (select \"trades_0\".ID, \"trades_0\".STATUS, \"trades_0\".TRADESUMMARY, \"blocks_0\".leftJoinKey_0 as leftJoinKey_0 from (select \"ss_flatten_0\".value as flattened_prop, \"root\".ID as leftJoinKey_0 from Semistructured.Blocks as \"root\" inner join lateral flatten(input => \"root\".BLOCKDATA['relatedEntities'], outer => true, recursive => false, mode => 'array') as \"ss_flatten_0\") as \"blocks_0\" inner join Semistructured.Trades as \"trades_0\" on (to_varchar(get_path(\"blocks_0\".flattened_prop, 'tag')) = 'trade' and to_varchar(get_path(\"blocks_0\".flattened_prop, 'tagId')) = \"trades_0\".ID)) as \"blocks_2\" on (\"orders_1\".ID = \"blocks_2\".leftJoinKey_0)\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n"; String TDSType = " type = TDS[(Id, String, VARCHAR(100), \"\"), (Trade Id, String, VARCHAR(100), \"\")]\n"; diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/test/semiStructured/TestSnowflakeSemiStructuredMatching.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/test/semiStructured/TestSnowflakeSemiStructuredMatching.java index 93645d33e31..35e5cf33790 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/test/semiStructured/TestSnowflakeSemiStructuredMatching.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-execution/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/test/semiStructured/TestSnowflakeSemiStructuredMatching.java @@ -115,7 +115,7 @@ public void testSemiStructuredMatchWithVariableAccess() " (\n" + " type = TDS[(Max Amount Flag, Boolean, \"\", \"\")]\n" + " resultColumns = [(\"Max Amount Flag\", \"\")]\n" + - " sql = select case when \"root\".CUSTOMER['transactionDetails']['payment']['@type']::varchar in ('CashOnDeliveryPayment') then case when \"root\".CUSTOMER['transactionDetails']['payment']['amountToBePaid'] < ${maxAmount} then 'true' else 'false' end when \"root\".CUSTOMER['transactionDetails']['payment']['@type']::varchar in ('PrepaidPayment', 'WalletPrepaidPayment', 'CardPrepaidPayment') then case when \"root\".CUSTOMER['transactionDetails']['payment']['amountPaid'] < ${maxAmount} then 'true' else 'false' end else null end as \"Max Amount Flag\" from ORDER_SCHEMA.ORDER_TABLE as \"root\"\n" + + " sql = select case when \"root\".CUSTOMER['transactionDetails']['payment']['@type']::varchar in ('CashOnDeliveryPayment') then case when \"root\".CUSTOMER['transactionDetails']['payment']['amountToBePaid'] < ${maxAmount} then true else false end when \"root\".CUSTOMER['transactionDetails']['payment']['@type']::varchar in ('PrepaidPayment', 'WalletPrepaidPayment', 'CardPrepaidPayment') then case when \"root\".CUSTOMER['transactionDetails']['payment']['amountPaid'] < ${maxAmount} then true else false end else null end as \"Max Amount Flag\" from ORDER_SCHEMA.ORDER_TABLE as \"root\"\n" + " connection = RelationalDatabaseConnection(type = \"Snowflake\")\n" + " )\n" + " ) \n" + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-grammar/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-grammar/pom.xml index 17d450ecb30..e4f902b9b49 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-grammar/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-snowflake - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/pom.xml index 0309ecd6134..c3232be1328 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-snowflake - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -37,6 +37,10 @@ org.finos.legend.engine legend-engine-xt-relationalStore-protocol + + org.finos.legend.engine + legend-engine-xt-connection-protocol + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/SnowflakeProtocolExtension.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/SnowflakeProtocolExtension.java index f4dd2829f20..7d1b06fd077 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/SnowflakeProtocolExtension.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/SnowflakeProtocolExtension.java @@ -16,6 +16,8 @@ import org.eclipse.collections.api.block.function.Function0; import org.eclipse.collections.impl.factory.Lists; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.connection.SnowflakeConnectionSpecification; import org.finos.legend.engine.protocol.pure.v1.extension.ProtocolSubTypeInfo; import org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.authentication.AuthenticationStrategy; @@ -31,14 +33,18 @@ public class SnowflakeProtocolExtension implements PureProtocolExtension public List>>> getExtraProtocolSubTypeInfoCollectors() { return Lists.fixedSize.with(() -> Lists.fixedSize.with( - //DatasourceSpecification - ProtocolSubTypeInfo.newBuilder(DatasourceSpecification.class) - .withSubtype(SnowflakeDatasourceSpecification.class, "snowflake") - .build(), - // AuthenticationStrategy - ProtocolSubTypeInfo.newBuilder(AuthenticationStrategy.class) - .withSubtype(SnowflakePublicAuthenticationStrategy.class, "snowflakePublic") - .build() + // DatasourceSpecification + ProtocolSubTypeInfo.newBuilder(DatasourceSpecification.class) + .withSubtype(SnowflakeDatasourceSpecification.class, "snowflake") + .build(), + // AuthenticationStrategy + ProtocolSubTypeInfo.newBuilder(AuthenticationStrategy.class) + .withSubtype(SnowflakePublicAuthenticationStrategy.class, "snowflakePublic") + .build(), + // ConnectionSpecification + ProtocolSubTypeInfo.newBuilder(ConnectionSpecification.class) + .withSubtype(SnowflakeConnectionSpecification.class, "snowflake") + .build() )); } } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/protocol/SnowflakeConnectionSpecification.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/connection/SnowflakeConnectionSpecification.java similarity index 91% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/protocol/SnowflakeConnectionSpecification.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/connection/SnowflakeConnectionSpecification.java index 555f44c7979..0e2118f6551 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-connection/src/main/java/org/finos/legend/connection/protocol/SnowflakeConnectionSpecification.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/connection/SnowflakeConnectionSpecification.java @@ -12,7 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.protocol; +package org.finos.legend.engine.protocol.pure.v1.connection; + +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; public class SnowflakeConnectionSpecification extends ConnectionSpecification { diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/pom.xml index 9398b2bbf95..28c36759c8b 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-snowflake - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake.definition.json b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake.definition.json index 26e2977f5f2..c0aa332c785 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake.definition.json +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake.definition.json @@ -1,5 +1,5 @@ { "name" : "core_relational_snowflake", - "pattern" : "(meta::relational::functions::sqlQueryToString::snowflake|meta::relational::tests::sqlQueryToString::snowflake|meta::relational::tests::sqlToString::snowflake|meta::pure::executionPlan::tests::snowflake|meta::relational::tests::projection::snowflake|meta::relational::tests::query::snowflake|meta::relational::tests::tds::snowflake|meta::relational::tests::mapping::function::snowflake|meta::relational::tests::postProcessor::snowflake|meta::pure::alloy::connections|meta::protocols::pure)(::.*)?", + "pattern" : "(meta::relational::functions::sqlQueryToString::snowflake|meta::relational::tests::connEquality|meta::relational::tests::sqlQueryToString::snowflake|meta::relational::tests::sqlToString::snowflake|meta::pure::executionPlan::tests::snowflake|meta::relational::tests::projection::snowflake|meta::relational::tests::query::snowflake|meta::relational::tests::tds::snowflake|meta::relational::tests::mapping::function::snowflake|meta::relational::tests::postProcessor::snowflake|meta::pure::alloy::connections|meta::protocols::pure)(::.*)?", "dependencies" : ["platform", "platform_functions", "platform_store_relational", "platform_dsl_mapping", "core_functions", "core", "core_relational"] } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/connection/connectionEqualityTest.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/connection/connectionEqualityTest.pure new file mode 100644 index 00000000000..d0600c577db --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/connection/connectionEqualityTest.pure @@ -0,0 +1,48 @@ +// Copyright 2021 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import meta::relational::metamodel::execute::tests::*; +import meta::external::store::relational::runtime::*; +import meta::pure::runtime::*; +import meta::relational::translation::*; +import meta::pure::extension::*; +import meta::relational::extension::*; +import meta::relational::runtime::*; +import meta::relational::tests::csv::*; +import meta::relational::metamodel::execute::*; +import meta::relational::metamodel::*; +import meta::pure::mapping::*; + + + +function <> meta::relational::tests::connEquality::testConnectionEqualityAllSameSnowflake() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + + type = DatabaseType.Snowflake, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::SnowflakeDatasourceSpecification(accountName='account', region='region', warehouseName='wh', databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::SnowflakePublicAuthenticationStrategy(privateKeyVaultReference='pkvr', publicUserName ='public', passPhraseVaultReference='ppVR') + ); + + let c2 = ^RelationalDatabaseConnection( + + type = DatabaseType.Snowflake, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::SnowflakeDatasourceSpecification(accountName='account', region='region', warehouseName='wh', databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::SnowflakePublicAuthenticationStrategy(privateKeyVaultReference='pkvr', publicUserName ='public', passPhraseVaultReference='ppVR') + ); + + assert(runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/connection/metamodel.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/connection/metamodel.pure index 5cd3bff5dbb..19fb053d8f0 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/connection/metamodel.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/connection/metamodel.pure @@ -19,28 +19,28 @@ Enum meta::pure::alloy::connections::alloy::specification::SnowflakeAccountType Class meta::pure::alloy::connections::alloy::specification::SnowflakeDatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - accountName:String[1]; - region:String[1]; - warehouseName:String[1]; - databaseName:String[1]; - role:String[0..1]; + <> accountName:String[1]; + <> region:String[1]; + <> warehouseName:String[1]; + <> databaseName:String[1]; + <> role:String[0..1]; proxyHost:String[0..1]; proxyPort:String[0..1]; nonProxyHosts:String[0..1]; - accountType: meta::pure::alloy::connections::alloy::specification::SnowflakeAccountType[0..1]; - organization:String[0..1]; - cloudType:String[0..1]; + <> accountType: meta::pure::alloy::connections::alloy::specification::SnowflakeAccountType[0..1]; + <> organization:String[0..1]; + <> cloudType:String[0..1]; - quotedIdentifiersIgnoreCase:Boolean[0..1]; - enableQueryTags: Boolean[0..1]; + <> quotedIdentifiersIgnoreCase:Boolean[0..1]; + <> enableQueryTags: Boolean[0..1]; } Class meta::pure::alloy::connections::alloy::authentication::SnowflakePublicAuthenticationStrategy extends meta::pure::alloy::connections::alloy::authentication::AuthenticationStrategy { - privateKeyVaultReference:String[1]; - passPhraseVaultReference:String[1]; - publicUserName:String[1]; + <> privateKeyVaultReference:String[1]; + <> passPhraseVaultReference:String[1]; + <> publicUserName:String[1]; } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/executionPlan/tests/executionPlanTestSnowflake.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/executionPlan/tests/executionPlanTestSnowflake.pure index f9a79437922..d6f3b67a498 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/executionPlan/tests/executionPlanTestSnowflake.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/executionPlan/tests/executionPlanTestSnowflake.pure @@ -53,7 +53,7 @@ import meta::pure::functions::collection::*; function <> meta::pure::executionPlan::tests::snowflake::testFilterEqualsWithOptionalParameter_Snowflake():Boolean[1] { let generatedPlan = executionPlan({optionalID: String[0..1], optionalActive: Boolean[0..1]|Interaction.all()->filter(i|$i.id==$optionalID && $i.active==$optionalActive)->project(col(i|$i.time, 'Time'))}, simpleRelationalMapping, ^Runtime(connectionStores=^ConnectionStore(element = relationalDB,connection=meta::pure::executionPlan::tests::snowflake::relationalConnectionForSnowflake(true))), meta::relational::extension::relationalExtensions()); - let expectedPlan = 'RelationalBlockExecutionNode(type=TDS[(Time,Integer,INT,"")](FunctionParametersValidationNode(functionParameters=[optionalID:String[0..1],optionalActive:Boolean[0..1]])SQL(type=VoidresultColumns=[]sql=ALTERSESSIONSETQUERY_TAG=\'{"executionTraceID":"${execID}","engineUser":"${userId}","referer":"${referer}"}\';connection=RelationalDatabaseConnection(type="Snowflake"))Relational(type=TDS[(Time,Integer,INT,"")]resultColumns=[("Time",INT)]sql=select"root".timeas"Time"frominteractionTableas"root"where((${optionalVarPlaceHolderOperationSelector(optionalID![],\'"root".ID=${varPlaceHolderToString(optionalID![]"\\\'""\\\'"{"\\\'":"\\\'\\\'"}"null")}\',\'"root".IDisnull\')})and(${optionalVarPlaceHolderOperationSelector(optionalActive![],\'casewhen"root".active=\\\'Y\\\'then\\\'true\\\'else\\\'false\\\'end=${varPlaceHolderToString(optionalActive![]"\\\'""\\\'"{}"null")}\',\'casewhen"root".active=\\\'Y\\\'then\\\'true\\\'else\\\'false\\\'endisnull\')}))connection=RelationalDatabaseConnection(type="Snowflake")))finallyExecutionNodes=(SQL(type=VoidresultColumns=[]sql=ALTERSESSIONUNSETQUERY_TAG;connection=RelationalDatabaseConnection(type="Snowflake"))))'; + let expectedPlan = 'RelationalBlockExecutionNode(type=TDS[(Time,Integer,INT,"")](FunctionParametersValidationNode(functionParameters=[optionalID:String[0..1],optionalActive:Boolean[0..1]])SQL(type=VoidresultColumns=[]sql=ALTERSESSIONSETQUERY_TAG=\'{"executionTraceID":"${execID}","engineUser":"${userId}","referer":"${referer}"}\';connection=RelationalDatabaseConnection(type="Snowflake"))Relational(type=TDS[(Time,Integer,INT,"")]resultColumns=[("Time",INT)]sql=select"root".timeas"Time"frominteractionTableas"root"where((${optionalVarPlaceHolderOperationSelector(optionalID![],\'"root".ID=${varPlaceHolderToString(optionalID![]"\\\'""\\\'"{"\\\'":"\\\'\\\'"}"null")}\',\'"root".IDisnull\')})and(${optionalVarPlaceHolderOperationSelector(optionalActive![],\'casewhen"root".active=\\\'Y\\\'then\\\'true\\\'else\\\'false\\\'end=${varPlaceHolderToString(optionalActive![]""""{}"null")}\',\'casewhen"root".active=\\\'Y\\\'then\\\'true\\\'else\\\'false\\\'endisnull\')}))connection=RelationalDatabaseConnection(type="Snowflake")))finallyExecutionNodes=(SQL(type=VoidresultColumns=[]sql=ALTERSESSIONUNSETQUERY_TAG;connection=RelationalDatabaseConnection(type="Snowflake"))))'; assertEquals($expectedPlan, $generatedPlan->planToStringWithoutFormatting(meta::relational::extension::relationalExtensions())); assertSameElements(templateFunctionsList(),$generatedPlan.processingTemplateFunctions); } @@ -131,3 +131,21 @@ function <> meta::pure::executionPlan::tests::snowflake::testRelation let expectedPlan = 'Relational(type=TDS[(Name,String,VARCHAR(200),"")]resultColumns=[("Name",VARCHAR(200))]sql=select"root".NAMEas"Name"fromproductSchema.productTableas"root"connection=RelationalDatabaseConnection(type="Snowflake"))'; assertEquals($expectedPlan, $generatedPlan->planToStringWithoutFormatting(meta::relational::extension::relationalExtensions())); } + +function <> meta::pure::executionPlan::tests::snowflake::testInExecutionWithLiteralListSnowflake():Boolean[1] +{ + let intList = range(1,17000); + let generatedPlan = executionPlan({|Person.all()->filter(p|$p.age->in($intList))->project([x |$x.name], ['fullName'])}, simpleRelationalMapping, ^Runtime(connectionStores=^ConnectionStore(element = meta::relational::tests::db,connection=meta::pure::executionPlan::tests::snowflake::relationalConnectionForSnowflake(true))), meta::relational::extension::relationalExtensions()); + let execNodesInRelBlockNode = $generatedPlan.rootExecutionNode.executionNodes; + + let createAndPopulateTempTableNode = $execNodesInRelBlockNode->filter(e|$e->instanceOf(CreateAndPopulateTempTableExecutionNode)); + + let tempTableName = $createAndPopulateTempTableNode->at(0)->cast(@CreateAndPopulateTempTableExecutionNode).tempTableName; + assertEquals('LEGEND_TEMP_DB.LEGEND_TEMP_SCHEMA.tempTableForIn_8', $tempTableName); + let inputVarNames = $createAndPopulateTempTableNode->at(0)->cast(@CreateAndPopulateTempTableExecutionNode).inputVarNames; + assertEquals('tempVarForIn_8', $inputVarNames->at(0)); + + let allocationNode = $execNodesInRelBlockNode->filter(e|$e->instanceOf(AllocationExecutionNode)); + let varName = $allocationNode->at(0)->cast(@AllocationExecutionNode).varName; + assertEquals('tempVarForIn_8', $varName); +} \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/sqlQueryToString/snowflakeExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/sqlQueryToString/snowflakeExtension.pure index f8080beafdc..31bece49761 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/sqlQueryToString/snowflakeExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/sqlQueryToString/snowflakeExtension.pure @@ -20,7 +20,7 @@ function <> meta::relational::functions::sqlQueryToString::s function <> meta::relational::functions::sqlQueryToString::snowflake::createDbExtensionForSnowflake():DbExtension[1] { - let reservedWords = defaultReservedWords(); + let reservedWords = snowflakeReservedWords(); let literalProcessors = getDefaultLiteralProcessors()->putAll(getLiteralProcessorsForSnowflake()); let literalProcessor = {type:Type[1]| $literalProcessors->get(if($type->instanceOf(Enumeration), | Enum, | $type))->toOne()}; let dynaFuncDispatch = getDynaFunctionToSqlDefault($literalProcessor)->groupBy(d| $d.funcName)->putAll( @@ -30,7 +30,7 @@ function <> meta::relational::functions::sqlQueryToString::snowf isBooleanLiteralSupported = true, collectionThresholdLimit = 16348, aliasLimit = 255, - isDbReservedIdentifier = {str:String[1]| $str->in($reservedWords)}, + isDbReservedIdentifier = {str:String[1]| $str->toLower()->in($reservedWords)}, literalProcessor = $literalProcessor, windowColumnProcessor = processWindowColumn_WindowColumn_1__SqlGenerationContext_1__String_1_, semiStructuredElementProcessor = processSemiStructuredElementForSnowflake_RelationalOperationElement_1__SqlGenerationContext_1__String_1_, @@ -51,7 +51,8 @@ function <> meta::relational::functions::sqlQueryToString::snowf newMap([ pair(StrictDate, ^LiteralProcessor(format = '\'%s\'::date', transform = {d:StrictDate[1], dbTimeZone:String[0..1] | $d->convertDateToSqlString($dbTimeZone)})), pair(DateTime, ^LiteralProcessor(format = '\'%s\'::timestamp', transform = {d:DateTime[1], dbTimeZone:String[0..1] | $d->convertDateToSqlString($dbTimeZone)})), - pair(Date, ^LiteralProcessor(format = '\'%s\'::timestamp', transform = {d:Date[1], dbTimeZone:String[0..1] | $d->convertDateToSqlString($dbTimeZone)})) + pair(Date, ^LiteralProcessor(format = '\'%s\'::timestamp', transform = {d:Date[1], dbTimeZone:String[0..1] | $d->convertDateToSqlString($dbTimeZone)})), + pair(Boolean, ^LiteralProcessor(format = '%s', transform = toString_Any_1__String_1_->literalTransform())) ]) } @@ -110,6 +111,7 @@ function <> meta::relational::functions::sqlQueryToString::snowf dynaFnToSql('dateDiff', $allStates, ^ToSql(format='datediff(%s,%s,%s)', transform={p:String[*]|[$p->at(2)->replace('\'', '')->processDateDiffDurationUnitForSnowflake(),$p->at(0),$p->at(1)]})), dynaFnToSql('datePart', $allStates, ^ToSql(format='Date(%s)')), dynaFnToSql('dayOfMonth', $allStates, ^ToSql(format='DAYOFMONTH(%s)')), + dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='to_char(%s, \'DYDY\')')), dynaFnToSql('dayOfWeekNumber', $allStates, ^ToSql(format='DAYOFWEEKISO(%s)')), dynaFnToSql('dayOfYear', $allStates, ^ToSql(format='DAYOFYEAR(%s)')), dynaFnToSql('extractFromSemiStructured', $allStates, ^ToSql(format='%s', transform={p:String[3]|$p->processExtractFromSemiStructuredParamsForSnowflake()})), @@ -135,6 +137,7 @@ function <> meta::relational::functions::sqlQueryToString::snowf dynaFnToSql('minute', $allStates, ^ToSql(format='minute(%s)')), dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='MONTH(%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='to_char(%s, \'MMMM\')')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='MONTH(%s)')), dynaFnToSql('mostRecentDayOfWeek', $allStates, ^ToSql(format='DATE_TRUNC(\'WEEK\', CURRENT_DATE)%s', transform={p:String[*] | ''})), dynaFnToSql('now', $allStates, ^ToSql(format='current_timestamp')), @@ -352,3 +355,102 @@ function meta::relational::functions::sqlQueryToString::snowflake::preAndFinally | [] ); } + +function <> meta::relational::functions::sqlQueryToString::snowflake::snowflakeReservedWords():String[*] +{ + // Based on https://docs.snowflake.com/en/sql-reference/reserved-keywords + [ + 'account', + 'alter', + 'and', + 'any', + 'as', + 'between', + 'by', + 'case', + 'cast', + 'check', + 'column', + 'connect', + 'connection', + 'constraint', + 'create', + 'cross', + 'current', + 'current_date', + 'current_time', + 'current_timestamp', + 'current_user', + 'database', + 'delete', + 'distinct', + 'drop', + 'else', + 'exists', + 'false', + 'following', + 'for', + 'from', + 'full', + 'grant', + 'group', + 'gscluster', + 'having', + 'ilike', + 'in', + 'increment', + 'inner', + 'insert', + 'intersect', + 'into', + 'is', + 'issue', + 'join', + 'lateral', + 'left', + 'like', + 'localtime', + 'localtimestamp', + 'minus', + 'natural', + 'not', + 'null', + 'of', + 'on', + 'or', + 'order', + 'organization', + 'qualify', + 'regexp', + 'revoke', + 'right', + 'rlike', + 'row', + 'rows', + 'sample', + 'schema', + 'select', + 'set', + 'some', + 'start', + 'table', + 'tablesample', + 'then', + 'to', + 'trigger', + 'true', + 'try_cast', + 'union', + 'unique', + 'update', + 'using', + 'values', + 'view', + 'when', + 'whenever', + 'where', + 'with' + ] +} + + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/tests/testSnowflakeWithFunction.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/tests/testSnowflakeWithFunction.pure index e4d20b8279f..79fe5a4d855 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/tests/testSnowflakeWithFunction.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/main/resources/core_relational_snowflake/relational/tests/testSnowflakeWithFunction.pure @@ -38,3 +38,14 @@ function <> meta::relational::tests::query::snowflake::testFilterUsin let s = toSQLString($fn, simpleRelationalMapping, meta::relational::runtime::DatabaseType.Snowflake, meta::relational::extension::relationalExtensions()); assertEquals('select "root".ID as "pk_0", "root".FIRSTNAME as "firstName", "root".AGE as "age", "root".LASTNAME as "lastName" from personTable as "root" left outer join firmTable as "firmTable_d#2_dy0_d#3_d_m1" on ("firmTable_d#2_dy0_d#3_d_m1".ID = "root".FIRMID) where "firmTable_d#2_dy0_d#3_d_m1".LEGALNAME regexp \'^[A-Za-z0-9]*$\'',$s); } + +function <> meta::relational::tests::query::snowflake::testFilterBoolean():Boolean[1] +{ + let sql =toSQLString(|Person.all()->project(p|$p.firstName,'firstName')->filter(p | if( + $p.getString('firstName')->startsWith('d'), + |true, + |false + )), + simpleRelationalMapping, meta::relational::runtime::DatabaseType.Snowflake, meta::relational::extension::relationalExtensions()); + assertEquals('select "root".FIRSTNAME as "firstName" from personTable as "root" where case when "root".FIRSTNAME like \'d%\' then true else false end', $sql); +} \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java new file mode 100644 index 00000000000..c072278365f --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/legend-engine-xt-relationalStore-snowflake-pure/src/test/java/org/finos/legend/pure/code/core/Test_Pure_Relational_ConnectionEquality.java @@ -0,0 +1,31 @@ +// Copyright 2022 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.pure.code.core; + +import junit.framework.TestSuite; +import org.finos.legend.pure.m3.execution.test.PureTestBuilder; +import org.finos.legend.pure.m3.execution.test.TestCollection; +import org.finos.legend.pure.runtime.java.compiled.execution.CompiledExecutionSupport; +import org.finos.legend.pure.runtime.java.compiled.testHelper.PureTestBuilderCompiled; + +public class Test_Pure_Relational_ConnectionEquality +{ + public static TestSuite suite() + { + String testPackage = "meta::relational::tests::connEquality"; + CompiledExecutionSupport executionSupport = PureTestBuilderCompiled.getClassLoaderExecutionSupport(); + return PureTestBuilderCompiled.buildSuite(TestCollection.collectTests(testPackage, executionSupport.getProcessorSupport(), ci -> PureTestBuilder.satisfiesConditions(ci, executionSupport.getProcessorSupport())), executionSupport); + } +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/pom.xml index 65b54b5b365..0dc7294f9db 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-snowflake/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-execution-tests/pom.xml index c08de9ba64c..5b29090a4f1 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-execution-tests/pom.xml @@ -3,7 +3,7 @@ legend-engine-xt-relationalStore-spanner org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-execution/pom.xml index 164a7e43103..81698a8364f 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-execution/pom.xml @@ -19,7 +19,7 @@ legend-engine-xt-relationalStore-spanner org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-grammar/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-grammar/pom.xml index eab19ddc209..0ab9d32af7c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-grammar/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-grammar/pom.xml @@ -3,7 +3,7 @@ legend-engine-xt-relationalStore-spanner org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-jdbc-shaded/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-jdbc-shaded/pom.xml index 930b2638ae7..52d2390c744 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-jdbc-shaded/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-jdbc-shaded/pom.xml @@ -3,7 +3,7 @@ legend-engine-xt-relationalStore-spanner org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-protocol/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-protocol/pom.xml index 3e947a3ca19..52a348d6623 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-protocol/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-protocol/pom.xml @@ -3,7 +3,7 @@ legend-engine-xt-relationalStore-spanner org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-pure/pom.xml index 5620a78b37f..9fa2f3ae9ed 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-pure/pom.xml @@ -3,7 +3,7 @@ legend-engine-xt-relationalStore-spanner org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-pure/src/main/resources/core_relational_spanner/relational/runtime/connection/spannerSpecification.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-pure/src/main/resources/core_relational_spanner/relational/runtime/connection/spannerSpecification.pure index fd243423c9e..42aec568886 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-pure/src/main/resources/core_relational_spanner/relational/runtime/connection/spannerSpecification.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/legend-engine-xt-relationalStore-spanner-pure/src/main/resources/core_relational_spanner/relational/runtime/connection/spannerSpecification.pure @@ -14,9 +14,9 @@ Class meta::pure::alloy::connections::alloy::specification::SpannerDatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - projectId:String[1]; - instanceId:String[1]; - databaseId:String[1]; - proxyHost: String[0..1]; - proxyPort: Integer[0..1]; + <> projectId:String[1]; + <> instanceId:String[1]; + <> databaseId:String[1]; + <> proxyHost: String[0..1]; + <> proxyPort: Integer[0..1]; } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/pom.xml index 3214a07efac..be3a2eacc66 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-spanner/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/pom.xml index 1de244ccd5b..0356a4c5c77 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-sparksql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/src/main/resources/core_relational_sparksql/relational/executionPlan/tests/executionPlanTestSparkSQL.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/src/main/resources/core_relational_sparksql/relational/executionPlan/tests/executionPlanTestSparkSQL.pure index 5608b16b1ae..59f36767184 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/src/main/resources/core_relational_sparksql/relational/executionPlan/tests/executionPlanTestSparkSQL.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/src/main/resources/core_relational_sparksql/relational/executionPlan/tests/executionPlanTestSparkSQL.pure @@ -63,7 +63,7 @@ function <> meta::pure::executionPlan::tests::sparkSQL::testFilterEqu ' (\n'+ ' type = TDS[(Time, Integer, INT, "")]\n'+ ' resultColumns = [("Time", INT)]\n'+ - ' sql = select "root"."time" as "Time" from interactionTable as "root" where ((${optionalVarPlaceHolderOperationSelector(optionalID![], \'"root".ID = ${varPlaceHolderToString(optionalID![] "\\\'" "\\\'" {"\\\'" : "\\\'\\\'"} "null")}\', \'"root".ID is null\')}) and (${optionalVarPlaceHolderOperationSelector(optionalActive![], \'case when "root"."active" = \\\'Y\\\' then \\\'true\\\' else \\\'false\\\' end = ${varPlaceHolderToString(optionalActive![] "\\\'" "\\\'" {} "null")}\', \'case when "root"."active" = \\\'Y\\\' then \\\'true\\\' else \\\'false\\\' end is null\')}))\n'+ + ' sql = select "root"."time" as "Time" from interactionTable as "root" where ((${optionalVarPlaceHolderOperationSelector(optionalID![], \'"root".ID = ${varPlaceHolderToString(optionalID![] "\\\'" "\\\'" {"\\\'" : "\\\'\\\'"} "null")}\', \'"root".ID is null\')}) and (${optionalVarPlaceHolderOperationSelector(optionalActive![], \'case when "root".active = \\\'Y\\\' then \\\'true\\\' else \\\'false\\\' end = ${varPlaceHolderToString(optionalActive![] "\\\'" "\\\'" {} "null")}\', \'case when "root".active = \\\'Y\\\' then \\\'true\\\' else \\\'false\\\' end is null\')}))\n'+ ' connection = DatabaseConnection(type = "SparkSQL")\n'+ ' )\n'+ ' )\n'+ diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/src/main/resources/core_relational_sparksql/relational/sqlQueryToString/sparkSQLExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/src/main/resources/core_relational_sparksql/relational/sqlQueryToString/sparkSQLExtension.pure index a14c2f8aad0..173a5efd426 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/src/main/resources/core_relational_sparksql/relational/sqlQueryToString/sparkSQLExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/legend-engine-xt-relationalStore-sparksql-pure/src/main/resources/core_relational_sparksql/relational/sqlQueryToString/sparkSQLExtension.pure @@ -15,7 +15,7 @@ function <> meta::relational::functions::sqlQueryToString::s function <> meta::relational::functions::sqlQueryToString::sparkSQL::createDbExtensionForSparkSQL():DbExtension[1] { - let reservedWords = sybaseReservedWords(); + let reservedWords = sparkReservedWords(); let literalProcessors = getDefaultLiteralProcessors()->putAll(getLiteralProcessorsForSparkSQL()); let literalProcessor = {type:Type[1]| $literalProcessors->get(if($type->instanceOf(Enumeration), | Enum, | $type))->toOne()}; let dynaFuncDispatch = getDynaFunctionToSqlDefault($literalProcessor)->groupBy(d| $d.funcName)->putAll( @@ -304,351 +304,121 @@ function meta::relational::functions::sqlQueryToString::sparkSQL::loadValuesToDb ,| $l->meta::relational::functions::sqlQueryToString::default::loadValuesToDbTableDefault($dbConfig)); } -function <> meta::relational::functions::sqlQueryToString::sparkSQL::sybaseReservedWords():String[*] -{ - //Based on - // http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.infocenter.dc38151.1510/html/iqrefbb/Alhakeywords.htm - // and - // http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.infocenter.dc38151.1601/doc/html/san1278452828146.html +function <> meta::relational::functions::sqlQueryToString::sparkSQL::sparkReservedWords():String[*] +{ + //Based on ANSI SQL standard + // https://learn.microsoft.com/en-us/azure/databricks/sql/language-manual/sql-ref-reserved-words + [ - 'active', - 'add', - 'algorithm', 'all', 'alter', 'and', 'any', - 'append', 'array', 'as', - 'asc', - 'attach', - 'auto', - 'backup', - 'begin', + 'at', + 'authorization', 'between', - 'bigint', - 'binary', - 'bit', - 'bottom', - 'break', + 'both', 'by', - 'calibrate', - 'calibration', - 'call', - 'cancel', - 'capability', - 'cascade', 'case', 'cast', - 'certificate', - 'char', - 'char_convert', - 'character', 'check', - 'checkpoint', - 'checksum', - 'clientport', - 'close', - 'columns', - 'comment', + 'collate', + 'column', 'commit', - 'committed', - 'comparisons', - 'compressed', - 'computes', - 'conflict', - 'connect', 'constraint', - 'contains', - 'continue', - 'convert', 'create', 'cross', 'cube', 'current', + 'current_date', + 'current_time', 'current_timestamp', 'current_user', - 'cursor', - 'date', - 'datetimeoffset', - 'dbspace', - 'dbspacename', - 'deallocate', - 'debug', - 'dec', - 'decimal', - 'declare', - 'decoupled', - 'decrypted', - 'default', - 'delay', 'delete', - 'deleting', - 'density', - 'desc', - 'detach', - 'deterministic', - 'disable', - 'distinct', - 'do', - 'double', + 'describe', + 'delete', 'drop', - 'dynamic', - 'elements', 'else', - 'elseif', - 'enable', - 'encapsulated', - 'encrypted', 'end', - 'endif', 'escape', 'except', - 'exception', - 'exclude', - 'exec', - 'execute', - 'existing', - 'exists', - 'explicit', - 'express', - 'externlogin', - 'fastfirstrow', + 'exist', + 'external', + 'extract', + 'false', 'fetch', - 'first', - 'float', - 'following', + 'filter', 'for', - 'force', 'foreign', - 'forward', 'from', 'full', - 'gb', - 'goto', + 'function', + 'global', 'grant', 'group', 'grouping', 'having', - 'hidden', - 'history', - 'holdlock', - 'identified', - 'if', 'in', - 'inactive', - 'index', - 'index_lparen', 'inner', - 'inout', - 'input', - 'insensitive', 'insert', - 'inserting', - 'install', - 'instead', - 'int', - 'integer', - 'integrated', 'intersect', + 'interval', 'into', - 'iq', 'is', - 'isolation', - 'jdk', 'join', - 'json', - 'kb', - 'kerberos', - 'key', - 'lateral', + 'leading', 'left', 'like', - 'limit', - 'lock', - 'logging', - 'login', - 'long', - 'match', - 'mb', - 'membership', - 'merge', - 'message', - 'mode', - 'modify', - 'namespace', + 'local', 'natural', - 'nchar', - 'new', 'no', - 'noholdlock', - 'nolock', 'not', - 'notify', 'null', - 'numeric', - 'nvarchar', 'of', - 'off', 'on', - 'open', - 'openstring', - 'openxml', - 'optimization', - 'option', - 'options', + 'only', 'or', 'order', - 'others', 'out', 'outer', - 'over', - 'pages', - 'paglock', - 'partial', + 'overlaps', 'partition', - 'passthrough', - 'password', - 'plan', - 'preceding', - 'precision', - 'prepare', + 'position', 'primary', - 'print', - 'privileges', - 'proc', - 'procedure', - 'proxy', - 'publication', - 'raiserror', 'range', - 'raw', - 'readcommitted', - 'readonly', - 'readpast', - 'readtext', - 'readuncommitted', - 'readwrite', - 'real', - 'recursive', - 'reference', 'references', - 'refresh', - 'release', - 'relocate', - 'remote', - 'remove', - 'rename', - 'reorganize', - 'repeatable', - 'repeatableread', - 'reserve', - 'resizing', - 'resource', - 'restore', - 'restrict', - 'return', 'revoke', 'right', 'rollback', 'rollup', - 'root', 'row', - 'rowlock', 'rows', - 'rowtype', - 'save', - 'savepoint', - 'schedule', - 'scroll', - 'secure', 'select', - 'sensitive', - 'serializable', - 'service', - 'session', + 'session_user', 'set', - 'setuser', - 'share', - 'smallint', - 'soapaction', 'some', - 'space', - 'spatial', - 'sqlcode', - 'sqlstate', 'start', - 'stop', - 'subtrans', - 'subtransaction', - 'synchronize', - 'syntax_error', 'table', - 'tablock', - 'tablockx', - 'tb', - 'temporary', + 'tablesample', 'then', - 'ties', 'time', - 'timestamp', - 'tinyint', 'to', - 'top', - 'tran', - 'transaction', - 'transactional', - 'transfer', - 'treat', - 'tries', - 'trigger', + 'trailing', + 'true', 'truncate', - 'tsequal', - 'unbounded', - 'uncommitted', 'union', 'unique', - 'uniqueidentifier', 'unknown', - 'unnest', - 'unsigned', 'update', - 'updating', - 'updlock', - 'url', 'user', 'using', - 'utc', - 'validate', 'values', - 'varbinary', - 'varbit', - 'varchar', - 'variable', - 'varray', - 'varying', - 'view', - 'virtual', - 'wait', - 'waitfor', - 'web', - 'when', + 'when', 'where', - 'while', 'window', - 'with', - 'with_cube', - 'with_lparen', - 'with_rollup', - 'withauto', - 'within', - 'word', - 'work', - 'writeserver', - 'writetext', - 'xlock', - 'xml' - ] + 'with' +] } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/pom.xml index 00836ab26b1..bbaff1d3d04 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sparksql/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/pom.xml index a904b9b33f5..8e6aa76c247 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-sqlserver - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/java/org/finos/legend/connection/jdbc/driver/SQLServerDatabaseManager.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/java/org/finos/legend/connection/jdbc/driver/SQLServerRelationalDatabaseManager.java similarity index 90% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/java/org/finos/legend/connection/jdbc/driver/SQLServerDatabaseManager.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/java/org/finos/legend/connection/jdbc/driver/SQLServerRelationalDatabaseManager.java index f22628f7a8b..df5c20172cc 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/java/org/finos/legend/connection/jdbc/driver/SQLServerDatabaseManager.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/java/org/finos/legend/connection/jdbc/driver/SQLServerRelationalDatabaseManager.java @@ -15,13 +15,13 @@ package org.finos.legend.connection.jdbc.driver; import org.eclipse.collections.impl.factory.Lists; -import org.finos.legend.connection.DatabaseManager; +import org.finos.legend.connection.RelationalDatabaseManager; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.DatabaseType; import java.util.List; import java.util.Properties; -public class SQLServerDatabaseManager implements DatabaseManager +public class SQLServerRelationalDatabaseManager implements RelationalDatabaseManager { @Override public List getIds() diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager deleted file mode 100644 index c4d426cd4b6..00000000000 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/resources/META-INF/services/org.finos.legend.connection.DatabaseManager +++ /dev/null @@ -1 +0,0 @@ -org.finos.legend.connection.jdbc.driver.SQLServerDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager new file mode 100644 index 00000000000..120a1866f63 --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-connection/src/main/resources/META-INF/services/org.finos.legend.connection.RelationalDatabaseManager @@ -0,0 +1 @@ +org.finos.legend.connection.jdbc.driver.SQLServerRelationalDatabaseManager diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-execution-tests/pom.xml index 972c410cb17..36deab7f39d 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-execution-tests/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-sqlserver - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-xt-relationalStore-sqlserver-execution-tests diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-execution/pom.xml index a8139e8cdfd..ff6603d7e07 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-sqlserver - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-pure/pom.xml index 1efb477f179..e1f9d56926b 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/legend-engine-xt-relationalStore-sqlserver-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-sqlserver - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/pom.xml index 38140532577..e7a9643d7a1 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sqlserver/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/legend-engine-xt-relationalStore-sybase-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/legend-engine-xt-relationalStore-sybase-pure/pom.xml index 863cdc1c7bc..8a3f1e8d0d2 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/legend-engine-xt-relationalStore-sybase-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/legend-engine-xt-relationalStore-sybase-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-sybase - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/legend-engine-xt-relationalStore-sybase-pure/src/main/resources/core_relational_sybase/relational/sqlQueryToString/sybaseASEExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/legend-engine-xt-relationalStore-sybase-pure/src/main/resources/core_relational_sybase/relational/sqlQueryToString/sybaseASEExtension.pure index 8909ba33e73..1cb86366689 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/legend-engine-xt-relationalStore-sybase-pure/src/main/resources/core_relational_sybase/relational/sqlQueryToString/sybaseASEExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/legend-engine-xt-relationalStore-sybase-pure/src/main/resources/core_relational_sybase/relational/sqlQueryToString/sybaseASEExtension.pure @@ -124,6 +124,7 @@ function meta::relational::functions::sqlQueryToString::sybaseASE::getDynaFuncti dynaFnToSql('minute', $allStates, ^ToSql(format='minute(%s)')), dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='month(%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='datename(MONTH, %s)')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='month(%s)')), dynaFnToSql('mostRecentDayOfWeek', $allStates, ^ToSql(format='dateadd(Day, case when %s - dow(%s) > 0 then %s - dow(%s) - 7 else %s - dow(%s) end, %s)', transform={p:String[1..2] | $p->formatMostRecentSybase('today()')}, parametersWithinWhenClause = [false, false])), dynaFnToSql('now', $allStates, ^ToSql(format='now(%s)', transform={p:String[*] | ''})), diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/pom.xml index 04ba83b1069..608eb197d28 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybase/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/pom.xml index 617c7b119cd..346802ecadb 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-sybaseiq - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/sybaseIQExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/sybaseIQExtension.pure index 213fc63e865..a167de9a353 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/sybaseIQExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/sybaseIQExtension.pure @@ -121,6 +121,7 @@ function meta::relational::functions::sqlQueryToString::sybaseIQ::getDynaFunctio dynaFnToSql('minute', $allStates, ^ToSql(format='minute(%s)')), dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='month(%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='monthname(%s)')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='month(%s)')), dynaFnToSql('mostRecentDayOfWeek', $allStates, ^ToSql(format='dateadd(Day, case when %s - dow(%s) > 0 then %s - dow(%s) - 7 else %s - dow(%s) end, %s)', transform={p:String[1..2] | $p->formatMostRecentSybase('today()')}, parametersWithinWhenClause = [false, false])), dynaFnToSql('now', $allStates, ^ToSql(format='now(%s)', transform={p:String[*] | ''})), @@ -249,7 +250,7 @@ function <> meta::relational::functions::sqlQueryToString::sybas let s = if($isSubSelect && ($sq.fromRow->isNotEmpty() || $sq.toRow->isNotEmpty()), |$sq->rewriteSliceAsWindowFunction(), |$sq); let opStr = if($s.filteringOperation->isEmpty(), |'', |$s.filteringOperation->map(s|$s->wrapAsBooleanOperation($extensions)->processOperation($dbConfig, $format->indent(), ^$config(callingFromFilter = true), $extensions))->filter(s|$s != '')->joinStrings(' <||> ')); - let havingStr = if($s.havingOperation->isEmpty(), |'', |$s.havingOperation->map(s|$s->processOperation($dbConfig, $format->indent(), $config, $extensions))->filter(s|$s != '')->joinStrings(' <||> ')); + let havingStr = if($s.havingOperation->isEmpty(), |'', |$s.havingOperation->map(s|$s->wrapAsBooleanOperation($extensions)->processOperation($dbConfig, $format->indent(), $config, $extensions))->filter(s|$s != '')->joinStrings(' <||> ')); $format.separator + 'select ' + if($s.distinct == true,|'distinct ',|'') + processTop($s, $format, $dbConfig, $extensions) + processSelectColumns($s.columns, $dbConfig, $format->indent(), false, $extensions) + diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/tests/testSybaseIQIsEmpty.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/tests/testSybaseIQIsEmpty.pure index 08a412e59e2..0a48a0a3765 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/tests/testSybaseIQIsEmpty.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/tests/testSybaseIQIsEmpty.pure @@ -45,12 +45,12 @@ function <> meta::relational::tests::query::function::sybaseIQ::testD { let result = execute(|TestClass.all()->groupBy([],[agg(x|$x.isValued(), y | $y->count())],['count']), TestMapping, meta::external::store::relational::tests::testRuntime(), meta::relational::extension::relationalExtensions()); assertEquals('select count("root".value is null) as "count" from testTable as "root"', $result->sqlRemoveFormatting()); - assertEquals('select count(case when ("root".value is null) then \'true\' else \'false\' end) as "count" from testTable as "root"', meta::relational::functions::sqlstring::toSQLString(|TestClass.all()->groupBy([],[agg(x|$x.isValued(), y | $y->count())],['count']), TestMapping, meta::relational::runtime::DatabaseType.SybaseIQ, meta::relational::extension::relationalExtensions())); + assertEquals('select count(case when "root".value is null then \'true\' else \'false\' end) as "count" from testTable as "root"', meta::relational::functions::sqlstring::toSQLString(|TestClass.all()->groupBy([],[agg(x|$x.isValued(), y | $y->count())],['count']), TestMapping, meta::relational::runtime::DatabaseType.SybaseIQ, meta::relational::extension::relationalExtensions())); } function <> meta::relational::tests::query::function::sybaseIQ::testDerivedCountWithIsEmptyNestedInIf():Boolean[1] { let result = execute(|TestClass.all()->groupBy([],[agg(x|$x.isValuedNested(), y | $y->count())],['count']), TestMapping, meta::external::store::relational::tests::testRuntime(), meta::relational::extension::relationalExtensions()); assertEquals('select count(case when "root".value is null then true else false end) as "count" from testTable as "root"', $result->sqlRemoveFormatting()); - assertEquals('select count(case when ("root".value is null) then \'true\' else \'false\' end) as "count" from testTable as "root"', meta::relational::functions::sqlstring::toSQLString(|TestClass.all()->groupBy([],[agg(x|$x.isValued(), y | $y->count())],['count']), TestMapping, meta::relational::runtime::DatabaseType.SybaseIQ, meta::relational::extension::relationalExtensions())); + assertEquals('select count(case when "root".value is null then \'true\' else \'false\' end) as "count" from testTable as "root"', meta::relational::functions::sqlstring::toSQLString(|TestClass.all()->groupBy([],[agg(x|$x.isValued(), y | $y->count())],['count']), TestMapping, meta::relational::runtime::DatabaseType.SybaseIQ, meta::relational::extension::relationalExtensions())); } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/tests/testSybaseIQTDSFilter.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/tests/testSybaseIQTDSFilter.pure index eaf73862e89..a4465e9cd2f 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/tests/testSybaseIQTDSFilter.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/legend-engine-xt-relationalStore-sybaseiq-pure/src/main/resources/core_relational_sybaseiq/relational/sqlQueryToString/tests/testSybaseIQTDSFilter.pure @@ -30,3 +30,17 @@ function <> meta::relational::tests::tds::sybaseIQ::testFilterOnDates simpleRelationalMapping, DatabaseType.SybaseIQ, meta::relational::extension::relationalExtensions()); assertEquals('select "root".settlementDateTime as "settlementDateTime" from tradeTable as "root" where "root".settlementDateTime < convert(DATETIME, \'2015-01-01 00:00:00.000\', 121)', $sql); } + +function <> meta::relational::tests::tds::sybaseIQ::testFirstNotNullFunction():Boolean[1] +{ + let sql =toSQLString(|Person.all()->project(p|$p.firstName,'firstName')->filter(p | meta::pure::tds::extensions::firstNotNull([$p.getString('firstName')->in(['John','Peter','Anthony']), false]) != false), + simpleRelationalMapping, meta::relational::runtime::DatabaseType.SybaseIQ, meta::relational::extension::relationalExtensions()); + assertEquals('select "root".FIRSTNAME as "firstName" from personTable as "root" where (coalesce(case when "root".FIRSTNAME in (\'John\', \'Peter\', \'Anthony\') then \'true\' else \'false\' end, \'false\') <> \'false\' OR coalesce(case when "root".FIRSTNAME in (\'John\', \'Peter\', \'Anthony\') then \'true\' else \'false\' end, \'false\') is null)', $sql); +} + +function <> meta::relational::tests::tds::sybaseIQ::testFirstNotNullFunctionWithinWhenClause():Boolean[1] +{ + let sql =toSQLString(|Person.all()->project(p|$p.firstName,'firstName')->filter(p | + if(meta::pure::tds::extensions::firstNotNull([$p.getString('firstName')->in(['John','Peter','Anthony']),false ])->toOne(), | true,| false)), simpleRelationalMapping, meta::relational::runtime::DatabaseType.SybaseIQ, meta::relational::extension::relationalExtensions()); + assertEquals('select "root".FIRSTNAME as "firstName" from personTable as "root" where case when coalesce(case when "root".FIRSTNAME in (\'John\', \'Peter\', \'Anthony\') then \'true\' else \'false\' end, \'false\') = \'true\' then \'true\' else \'false\' end = \'true\'', $sql); +} \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/pom.xml index fd350fd8ed5..993d0ec71f5 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-sybaseiq/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-test-reports/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-test-reports/pom.xml index 861d1ac7aa1..be403d4acc7 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-test-reports/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-test-reports/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-xt-relationalStore-test-reports diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-test-server/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-test-server/pom.xml index 4e5172262ce..7d47f47317a 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-test-server/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-test-server/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-xt-relationalStore-test-server diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-execution-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-execution-tests/pom.xml index 01192b31507..d4d26be7efe 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-execution-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-execution-tests/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-trino - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-execution/pom.xml index a209baae6cc..5cf930a1f0e 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-trino - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-grammar/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-grammar/pom.xml index da17a73dc1e..20f848d15ba 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-grammar/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-trino - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-protocol/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-protocol/pom.xml index a8595f21513..52ade6a5c2a 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-protocol/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-trino - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-pure/pom.xml index df813c99f7c..cf49ce419ae 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/legend-engine-xt-relationalStore-trino-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-trino - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/pom.xml index a27208fa210..ff1486040d6 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/legend-engine-xt-relationalStore-trino/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-dbExtension - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/pom.xml index f8df50ce175..6ffb777053c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-dbExtension/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-relationalStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-authorizer/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-authorizer/pom.xml index 6cb2cf613b4..a76445fc61a 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-authorizer/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-authorizer/pom.xml @@ -3,7 +3,7 @@ legend-engine-xt-relationalStore-execution org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-api/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-api/pom.xml index df6199ea357..d55faf9dc4b 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-api/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-api/pom.xml @@ -19,7 +19,7 @@ legend-engine-xt-relationalStore-execution org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication-default/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication-default/pom.xml index cd3a70b279b..30241fe8a54 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication-default/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication-default/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-xt-relationalStore-executionPlan-connection-authentication-default diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication/pom.xml index 8d6660293c6..5fe6207fca9 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-authentication/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-xt-relationalStore-executionPlan-connection-authentication diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-tests/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-tests/pom.xml index 6be48f969bc..ce8d2c1d2f9 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-tests/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection-tests/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection/pom.xml index 4209282b241..2d976c8870c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan-connection/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/pom.xml index ab6fe96e9b0..292995db609 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -129,7 +129,7 @@ org.finos.legend.engine - legend-engine-xt-authentication-connection-factory + legend-engine-xt-connection-factory org.bouncycastle diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/connection/manager/ConnectionManagerSelector.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/connection/manager/ConnectionManagerSelector.java index c8a4be55180..5155617da16 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/connection/manager/ConnectionManagerSelector.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/connection/manager/ConnectionManagerSelector.java @@ -155,7 +155,7 @@ public Connection getDatabaseConnectionImpl(Identity identity, DatabaseConnectio { try { - return this.connectionFactory.getConnection(identity, connectionFactoryMaterial.storeInstance, connectionFactoryMaterial.authenticationConfiguration); + return this.connectionFactory.getConnection(identity, connectionFactoryMaterial.connection, connectionFactoryMaterial.authenticationConfiguration); } catch (Exception exception) { diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/result/RelationalResult.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/result/RelationalResult.java index f26517bfeff..086e87eac60 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/result/RelationalResult.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/result/RelationalResult.java @@ -25,6 +25,7 @@ import java.sql.Statement; import java.sql.Timestamp; import java.sql.Types; +import java.util.Calendar; import java.util.GregorianCalendar; import java.util.List; import java.util.Map; @@ -113,6 +114,7 @@ public class RelationalResult extends StreamingResult implements IRelationalResu public MutableList setTransformers = Lists.mutable.empty(); public Builder builder; + private Calendar calendar; public RelationalResult(MutableList activities, RelationalExecutionNode node, List sqlResultColumns, String databaseType, String databaseTimeZone, Connection connection, MutableList profiles, List temporaryTables, Span topSpan) { @@ -444,16 +446,7 @@ public Object getValue(int columnIndex) throws SQLException if (resultDBColumnsMetaData.isTimestampColumn(columnIndex)) { Timestamp ts; - if (getRelationalDatabaseTimeZone() != null) - { - ts = resultSet.getTimestamp(columnIndex, new GregorianCalendar(TimeZone.getTimeZone(getRelationalDatabaseTimeZone()))); - } - else - { - //TODO, throw exception, TZ should always be specified - //Till then, default to PURE default which is "GMT" - ts = resultSet.getTimestamp(columnIndex, new GregorianCalendar(TimeZone.getTimeZone("GMT"))); - } + ts = resultSet.getTimestamp(columnIndex, getCalendar()); result = ts; } else if (resultDBColumnsMetaData.isDateColumn(columnIndex)) @@ -651,6 +644,25 @@ public boolean tryAdvance(Consumer action) return StreamSupport.stream(spliterator, false).onClose(this::close); } + + private Calendar getCalendar() + { + String timeZoneId = getRelationalDatabaseTimeZone(); + TimeZone timeZone = (timeZoneId != null) ? TimeZone.getTimeZone(timeZoneId) : TimeZone.getTimeZone("GMT"); + if (calendar == null) + { + //TODO, throw exception, TZ should always be specified + //Till then, default to PURE default which is "GMT" + calendar = new GregorianCalendar(timeZone); + } + else + { + calendar.clear(); + calendar.setTimeZone(timeZone); + } + return calendar; + } + @Override public void cancel() { diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/result/SQLResultDBColumnsMetaData.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/result/SQLResultDBColumnsMetaData.java index f5dc26a96a7..54177ee6e21 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/result/SQLResultDBColumnsMetaData.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/main/java/org/finos/legend/engine/plan/execution/stores/relational/result/SQLResultDBColumnsMetaData.java @@ -26,25 +26,45 @@ public class SQLResultDBColumnsMetaData { private final List sqlResultColumns; private final List dbMetaDataType; + private final boolean[] timeStampColumns; + private final boolean[] dateColumns; SQLResultDBColumnsMetaData(List resultColumns, ResultSetMetaData rsMetaData) throws SQLException { + int size = resultColumns.size(); this.sqlResultColumns = resultColumns; - this.dbMetaDataType = Lists.multiReader.ofInitialCapacity(resultColumns.size()); - for (int i = 1; i <= resultColumns.size(); i++) + this.dbMetaDataType = Lists.multiReader.ofInitialCapacity(size); + this.timeStampColumns = new boolean[size]; + this.dateColumns = new boolean[size]; + + + for (int i = 1; i <= size; i++) { + this.dbMetaDataType.add(rsMetaData.getColumnType(i)); + if (columnIsOfType(i, Types.TIMESTAMP, "TIMESTAMP")) + { + timeStampColumns[i - 1] = true; + + } + else if (columnIsOfType(i, Types.DATE, "DATE")) + { + dateColumns[i - 1] = true; + + } + } } boolean isTimestampColumn(int index) { - return columnIsOfType(index, Types.TIMESTAMP, "TIMESTAMP"); + return timeStampColumns[index - 1]; } + boolean isDateColumn(int index) { - return columnIsOfType(index, Types.DATE, "DATE"); + return dateColumns[index - 1]; } private boolean columnIsOfType(int index, int dbColumnType, String alloyColumnType) diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/result/TestSqlResultDBColumnMetaData.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/result/TestSqlResultDBColumnMetaData.java new file mode 100644 index 00000000000..bd9e267200d --- /dev/null +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-executionPlan/src/test/java/org/finos/legend/engine/plan/execution/stores/relational/result/TestSqlResultDBColumnMetaData.java @@ -0,0 +1,83 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.plan.execution.stores.relational.result; + +import org.eclipse.collections.api.factory.Lists; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.model.result.SQLResultColumn; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.List; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.when; + +public class TestSqlResultDBColumnMetaData +{ + + private SQLResultDBColumnsMetaData metaData; + private ResultSetMetaData resultSetMetaData; + + + @Before + public void setUp() throws SQLException + { + resultSetMetaData = Mockito.mock(ResultSetMetaData.class); + try + { + when(resultSetMetaData.getColumnType(1)).thenReturn(Types.TIMESTAMP); + when(resultSetMetaData.getColumnType(2)).thenReturn(Types.DATE); + when(resultSetMetaData.getColumnType(3)).thenReturn(Types.VARCHAR); + + } + catch (SQLException e) + { + e.printStackTrace(); + } + + List resultColumns = Lists.mutable.of( + new SQLResultColumn("Column1", "TIMESTAMP"), + new SQLResultColumn("Column2", "DATE"), + new SQLResultColumn("Column3", "STRING") + + ); + + metaData = new SQLResultDBColumnsMetaData(resultColumns, resultSetMetaData); + } + + @Test + public void testIsTimestampColumn() + { + assertTrue(metaData.isTimestampColumn(1)); + assertFalse(metaData.isTimestampColumn(2)); + assertFalse(metaData.isTimestampColumn(3)); + + } + + + @Test + public void testIsDateColumn() + { + assertFalse(metaData.isDateColumn(1)); + assertTrue(metaData.isDateColumn(2)); + assertFalse(metaData.isDateColumn(3)); + + } + +} diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-h2-1.4.200-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-h2-1.4.200-execution/pom.xml index d2c2d61ae7f..3bf6bcd6b60 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-h2-1.4.200-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-h2-1.4.200-execution/pom.xml @@ -20,7 +20,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-mutation-executionPlan-test/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-mutation-executionPlan-test/pom.xml index b4508ff6709..164be8d4d15 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-mutation-executionPlan-test/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/legend-engine-xt-relationalStore-mutation-executionPlan-test/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-execution - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/pom.xml index b4d2ccddf9f..060c9ed1e51 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-execution/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-relationalStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-api/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-api/pom.xml index 3cfed4a48ea..dc6ca86ab7d 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-api/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/pom.xml index 75fbe5a77b7..d990cb6e05c 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/HelperRelationalDatabaseConnectionBuilder.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/HelperRelationalDatabaseConnectionBuilder.java index c1476ad4b91..059003133ec 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/HelperRelationalDatabaseConnectionBuilder.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/HelperRelationalDatabaseConnectionBuilder.java @@ -51,7 +51,7 @@ public static void addDatabaseConnectionProperties(Root_meta_external_store_rela } catch (RuntimeException e) { - new Root_meta_relational_metamodel_Database_Impl(element)._name(element); + context.pureModel.storesIndex.putIfAbsent(element, new Root_meta_relational_metamodel_Database_Impl(element)._name(element)); } } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/RelationalCompilerExtension.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/RelationalCompilerExtension.java index 51eadfd2a4d..1dec37f6acd 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/RelationalCompilerExtension.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/main/java/org/finos/legend/engine/language/pure/compiler/toPureGraph/RelationalCompilerExtension.java @@ -525,6 +525,12 @@ public List>> handlers.m(handlers.m(handlers.h("meta::pure::tds::extensions::rowValueDifference_TabularDataSet_1__TabularDataSet_1__String_$1_MANY$__String_$1_MANY$__String_$1_MANY$__TabularDataSet_1_", false, ps -> handlers.res("meta::pure::tds::TabularDataSet", "one"), ps -> ps.size() == 5)), handlers.m(handlers.h("meta::pure::tds::extensions::rowValueDifference_TabularDataSet_1__TabularDataSet_1__String_$1_MANY$__String_$1_MANY$__TabularDataSet_1_", false, ps -> handlers.res("meta::pure::tds::TabularDataSet", "one"), ps -> ps.size() == 4))) ), + new FunctionExpressionBuilderRegistrationInfo(null, + handlers.m(handlers.h("meta::pure::tds::extensions::zScore_TabularDataSet_1__String_MANY__String_$1_MANY$__String_$1_MANY$__TabularDataSet_1_", false, ps -> handlers.res(ps.get(0)._genericType(), "one"), ps -> ps.size() == 4)) + ), + new FunctionExpressionBuilderRegistrationInfo(null, + handlers.m(handlers.h("meta::pure::tds::extensions::iqrClassify_TabularDataSet_1__String_MANY__String_$1_MANY$__String_$1_MANY$__TabularDataSet_1_", false, ps -> handlers.res(ps.get(0)._genericType(), "one"), ps -> ps.size() == 4)) + ), new FunctionExpressionBuilderRegistrationInfo(Lists.mutable.with(3), handlers.m(handlers.h("meta::pure::functions::asserts::assertEq_Any_1__Any_1__Function_1__Boolean_1_", false, ps -> handlers.res("Boolean", "one"), ps -> ps.size() == 3 && !handlers.typeOne(ps.get(2), "String"))) ) diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/test/java/org/finos/legend/engine/language/pure/compiler/test/TestRelationalFunctionHandler.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/test/java/org/finos/legend/engine/language/pure/compiler/test/TestRelationalFunctionHandler.java index bddc3c64d81..43d26492fac 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/test/java/org/finos/legend/engine/language/pure/compiler/test/TestRelationalFunctionHandler.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-grammar/src/test/java/org/finos/legend/engine/language/pure/compiler/test/TestRelationalFunctionHandler.java @@ -94,4 +94,79 @@ public void testRowValueDifferenceCompile() " }:meta::pure::tds::TabularDataSet[1];" + "}"); } + + @Test + public void testCompile_zScore() + { + // Faulty stereotype + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test("###Pure\n" + + "function xx::myFunc() : Boolean[1] {\n" + + " ^Pair(first = 'student1', second = 5)->project([col(x|$x.first, 'student'), col(x|$x.second, 'score')])->meta::pure::tds::extensions::zScore([], ['score'], ['score zScore']);\n" + + " true;\n" + + "}\n"); + } + + @Test + public void testCompile_iqrClassify() + { + // Faulty stereotype + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test("###Pure\n" + + "function xx::myFunc() : Boolean[1] {\n" + + " ^Pair(first = 'student1', second = 5)->project([col(x|$x.first, 'student'), col(x|$x.second, 'score')])->meta::pure::tds::extensions::iqrClassify([], 'score', 'irq_classification');\n" + + " true;\n" + + "}\n"); + } + + @Test + public void testCompile_columnValueDifference1() + { + // Faulty stereotype + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test("###Pure\n" + + "function xx::myFunc() : Boolean[1] {\n" + + " ^Pair(first = 1, second = 5)->project([col(x|$x.first, 'student'), col(x|$x.second, 'score')])\n" + + " ->meta::pure::tds::extensions::columnValueDifference(\n" + + " ^Pair(first = 1, second = 6)->project([col(x|$x.first, 'student'), col(x|$x.second, 'score')]),\n" + + " ['student'], ['score']);\n" + + " true;\n" + + "}\n"); + } + + @Test + public void testCompile_columnValueDifference2() + { + // Faulty stereotype + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test("###Pure\n" + + "function xx::myFunc() : Boolean[1] {\n" + + " ^Pair(first = 1, second = 5)->project([col(x|$x.first, 'student'), col(x|$x.second, 'score')])\n" + + " ->meta::pure::tds::extensions::columnValueDifference(\n" + + " ^Pair(first = 1, second = 6)->project([col(x|$x.first, 'student'), col(x|$x.second, 'score')]),\n" + + " ['student'], ['score'], ['score diff']);\n" + + " true;\n" + + "}\n"); + } + + @Test + public void testCompile_extendWithDigestOnColumns1() + { + // Faulty stereotype + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test("###Pure\n" + + "function xx::myFunc() : Boolean[1] {\n" + + " ^Pair(first = 1, second = 5)->project([col(x|$x.first, 'student'), col(x|$x.second, 'score')])\n" + + " ->meta::pure::tds::extensions::extendWithDigestOnColumns('digest');\n" + + " true;\n" + + "}\n"); + } + + @Test + public void testCompile_extendWithDigestOnColumns2() + { + // Faulty stereotype + TestCompilationFromGrammar.TestCompilationFromGrammarTestSuite.test("###Pure\n" + + "function xx::myFunc() : Boolean[1] {\n" + + " ^Pair(first = 1, second = 5)->project([col(x|$x.first, 'student'), col(x|$x.second, 'score')])\n" + + " ->meta::pure::tds::extensions::extendWithDigestOnColumns(['student'], meta::pure::functions::hash::HashType.MD5, 'digest');\n" + + " true;\n" + + "}\n"); + } + } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-javaPlatformBinding-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-javaPlatformBinding-pure/pom.xml index dff071bd52d..40bcf4b8a99 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-javaPlatformBinding-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-javaPlatformBinding-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/pom.xml index a53578648a4..67221f7ee43 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -83,6 +83,10 @@ junit test + + org.finos.legend.engine + legend-engine-xt-connection-protocol + \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/RelationalProtocolExtension.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/RelationalProtocolExtension.java index 13228724c59..2a9067ca3b6 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/RelationalProtocolExtension.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/RelationalProtocolExtension.java @@ -17,8 +17,10 @@ import org.eclipse.collections.api.block.function.Function0; import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.factory.Maps; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; import org.finos.legend.engine.protocol.pure.v1.extension.ProtocolSubTypeInfo; import org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension; +import org.finos.legend.engine.protocol.pure.v1.model.connection.StaticJDBCConnectionSpecification; import org.finos.legend.engine.protocol.pure.v1.model.data.EmbeddedData; import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.RelationResultType; import org.finos.legend.engine.protocol.pure.v1.model.executionPlan.nodes.CreateAndPopulateTempTableExecutionNode; @@ -184,6 +186,11 @@ public List>>> getExtraProtocolSubTypeInfo .withSubtype(BusinessMilestoning.class, "businessMilestoning") .withSubtype(BusinessSnapshotMilestoning.class, "businessSnapshotMilestoning") .withSubtype(ProcessingMilestoning.class, "processingMilestoning") + .build(), + + // Connection Specification + ProtocolSubTypeInfo.newBuilder(ConnectionSpecification.class) + .withSubtype(StaticJDBCConnectionSpecification.class, "staticJDBC") .build() )); } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/protocol/StaticJDBCConnectionSpecification.java b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/connection/StaticJDBCConnectionSpecification.java similarity index 82% rename from legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/protocol/StaticJDBCConnectionSpecification.java rename to legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/connection/StaticJDBCConnectionSpecification.java index 373e76c7c42..1082bacc174 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-connection/src/main/java/org/finos/legend/connection/protocol/StaticJDBCConnectionSpecification.java +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-protocol/src/main/java/org/finos/legend/engine/protocol/pure/v1/model/connection/StaticJDBCConnectionSpecification.java @@ -12,9 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.connection.protocol; +package org.finos.legend.engine.protocol.pure.v1.model.connection; -import org.finos.legend.connection.protocol.ConnectionSpecification; +import org.finos.legend.engine.protocol.pure.v1.packageableElement.connection.ConnectionSpecification; public class StaticJDBCConnectionSpecification extends ConnectionSpecification { @@ -22,6 +22,11 @@ public class StaticJDBCConnectionSpecification extends ConnectionSpecification public int port; public String databaseName; + public StaticJDBCConnectionSpecification() + { + // jackson + } + public StaticJDBCConnectionSpecification(String host, int port, String databaseName) { this.host = host; diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/pom.xml index fa6e862a6fb..6fcd4ec012f 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xt-relationalStore-generation - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/contract/storeContract.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/contract/storeContract.pure index 34a5a19a1cd..7ad7bcba2a4 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/contract/storeContract.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/contract/storeContract.pure @@ -12,6 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +import meta::relational::contract::*; import meta::pure::router::metamodel::*; import meta::pure::router::systemMapping::tests::*; import meta::relational::mapping::*; @@ -43,6 +44,20 @@ function meta::relational::contract::relationalStoreContract():StoreContract[1] planGraphFetchExecution = meta::relational::contract::planGraphFetchExecution_StoreMappingLocalGraphFetchExecutionNodeGenerationInput_1__LocalGraphFetchExecutionNode_1_, planCrossGraphFetchExecution = meta::relational::contract::planCrossGraphFetchExecution_StoreMappingCrossLocalGraphFetchExecutionNodeGenerationInput_1__LocalGraphFetchExecutionNode_1_, + connectionEquality = { b : Connection [1] | + [ + d: RelationalDatabaseConnection[1]| + let bAsRDB = $b->cast(@RelationalDatabaseConnection); + // connection element is the store name and we dont compare those + let comparison = $d.type == $bAsRDB.type && + $d.timeZone == $bAsRDB.timeZone && + $d.quoteIdentifiers == $bAsRDB.quoteIdentifiers && + $d.datasourceSpecification == $bAsRDB.datasourceSpecification && + compareObjectsWithPossiblyNoProperties($d.authenticationStrategy,$bAsRDB.authenticationStrategy) && + postProcessorsMatch($d.postProcessors, $bAsRDB.postProcessors); + ] + }, + supports = meta::relational::contract::supports_FunctionExpression_1__Boolean_1_, supportsStreamFunction = meta::relational::contract::supportsStream_FunctionExpression_1__Boolean_1_, shouldStopRouting = [ @@ -225,6 +240,29 @@ function meta::relational::contract::planExecution(sq:meta::pure::mapping::Store ); } +function meta::relational::contract::postProcessorsMatch(postProcessors1: meta::pure::alloy::connections::PostProcessor[*], postProcessors2: meta::pure::alloy::connections::PostProcessor[*]): Boolean[1] +{ + (($postProcessors1->isEmpty() && $postProcessors2->isEmpty()) + || ($postProcessors1->size() == $postProcessors2->size() + // for now we do a simple type and sequence comparison of the post processors and they should match for equality + && $postProcessors1->zip($postProcessors2)->map( postProcessorPair| + $postProcessorPair.first->type() == $postProcessorPair.second->type() + // && - TODO implement some kind of equality interface in a post processor which is called here and implemented + // by post processors which would like to process equality with other attributes besides type + )->distinct()->remove(true)->isEmpty())) +} + +function meta::relational::contract::compareObjectsWithPossiblyNoProperties(obj1: Any[1], obj2: Any[1]): Boolean[1] +{ + let propertyCountForObj1 = $obj1->type()->cast(@Class)->hierarchicalProperties()->size(); + let propertyCountForObj2 = $obj2->type()->cast(@Class)->hierarchicalProperties()->size(); + + if($propertyCountForObj1 == 0 && $propertyCountForObj2 == 0 + ,| true + ,| $obj1 == $obj2 + ); +} + function meta::relational::contract::planGraphFetchExecution(input: StoreMappingLocalGraphFetchExecutionNodeGenerationInput[1]): LocalGraphFetchExecutionNode[1] { meta::relational::graphFetch::executionPlan::planRootGraphFetchExecutionRelational($input.storeQuery, $input.ext, $input.clusteredTree, $input.orderedPaths, $input.mapping, $input.runtime, $input.exeCtx, $input.enableConstraints, $input.checked, $input.extensions, $input.debug) diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/functions/tests/projection/testFilters.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/functions/tests/projection/testFilters.pure index 05e294b03b3..d317a10f9e4 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/functions/tests/projection/testFilters.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/functions/tests/projection/testFilters.pure @@ -90,3 +90,31 @@ function <> meta::relational::tests::projection::filter::isolation::t assertEquals(['Firm X, UK', 'Firm X, Europe', 'Firm X, Europe', 'Firm X, Europe', 'Firm A, Europe', 'Firm B, Europe', 'Firm C, Europe'],$tds.rows->map(r|$r.values->makeString(', '))); assertEquals('select "root".LEGALNAME as "legalName", case when ("firmpersonbridgetable_0".ADDRESSID = 1 or "addresstable_0".ID = 1) then \'UK\' else \'Europe\' end as "addressName" from firmTable as "root" left outer join (select "firmpersonbridgetable_1".FIRM_ID as FIRM_ID, "persontable_0".ADDRESSID as ADDRESSID from firmPersonBridgeTable as "firmpersonbridgetable_1" inner join personTable as "persontable_0" on ("persontable_0".ID = "firmpersonbridgetable_1".PERSON_ID)) as "firmpersonbridgetable_0" on ("root".ID = "firmpersonbridgetable_0".FIRM_ID) left outer join addressTable as "addresstable_0" on ("addresstable_0".ID = "root".ADDRESSID)', $result->sqlRemoveFormatting()); } + +// Filters having no table alias like 'Smith' = 'Smith' +function <> meta::relational::tests::projection::filter::isolation::testIsolationOfFiltersWithoutAlias():Boolean[1] +{ + let result1 = execute(|Firm.all()->project([f|$f.employeeByLastName('Smith').address.name, f | $f.employeeByLastName('Smith').firstName],['address', 'employeeFirstName']), meta::relational::tests::mapping::join::model::mapping::MappingWithLiteral, testRuntime(), meta::relational::extension::relationalExtensions()); + let result2 = execute(|Firm.all()->project([f|$f.employeeByLastName('Roberts').address.name, f | $f.employeeByLastName('Roberts').firstName],['address', 'employeeFirstName']), meta::relational::tests::mapping::join::model::mapping::MappingWithLiteral, testRuntime(), meta::relational::extension::relationalExtensions()); + let result3 = execute(|Firm.all()->project([f|$f.employeeByLastName('Roberts').address.name, f | $f.employeeByLastName('Smith').firstName],['address', 'employeeFirstName']), meta::relational::tests::mapping::join::model::mapping::MappingWithLiteral, testRuntime(), meta::relational::extension::relationalExtensions()); + + let tds1 = $result1.values->at(0); + assertEquals(['Hoboken, Peter', 'New York, John', 'New York, John', 'New York, Anthony', 'San Fransisco, Fabrice', 'Hong Kong, Oliver', 'New York, David'], $tds1.rows->map(r|$r.values->makeString(', '))); + assertEquals('select "addresstable_0".NAME as "address", "persontable_0".FIRSTNAME as "employeeFirstName" from firmTable as "root" left outer join personTable as "persontable_0" on ("root".ID = "persontable_0".FIRMID and \'Smith\' = \'Smith\') left outer join addressTable as "addresstable_0" on ("addresstable_0".ID = "persontable_0".ADDRESSID)', $result1->sqlRemoveFormatting()); + + let tds2 = $result2.values->at(0); + assertEquals(['TDSNull, TDSNull', 'TDSNull, TDSNull', 'TDSNull, TDSNull', 'TDSNull, TDSNull'], $tds2.rows->map(r|$r.values->makeString(', '))); + assertEquals('select "addresstable_0".NAME as "address", "persontable_0".FIRSTNAME as "employeeFirstName" from firmTable as "root" left outer join personTable as "persontable_0" on ("root".ID = "persontable_0".FIRMID and \'Smith\' = \'Roberts\') left outer join addressTable as "addresstable_0" on ("addresstable_0".ID = "persontable_0".ADDRESSID)', $result2->sqlRemoveFormatting()); + + let tds3 = $result3.values->at(0); + assertEquals(['TDSNull, Peter', 'TDSNull, John', 'TDSNull, John', 'TDSNull, Anthony', 'TDSNull, Fabrice', 'TDSNull, Oliver', 'TDSNull, David'], $tds3.rows->map(r|$r.values->makeString(', '))); + assertEquals('select "addresstable_0".NAME as "address", "persontable_1".FIRSTNAME as "employeeFirstName" from firmTable as "root" left outer join personTable as "persontable_0" on ("root".ID = "persontable_0".FIRMID and \'Smith\' = \'Roberts\') left outer join addressTable as "addresstable_0" on ("addresstable_0".ID = "persontable_0".ADDRESSID) left outer join personTable as "persontable_1" on ("root".ID = "persontable_1".FIRMID and \'Smith\' = \'Smith\')', $result3->sqlRemoveFormatting()); +} + +function <> meta::relational::tests::projection::filter::lessThanEqual::testIsolationOfFiltersWithoutAliasWithChainedJoins():Boolean[1] +{ + let result = execute(|Trade.all()->project([t | $t.id, t| $t.product.synonymByType(ProductSynonymType.CUSIP).name, t| $t.product.synonymByType(ProductSynonymType.ISIN).name], ['tradeId','SynonyName1', 'SynonymName2']), meta::relational::tests::mapping::join::model::mapping::MappingWithLiteral, testRuntime(), meta::relational::extension::relationalExtensions()); + let tds = $result.values->at(0); + assertEquals(['1, CUSIP1, TDSNull', '1, ISIN1, TDSNull', '2, CUSIP1, TDSNull', '2, ISIN1, TDSNull', '3, CUSIP2, TDSNull', '3, ISIN2, TDSNull', '4, CUSIP2, TDSNull', '4, ISIN2, TDSNull', '5, CUSIP2, TDSNull', '5, ISIN2, TDSNull', '6, CUSIP3, TDSNull', '6, ISIN3, TDSNull', '7, CUSIP3, TDSNull', '7, ISIN3, TDSNull', '8, CUSIP3, TDSNull', '8, ISIN3, TDSNull', '9, CUSIP3, TDSNull', '9, ISIN3, TDSNull', '10, CUSIP3, TDSNull', '10, ISIN3, TDSNull', '11, TDSNull, TDSNull'], $tds.rows->map(r|$r.values->makeString(', '))); + assertEquals('select "root".ID as "tradeId", "synonymtable_0".NAME as "SynonyName1", "synonymtable_2".NAME as "SynonymName2" from tradeTable as "root" left outer join productSchema.productTable as "producttable_0" on ("root".prodId = "producttable_0".ID) left outer join (select "synonymtable_1".PRODID as PRODID, "synonymtable_1".NAME as NAME from productSchema.synonymTable as "synonymtable_1" where \'CUSIP\' = \'CUSIP\') as "synonymtable_0" on ("synonymtable_0".PRODID = "producttable_0".ID) left outer join (select "synonymtable_1".PRODID as PRODID, "synonymtable_1".NAME as NAME from productSchema.synonymTable as "synonymtable_1" where \'CUSIP\' = \'ISIN\') as "synonymtable_2" on ("synonymtable_2".PRODID = "producttable_0".ID)', $result->sqlRemoveFormatting()); +} \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/postprocessor/defaultPostProcessor/processInOperation.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/postprocessor/defaultPostProcessor/processInOperation.pure index 48938985b61..7f01ed701e0 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/postprocessor/defaultPostProcessor/processInOperation.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/postprocessor/defaultPostProcessor/processInOperation.pure @@ -52,6 +52,11 @@ function <> meta::relational::postProcessor::prefixForWrapperAll 'inFilterClause_'; } +function <> meta::relational::postProcessor::processedTempTableNameForIn(dbType: DatabaseType[1]):String[1] +{ + $dbType->createDbConfig([]).procesTempTableName('tempTableForIn_'); +} + function meta::relational::postProcessor::processInOperation(query:SQLQuery[1], runtime:Runtime[1], store:Database[0..1], exeCtx:ExecutionContext[1], extensions:Extension[*]):PostProcessorResult[1] { let connection = $runtime->connectionByElement($store->toOne())->meta::relational::mapping::updateConnection($extensions)->cast(@DatabaseConnection); @@ -68,12 +73,12 @@ function meta::relational::postProcessor::processInOperation(query:SQLQuery[1], d : DynaFunction[1] | if($d.name == 'in' && $d.parameters->at(1)->instanceOf(LiteralList) && ($d.parameters->at(1)->cast(@LiteralList).values->size() > $dbThreshold->toOne()), | let dbType = $connection.type; - let tempTableName = 'tempTableForIn_' + $uniqueId->toString(); + let tempTableName = $dbType->processedTempTableNameForIn() + $uniqueId->toString(); let tempTableColumnName = 'ColumnForStoringInCollection'; let firstLiteralValue = $d.parameters->at(1)->cast(@LiteralList).values->map(l | $l.value)->at(0); let collectionValueType = if($firstLiteralValue->instanceOf(VarPlaceHolder), | $firstLiteralValue->cast(@VarPlaceHolder).type, | $firstLiteralValue->type()); - let selectSQLQuery = generateTempTableSelectSQLQuery('default', $dbType->createDbConfig([]).procesTempTableName($tempTableName), $tempTableColumnName, meta::relational::transform::fromPure::pureTypeToDataTypeMap()->get($collectionValueType)->translateCoreTypeToDbSpecificType(^TranslationContext(dbType=$dbType))->toOne()); + let selectSQLQuery = generateTempTableSelectSQLQuery('default', $tempTableName, $tempTableColumnName, meta::relational::transform::fromPure::pureTypeToDataTypeMap()->get($collectionValueType)->translateCoreTypeToDbSpecificType(^TranslationContext(dbType=$dbType))->toOne()); ^$d(parameters = [$d.parameters->at(0), $selectSQLQuery]);, |$d);, @@ -112,8 +117,8 @@ function meta::relational::postProcessor::generatePostProcessorResult(changedFun let outerAllocationNodeName = if($newInFunction.parameters->at(1)->instanceOf(VarPlaceHolder), |$newInFunction.parameters->at(1)->cast(@VarPlaceHolder).name, - |$newInFunction.parameters->at(1)->cast(@SelectSQLQuery).data.alias.name->toOne()->replace('tempTableForIn_', prefixForWrapperAllocationNodeName())); - let tempTableName = $dbType->createDbConfig([]).procesTempTableName($outerAllocationNodeName->replace(prefixForWrapperAllocationNodeName(), 'tempTableForIn_')); + |$newInFunction.parameters->at(1)->cast(@SelectSQLQuery).data.alias.name->toOne()->replace($dbType->processedTempTableNameForIn(), prefixForWrapperAllocationNodeName())); + let tempTableName = $outerAllocationNodeName->replace(prefixForWrapperAllocationNodeName(), $dbType->processedTempTableNameForIn()); let tempTableColumnName = 'ColumnForStoringInCollection'; let allocationNodeName = $outerAllocationNodeName->replace(prefixForWrapperAllocationNodeName(), 'tempVarForIn_'); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/pureToSQLQuery/pureToSQLQuery.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/pureToSQLQuery/pureToSQLQuery.pure index 1c9b69f20cf..9bb692ea631 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/pureToSQLQuery/pureToSQLQuery.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/pureToSQLQuery/pureToSQLQuery.pure @@ -857,7 +857,7 @@ function meta::relational::functions::pureToSqlQuery::processQualifiedPropertyFu function meta::relational::functions::pureToSqlQuery::tdsQualifier(fe:FunctionExpression[1], operation:SelectWithCursor[1], vars:Map[1], state:State[1], extensions:Extension[*]):OperationWithParentPropertyMapping[1] { - let tdsProperties = ['getNumber', 'getInteger', 'getString', 'getFloat', 'getDate', 'getBoolean', 'getEnum', 'getDateTime', 'getStrictDate', 'isNull', 'isNotNull']; + let tdsProperties = ['getNumber', 'getInteger', 'getString', 'getFloat', 'getDate', 'getDecimal', 'getBoolean', 'getEnum', 'getDateTime', 'getStrictDate', 'isNull', 'isNotNull']; let funcName = $fe.func.functionName->toOne(); let valid = $tdsProperties->contains($funcName); @@ -2368,7 +2368,7 @@ function <> meta::relational::functions::pureToSqlQuery::process let functionCount = $functions->size(); let allFunctionsSubtypes = $functions->size()->range()->map(ind | let functionType = $functions->at($ind)->meta::pure::functions::meta::functionType().parameters->at(0).genericType.rawType->toOne(); assertFalse($functionType->instanceOf(PrimitiveType), 'Match does not support checking on primitive data type..! Currently checking on : ' + $functionType->toString()); - if($functionType->isAnyClass(), + if($functionType == Any, | assert($ind == $functionCount-1, 'Any should be used as a default type at the end of match'); [];, | @@ -2413,11 +2413,14 @@ function meta::relational::functions::pureToSqlQuery::processPlus(f:FunctionExpr function meta::relational::functions::pureToSqlQuery::processParseDate(f:FunctionExpression[1], currentPropertyMapping:PropertyMapping[*], operation:SelectWithCursor[1], vars:Map[1], state:State[1], joinType:JoinType[1], nodeId:String[1], aggFromMap:List[1], context:DebugContext[1], extensions:Extension[*]):RelationalOperationElement[1] { let formatInstance = ^InstanceValue(multiplicity = PureOne, genericType = ^GenericType(rawType=String), values = 'YYYY-MM-DD HH24:MI:SS'); - let dynaFuncName = 'toTimestamp'; - let oldFunc = $f.func; - let newFunc = ^$oldFunc(functionName=$dynaFuncName); - let functionExpression = ^$f(func = $newFunc, parametersValues=$f.parametersValues->concatenate($formatInstance)); - $functionExpression->processDynaFunction($currentPropertyMapping, $operation, $vars, $state, $joinType, $nodeId, $aggFromMap, $context, $extensions); + + processDynafuncWithRename('toTimestamp', ^$f(parametersValues=$f.parametersValues->concatenate($formatInstance)), $currentPropertyMapping, $operation, $vars, $state, $joinType, $nodeId, $aggFromMap, $context, $extensions); +} + +//this is transformed to monthName due to "month" already handled as monthNumber in dialects +function meta::relational::functions::pureToSqlQuery::processMonth(f:FunctionExpression[1], currentPropertyMapping:PropertyMapping[*], operation:SelectWithCursor[1], vars:Map[1], state:State[1], joinType:JoinType[1], nodeId:String[1], aggFromMap:List[1], context:DebugContext[1], extensions:Extension[*]):RelationalOperationElement[1] +{ + processDynafuncWithRename('monthName', $f, $currentPropertyMapping, $operation, $vars, $state, $joinType, $nodeId, $aggFromMap, $context, $extensions) } function meta::relational::functions::pureToSqlQuery::processFirstNotNull(f:FunctionExpression[1], currentPropertyMapping:PropertyMapping[*], operation:SelectWithCursor[1], vars:Map[1], state:State[1], joinType:JoinType[1], nodeId:String[1], aggFromMap:List[1], context:DebugContext[1], extensions:Extension[*]):RelationalOperationElement[1] @@ -2434,8 +2437,13 @@ function meta::relational::functions::pureToSqlQuery::processFirstNotNull(f:Func a:Any[*] | fail($error) ]); + processDynafuncWithRename('coalesce', $f, $currentPropertyMapping, $operation, $vars, $state, $joinType, $nodeId, $aggFromMap, $context, $extensions); +} + +function meta::relational::functions::pureToSqlQuery::processDynafuncWithRename(newName:String[1], f:FunctionExpression[1], currentPropertyMapping:PropertyMapping[*], operation:SelectWithCursor[1], vars:Map[1], state:State[1], joinType:JoinType[1], nodeId:String[1], aggFromMap:List[1], context:DebugContext[1], extensions:Extension[*]):RelationalOperationElement[1] +{ let oldFunc = $f.func; - let newFunc = ^$oldFunc(functionName = 'coalesce'); + let newFunc = ^$oldFunc(functionName = $newName); processDynaFunction(^$f(func = $newFunc), $currentPropertyMapping, $operation, $vars, $state, $joinType, $nodeId, $aggFromMap, $context, $extensions); } @@ -4491,7 +4499,13 @@ function <> meta::relational::functions::pureToSqlQuery::process let select = $mainQuery.select->cast(@TdsSelectSqlQuery); let funcParams = $f->genericType().typeArguments.rawType->cast(@FunctionType).parameters->tail(); let updatedState = if(!$vars->keys()->isEmpty(),|updateFunctionParamScope($state, $funcParams->evaluateAndDeactivate(),$operation),|$state); - let cols = $f->instanceValuesAtParameter(1, $vars, $updatedState.inScopeVars)->cast(@BasicColumnSpecification); + let cols = $f->instanceValuesAtParameter(1, $vars, $updatedState.inScopeVars)->map(col | + $col->match([ + b:BasicColumnSpecification[1] | $b, + s:SimpleFunctionExpression[1] | $s->reactivate()->cast(@BasicColumnSpecification) + ]) + ); + let newCols = $cols->map(cs| let newElement = processTdsLambda($cs.func->cast(@FunctionDefinition).expressionSequence->at(0), $mainQuery.select.columns->cast(@Alias), false, $vars, $updatedState, $currentPropertyMapping, $context)->at(0); let alias = ^Alias(name='"'+$cs.name+'"', relationalElement=$newElement); @@ -5144,6 +5158,9 @@ function meta::relational::functions::pureToSqlQuery::processTdsLambda(mapFn:Val ^PureFunctionTDSToRelationalFunctionPair(first = meta::pure::tds::extensions::firstNotNull_T_MANY__T_$0_1$_, second = {| newDynaFunction('coalesce', $f.parametersValues->at(0)->processTdsLambda($a, $returnColumnName, $vars, $state, $currentPropertyMapping, $context)) }), + ^PureFunctionTDSToRelationalFunctionPair(first = meta::pure::functions::date::month_Date_1__Month_1_, second = {| + newDynaFunction('monthName', $f.parametersValues->at(0)->processTdsLambda($a, $returnColumnName, $vars, $state, $currentPropertyMapping, $context)) + }), ^PureFunctionTDSToRelationalFunctionPair(first = splitPart_String_$0_1$__String_1__Integer_1__String_$0_1$_, second = {| newDynaFunction('splitPart', [ $f.parametersValues->at(0)->processTdsLambda($a, $returnColumnName, $vars, $state, $currentPropertyMapping, $context)->toOne(), @@ -5912,7 +5929,10 @@ function meta::relational::functions::pureToSqlQuery::findBestNodeToIsolate(sele { let joinThreads = ^List(values=$select.data)->buildThreads(); - let aliasesWithConstraints = $select.savedFilteringOperation.second->extractTableAliasColumns().alias->removeDuplicates(); + //For filters having aliases like a.x = 'y' fetch aliases from relationalOperationalElement and for filters without aliases like 'x' = 'x' fetch aliases from the treeNode. + let aliasesWithConstraints = $select.savedFilteringOperation->map(x | let aliasFromFilters = $x.second->extractTableAliasColumns().alias; + if($aliasFromFilters->isNotEmpty(), |$aliasFromFilters, | $x.first.alias); + )->removeDuplicates(); let filterThreadsWithAllConditions = $joinThreads->filter(thread| $thread->matchAllAliases($aliasesWithConstraints)); @@ -7675,6 +7695,7 @@ function meta::relational::functions::pureToSqlQuery::getSupportedFunctions():Ma ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::date::minute_Date_1__Integer_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::date::second_Date_1__Integer_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::date::monthNumber_Date_1__Integer_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), + ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::date::month_Date_1__Month_1_,second=meta::relational::functions::pureToSqlQuery::processMonth_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::date::quarterNumber_Date_1__Integer_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::date::dateDiff_Date_1__Date_1__DurationUnit_1__Integer_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::date::adjust_Date_1__Integer_1__DurationUnit_1__Date_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), @@ -7714,6 +7735,10 @@ function meta::relational::functions::pureToSqlQuery::getSupportedFunctions():Ma ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::trim_String_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::ltrim_String_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::rtrim_String_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), + ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::lpad_String_1__Integer_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), + ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::lpad_String_1__Integer_1__String_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), + ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::rpad_String_1__Integer_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), + ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::rpad_String_1__Integer_1__String_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::toString_Any_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::replace_String_1__String_1__String_1__String_1_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), ^PureFunctionToRelationalFunctionPair(first=meta::pure::functions::string::repeatString_String_$0_1$__Integer_1__String_$0_1$_,second=meta::relational::functions::pureToSqlQuery::processDynaFunction_FunctionExpression_1__PropertyMapping_MANY__SelectWithCursor_1__Map_1__State_1__JoinType_1__String_1__List_1__DebugContext_1__Extension_MANY__RelationalOperationElement_1_), diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/relationalExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/relationalExtension.pure index 595ad0f131d..96e257fa526 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/relationalExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/relationalExtension.pure @@ -471,6 +471,16 @@ function <> meta::relational::functions::typeInference::getDynaF ]) ), + pair( + 'dayOfMWeek', + list([ + pair( + {params: RelationalOperationElement[*] | true}, + {params: RelationalOperationElement[*] | ^meta::relational::metamodel::datatype::Varchar(size = 9)} + ) + ]) + ), + pair( 'dayOfWeekNumber', list([ @@ -889,6 +899,26 @@ function <> meta::relational::functions::typeInference::getDynaF ]) ), + pair( + 'lpad', + list([ + pair( + {params: RelationalOperationElement[*] | true}, + {params: RelationalOperationElement[*] | ^meta::relational::metamodel::datatype::Varchar(size = $params->at(1)->match([l:Literal[1] | $l.value->match([i:Integer[1] | $i])]))} + ) + ]) + ), + + pair( + 'rpad', + list([ + pair( + {params: RelationalOperationElement[*] | true}, + {params: RelationalOperationElement[*] | ^meta::relational::metamodel::datatype::Varchar(size = $params->at(1)->match([l:Literal[1] | $l.value->match([i:Integer[1] | $i])]))} + ) + ]) + ), + pair( 'max', list([ @@ -967,6 +997,16 @@ function <> meta::relational::functions::typeInference::getDynaF ]) ), + pair( + 'monthName', + list([ + pair( + {params: RelationalOperationElement[*] | true}, + {params: RelationalOperationElement[*] | ^meta::relational::metamodel::datatype::Varchar(size = 9)} + ) + ]) + ), + pair( 'monthNumber', list([ diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/runtime/connection/authenticationStrategy.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/runtime/connection/authenticationStrategy.pure index cea7442a70b..46f7b8f2500 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/runtime/connection/authenticationStrategy.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/runtime/connection/authenticationStrategy.pure @@ -23,23 +23,22 @@ Class <> meta::pure::alloy::connections::alloy::authent Class meta::pure::alloy::connections::alloy::authentication::DelegatedKerberosAuthenticationStrategy extends meta::pure::alloy::connections::alloy::authentication::AuthenticationStrategy { - serverPrincipal: String[0..1]; + <> serverPrincipal: String[0..1]; } Class {doc.doc = 'Authentication using a middle tier user/password'} meta::pure::alloy::connections::alloy::authentication::MiddleTierUserNamePasswordAuthenticationStrategy extends meta::pure::alloy::connections::alloy::authentication::AuthenticationStrategy { - {doc.doc = 'Username/pasword vault reference'} - vaultReference: String[1]; + <> {doc.doc = 'Username/pasword vault reference'} vaultReference: String[1]; } Class meta::pure::alloy::connections::alloy::authentication::UserNamePasswordAuthenticationStrategy extends meta::pure::alloy::connections::alloy::authentication::AuthenticationStrategy { - baseVaultReference: String[0..1]; - userNameVaultReference: String[1]; - passwordVaultReference: String[1]; + <> baseVaultReference: String[0..1]; + <> userNameVaultReference: String[1]; + <> passwordVaultReference: String[1]; } Class meta::pure::alloy::connections::alloy::authentication::GCPApplicationDefaultCredentialsAuthenticationStrategy extends meta::pure::alloy::connections::alloy::authentication::AuthenticationStrategy @@ -52,7 +51,7 @@ Class meta::pure::alloy::connections::alloy::authentication::DefaultH2Authentica Class meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy extends meta::pure::alloy::connections::alloy::authentication::AuthenticationStrategy { - apiToken:String[1]; + <> apiToken:String[1]; } Class meta::pure::alloy::connections::alloy::authentication::TestDatabaseAuthenticationStrategy extends meta::pure::alloy::connections::alloy::authentication::DefaultH2AuthenticationStrategy @@ -61,6 +60,6 @@ Class meta::pure::alloy::connections::alloy::authentication::TestDatabaseAuthent Class meta::pure::alloy::connections::alloy::authentication::GCPWorkloadIdentityFederationAuthenticationStrategy extends meta::pure::alloy::connections::alloy::authentication::AuthenticationStrategy { - serviceAccountEmail : String[1]; - additionalGcpScopes: String[*]; + <> serviceAccountEmail : String[1]; + <> additionalGcpScopes: String[*]; } \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/runtime/connection/datasourceSpecification.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/runtime/connection/datasourceSpecification.pure index cc96e0c03b0..0eec4ed5d67 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/runtime/connection/datasourceSpecification.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/runtime/connection/datasourceSpecification.pure @@ -18,20 +18,21 @@ Class <> meta::pure::alloy::connections::alloy::specific Class meta::pure::alloy::connections::alloy::specification::StaticDatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - host: String[1]; - port: Integer[1]; - databaseName: String[1]; + <> host: String[1]; + <> port: Integer[1]; + <> databaseName: String[1]; } Class meta::pure::alloy::connections::alloy::specification::EmbeddedH2DatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - databaseName:String[1]; - directory:String[1]; - autoServerMode:Boolean[1]; + <> databaseName:String[1]; + <> directory:String[1]; + <> autoServerMode:Boolean[1]; + } Class meta::pure::alloy::connections::alloy::specification::LocalH2DatasourceSpecification extends meta::pure::alloy::connections::alloy::specification::DatasourceSpecification { - testDataSetupCsv:String[0..1]; - testDataSetupSqls:String[*]; + <> testDataSetupCsv:String[0..1]; + <> testDataSetupSqls:String[*]; } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbExtension.pure index b41a4d9740f..deb07bafd16 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbExtension.pure @@ -609,6 +609,19 @@ function meta::relational::functions::sqlQueryToString::wrapAsBooleanOperation(e if ($e->isBooleanOperation($extensions), | $e, | ^DynaFunction(name ='equal', parameters=[$e, ^Literal(value=true)]);); } +function meta::relational::functions::sqlQueryToString::maybeWrapAsBooleanCaseOperation(e:RelationalOperationElement[1], sgc:SqlGenerationContext[1]):RelationalOperationElement[1] +{ + if ($sgc.dbConfig.dbExtension.isBooleanLiteralSupported, + |$e, + |$e->wrapAsBooleanCaseOperation($sgc.extensions) + ); +} + +function meta::relational::functions::sqlQueryToString::wrapAsBooleanCaseOperation(e:RelationalOperationElement[1], extensions:Extension[*]):RelationalOperationElement[1] +{ + if ($e->isBooleanOperation($extensions), | ^DynaFunction(name='case', parameters = [$e, ^Literal(value=true), ^Literal(value=false)]), | $e); +} + function meta::relational::functions::sqlQueryToString::isBooleanOperation(relationalElement:RelationalOperationElement[1], extensions:Extension[*]):Boolean[1] { $relationalElement->match($extensions.moduleExtension('relational')->cast(@RelationalExtension).sqlQueryToString_isBooleanOperation->concatenate([ @@ -690,7 +703,9 @@ function <> meta::relational::functions::sqlQueryToString::proce | if($func.name == 'if', | $func.parameters->head()->map(p | $p->maybeWrapAsBooleanOperation($sgc))->concatenate($func.parameters->tail()), - | $func.parameters + | if (!$func->isBooleanOperation($sgc.extensions), + | $func.parameters->map(p | $p->maybeWrapAsBooleanCaseOperation($sgc)), + | $func.parameters) )); let config = $sgc.config; let generationState = $sgc.generationState; @@ -958,6 +973,7 @@ Enum meta::relational::functions::sqlQueryToString::DynaFunctionRegistry lessThanEqual, log, log10, + lpad, ltrim, matches, max, @@ -967,6 +983,7 @@ Enum meta::relational::functions::sqlQueryToString::DynaFunctionRegistry minute, mod, month, + monthName, monthNumber, mostRecentDayOfWeek, not, @@ -995,6 +1012,7 @@ Enum meta::relational::functions::sqlQueryToString::DynaFunctionRegistry right, round, rowNumber, + rpad, rtrim, second, sha1, diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/db2/db2Extension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/db2/db2Extension.pure index 34468b81f17..c059e176ac7 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/db2/db2Extension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/db2/db2Extension.pure @@ -87,6 +87,7 @@ function <> meta::relational::functions::sqlQueryToString::db2:: dynaFnToSql('convertDateTime', $allStates, ^ToSql(format='%s' , transform={p:String[*] | $p->convertToDateTimeDB2()})), dynaFnToSql('convertVarchar128', $allStates, ^ToSql(format='cast(%s as VARCHAR(128))')), dynaFnToSql('datePart', $allStates, ^ToSql(format='date(%s)')), + dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='dayname(%s)')), dynaFnToSql('dayOfWeekNumber', $allStates, ^ToSql(format='dayofweek_iso(%s)')), dynaFnToSql('dayOfYear', $allStates, ^ToSql(format='dayofyear(%s)')), dynaFnToSql('firstDayOfMonth', $allStates, ^ToSql(format='date(1) + (year(%s)-1) YEARS + (month(%s)-1) MONTHS', transform={p:String[1] | $p->repeat(2)})), @@ -102,6 +103,7 @@ function <> meta::relational::functions::sqlQueryToString::db2:: dynaFnToSql('md5', $allStates, ^ToSql(format='hash_md5(%s)')), dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='month(%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='monthname(%s)')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='month(%s)')), dynaFnToSql('mostRecentDayOfWeek', $allStates, ^ToSql(format='%s + case when %s - dayofweek(%s) > 0 then %s - dayofweek(%s) - 7 else %s - dayofweek(%s) end DAY', transform={p:String[1..2] | $p->formatMostRecentDb2('current date')}, parametersWithinWhenClause = [false, false])), dynaFnToSql('now', $allStates, ^ToSql(format='current timestamp')), diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/h2/h2Extension1_4_200.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/h2/h2Extension1_4_200.pure index 14e0ce2d648..03f6056e828 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/h2/h2Extension1_4_200.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/h2/h2Extension1_4_200.pure @@ -80,6 +80,7 @@ function <> meta::relational::functions::sqlQueryToString::h2::v dynaFnToSql('dateDiff', $allStates, ^ToSql(format='datediff(%s,%s,%s)', transform={p:String[*]|[$p->at(2)->replace('\'', '')->processDateDiffDurationUnitForH2(),$p->at(0),$p->at(1)]})), dynaFnToSql('datePart', $allStates, ^ToSql(format='cast(truncate(%s) as date)')), dynaFnToSql('dayOfMonth', $allStates, ^ToSql(format='DAY_OF_MONTH(%s)')), + dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='dayname(%s)')), dynaFnToSql('dayOfWeekNumber', $allStates, ^ToSql(format='%s',transform={p:String[1..2]| if($p->size()==1,| 'DAY_OF_WEEK('+$p->at(0)+')',|$p->dayOfWeekNumberH2());})), dynaFnToSql('dayOfYear', $allStates, ^ToSql(format='DAY_OF_YEAR(%s)')), dynaFnToSql('decodeBase64', $allStates, ^ToSql(format='legend_h2_extension_base64_decode(%s)')), @@ -105,6 +106,7 @@ function <> meta::relational::functions::sqlQueryToString::h2::v dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='month(%s)')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='month(%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='monthname(%s)')), dynaFnToSql('mostRecentDayOfWeek', $allStates, ^ToSql(format='dateadd(DAY, case when %s - DAY_OF_WEEK(%s) > 0 then %s - DAY_OF_WEEK(%s) - 7 else %s - DAY_OF_WEEK(%s) end, %s)', transform={p:String[1..2] | $p->formatMostRecentH2('current_date()')}, parametersWithinWhenClause = [false, false])), dynaFnToSql('now', $allStates, ^ToSql(format='current_timestamp()')), dynaFnToSql('parseDate', $allStates, ^ToSql(format='parsedatetime(%s,%s)')), @@ -133,8 +135,7 @@ function <> meta::relational::functions::sqlQueryToString::h2::v dynaFnToSql('toString', $allStates, ^ToSql(format='cast(%s as varchar)')), dynaFnToSql('toTimestamp', $allStates, ^ToSql(format='%s', transform={p:String[2] | $p->transformToTimestampH2()})), dynaFnToSql('weekOfYear', $allStates, ^ToSql(format='week(%s)')), - dynaFnToSql('year', $allStates, ^ToSql(format='year(%s)')), - dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='dayname(%s)')) + dynaFnToSql('year', $allStates, ^ToSql(format='year(%s)')) ]; } diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/h2/h2Extension2_1_214.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/h2/h2Extension2_1_214.pure index 599e8306737..0e9124cb439 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/h2/h2Extension2_1_214.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/dbSpecific/h2/h2Extension2_1_214.pure @@ -121,8 +121,6 @@ function <> meta::relational::functions::sqlQueryToString::h2::v let falsesList = ['n', '0', 'false']; let bitStr = $s->trim()->toLower(); - println('hello'); - let bitVal = if($bitStr->in($truesList), | 'true', | if($bitStr->in($falsesList), @@ -191,6 +189,7 @@ function <> meta::relational::functions::sqlQueryToString::h2::v dynaFnToSql('dateDiff', $allStates, ^ToSql(format='datediff(%s,%s,%s)', transform={p:String[*]|[$p->at(2)->replace('\'', '')->processDateDiffDurationUnitForH2(),$p->at(0),$p->at(1)]})), dynaFnToSql('datePart', $allStates, ^ToSql(format='cast(truncate(%s) as date)')), dynaFnToSql('dayOfMonth', $allStates, ^ToSql(format='DAY_OF_MONTH(%s)')), + dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='dayname(%s)')), dynaFnToSql('dayOfWeekNumber', $allStates, ^ToSql(format='%s',transform={p:String[1..2]| if($p->size()==1,| 'DAY_OF_WEEK('+$p->at(0)+')',|$p->dayOfWeekNumberH2());})), dynaFnToSql('dayOfYear', $allStates, ^ToSql(format='DAY_OF_YEAR(%s)')), dynaFnToSql('decodeBase64', $allStates, ^ToSql(format='legend_h2_extension_base64_decode(%s)')), @@ -220,6 +219,7 @@ function <> meta::relational::functions::sqlQueryToString::h2::v dynaFnToSql('mod', $allStates, ^ToSql(format='mod(%s,%s)')), dynaFnToSql('month', $allStates, ^ToSql(format='month(%s)')), dynaFnToSql('monthNumber', $allStates, ^ToSql(format='month(%s)')), + dynaFnToSql('monthName', $allStates, ^ToSql(format='monthname(%s)')), dynaFnToSql('mostRecentDayOfWeek', $allStates, ^ToSql(format='dateadd(DAY, case when %s - DAY_OF_WEEK(%s) > 0 then %s - DAY_OF_WEEK(%s) - 7 else %s - DAY_OF_WEEK(%s) end, %s)', transform={p:String[1..2] | $p->formatMostRecentH2('current_date()')}, parametersWithinWhenClause = [false, false])), dynaFnToSql('now', $allStates, ^ToSql(format='current_timestamp()')), dynaFnToSql('parseDate', $allStates, ^ToSql(format='cast(parsedatetime(%s,%s) as timestamp)')), @@ -248,8 +248,7 @@ function <> meta::relational::functions::sqlQueryToString::h2::v dynaFnToSql('toFloat', $allStates, ^ToSql(format='cast(%s as double precision)')), dynaFnToSql('toTimestamp', $allStates, ^ToSql(format='%s', transform={p:String[2] | $p->transformToTimestampH2()})), dynaFnToSql('weekOfYear', $allStates, ^ToSql(format='week(%s)')), - dynaFnToSql('year', $allStates, ^ToSql(format='year(%s)')), - dynaFnToSql('dayOfWeek', $allStates, ^ToSql(format='dayname(%s)')) + dynaFnToSql('year', $allStates, ^ToSql(format='year(%s)')) ]; } @@ -459,25 +458,32 @@ function <> meta::relational::functions::sqlQueryToString::h2::v extensions:Extension[*] ): String[1] { - let opStr = - if($s.filteringOperation->isEmpty(), - | '', - | $s.filteringOperation->map(s | $s->wrapH2Boolean($extensions)->processOperation($dbConfig, $format->indent(), ^$config(callingFromFilter = true), $extensions))->filter(s|$s != '')->joinStrings(' <||> ') - ); - let havingStr = - if($s.havingOperation->isEmpty(), - | '', - | $s.havingOperation->map(s|$s->wrapH2Boolean($extensions)->processOperation($dbConfig, $format->indent(), $config, $extensions))->filter(s|$s != '')->joinStrings(' <||> ') - ); + if($s.data.childrenData->match([jtn:JoinTreeNode[1]|$jtn.joinType == JoinType.FULL_OUTER, a:Any[*]|false]) , + | + // H2 doesn't support FULL_OUTER join, so we need to convert/emulate it so people can write unit tests... + $s->meta::relational::functions::sqlQueryToString::default::convertFullOuterJoinToLeftRightOuter()->processOperation($dbConfig, $format->indent(), $extensions); + , + | + let opStr = + if($s.filteringOperation->isEmpty(), + | '', + | $s.filteringOperation->map(s | $s->wrapH2Boolean($extensions)->processOperation($dbConfig, $format->indent(), ^$config(callingFromFilter = true), $extensions))->filter(s|$s != '')->joinStrings(' <||> ') + ); + let havingStr = + if($s.havingOperation->isEmpty(), + | '', + | $s.havingOperation->map(s|$s->wrapH2Boolean($extensions)->processOperation($dbConfig, $format->indent(), $config, $extensions))->filter(s|$s != '')->joinStrings(' <||> ') + ); - $format.separator + 'select ' + processTop($s, $format, $dbConfig, $extensions) + if($s.distinct == true,|'distinct ',|'') + - processSelectColumnsH2($s.columns, $dbConfig, $format->indent(), true, $extensions) + - if($s.data == [],|'',| ' ' + $format.separator + 'from ' + $s.data->toOne()->processJoinTreeNodeH2([], $dbConfig, $format->indent(), [], $extensions)) + - if (eq($opStr, ''), |'', | ' ' + $format.separator + 'where ' + $opStr) + - if ($s.groupBy->isEmpty(),|'',| ' ' + $format.separator + 'group by '+$s.groupBy->processGroupByColumns($dbConfig, $format->indent(), true, $extensions)->makeString(','))+ - if (eq($havingStr, ''), |'', | ' ' + $format.separator + 'having ' + $havingStr) + - if ($s.orderBy->isEmpty(),|'',| ' ' + $format.separator + 'order by '+ $s.orderBy->processOrderBy($dbConfig, $format->indent(), $config, $extensions)->makeString(','))+ - + processLimit($s, $dbConfig, $format, $extensions, [], processSliceOrDropForH2_SelectSQLQuery_1__Format_1__DbConfig_1__Extension_MANY__Any_1__String_1_); + $format.separator + 'select ' + processTop($s, $format, $dbConfig, $extensions) + if($s.distinct == true,|'distinct ',|'') + + processSelectColumnsH2($s.columns, $dbConfig, $format->indent(), true, $extensions) + + if($s.data == [],|'',| ' ' + $format.separator + 'from ' + $s.data->toOne()->processJoinTreeNodeH2([], $dbConfig, $format->indent(), [], $extensions)) + + if (eq($opStr, ''), |'', | ' ' + $format.separator + 'where ' + $opStr) + + if ($s.groupBy->isEmpty(),|'',| ' ' + $format.separator + 'group by '+$s.groupBy->processGroupByColumns($dbConfig, $format->indent(), true, $extensions)->makeString(','))+ + if (eq($havingStr, ''), |'', | ' ' + $format.separator + 'having ' + $havingStr) + + if ($s.orderBy->isEmpty(),|'',| ' ' + $format.separator + 'order by '+ $s.orderBy->processOrderBy($dbConfig, $format->indent(), $config, $extensions)->makeString(','))+ + + processLimit($s, $dbConfig, $format, $extensions, [], processSliceOrDropForH2_SelectSQLQuery_1__Format_1__DbConfig_1__Extension_MANY__Any_1__String_1_); + ); } /* @@ -599,6 +605,7 @@ function <> meta::relational::functions::sqlQueryToString::h2::v j:JoinTreeNode[1] | $j.join->otherTableFromAlias($parent->toOne()); ] ); + let val = $joinTreeNode->match( [ r:RootJoinTreeNode[1] | @@ -606,12 +613,19 @@ function <> meta::relational::functions::sqlQueryToString::h2::v ->map(a|^$a(name = '"' + $a.name + '"')) //Not sure why this is necessary, but it's retained to keep the generated SQL the same as previously (and does no real harm) ->processOperation($dbConfig, $format->indent(), $extensions), j:JoinTreeNode[1] | - if($j.joinType == JoinType.INNER,| ' ' + $format.separator() + 'inner join ',|if($j.joinType == JoinType.LEFT_OUTER,| ' ' + $format.separator() + 'left outer join ',| ' ' + $format.separator() + 'right outer join ')) + if($j.joinType == JoinType.FULL_OUTER, + | + // This should have been converted earlier to avoid a FULL_OUTER reaching this point + fail($j.joinType->toOne()->toString() + ' join not supported in H2'); ''; + , + | + $j.joinType->map(jt|$jt->meta::relational::functions::sqlQueryToString::default::processJoinType($dbConfig, $format, $extensions))->orElse('') + if ($j.lateral == true, | 'lateral ', | '') + $j.alias - ->map(a|^$a(name = '"' + $a.name + '"')) //Not sure why this is necessary, but it's retained to keep the generated SQL the same as previously (and does no real harm) - ->toOne()->processOperation($dbConfig, $format->indent(), $extensions) + $format.separator() - + ' ' + 'on (' + $j.join.operation->wrapH2Boolean($extensions)->processOperation($dbConfig, $format->indent(), ^Config(), $extensions) + ')';, + ->map(a|^$a(name = '"' + $a.name + '"')) //Not sure why this is necessary, but it's retained to keep the generated SQL the same as previously (and does no real harm) + ->toOne()->processOperation($dbConfig, $format->indent(), $extensions) + $format.separator() + + ' ' + 'on (' + $j.join.operation->wrapH2Boolean($extensions)->processOperation($dbConfig, $format->indent(), ^Config(), $extensions) + ')'; + ), a:Any[1] | '' ] ); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/extensionDefaults.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/extensionDefaults.pure index df3fb18604e..7a1fabd8994 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/extensionDefaults.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/extensionDefaults.pure @@ -200,6 +200,7 @@ function meta::relational::functions::sqlQueryToString::default::getDynaFunction dynaFnToSql('lessThanEqual', $allStates, ^ToSql(format='%s <= %s')), dynaFnToSql('log', $allStates, ^ToSql(format='ln(%s)')), dynaFnToSql('log10', $allStates, ^ToSql(format='log10(%s)')), + dynaFnToSql('lpad', $allStates, ^ToSql(format='lpad(%s)', transform={p:String[*] | $p->joinStrings(', ')})), dynaFnToSql('ltrim', $allStates, ^ToSql(format='ltrim(%s)')), dynaFnToSql('max', $allStates, ^ToSql(format='max(%s)')), dynaFnToSql('md5', $allStates, ^ToSql(format='md5(%s)')), @@ -215,6 +216,7 @@ function meta::relational::functions::sqlQueryToString::default::getDynaFunction dynaFnToSql('repeatString', $allStates, ^ToSql(format='repeat(%s, %s)')), dynaFnToSql('replace', $allStates, ^ToSql(format='replace(%s, %s, %s)')), dynaFnToSql('reverseString', $allStates, ^ToSql(format='reverse(%s)')), + dynaFnToSql('rpad', $allStates, ^ToSql(format='rpad(%s)', transform={p:String[*] | $p->joinStrings(', ')})), dynaFnToSql('rtrim', $allStates, ^ToSql(format='rtrim(%s)')), dynaFnToSql('rowNumber', $allStates, ^ToSql(format='row_number()')), dynaFnToSql('sha1', $allStates, ^ToSql(format='sha1(%s)')), @@ -297,9 +299,56 @@ function meta::relational::functions::sqlQueryToString::default::processSelectCo ); } +function meta::relational::functions::sqlQueryToString::default::convertFullOuterJoinToLeftRightOuter(s:SelectSQLQuery[1]) : RelationalOperationElement[1] +{ + // Emulate full outer join using a union of both a LEFT_OUTER and a RIGHT_OUTER join + // https://en.wikipedia.org/wiki/Join_(SQL)#Full_outer_join + // + // for the RIGHT_OUTER part of the query, we need to filter out rows successfully joined from the LHS (to avoid duplicates + // from the LEFT_OUTER query). Given we don't easily know the join keys / PK, we do this by adding a synthetic constant column to + // the LHS query and then filtering on where this synthetic key is null + + ^UnionAll( + queries = [ + ^$s(data = $s.data->map(d|^$d(childrenData = $d.childrenData->cast(@JoinTreeNode)->map(cd|^$cd(joinType = JoinType.LEFT_OUTER))))), + ^$s( + data = $s.data->map(d| + ^$d( + alias = $d.alias->map(a|^$a(relationalElement = $a.relationalElement->cast(@TdsSelectSqlQuery)->map(x|^$x(columns = $x.columns->concatenate(^Alias(name='"_lhsJoinValid_' + + $a.name, relationalElement=^Literal(value=1))))))), + childrenData = $d.childrenData->cast(@JoinTreeNode)->map(cd|^$cd(joinType = JoinType.RIGHT_OUTER)) + ) + ) + ) + ->map(newS| + ^$newS( + filteringOperation = ^DynaFunction(name = 'not', parameters = ^DynaFunction(name='equal', parameters=[^TableAliasColumn(alias=$newS.data.alias->toOne(),column=^Column(type=^meta::relational::metamodel::datatype::Integer(),name='"_lhsJoinValid_' + + $newS.data.alias.name)),^Literal(value=1)])) + ) + + ) + ] + ) +} + +function meta::relational::functions::sqlQueryToString::default::processJoinType(joinType:JoinType[1], dbConfig : DbConfig[1], format:Format[1], extensions:Extension[*]):String[1] +{ + if($joinType == JoinType.INNER, + | ' ' + $format.separator() + 'inner join ', + | if($joinType == JoinType.LEFT_OUTER, + | ' ' + $format.separator() + 'left outer join ', + | if($joinType == JoinType.RIGHT_OUTER, + | ' ' + $format.separator() + 'right outer join ', + | if($joinType == JoinType.FULL_OUTER, + | ' ' + $format.separator() + 'full outer join ', + | fail('Unsupported join type: ' + $joinType->toString()); ''; + ) + ) + ) + ) +} + function meta::relational::functions::sqlQueryToString::default::processJoinTreeNodeWithLateralJoinDefault(j:JoinTreeNode[1], dbConfig : DbConfig[1], format:Format[1], extensions:Extension[*]):String[1] { - if($j.joinType == JoinType.INNER,| ' ' + $format.separator() + 'inner join ',|if($j.joinType == JoinType.LEFT_OUTER,| ' ' + $format.separator() + 'left outer join ',| ' ' + $format.separator() + 'right outer join ')) + 'lateral ' + $j.joinType->map(jt|$jt->processJoinType($dbConfig, $format, $extensions))->orElse('') + 'lateral ' + $j.alias ->map(a|^$a(name = '"' + $a.name + '"')) //Not sure why this is necessary, but it's retained to keep the generated SQL the same as previously (and does no real harm) ->toOne()->processOperation($dbConfig, $format->indent(), $extensions) + $format.separator() @@ -323,7 +372,7 @@ function meta::relational::functions::sqlQueryToString::default::processJoinTree j:JoinTreeNode[1] | if($j.lateral == true, | $dbConfig.lateralJoinProcessor($j, $dbConfig, $format, $extensions), - | if($j.joinType == JoinType.INNER,| ' ' + $format.separator() + 'inner join ',|if($j.joinType == JoinType.LEFT_OUTER,| ' ' + $format.separator() + 'left outer join ',| ' ' + $format.separator() + 'right outer join ')) + | $j.joinType->map(jt|$jt->processJoinType($dbConfig, $format, $extensions))->orElse('') + $j.alias ->map(a|^$a(name = '"' + $a.name + '"')) //Not sure why this is necessary, but it's retained to keep the generated SQL the same as previously (and does no real harm) ->toOne()->processOperation($dbConfig, $format->indent(), $extensions) + $format.separator() diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuite/dynaFunctions/date.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuite/dynaFunctions/date.pure index 130c3fa9196..be12499b2c2 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuite/dynaFunctions/date.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuite/dynaFunctions/date.pure @@ -102,6 +102,20 @@ function <> meta::relational::tests::dbSpecificTests::sqlQueryTe runDynaFunctionDatabaseTest($dynaFunc, $expected, $config); } +function <> meta::relational::tests::dbSpecificTests::sqlQueryTests::dynaFunctions::monthName::testMonthName(config:DbTestConfig[1]):Boolean[1] +{ + let dynaFunc = ^DynaFunction(name='monthName', parameters=[^Literal(value=%2014-12-04T15:22:23)]); + let expected = ^Literal(value='December'); + runDynaFunctionDatabaseTest($dynaFunc, $expected, $config); +} + +function <> meta::relational::tests::dbSpecificTests::sqlQueryTests::dynaFunctions::dayOfWeek::testDayOfWeek(config:DbTestConfig[1]):Boolean[1] +{ + let dynaFunc = ^DynaFunction(name='dayOfWeek', parameters=[^Literal(value=%2014-12-04T15:22:23)]); + let expected = ^Literal(value='Thursday'); + runDynaFunctionDatabaseTest($dynaFunc, $expected, $config); +} + function <> meta::relational::tests::dbSpecificTests::sqlQueryTests::dynaFunctions::quarter::testQuarterAsNumber(config:DbTestConfig[1]):Boolean[1] { let dynaFunc = ^DynaFunction(name='quarter', parameters=[^Literal(value=%2014-12-04T15:22:23)]); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuite/dynaFunctions/string.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuite/dynaFunctions/string.pure index a4b108a63c2..de59320059d 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuite/dynaFunctions/string.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuite/dynaFunctions/string.pure @@ -166,6 +166,34 @@ function <> meta::relational::tests::dbSpecificTests::sqlQueryTe runDynaFunctionDatabaseTest($dynaFunc, $expected, $config); } +function <> meta::relational::tests::dbSpecificTests::sqlQueryTests::dynaFunctions::lpad::testLpad(config:DbTestConfig[1]):Boolean[1] +{ + let dynaFunc = ^DynaFunction(name='lpad', parameters=[^Literal(value='Smith'), ^Literal(value=7)]); + let expected = ^Literal(value=' Smith'); + runDynaFunctionDatabaseTest($dynaFunc, $expected, $config); +} + +function <> meta::relational::tests::dbSpecificTests::sqlQueryTests::dynaFunctions::lpad::testLpadWithChar(config:DbTestConfig[1]):Boolean[1] +{ + let dynaFunc = ^DynaFunction(name='lpad', parameters=[^Literal(value='Smith'), ^Literal(value=7), ^Literal(value='0')]); + let expected = ^Literal(value='00Smith'); + runDynaFunctionDatabaseTest($dynaFunc, $expected, $config); +} + +function <> meta::relational::tests::dbSpecificTests::sqlQueryTests::dynaFunctions::rpad::testRpad(config:DbTestConfig[1]):Boolean[1] +{ + let dynaFunc = ^DynaFunction(name='rpad', parameters=[^Literal(value='Smith'), ^Literal(value=7)]); + let expected = ^Literal(value='Smith '); + runDynaFunctionDatabaseTest($dynaFunc, $expected, $config); +} + +function <> meta::relational::tests::dbSpecificTests::sqlQueryTests::dynaFunctions::rpad::testRpadWithChar(config:DbTestConfig[1]):Boolean[1] +{ + let dynaFunc = ^DynaFunction(name='rpad', parameters=[^Literal(value='Smith'), ^Literal(value=7), ^Literal(value='0')]); + let expected = ^Literal(value='Smith00'); + runDynaFunctionDatabaseTest($dynaFunc, $expected, $config); +} + function <> meta::relational::tests::dbSpecificTests::sqlQueryTests::dynaFunctions::toUpper::test1(config:DbTestConfig[1]):Boolean[1] { let dynaFunc = ^DynaFunction(name='toUpper', parameters=[^Literal(value='Smith')]); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuiteCompletenessTests.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuiteCompletenessTests.pure index 4af083c0423..96ab9a7eb0d 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuiteCompletenessTests.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/sqlQueryToString/testSuiteCompletenessTests.pure @@ -7,7 +7,7 @@ function <> meta::relational::tests::testSuite::ensureEveryDynaFnIsTe meta::relational::tests::dbSpecificTests::sqlQueryTests::selectSubClauses::aggregationDynaFns ]; let testedDynaFnNames = $packagesContainingDynaFnTests.children->filter(c| $c->instanceOf(Package)).name; - let testingIgnoredDynaFnNames = ['add', 'averageRank', 'cbrt', 'case', 'convertDate', 'convertDateTime', 'convertVarchar128', 'dayOfWeek', 'decodeBase64', 'denseRank', 'distinct', 'divide', 'encodeBase64', 'exists', 'group', 'if', 'isAlphaNumeric', 'isDistinct', 'isNumeric', 'minus', 'not', 'objectReferenceIn', 'parseDate', 'parseDecimal', 'parseFloat', 'parseInteger', 'parseJson', 'percentile', 'plus', 'rank', 'rowNumber', 'sub', 'times', 'toOne', 'toString', 'extractFromSemiStructured', 'explodeSemiStructured']; + let testingIgnoredDynaFnNames = ['add', 'averageRank', 'cbrt', 'case', 'convertDate', 'convertDateTime', 'convertVarchar128', 'decodeBase64', 'denseRank', 'distinct', 'divide', 'encodeBase64', 'exists', 'group', 'if', 'isAlphaNumeric', 'isDistinct', 'isNumeric', 'minus', 'not', 'objectReferenceIn', 'parseDate', 'parseDecimal', 'parseFloat', 'parseInteger', 'parseJson', 'percentile', 'plus', 'rank', 'rowNumber', 'sub', 'times', 'toOne', 'toString', 'extractFromSemiStructured', 'explodeSemiStructured']; let incorrectlyMarkedDynaFnNames = $testingIgnoredDynaFnNames->filter(n| $n->in($testedDynaFnNames)); assert($incorrectlyMarkedDynaFnNames->isEmpty(), |'dyna fns ' + $incorrectlyMarkedDynaFnNames->makeString('[', ', ', ']') + ' are incorrectly marked as ignored even though they are tested in sqlQueryToString/testSuite'); let untestedDynaFnNames = $dynaFnNames->filter(d| !$d->in($testedDynaFnNames) && !$d->in($testingIgnoredDynaFnNames)); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tds.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tds.pure index 2a26e2edd82..cd6f0d18110 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tds.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tds.pure @@ -69,11 +69,11 @@ function let rows = if($type == JoinType.INNER || ($leftTdsRows->isNotEmpty() && $rightTdsRows->isNotEmpty()), | $leftTdsRows->map(r1|$rightTdsRows->map(r2|^TDSRow(values=concatenate($r1.values, $r2.values), parent=$res)));, | - if($type == JoinType.LEFT_OUTER, + if(($type == JoinType.LEFT_OUTER) || ($type == JoinType.FULL_OUTER && $rightTdsRows->isEmpty()), | $leftTdsRows->map(r1|^TDSRow(values=concatenate($r1.values, $requiredRightCols->map(c|^TDSNull())), parent=$res));, - | if($type == JoinType.RIGHT_OUTER, + | if(($type == JoinType.RIGHT_OUTER) || ($type == JoinType.FULL_OUTER && $leftTdsRows->isEmpty()), | $rightTdsRows->map(r2|^TDSRow(values=concatenate($requiredLeftCols->map(c|^TDSNull()), $r2.values), parent=$res)), - | fail('Unsupported join type ' + $type->makeString()); []; + | fail('Unsupported join type ' + $type->makeString()); []; ) ) ); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tests/testTDSFilter.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tests/testTDSFilter.pure index 6931d67c444..93aabe30d04 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tests/testTDSFilter.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tests/testTDSFilter.pure @@ -47,6 +47,13 @@ function <> meta::relational::tests::tds::tdsFilter::testFilterOnEnum assertEquals('select "root".NAME as "name", "root".TYPE as "type" from addressTable as "root" where "root".TYPE = 1', $result->sqlRemoveFormatting()); } +function <> meta::relational::tests::tds::tdsFilter::testFilterOnDecimal():Boolean[1] +{ + let result = execute(|Person.all()->project(col(p | 1.0d, 'decimal'))->filter({l|$l.getDecimal('decimal') == 2.0d}), simpleRelationalMapping, testRuntime(), meta::relational::extension::relationalExtensions()); + assertSize($result.values.rows, 0); + assertEquals('select 1.0 as "decimal" from personTable as "root" where 1.0 = 2.0', $result->sqlRemoveFormatting()); +} + function <> meta::relational::tests::tds::tdsFilter::testFilterTwoExpressions():Boolean[1] { let result = execute(|Person.all()->project([p | $p.firstName, p | $p.lastName], ['first', 'last'])->filter({r | $r.getString('first') == 'John' && $r.getString('last') == 'Johnson'}), simpleRelationalMapping, testRuntime(), meta::relational::extension::relationalExtensions()); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tests/testTDSJoin.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tests/testTDSJoin.pure index e2ee749a5bb..f45e18b7c07 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tests/testTDSJoin.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tds/tests/testTDSJoin.pure @@ -254,6 +254,23 @@ function <> meta::relational::tests::tds::tdsJoin::testRightOuterJoin '5,Roberts,2,Firm C,4', 'TDSNull,TDSNull,TDSNull,Firm A,2', 'TDSNull,TDSNull,TDSNull,Firm X,1'], $result.values.rows->map(r|$r.values->makeString(','))); } +function <> meta::relational::tests::tds::tdsJoin::testFullOuterJoinSimple():Boolean[1] +{ + let query = {| + testJoinTDS_Person.all()->project([#/testJoinTDS_Person/personID!personID#, #/testJoinTDS_Person/lastName!personName#, #/testJoinTDS_Person/employerID!eID#]) + ->join(testJoinTDS_Firm.all()->project([#/testJoinTDS_Firm/legalName!firmName#, #/testJoinTDS_Firm/firmID!fID#]), JoinType.FULL_OUTER, {a,b|($a.getInteger('eID') + 2) == $b.getInteger('fID');}) + ->sort(asc('personID')) + }; + + let result = execute($query, testJoinTDSMapping, testRuntime(), meta::relational::extension::relationalExtensions()); + + assertEquals([Integer , String, Integer, String, Integer], $result.values.columns.type); + assertSize($result.values.rows, 9); + assertSameElements(['1,Smith,1,Firm B,3', '2,Johnson,1,Firm B,3', '3,Hill,1,Firm B,3', '4,Allen,1,Firm B,3', + '5,Roberts,2,Firm C,4', '6,Hill,3,TDSNull,TDSNull', '7,Harris,4,TDSNull,TDSNull', + 'TDSNull,TDSNull,TDSNull,Firm A,2', 'TDSNull,TDSNull,TDSNull,Firm X,1'], $result.values.rows->map(r|$r.values->makeString(','))); +} + function <> meta::relational::tests::tds::tdsJoin::testInnerJoinConditionTrueUsingCol():Boolean[1] { let result = execute(|testJoinTDS_Person.all()->project([col(t|$t.personID,'personID'), col(t|$t.lastName,'personName'), col(t|$t.employerID,'eID')]) diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/innerJoin/testIsolation.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/innerJoin/testIsolation.pure index 42d7b405c64..eedf74d4616 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/innerJoin/testIsolation.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/innerJoin/testIsolation.pure @@ -21,13 +21,21 @@ function <> meta::relational::tests::mapping::innerjoin::iso } function <> meta::relational::tests::mapping::innerjoin::isolation::testIsolationOfInnerJoins():Any[*] { - let func = {| Car.all()->filter(c|$c.org.publicAncestor.name != 'GSorg2' - && $c.org.publicAncestor.name->contains('GSorg') - && $c.org.publicAncestor.name->contains('toReturn') + let func = {| Car.all()->filter(c|$c.org.privateAncestor.name != 'GSorg2' + && $c.org.privateAncestor.name->contains('GSorg') + && $c.org.privateAncestor.name->contains('toReturn') )->project([col(c|$c.id, 'Id'), col(c|$c.type, 'type')])}; let result = execute($func,AutoMapping,autoMobileRuntime(), meta::relational::extension::relationalExtensions()); assertEquals([ 3, 'Mercedes3'],$result.values.rows->map(r|$r.values)); assertSize($result.values.rows, 1); assertEquals('select "root".vehicleId as "Id", "root".type as "type" from AutomobileTable as "root" left outer join AutomobileTable as "automobiletable_1" on ("root".vehicleId = "automobiletable_1".vehicleId and "root".orgId < 100) left outer join (select "autoancestor_1".childId as childId, "autoancestor_2".orgName as orgName from AutoAncestor as "autoancestor_1" left outer join (select "autoancestor_2".parentId as parentId, "automobiletable_2".orgName as orgName from AutoAncestor as "autoancestor_2" inner join AutomobileTable as "automobiletable_2" on ("autoancestor_2".parentId = "automobiletable_2".orgId and case when "automobiletable_2".orgtype = \'public\' then \'N\' else \'Y\' end = \'Y\')) as "autoancestor_2" on ("autoancestor_1".parentId = "autoancestor_2".parentId)) as "autoancestor_0" on ("automobiletable_1".orgId = "autoancestor_0".childId) left outer join (select "autoancestor_4".childId as childId, "automobiletable_3".orgName as orgName from AutoAncestor as "autoancestor_4" inner join AutomobileTable as "automobiletable_3" on ("autoancestor_4".parentId = "automobiletable_3".orgId) where case when "automobiletable_3".orgtype = \'public\' then \'N\' else \'Y\' end = \'Y\') as "autoancestor_3" on ("automobiletable_1".orgId = "autoancestor_3".childId) where ((("autoancestor_3".orgName <> \'GSorg2\' OR "autoancestor_3".orgName is null) and ("autoancestor_0".orgName is not null and "autoancestor_3".orgName like \'%GSorg%\')) and ("autoancestor_0".orgName is not null and "autoancestor_3".orgName like \'%toReturn%\'))',$result->sqlRemoveFormatting()); +} +function <> meta::relational::tests::mapping::innerjoin::isolation::testIsolationForFiltersWithoutAliasAndInnerJoins():Any[*] +{ + let func = {| Car.all()->project([col(c|$c.org.privateAncestor.name, 'privateAncestorName'), col(c|$c.org.publicAncestor.name, 'publicAncestorName')])}; + let result = execute($func,MappingWithConstant,autoMobileRuntime(), meta::relational::extension::relationalExtensions()); + let tds = $result.values->at(0); + assertEquals(['GSorg4, TDSNull', 'GSorg3, TDSNull', 'GSorgtoReturn, TDSNull', 'TDSNull, TDSNull'], $tds.rows->map(r|$r.values->makeString(', '))); + assertEquals('select "autoancestor_0".orgName as "privateAncestorName", "autoancestor_2".orgName as "publicAncestorName" from AutomobileTable as "root" left outer join AutomobileTable as "automobiletable_1" on ("root".vehicleId = "automobiletable_1".vehicleId and "root".orgId < 100) left outer join (select "autoancestor_1".childId as childId, "automobiletable_2".orgName as orgName from AutoAncestor as "autoancestor_1" inner join AutomobileTable as "automobiletable_2" on ("autoancestor_1".parentId = "automobiletable_2".orgId) where \'Y\' = \'Y\') as "autoancestor_0" on ("automobiletable_1".orgId = "autoancestor_0".childId) left outer join (select "autoancestor_1".childId as childId, "automobiletable_2".orgName as orgName from AutoAncestor as "autoancestor_1" inner join AutomobileTable as "automobiletable_2" on ("autoancestor_1".parentId = "automobiletable_2".orgId) where \'Y\' = \'N\') as "autoancestor_2" on ("automobiletable_1".orgId = "autoancestor_2".childId)',$result->sqlRemoveFormatting()); } \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/innerJoin/testIsolationSetUp.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/innerJoin/testIsolationSetUp.pure index cb1c8d6ba7e..55a270578c8 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/innerJoin/testIsolationSetUp.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/innerJoin/testIsolationSetUp.pure @@ -40,12 +40,17 @@ Class meta::relational::tests::mapping::innerjoin::isolation::AutomobileUnion isPrivate: String[0..1]; ancestors: AutomobileUnion[0..*]; children: AutomobileUnion[0..*]; - publicAncestor() + privateAncestor() { $this.ancestors->filter(a|$a.isPrivate =='Y')->toOne() }:AutomobileUnion[1]; - publicAncestor2() + publicAncestor() + { + $this.ancestors->filter(a|$a.isPrivate =='N')->toOne() + }:AutomobileUnion[1]; + + privateAncestor2() { $this.ancestors->filter(a|$a.isPrivate =='Y').name=='haha' || ($this.ancestors->filter(a|$a.isPrivate=='Y').name=='ahah') }: Boolean[1]; @@ -63,8 +68,8 @@ function meta::relational::tests::mapping::innerjoin::isolation::initAutomobileD executeInDb('Create Table AutomobileTable (vehicleId INT, type VARCHAR(20),orgId INT, orgName VARCHAR(40),orgtype VARCHAR(40));', $connection); executeInDb('insert into AutomobileTable (vehicleId, type, orgId,orgName,orgType) values (1, \'Mercedes1\',17,\'GSorgtoReturn\' ,\'private\');', $connection); executeInDb('insert into AutomobileTable (vehicleId, type, orgId,orgName,orgType) values (2, \'Mercedes2\',18,\'GSorgtoReturn2\' ,\'private\');', $connection); - executeInDb('insert into AutomobileTable (vehicleId, type, orgId,orgName,orgType) values (3, \'Mercedes3\',19,\'GSorg3\' ,\'public3\');', $connection); - executeInDb('insert into AutomobileTable (vehicleId, type, orgId,orgName,orgType) values (4, \'Mercedes4\',20,\'GSorg4\' ,\'public4\');', $connection); + executeInDb('insert into AutomobileTable (vehicleId, type, orgId,orgName,orgType) values (3, \'Mercedes3\',19,\'GSorg3\' ,\'public\');', $connection); + executeInDb('insert into AutomobileTable (vehicleId, type, orgId,orgName,orgType) values (4, \'Mercedes4\',20,\'GSorg4\' ,\'public\');', $connection); executeInDb('Drop table if exists AutoAncestor;', $connection); executeInDb('Create Table AutoAncestor (childId INT ,parentId INT);',$connection); @@ -118,6 +123,36 @@ Mapping meta::relational::tests::mapping::innerjoin::isolation::AutoMapping ) } +) + +###Mapping +import meta::relational::tests::mapping::innerjoin::isolation::*; +Mapping meta::relational::tests::mapping::innerjoin::isolation::MappingWithConstant +( + Car :Relational + { + scope([AutomobileDb]) + ( + type: AutomobileTable.type, + id: AutomobileTable.vehicleId, + org( + name: AutomobileTable.orgName, + isPrivate: 'Y', + ancestors: @ Auto_Auto > @Auto_Ancestor > (INNER)@AncestorAuto + ) + ) + } + + AutomobileUnion:Relational + { + scope([AutomobileDb]) + ( + name: AutomobileTable.orgName, + isPrivate: 'Y', + ancestors: @Auto_Ancestor > (INNER)@AncestorAuto + ) + } + ) ###Relational Database meta::relational::tests::mapping::innerjoin::isolation::AutomobileDb diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/join/advancedRelationalSetUp.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/join/advancedRelationalSetUp.pure index 11dd534456a..67c193dc1b0 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/join/advancedRelationalSetUp.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/mapping/join/advancedRelationalSetUp.pure @@ -385,6 +385,68 @@ Mapping meta::relational::tests::mapping::join::model::mapping::JoinSchemaBWithS } ) +###Mapping +import meta::relational::tests::model::simple::*; +import meta::relational::tests::*; +Mapping meta::relational::tests::mapping::join::model::mapping::MappingWithLiteral +( + Person : Relational + { + scope([dbInc]) + ( + firstName : personTable.FIRSTNAME, + age : personTable.AGE + ), + scope([dbInc]default.personTable) + ( + lastName : 'Smith' + ), + firm : [dbInc]@Firm_Person, + address : [dbInc]@Address_Person, + locations : [dbInc]@Person_Location, + manager : [dbInc]@Person_Manager + } + + Firm : Relational + { + ~mainTable [dbInc] firmTable + legalName : [dbInc]firmTable.LEGALNAME, + employees : [dbInc]@Firm_Person, + address : [dbInc]@Address_Firm + } + + Address : Relational + { + name : [dbInc]addressTable.NAME, + street : [dbInc]addressTable.STREET, + comments : [dbInc]addressTable.COMMENTS + } + + Product : Relational + { + name : [db]productSchema.productTable.NAME, + synonyms : [db]@Product_Synonym + } + + ProductSynonymType: EnumerationMapping SynonymEnum + { + CUSIP: 'CUSIP', + ISIN: 'ISIN' + } + + Synonym : Relational + { + name : [db]productSchema.synonymTable.NAME, + type : EnumerationMapping SynonymEnum: 'CUSIP' + } + + Trade : Relational + { + id : [db]tradeTable.ID, + product : [db]@Trade_Product + } +) + ###Pure import meta::relational::tests::mapping::join::model::store::*; import meta::pure::profiles::*; @@ -494,4 +556,4 @@ function meta::relational::tests::mapping::join::model::store::createTablesAndFi executeInDb('insert into schemaB.PersonTable (id, firstName, LastName) values (4, \'Anthonye\', \'Anthony B\');', $connection); executeInDb('insert into schemaB.PersonTable (id, firstName, LastName) values (5, \'Oliver\', \'Oliver B\');', $connection); true; -} +} \ No newline at end of file diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/testRelationalExtension.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/testRelationalExtension.pure index e3c3e81bbab..b8cdc73fc60 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/testRelationalExtension.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/tests/testRelationalExtension.pure @@ -12,10 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -import meta::core::runtime::*; import meta::relational::metamodel::execute::tests::*; import meta::pure::alloy::connections::*; -import meta::external::store::relational::runtime::*; import meta::pure::runtime::*; import meta::core::runtime::*; import meta::relational::translation::*; @@ -28,6 +26,109 @@ import meta::relational::metamodel::execute::*; import meta::relational::metamodel::*; import meta::pure::mapping::*; +function meta::relational::metamodel::execute::tests::runRelationalRouterExtensionConnectionEquality(c1: RelationalDatabaseConnection[1], + c2: RelationalDatabaseConnection[1]) : Boolean[1] +{ + let extensions = meta::relational::extension::relationalExtensions().routerExtensions(); + $c1->match( + $extensions.connectionEquality->map(e | $e->eval($c2))->concatenate([ + a:Connection[1] | true + ])->toOneMany()); +} + +function <> meta::relational::metamodel::execute::tests::testConnectionEqualityAllSameStatic() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + type = DatabaseType.SybaseIQ, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::StaticDatasourceSpecification(host='host', port=8080, databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::DefaultH2AuthenticationStrategy() + ); + + let c2 = ^RelationalDatabaseConnection( + type = DatabaseType.SybaseIQ, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::StaticDatasourceSpecification(host='host', port=8080, databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::DefaultH2AuthenticationStrategy() + ); + + assert(runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + +function <> meta::relational::metamodel::execute::tests::testConnectionEqualityAllButOnePropertySame() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + type = DatabaseType.Snowflake, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::StaticDatasourceSpecification(host='host', port=8090, databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::DefaultH2AuthenticationStrategy() + ); + + let c2 = ^RelationalDatabaseConnection( + type = DatabaseType.Snowflake, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::StaticDatasourceSpecification(host='host', port=8080, databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + assert(!runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + +function <> meta::relational::metamodel::execute::tests::testConnectionEqualityTypeDiff() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + type = DatabaseType.H2, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::LocalH2DatasourceSpecification(), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::DefaultH2AuthenticationStrategy() + ); + + let c2 = ^RelationalDatabaseConnection( + type = DatabaseType.Snowflake, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::StaticDatasourceSpecification(host='host', port=8080, databaseName='db'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + + assert(!runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + + +function <> meta::relational::metamodel::execute::tests::testConnectionEqualityTypeSameSpecDiff() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + type = DatabaseType.H2, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::LocalH2DatasourceSpecification(), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::DefaultH2AuthenticationStrategy() + ); + + let c2 = ^RelationalDatabaseConnection( + type = DatabaseType.H2, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::LocalH2DatasourceSpecification(testDataSetupCsv='something'), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::DefaultH2AuthenticationStrategy() + ); + + + assert(!runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} + +function <> meta::relational::metamodel::execute::tests::testConnectionEqualityTypeSpecSameAuthDiff() : Boolean[1] +{ + let c1 = ^RelationalDatabaseConnection( + type = DatabaseType.H2, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::LocalH2DatasourceSpecification(), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::DefaultH2AuthenticationStrategy() + ); + + let c2 = ^RelationalDatabaseConnection( + type = DatabaseType.H2, + datasourceSpecification = ^meta::pure::alloy::connections::alloy::specification::LocalH2DatasourceSpecification(), + authenticationStrategy = ^meta::pure::alloy::connections::alloy::authentication::ApiTokenAuthenticationStrategy(apiToken='token') + ); + + + assert(!runRelationalRouterExtensionConnectionEquality($c1, $c2)); + +} function <> meta::relational::metamodel::execute::tests::testExecuteInDbToTDS() : Boolean[1] { diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/transform/fromPure/tests/testToSQLString.pure b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/transform/fromPure/tests/testToSQLString.pure index 206c5d6db81..ce101f8b7fb 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/transform/fromPure/tests/testToSQLString.pure +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/legend-engine-xt-relationalStore-pure/src/main/resources/core_relational/relational/transform/fromPure/tests/testToSQLString.pure @@ -402,6 +402,35 @@ function <> meta::relational::tests::functions::sqlstring::testTrim() )->distinct() == [true]; } +function <> meta::relational::tests::functions::sqlstring::testPad():Boolean[1] +{ + let common = 'select lpad("root".FIRSTNAME, 1) as "lpad", lpad("root".FIRSTNAME, 1, \'0\') as "lpad2", rpad("root".FIRSTNAME, 1) as "rpad", rpad("root".FIRSTNAME, 1, \'0\') as "rpad2" from personTable as "root"'; + + let expected = [ + pair(DatabaseType.DB2, $common), + pair(DatabaseType.H2, $common), + pair(DatabaseType.Composite, $common) + ]; + + $expected->map(p| + let driver = $p.first; + let expectedSql = $p.second; + + let result = toSQLString( + |Person.all()->project([ + a | $a.firstName->lpad(1), + a | $a.firstName->lpad(1, '0'), + a | $a.firstName->rpad(1), + a | $a.firstName->rpad(1, '0') + ], + ['lpad', 'lpad2', 'rpad', 'rpad2']), + simpleRelationalMapping, + $driver, meta::relational::extension::relationalExtensions()); + + assertEquals($expectedSql, $result, '\nSQL not as expected for %s\n\nexpected: %s\nactual: %s', [$driver, $expectedSql, $result]); + )->distinct() == [true]; +} + function <> meta::relational::tests::functions::sqlstring::testCbrt():Boolean[1] { let common = 'select cbrt("root".quantity) as "cbrt" from tradeTable as "root"'; @@ -572,6 +601,26 @@ function <> meta::relational::tests::functions::sqlstring::testToSqlG )->distinct() == [true]; } +function <> meta::relational::tests::functions::sqlstring::testToSqlGenerationMonth():Boolean[1] +{ + let expected = [ + pair(DatabaseType.H2, 'select monthname("root".tradeDate) as "month" from tradeTable as "root"') + ]; + + $expected->map(p| + let driver = $p.first; + let expectedSql = $p.second; + + let result = toSQLString( + |Trade.all() + ->project(col(t|$t.date->month(), 'month')), + simpleRelationalMapping, + $driver, meta::relational::extension::relationalExtensions()); + + assertEquals($expectedSql, $result, '\nSQL not as expected for %s\n\nexpected: %s\nactual: %s', [$driver, $expectedSql, $result]); + )->distinct() == [true]; +} + function <> meta::relational::tests::functions::sqlstring::testToSQLStringWithRepeatString():Boolean[1] { let sql = toSQLString(|Person.all()->project(p| repeatString('a', 2), 'repeat'), simpleRelationalMapping, DatabaseType.H2, meta::relational::extension::relationalExtensions()); diff --git a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/pom.xml b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/pom.xml index afb2ae401d0..8c00fb62853 100644 --- a/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/pom.xml +++ b/legend-engine-xts-relationalStore/legend-engine-xt-relationalStore-generation/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-relationalStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalStore/pom.xml b/legend-engine-xts-relationalStore/pom.xml index 717b5eb81b1..896de283376 100644 --- a/legend-engine-xts-relationalStore/pom.xml +++ b/legend-engine-xts-relationalStore/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalai/legend-engine-xt-relationalai-pure/pom.xml b/legend-engine-xts-relationalai/legend-engine-xt-relationalai-pure/pom.xml index 5810fc8444d..e9b198a0fc7 100644 --- a/legend-engine-xts-relationalai/legend-engine-xt-relationalai-pure/pom.xml +++ b/legend-engine-xts-relationalai/legend-engine-xt-relationalai-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-relationalai - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-relationalai/pom.xml b/legend-engine-xts-relationalai/pom.xml index 6fb2ce4665d..16fbccff784 100644 --- a/legend-engine-xts-relationalai/pom.xml +++ b/legend-engine-xts-relationalai/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-rosetta/legend-engine-xt-rosetta-pure/pom.xml b/legend-engine-xts-rosetta/legend-engine-xt-rosetta-pure/pom.xml index 6ddbd23f1ce..f6eee505bfc 100644 --- a/legend-engine-xts-rosetta/legend-engine-xt-rosetta-pure/pom.xml +++ b/legend-engine-xts-rosetta/legend-engine-xt-rosetta-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-rosetta - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-rosetta/legend-engine-xt-rosetta/pom.xml b/legend-engine-xts-rosetta/legend-engine-xt-rosetta/pom.xml index f1c2e39d2d5..bd7c3622b67 100644 --- a/legend-engine-xts-rosetta/legend-engine-xt-rosetta/pom.xml +++ b/legend-engine-xts-rosetta/legend-engine-xt-rosetta/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-rosetta - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-rosetta/pom.xml b/legend-engine-xts-rosetta/pom.xml index 04301fa5f9d..1ff6b2f15e9 100644 --- a/legend-engine-xts-rosetta/pom.xml +++ b/legend-engine-xts-rosetta/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-service/legend-engine-language-pure-dsl-service-execution/pom.xml b/legend-engine-xts-service/legend-engine-language-pure-dsl-service-execution/pom.xml index f241ee59173..e1cc8832154 100644 --- a/legend-engine-xts-service/legend-engine-language-pure-dsl-service-execution/pom.xml +++ b/legend-engine-xts-service/legend-engine-language-pure-dsl-service-execution/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-service - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-dsl-service-execution diff --git a/legend-engine-xts-service/legend-engine-language-pure-dsl-service-generation/pom.xml b/legend-engine-xts-service/legend-engine-language-pure-dsl-service-generation/pom.xml index c96cdee527e..2e9b8e31414 100644 --- a/legend-engine-xts-service/legend-engine-language-pure-dsl-service-generation/pom.xml +++ b/legend-engine-xts-service/legend-engine-language-pure-dsl-service-generation/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-service - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-service/legend-engine-language-pure-dsl-service-pure/pom.xml b/legend-engine-xts-service/legend-engine-language-pure-dsl-service-pure/pom.xml index 8ccff22469a..8dc9f3233d4 100644 --- a/legend-engine-xts-service/legend-engine-language-pure-dsl-service-pure/pom.xml +++ b/legend-engine-xts-service/legend-engine-language-pure-dsl-service-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-service - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-service/legend-engine-language-pure-dsl-service/pom.xml b/legend-engine-xts-service/legend-engine-language-pure-dsl-service/pom.xml index e1d12871634..169d289d2bc 100644 --- a/legend-engine-xts-service/legend-engine-language-pure-dsl-service/pom.xml +++ b/legend-engine-xts-service/legend-engine-language-pure-dsl-service/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-service - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-language-pure-dsl-service diff --git a/legend-engine-xts-service/legend-engine-language-pure-dsl-service/src/main/java/org/finos/legend/engine/language/pure/dsl/service/compiler/toPureGraph/HelperServiceBuilder.java b/legend-engine-xts-service/legend-engine-language-pure-dsl-service/src/main/java/org/finos/legend/engine/language/pure/dsl/service/compiler/toPureGraph/HelperServiceBuilder.java index 260d2416306..8ab30fa39f8 100644 --- a/legend-engine-xts-service/legend-engine-language-pure-dsl-service/src/main/java/org/finos/legend/engine/language/pure/dsl/service/compiler/toPureGraph/HelperServiceBuilder.java +++ b/legend-engine-xts-service/legend-engine-language-pure-dsl-service/src/main/java/org/finos/legend/engine/language/pure/dsl/service/compiler/toPureGraph/HelperServiceBuilder.java @@ -97,7 +97,7 @@ public static Root_meta_legend_service_metamodel_Execution processServiceExecuti { lambda = HelperValueSpecificationBuilder.buildLambda(pureSingleExecution.func, context); } - return new Root_meta_legend_service_metamodel_PureSingleExecution_Impl("", null, context.pureModel.getClass("meta::legend::service::metamodel::PureMultiExecution")) + return new Root_meta_legend_service_metamodel_PureSingleExecution_Impl("", null, context.pureModel.getClass("meta::legend::service::metamodel::PureSingleExecution")) ._func(lambda) ._mapping(mapping) ._runtime(runtime); diff --git a/legend-engine-xts-service/legend-engine-service-post-validation-runner/pom.xml b/legend-engine-xts-service/legend-engine-service-post-validation-runner/pom.xml index a3ca2dd87f4..69e4f2b93bd 100644 --- a/legend-engine-xts-service/legend-engine-service-post-validation-runner/pom.xml +++ b/legend-engine-xts-service/legend-engine-service-post-validation-runner/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-service - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-service/legend-engine-services-model-api/pom.xml b/legend-engine-xts-service/legend-engine-services-model-api/pom.xml index 9e1e310814f..cb08c300f61 100644 --- a/legend-engine-xts-service/legend-engine-services-model-api/pom.xml +++ b/legend-engine-xts-service/legend-engine-services-model-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-service - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-services-model-api diff --git a/legend-engine-xts-service/legend-engine-services-model/pom.xml b/legend-engine-xts-service/legend-engine-services-model/pom.xml index be3873faca2..69d18da1973 100644 --- a/legend-engine-xts-service/legend-engine-services-model/pom.xml +++ b/legend-engine-xts-service/legend-engine-services-model/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-service - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 legend-engine-services-model diff --git a/legend-engine-xts-service/legend-engine-test-runner-service/pom.xml b/legend-engine-xts-service/legend-engine-test-runner-service/pom.xml index a9dbf9de247..c13292482f5 100644 --- a/legend-engine-xts-service/legend-engine-test-runner-service/pom.xml +++ b/legend-engine-xts-service/legend-engine-test-runner-service/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-service - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-service/pom.xml b/legend-engine-xts-service/pom.xml index 1f3dbdf3bc2..534a101228e 100644 --- a/legend-engine-xts-service/pom.xml +++ b/legend-engine-xts-service/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-executionPlan/pom.xml b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-executionPlan/pom.xml index ceda5491fff..16028670fa1 100644 --- a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-executionPlan/pom.xml +++ b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-executionPlan/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-serviceStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-grammar/pom.xml b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-grammar/pom.xml index c85b9726cd5..280bfdcd434 100644 --- a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-grammar/pom.xml +++ b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-serviceStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-javaPlatformBinding-pure/pom.xml b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-javaPlatformBinding-pure/pom.xml index d679c93eeee..3ee5a8ed416 100644 --- a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-javaPlatformBinding-pure/pom.xml +++ b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-javaPlatformBinding-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-serviceStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-protocol/pom.xml b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-protocol/pom.xml index 67b7c08f389..7b335d67af0 100644 --- a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-protocol/pom.xml +++ b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-serviceStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-pure/pom.xml b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-pure/pom.xml index a3cb3d91f2c..4a8eb240701 100644 --- a/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-pure/pom.xml +++ b/legend-engine-xts-serviceStore/legend-engine-xt-serviceStore-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-serviceStore - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-serviceStore/pom.xml b/legend-engine-xts-serviceStore/pom.xml index 0e3742c8240..61889d9cf01 100644 --- a/legend-engine-xts-serviceStore/pom.xml +++ b/legend-engine-xts-serviceStore/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/pom.xml b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/pom.xml index afd677ee962..0194f80d44f 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/pom.xml +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-snowflakeApp - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -29,10 +29,6 @@ - - org.finos.legend.pure - legend-pure-m3-core - @@ -62,27 +58,27 @@ org.finos.legend.engine - legend-engine-executionPlan-generation + legend-engine-shared-core org.finos.legend.engine - legend-engine-shared-core + legend-engine-xt-functionActivator-pure org.finos.legend.engine - legend-engine-xt-functionActivator-pure + legend-engine-xt-functionActivator-deployment org.finos.legend.engine - legend-engine-protocol-pure + legend-engine-xt-functionActivator-protocol org.finos.legend.engine - legend-engine-xt-functionActivator-api + legend-engine-protocol-pure org.finos.legend.engine - legend-engine-xt-relationalStore-pure + legend-engine-xt-functionActivator-api org.finos.legend.engine @@ -92,7 +88,6 @@ org.finos.legend.engine legend-engine-executionPlan-execution - org.finos.legend.engine legend-engine-xt-snowflakeApp-pure @@ -103,13 +98,7 @@ org.finos.legend.engine - legend-engine-xt-snowflakeApp-compiler - runtime - - - org.finos.legend.engine - legend-engine-xt-snowflakeApp-grammar - runtime + legend-engine-xt-snowflakeApp-generator org.finos.legend.engine @@ -129,10 +118,6 @@ com.fasterxml.jackson.core jackson-databind - - org.pac4j - pac4j-core - org.eclipse.collections @@ -188,10 +173,6 @@ jersey-common test - - org.finos.legend.engine - legend-engine-language-pure-dsl-generation - diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/api/SnowflakeAppService.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/api/SnowflakeAppService.java index e061f149faf..eb47cf58e50 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/api/SnowflakeAppService.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/api/SnowflakeAppService.java @@ -14,64 +14,57 @@ package org.finos.legend.engine.language.snowflakeApp.api; +import com.fasterxml.jackson.databind.jsontype.NamedType; import org.eclipse.collections.api.RichIterable; import org.eclipse.collections.api.block.function.Function; import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.impl.factory.Lists; import org.eclipse.collections.impl.list.mutable.FastList; import org.finos.legend.engine.functionActivator.api.output.FunctionActivatorInfo; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; import org.finos.legend.engine.functionActivator.service.FunctionActivatorError; import org.finos.legend.engine.functionActivator.service.FunctionActivatorService; import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; -import org.finos.legend.engine.language.snowflakeApp.deployment.SnowflakeAppArtifact; -import org.finos.legend.engine.language.snowflakeApp.deployment.SnowflakeAppContent; -import org.finos.legend.engine.language.snowflakeApp.deployment.SnowflakeAppDeploymentConfiguration; -import org.finos.legend.engine.language.snowflakeApp.deployment.SnowflakeAppGenerator; -import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; -import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentStage; -import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeDeploymentConfiguration; -import org.finos.legend.engine.language.snowflakeApp.deployment.SnowflakeDeploymentManager; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppArtifact; +import org.finos.legend.engine.language.snowflakeApp.deployment.SnowflakeAppDeploymentManager; import org.finos.legend.engine.language.snowflakeApp.deployment.SnowflakeDeploymentResult; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppContent; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppDeploymentConfiguration; +import org.finos.legend.engine.language.snowflakeApp.generator.SnowflakeAppGenerator; +import org.finos.legend.engine.plan.execution.PlanExecutor; import org.finos.legend.engine.plan.execution.stores.relational.config.TemporaryTestDbConfiguration; import org.finos.legend.engine.plan.execution.stores.relational.connection.manager.ConnectionManagerSelector; -import org.finos.legend.engine.plan.generation.PlanGenerator; -import org.finos.legend.engine.plan.platform.PlanPlatform; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeAppProtocolExtension; +import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.pure.generated.Root_meta_external_function_activator_FunctionActivator; import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeApp; -import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeDeploymentConfiguration; -import org.finos.legend.pure.generated.Root_meta_external_store_relational_runtime_RelationalDatabaseConnection; -import org.finos.legend.pure.generated.Root_meta_pure_alloy_connections_alloy_authentication_SnowflakePublicAuthenticationStrategy; -import org.finos.legend.pure.generated.Root_meta_pure_alloy_connections_alloy_specification_SnowflakeDatasourceSpecification; -import org.finos.legend.pure.generated.Root_meta_pure_executionPlan_ExecutionNode; -import org.finos.legend.pure.generated.Root_meta_pure_executionPlan_ExecutionPlan; import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; -import org.finos.legend.pure.generated.Root_meta_relational_mapping_SQLExecutionNode; -import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.FunctionDefinition; -import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.PackageableFunction; -import org.pac4j.core.profile.CommonProfile; import java.util.List; public class SnowflakeAppService implements FunctionActivatorService { private ConnectionManagerSelector connectionManager; - private SnowflakeDeploymentManager snowflakeDeploymentManager; + private SnowflakeAppDeploymentManager snowflakeDeploymentManager; public SnowflakeAppService() { TemporaryTestDbConfiguration conf = new TemporaryTestDbConfiguration(); conf.port = Integer.parseInt(System.getProperty("h2ServerPort", "1234")); this.connectionManager = new ConnectionManagerSelector(conf, FastList.newList()); - this.snowflakeDeploymentManager = new SnowflakeDeploymentManager(new SnowflakeAppDeploymentTool(connectionManager)); + this.snowflakeDeploymentManager = new SnowflakeAppDeploymentManager(new SnowflakeAppDeploymentTool(connectionManager)); } public SnowflakeAppService(ConnectionManagerSelector connectionManager) { this.connectionManager = connectionManager; - this.snowflakeDeploymentManager = new SnowflakeDeploymentManager(new SnowflakeAppDeploymentTool(connectionManager)); + this.snowflakeDeploymentManager = new SnowflakeAppDeploymentManager(new SnowflakeAppDeploymentTool(connectionManager)); + } + + public SnowflakeAppService(PlanExecutor executor) + { + this.snowflakeDeploymentManager = new SnowflakeAppDeploymentManager(executor); } @Override @@ -92,27 +85,37 @@ public boolean supports(Root_meta_external_function_activator_FunctionActivator } @Override - public MutableList validate(MutableList profiles, PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, PureModelContext inputModel, Function> routerExtensions) + public MutableList validate(Identity identity, PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, PureModelContext inputModel, Function> routerExtensions) + { + SnowflakeAppArtifact artifact = SnowflakeAppGenerator.generateArtifact(pureModel, activator, inputModel, routerExtensions); + return validate(artifact); + } + + public MutableList validate(SnowflakeAppArtifact artifact) { - SnowflakeAppArtifact artifact = SnowflakeAppGenerator.generateArtifact(pureModel, activator, routerExtensions); int size = ((SnowflakeAppContent)artifact.content).sqlExpressions.size(); return size != 1 ? Lists.mutable.with(new SnowflakeAppError("SnowflakeApp can't be used with a plan containing '" + size + "' SQL expressions", ((SnowflakeAppContent)artifact.content).sqlExpressions)) : Lists.mutable.empty(); - } @Override - public SnowflakeDeploymentResult publishToSandbox(MutableList profiles, PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, PureModelContext inputModel, List runtimeConfigurations, Function> routerExtensions) + public SnowflakeDeploymentResult publishToSandbox(Identity identity, PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, PureModelContext inputModel, List runtimeConfigurations, Function> routerExtensions) { - SnowflakeAppArtifact artifact = SnowflakeAppGenerator.generateArtifact(pureModel, activator, routerExtensions); - return this.snowflakeDeploymentManager.deploy(profiles, artifact, runtimeConfigurations); + SnowflakeAppArtifact artifact = SnowflakeAppGenerator.generateArtifact(pureModel, activator, inputModel, routerExtensions); + MutableList validationError = validate(artifact); + if (validationError.isEmpty()) + { + return this.snowflakeDeploymentManager.deploy(identity, artifact, runtimeConfigurations); + } + return new SnowflakeDeploymentResult(validationError.collect(v -> v.message)); + } @Override public SnowflakeAppArtifact renderArtifact(PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, PureModelContext inputModel, String clientVersion, Function> routerExtensions) { - return SnowflakeAppGenerator.generateArtifact(pureModel, activator, routerExtensions); + return SnowflakeAppGenerator.generateArtifact(pureModel, activator, inputModel, routerExtensions); } @Override diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeDeploymentManager.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppDeploymentManager.java similarity index 59% rename from legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeDeploymentManager.java rename to legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppDeploymentManager.java index 5cd7369a2f4..b003814bd66 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeDeploymentManager.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppDeploymentManager.java @@ -19,19 +19,20 @@ import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.impl.factory.Lists; import org.finos.legend.engine.functionActivator.deployment.DeploymentManager; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; import org.finos.legend.engine.language.snowflakeApp.api.SnowflakeAppDeploymentTool; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppArtifact; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppDeploymentConfiguration; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppContent; import org.finos.legend.engine.plan.execution.PlanExecutor; -import org.finos.legend.engine.plan.execution.stores.relational.RelationalExecutor; import org.finos.legend.engine.plan.execution.stores.relational.connection.manager.ConnectionManagerSelector; +import org.finos.legend.engine.plan.execution.stores.relational.plugin.RelationalStoreExecutor; import org.finos.legend.engine.plan.execution.stores.relational.plugin.RelationalStoreState; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.DatabaseConnection; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.RelationalDatabaseConnection; -import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeDeploymentConfiguration; import org.finos.legend.engine.shared.core.identity.Identity; import org.finos.legend.pure.generated.Root_meta_pure_alloy_connections_alloy_authentication_SnowflakePublicAuthenticationStrategy; import org.finos.legend.pure.generated.Root_meta_pure_alloy_connections_alloy_specification_SnowflakeDatasourceSpecification; -import org.pac4j.core.profile.CommonProfile; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,23 +44,25 @@ import java.util.Map; -public class SnowflakeDeploymentManager implements DeploymentManager +public class SnowflakeAppDeploymentManager implements DeploymentManager { - private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDeploymentManager.class); + private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeAppDeploymentManager.class); private SnowflakeAppDeploymentTool snowflakeAppDeploymentTool; private PlanExecutor planExecutor; private ConnectionManagerSelector connectionManager; + private static final String deploymentSchema = "LEGEND_NATIVE_APPS"; + private static final String deploymentTable = "APP_METADATA"; - public SnowflakeDeploymentManager(SnowflakeAppDeploymentTool deploymentTool) + public SnowflakeAppDeploymentManager(SnowflakeAppDeploymentTool deploymentTool) { this.snowflakeAppDeploymentTool = deploymentTool; } - public SnowflakeDeploymentManager(PlanExecutor planExecutor) + public SnowflakeAppDeploymentManager(PlanExecutor planExecutor) { this.planExecutor = planExecutor; - connectionManager = ((RelationalStoreState)planExecutor.getExtraExecutors().select(c -> c instanceof RelationalExecutor).getFirst().getStoreState()).getRelationalExecutor().getConnectionManager(); + connectionManager = ((RelationalStoreState)planExecutor.getExtraExecutors().select(c -> c instanceof RelationalStoreExecutor).getFirst().getStoreState()).getRelationalExecutor().getConnectionManager(); } @Override @@ -69,21 +72,22 @@ public boolean canDeploy(FunctionActivatorArtifact artifact) } @Override - public SnowflakeDeploymentResult deploy(MutableList profiles, SnowflakeAppArtifact artifact) + public SnowflakeDeploymentResult deploy(Identity identity, SnowflakeAppArtifact artifact) { - return new SnowflakeDeploymentResult("",true); + return deploy(identity, artifact, Lists.mutable.empty()); } @Override - public SnowflakeDeploymentResult deploy(MutableList profiles, SnowflakeAppArtifact artifact, List availableRuntimeConfigurations) + public SnowflakeDeploymentResult deploy(Identity identity, SnowflakeAppArtifact artifact, List availableRuntimeConfigurations) { LOGGER.info("Starting deployment"); SnowflakeDeploymentResult result; - try (Connection jdbcConnection = this.getDeploymentConnection(profiles, artifact)) + //use the system connection if available (as would be the case in sandbox flow) , else use artifact connection (production flow) + try (Connection jdbcConnection = availableRuntimeConfigurations.isEmpty() ? this.getDeploymentConnection(identity, artifact) : this.getDeploymentConnection(identity, availableRuntimeConfigurations.get(0).connection)) { String appName = ((SnowflakeAppContent)artifact.content).applicationName; jdbcConnection.setAutoCommit(false); - this.deployImpl(jdbcConnection, appName); + this.deployImpl(jdbcConnection, (SnowflakeAppContent)artifact.content); jdbcConnection.commit(); LOGGER.info("Completed deployment successfully"); result = new SnowflakeDeploymentResult(appName, true); @@ -116,32 +120,50 @@ public SnowflakeDeploymentResult fakeDeploy(Root_meta_pure_alloy_connections_all } } - public java.sql.Connection getDeploymentConnection(MutableList profiles, RelationalDatabaseConnection connection) + public Connection getDeploymentConnection(Identity identity, RelationalDatabaseConnection connection) { - return this.connectionManager.getDatabaseConnection(profiles, (DatabaseConnection) connection); + return this.connectionManager.getDatabaseConnection(identity, (DatabaseConnection) connection); } - public void deployImpl(Connection jdbcConnection, String context) throws Exception + public void deployImpl(Connection jdbcConnection, SnowflakeAppContent context) throws SQLException { - Statement statement = jdbcConnection.createStatement(); - String deploymentTableName = this.getDeploymentTableName(jdbcConnection); - String createTableSQL = String.format("create table %s (id INTEGER, message VARCHAR(1000)) if not exists", deploymentTableName); - boolean createTableStatus = statement.execute(createTableSQL); - String insertSQL = String.format("insert into %s(id, message) values(%d, '%s')", deploymentTableName, System.currentTimeMillis(), context); - boolean insertStatus = statement.execute(insertSQL); + String catalogName = jdbcConnection.getCatalog(); + MutableList statements = generateStatements(catalogName, context); + for (String s: statements) + { + Statement statement = jdbcConnection.createStatement(); + statement.execute(s); + } + } + + public MutableList generateStatements(String catalogName, SnowflakeAppContent content) + { + MutableList statements = Lists.mutable.empty(); + if (content.type.equals("STAGE")) + { + String deploymentTableName = String.format("%s.%s." + deploymentTable, catalogName, deploymentSchema); + statements.add(String.format("insert into %s(CREATE_DATETIME, APP_NAME, SQL_FRAGMENT, VERSION_NUMBER, OWNER, DESCRIPTION) values('%s', '%s', '%s', '%s', '%s', '%s');", + deploymentTableName, content.creationTime, content.applicationName, content.sqlExpressions.getFirst(), content.getVersionInfo(), Lists.mutable.withAll(content.owners).makeString(","), content.description)); + + } + else + { + statements.add(String.format("CREATE OR REPLACE FUNCTION %S.%S.%s() RETURNS TABLE (%s) as $$ %s $$;", catalogName, deploymentSchema, content.applicationName, content.functionArguments, content.sqlExpressions.getFirst(), content.description)); + statements.add(String.format("CREATE OR REPLACE SECURE FUNCTION %S.%S.%s() RETURNS TABLE (%s) as $$ %s $$;", catalogName, deploymentSchema, content.applicationName, content.functionArguments, content.sqlExpressions.getFirst(), content.description)); + } + return statements; } public String getDeploymentTableName(Connection jdbcConnection) throws SQLException { String catalogName = jdbcConnection.getCatalog(); - String schema = "NATIVE_APP"; - return String.format("%s.%s.LEGEND_SNOWFLAKE_APP_DEPLOYMENT", catalogName, schema); + return String.format("%s.%s." + deploymentTable, catalogName, deploymentSchema); } - public java.sql.Connection getDeploymentConnection(MutableList profiles, SnowflakeAppArtifact artifact) + public Connection getDeploymentConnection(Identity identity, SnowflakeAppArtifact artifact) { RelationalDatabaseConnection connection = extractConnectionFromArtifact(artifact); - return this.connectionManager.getDatabaseConnection(profiles, connection); + return this.connectionManager.getDatabaseConnection(identity, connection); } public RelationalDatabaseConnection extractConnectionFromArtifact(SnowflakeAppArtifact artifact) @@ -149,12 +171,12 @@ public RelationalDatabaseConnection extractConnectionFromArtifact(SnowflakeAppAr return ((SnowflakeAppDeploymentConfiguration)artifact.deploymentConfiguration).connection; } - public ImmutableList getDeployed(MutableList profiles, RelationalDatabaseConnection connection) throws Exception + public ImmutableList getDeployed(Identity identity, RelationalDatabaseConnection connection) throws Exception { ImmutableList deployments = null; LOGGER.info("Querying deployment"); - try (Connection jdbcConnection = this.getDeploymentConnection(profiles, connection)) + try (Connection jdbcConnection = this.getDeploymentConnection(identity, connection)) { deployments = this.getDeployedImpl(jdbcConnection); LOGGER.info("Completed querying deployments successfully"); diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppGenerator.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppGenerator.java deleted file mode 100644 index ee9d38e94a7..00000000000 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppGenerator.java +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2023 Goldman Sachs -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - - -package org.finos.legend.engine.language.snowflakeApp.deployment; - -import org.eclipse.collections.api.RichIterable; -import org.eclipse.collections.api.block.function.Function; -import org.eclipse.collections.impl.factory.Lists; -import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; -import org.finos.legend.engine.plan.generation.PlanGenerator; -import org.finos.legend.engine.plan.platform.PlanPlatform; -import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeApp; -import org.finos.legend.pure.generated.Root_meta_external_store_relational_runtime_RelationalDatabaseConnection; -import org.finos.legend.pure.generated.Root_meta_pure_alloy_connections_alloy_authentication_SnowflakePublicAuthenticationStrategy; -import org.finos.legend.pure.generated.Root_meta_pure_alloy_connections_alloy_specification_SnowflakeDatasourceSpecification; -import org.finos.legend.pure.generated.Root_meta_pure_executionPlan_ExecutionNode; -import org.finos.legend.pure.generated.Root_meta_pure_executionPlan_ExecutionPlan; -import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; -import org.finos.legend.pure.generated.Root_meta_relational_mapping_SQLExecutionNode; -import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.FunctionDefinition; -import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.PackageableFunction; - -public class SnowflakeAppGenerator -{ - - public static SnowflakeAppArtifact generateArtifact(PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, Function> routerExtensions) - { - RichIterable sqlExpressions = extractSQLExpressions(pureModel, activator, routerExtensions); - return new SnowflakeAppArtifact(activator._applicationName(), Lists.mutable.withAll(sqlExpressions)); - } - - private static RichIterable extractSQLExpressions(PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, Function> routerExtensions) - { - PackageableFunction function = activator._function(); - Root_meta_pure_executionPlan_ExecutionPlan executionPlan = PlanGenerator.generateExecutionPlanAsPure((FunctionDefinition) function, null, null, null, pureModel, PlanPlatform.JAVA, null, routerExtensions.apply(pureModel)); - Root_meta_pure_executionPlan_ExecutionNode node = executionPlan._rootExecutionNode(); - return collectAllNodes(node) - .selectInstancesOf(Root_meta_relational_mapping_SQLExecutionNode.class) - .collect(Root_meta_relational_mapping_SQLExecutionNode::_sqlQuery) - .select(x -> !x.toLowerCase().startsWith("alter")); - } - - private static Object[] extractSQLExpressionsAndConnectionMetadata(PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, Function> routerExtensions) - { - PackageableFunction function = activator._function(); - Root_meta_pure_executionPlan_ExecutionPlan executionPlan = PlanGenerator.generateExecutionPlanAsPure((FunctionDefinition) function, null, null, null, pureModel, PlanPlatform.JAVA, null, routerExtensions.apply(pureModel)); - Root_meta_pure_executionPlan_ExecutionNode node = executionPlan._rootExecutionNode(); - - RichIterable expressions = collectAllNodes(node) - .selectInstancesOf(Root_meta_relational_mapping_SQLExecutionNode.class) - .collect(Root_meta_relational_mapping_SQLExecutionNode::_sqlQuery) - .select(x -> !x.toLowerCase().startsWith("alter")); - - Root_meta_external_store_relational_runtime_RelationalDatabaseConnection relCOnn = (Root_meta_external_store_relational_runtime_RelationalDatabaseConnection)collectAllNodes(node).selectInstancesOf(Root_meta_relational_mapping_SQLExecutionNode.class) - .getAny() - ._connection(); - Root_meta_pure_alloy_connections_alloy_specification_SnowflakeDatasourceSpecification ds = (Root_meta_pure_alloy_connections_alloy_specification_SnowflakeDatasourceSpecification) relCOnn._datasourceSpecification(); - Root_meta_pure_alloy_connections_alloy_authentication_SnowflakePublicAuthenticationStrategy as = (Root_meta_pure_alloy_connections_alloy_authentication_SnowflakePublicAuthenticationStrategy) relCOnn._authenticationStrategy(); - - return new Object[]{expressions, ds, as}; - } - - private static RichIterable collectAllNodes(Root_meta_pure_executionPlan_ExecutionNode node) - { - return Lists.mutable.with(node).withAll(node._executionNodes().flatCollect(SnowflakeAppGenerator::collectAllNodes)); - } -} diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeDeploymentResult.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeDeploymentResult.java index cfc0a154fee..8bfc1a49de6 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeDeploymentResult.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeDeploymentResult.java @@ -15,7 +15,7 @@ package org.finos.legend.engine.language.snowflakeApp.deployment; import org.eclipse.collections.api.list.MutableList; -import org.finos.legend.engine.functionActivator.deployment.DeploymentResult; +import org.finos.legend.engine.protocol.functionActivator.deployment.DeploymentResult; public class SnowflakeDeploymentResult extends DeploymentResult { diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension deleted file mode 100644 index cc58504ac24..00000000000 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension +++ /dev/null @@ -1 +0,0 @@ -org.finos.legend.engine.language.snowflakeApp.deployment.SnowflakeAppArtifactGenerationExtension diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-compiler/pom.xml b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-compiler/pom.xml index e4338792cb7..8559c0b3b4a 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-compiler/pom.xml +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-compiler/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-snowflakeApp org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-compiler/src/main/java/org/finos/legend/engine/language/snowflakeApp/compiler/toPureGraph/SnowflakeAppCompilerExtension.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-compiler/src/main/java/org/finos/legend/engine/language/snowflakeApp/compiler/toPureGraph/SnowflakeAppCompilerExtension.java index 2faa56790c4..9b29beeedd9 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-compiler/src/main/java/org/finos/legend/engine/language/snowflakeApp/compiler/toPureGraph/SnowflakeAppCompilerExtension.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-compiler/src/main/java/org/finos/legend/engine/language/snowflakeApp/compiler/toPureGraph/SnowflakeAppCompilerExtension.java @@ -19,18 +19,13 @@ import org.finos.legend.engine.language.pure.compiler.toPureGraph.CompileContext; import org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.CompilerExtension; import org.finos.legend.engine.language.pure.compiler.toPureGraph.extension.Processor; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.PackageableConnection; import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeApp; -import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeDeploymentConfiguration; -import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeApp; -import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeApp_Impl; -import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeDeploymentConfiguration; -import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeDeploymentConfiguration_Impl; -import org.finos.legend.pure.generated.Root_meta_external_store_relational_runtime_RelationalDatabaseConnection; +import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeAppDeploymentConfiguration; +import org.finos.legend.pure.generated.*; import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.PackageableFunction; import org.finos.legend.pure.m3.navigation.function.FunctionDescriptor; -import java.util.Collections; - public class SnowflakeAppCompilerExtension implements CompilerExtension { // Here only for dependency check error ... @@ -48,12 +43,8 @@ public Iterable> getExtraProcessors() return Lists.fixedSize.of( Processor.newProcessor( SnowflakeApp.class, - org.eclipse.collections.impl.factory.Lists.fixedSize.with(SnowflakeDeploymentConfiguration.class), + org.eclipse.collections.impl.factory.Lists.fixedSize.with(PackageableConnection.class), this::buildSnowflakeApp - ), - Processor.newProcessor( - SnowflakeDeploymentConfiguration.class, - this::buildDeploymentConfig ) ); } @@ -72,7 +63,8 @@ public Root_meta_external_function_activator_snowflakeApp_SnowflakeApp buildSnow ._function(func) ._description(app.description) ._owner(app.owner) - ._activationConfiguration(app.activationConfiguration != null ? buildDeploymentConfig((SnowflakeDeploymentConfiguration) app.activationConfiguration, context) : null); + ._type(app.type != null ? context.pureModel.getEnumValue("meta::external::function::activator::snowflakeApp::SnowflakeDeploymentType", app.type.toString()) : context.pureModel.getEnumValue("meta::external::function::activator::snowflakeApp::SnowflakeDeploymentType", "FULL")) + ._activationConfiguration(app.activationConfiguration != null ? buildDeploymentConfig((SnowflakeAppDeploymentConfiguration) app.activationConfiguration, context) : null); } catch (Exception e) { @@ -80,10 +72,10 @@ public Root_meta_external_function_activator_snowflakeApp_SnowflakeApp buildSnow } } - public Root_meta_external_function_activator_snowflakeApp_SnowflakeDeploymentConfiguration buildDeploymentConfig(SnowflakeDeploymentConfiguration configuration, CompileContext context) + public Root_meta_external_function_activator_snowflakeApp_SnowflakeDeploymentConfiguration buildDeploymentConfig(SnowflakeAppDeploymentConfiguration configuration, CompileContext context) { return new Root_meta_external_function_activator_snowflakeApp_SnowflakeDeploymentConfiguration_Impl("") ._target((Root_meta_external_store_relational_runtime_RelationalDatabaseConnection) context.resolveConnection(configuration.activationConnection.connection, configuration.sourceInformation)); - // ._stage(context.pureModel.getEnumValue("meta::external::function::activator::DeploymentStage", configuration.stage.name())); + // ._stage(context.pureModel.getEnumValue("meta::external::function::activator::DeploymentStage", configuration.stage.name())); } } diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/pom.xml b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/pom.xml new file mode 100644 index 00000000000..66ae6d76403 --- /dev/null +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/pom.xml @@ -0,0 +1,201 @@ + + + + + + org.finos.legend.engine + legend-engine-xts-snowflakeApp + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-snowflakeApp-generator + jar + Legend Engine - XT - Snowflake App - Generator + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + dependency-analyze + + + + org.finos.legend.engine:legend-engine-xt-functionActivator-pure + org.finos.legend.engine:legend-engine-executionPlan-execution + + + + + + + + + + + org.finos.legend.pure + legend-pure-m3-core + + + + + + + + + + org.finos.legend.engine + legend-engine-language-pure-compiler + + + + + + + org.finos.legend.engine + legend-engine-pure-code-compiled-core + + + org.finos.legend.engine + legend-engine-pure-code-core-extension + + + + org.finos.legend.engine + legend-engine-executionPlan-generation + + + org.finos.legend.engine + legend-engine-shared-core + + + org.finos.legend.engine + legend-engine-xt-functionActivator-pure + + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-xt-relationalStore-pure + + + org.finos.legend.engine + legend-engine-xt-relationalStore-protocol + + + org.finos.legend.engine + legend-engine-executionPlan-execution + + + org.finos.legend.engine + legend-engine-pure-platform-java + + + + org.finos.legend.engine + legend-engine-xt-snowflakeApp-pure + + + org.finos.legend.engine + legend-engine-xt-snowflakeApp-protocol + + + org.finos.legend.engine + legend-engine-xt-snowflakeApp-compiler + runtime + + + org.finos.legend.engine + legend-engine-xt-snowflakeApp-grammar + runtime + + + org.finos.legend.engine + legend-engine-xt-relationalStore-snowflake-protocol + + + org.finos.legend.engine + legend-engine-xt-relationalStore-snowflake-pure + + + + + + + + + com.fasterxml.jackson.core + jackson-databind + + + + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections + + + + + + org.slf4j + slf4j-api + + + + + + + + junit + junit + test + + + org.finos.legend.engine + legend-engine-xt-relationalStore-javaPlatformBinding-pure + test + + + org.finos.legend.engine + legend-engine-xt-relationalStore-grammar + test + + + org.finos.legend.engine + legend-engine-configuration + test + + + org.glassfish.jersey.core + jersey-common + test + + + org.finos.legend.engine + legend-engine-language-pure-dsl-generation + + + + diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/java/org/finos/legend/engine/language/snowflakeApp/generator/SnowflakeAppArtifactGenerationExtension.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/java/org/finos/legend/engine/language/snowflakeApp/generator/SnowflakeAppArtifactGenerationExtension.java new file mode 100644 index 00000000000..82a89baa5e8 --- /dev/null +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/java/org/finos/legend/engine/language/snowflakeApp/generator/SnowflakeAppArtifactGenerationExtension.java @@ -0,0 +1,78 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +package org.finos.legend.engine.language.snowflakeApp.generator; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.eclipse.collections.api.RichIterable; +import org.eclipse.collections.api.block.function.Function; +import org.eclipse.collections.impl.factory.Lists; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; +import org.finos.legend.engine.language.pure.dsl.generation.extension.Artifact; +import org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppArtifact; +import org.finos.legend.engine.pure.code.core.PureCoreExtensionLoader; +import org.finos.legend.engine.shared.core.ObjectMapperFactory; +import org.finos.legend.pure.generated.Root_meta_external_function_activator_snowflakeApp_SnowflakeApp; +import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; +import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.PackageableElement; +import org.slf4j.Logger; + +import java.util.List; + +public class SnowflakeAppArtifactGenerationExtension implements ArtifactGenerationExtension +{ + private static final ObjectMapper mapper = ObjectMapperFactory.getNewStandardObjectMapperWithPureProtocolExtensionSupports(); + private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(SnowflakeAppArtifactGenerationExtension.class); + private static final String ROOT_PATH = "snowflakeApp"; + private static final String FILE_NAME = "snowflakeAppArtifact.json"; + + @Override + public String getKey() + { + return ROOT_PATH; + } + + @Override + public boolean canGenerate(PackageableElement element) + { + return element instanceof Root_meta_external_function_activator_snowflakeApp_SnowflakeApp; + } + + + @Override + public List generate(PackageableElement element, PureModel pureModel, PureModelContextData data, String clientVersion) + { + List result = Lists.mutable.empty(); + Function> routerExtensions = (PureModel p) -> PureCoreExtensionLoader.extensions().flatCollect(e -> e.extraPureCoreExtensions(p.getExecutionSupport())); + SnowflakeAppArtifact artifact = SnowflakeAppGenerator.generateArtifact(pureModel, (Root_meta_external_function_activator_snowflakeApp_SnowflakeApp) element, data, routerExtensions); + try + { + LOGGER.info("Generating snowflake artifact for " + element.getName()); + String content = mapper.writeValueAsString(artifact); + result.add((new Artifact(content, FILE_NAME, "json"))); + LOGGER.info("Generated snowflake artifact for " + element.getName()); + + } + catch (Exception e) + { + LOGGER.error("Error generating snowflake artifact for " + element.getName() + " reason: " + e.getMessage()); + } + return result; + + } + +} diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/java/org/finos/legend/engine/language/snowflakeApp/generator/SnowflakeAppGenerator.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/java/org/finos/legend/engine/language/snowflakeApp/generator/SnowflakeAppGenerator.java new file mode 100644 index 00000000000..aa7e4335e97 --- /dev/null +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/java/org/finos/legend/engine/language/snowflakeApp/generator/SnowflakeAppGenerator.java @@ -0,0 +1,140 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +package org.finos.legend.engine.language.snowflakeApp.generator; + +import org.eclipse.collections.api.RichIterable; +import org.eclipse.collections.api.block.function.Function; +import org.eclipse.collections.impl.factory.Lists; +import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; +import org.finos.legend.engine.plan.generation.PlanGenerator; +import org.finos.legend.engine.plan.platform.PlanPlatform; +import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.context.SDLC; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.PackageableConnection; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.DatabaseType; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.RelationalDatabaseConnection; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.authentication.SnowflakePublicAuthenticationStrategy; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.specification.SnowflakeDatasourceSpecification; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppArtifact; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppContent; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppDeploymentConfiguration; +import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeApp; +import org.finos.legend.pure.generated.*; +import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.FunctionDefinition; +import org.finos.legend.pure.m3.coreinstance.meta.pure.metamodel.function.PackageableFunction; + +public class SnowflakeAppGenerator +{ + + public static SnowflakeAppArtifact generateArtifact(PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, PureModelContext inputModel, Function> routerExtensions) + { + PackageableFunction function = activator._function(); + Root_meta_pure_executionPlan_ExecutionPlan executionPlan = PlanGenerator.generateExecutionPlanAsPure((FunctionDefinition) function, null, null, null, pureModel, PlanPlatform.JAVA, null, routerExtensions.apply(pureModel)); + RichIterable sqlExpressions = extractSQLExpressions(executionPlan); + String functionColumns = executionPlan._rootExecutionNode()._resultType() instanceof Root_meta_pure_executionPlan_TDSResultType ? generateFunctionReturnColumns((Root_meta_pure_executionPlan_TDSResultType)executionPlan._rootExecutionNode()._resultType()) : ""; + + RelationalDatabaseConnection connection; + AlloySDLC sdlc = null; + if (((PureModelContextData)inputModel).getOrigin() != null) + { + SDLC sdlcInfo = ((PureModelContextData)inputModel).origin.sdlcInfo; + if (sdlcInfo instanceof AlloySDLC) + { + sdlc = (AlloySDLC) sdlcInfo; + } + } + SnowflakeAppContent content = new SnowflakeAppContent(activator._applicationName(), Lists.mutable.withAll(sqlExpressions), activator._description(), functionColumns, activator._type()._name(), Lists.mutable.with(activator._owner()), sdlc); + if (activator._activationConfiguration() != null) + { + //identify connection + SnowflakeApp protocolActivator = Lists.mutable.withAll(((PureModelContextData) inputModel).getElementsOfType(SnowflakeApp.class)) + .select(c -> c.getPath().equals(platform_pure_basics_meta_elementToPath.Root_meta_pure_functions_meta_elementToPath_PackageableElement_1__String_1_(activator, pureModel.getExecutionSupport()))) + .getFirst(); + connection = (RelationalDatabaseConnection) Lists.mutable.withAll(((PureModelContextData) inputModel).getElementsOfType(PackageableConnection.class)) + .select(c -> c.getPath().equals(((org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeAppDeploymentConfiguration)protocolActivator.activationConfiguration).activationConnection.connection)).getFirst().connectionValue; + return new SnowflakeAppArtifact(content, new SnowflakeAppDeploymentConfiguration(connection)); + } + + return new SnowflakeAppArtifact(content); + } + + private static RichIterable extractSQLExpressions(Root_meta_pure_executionPlan_ExecutionPlan executionPlan) + { + + Root_meta_pure_executionPlan_ExecutionNode node = executionPlan._rootExecutionNode(); + return collectAllNodes(node) + .selectInstancesOf(Root_meta_relational_mapping_SQLExecutionNode.class) + .collect(Root_meta_relational_mapping_SQLExecutionNode::_sqlQuery) + .select(x -> !x.toLowerCase().startsWith("alter")); + } + + private static String generateFunctionReturnColumns(Root_meta_pure_executionPlan_TDSResultType planResult) + { + return Lists.mutable.withAll(planResult._tdsColumns()).collect(c -> + c._name().replace(" ","_").replace("/","_") + " " + "VARCHAR(16777216)").makeString(" , "); + } + + private static Object[] extractSQLExpressionsAndConnectionMetadata(PureModel pureModel, Root_meta_external_function_activator_snowflakeApp_SnowflakeApp activator, Function> routerExtensions) + { + PackageableFunction function = activator._function(); + Root_meta_pure_executionPlan_ExecutionPlan executionPlan = PlanGenerator.generateExecutionPlanAsPure((FunctionDefinition) function, null, null, null, pureModel, PlanPlatform.JAVA, null, routerExtensions.apply(pureModel)); + Root_meta_pure_executionPlan_ExecutionNode node = executionPlan._rootExecutionNode(); + + RichIterable expressions = collectAllNodes(node) + .selectInstancesOf(Root_meta_relational_mapping_SQLExecutionNode.class) + .collect(Root_meta_relational_mapping_SQLExecutionNode::_sqlQuery) + .select(x -> !x.toLowerCase().startsWith("alter")); + + Root_meta_external_store_relational_runtime_RelationalDatabaseConnection relCOnn = (Root_meta_external_store_relational_runtime_RelationalDatabaseConnection)collectAllNodes(node).selectInstancesOf(Root_meta_relational_mapping_SQLExecutionNode.class) + .getAny() + ._connection(); + Root_meta_pure_alloy_connections_alloy_specification_SnowflakeDatasourceSpecification ds = (Root_meta_pure_alloy_connections_alloy_specification_SnowflakeDatasourceSpecification) relCOnn._datasourceSpecification(); + Root_meta_pure_alloy_connections_alloy_authentication_SnowflakePublicAuthenticationStrategy as = (Root_meta_pure_alloy_connections_alloy_authentication_SnowflakePublicAuthenticationStrategy) relCOnn._authenticationStrategy(); + + return new Object[]{expressions, ds, as}; + } + + private RelationalDatabaseConnection adaptConnection(Root_meta_pure_alloy_connections_alloy_specification_SnowflakeDatasourceSpecification datasourceSpecification, Root_meta_pure_alloy_connections_alloy_authentication_SnowflakePublicAuthenticationStrategy authenticationStrategy) + { + RelationalDatabaseConnection connection = new RelationalDatabaseConnection(); + + SnowflakeDatasourceSpecification snowflakeDatasourceSpecification = new SnowflakeDatasourceSpecification(); + snowflakeDatasourceSpecification.accountName = datasourceSpecification._accountName(); + snowflakeDatasourceSpecification.databaseName = datasourceSpecification._databaseName(); + snowflakeDatasourceSpecification.role = datasourceSpecification._role(); + snowflakeDatasourceSpecification.warehouseName = datasourceSpecification._warehouseName(); + snowflakeDatasourceSpecification.region = datasourceSpecification._region(); + snowflakeDatasourceSpecification.cloudType = datasourceSpecification._cloudType(); + + SnowflakePublicAuthenticationStrategy snowflakeAuthenticationStrategy = new SnowflakePublicAuthenticationStrategy(); + snowflakeAuthenticationStrategy.privateKeyVaultReference = authenticationStrategy._privateKeyVaultReference(); + snowflakeAuthenticationStrategy.passPhraseVaultReference = authenticationStrategy._passPhraseVaultReference(); + snowflakeAuthenticationStrategy.publicUserName = authenticationStrategy._publicUserName(); + + connection.authenticationStrategy = snowflakeAuthenticationStrategy; + connection.datasourceSpecification = snowflakeDatasourceSpecification; + connection.type = DatabaseType.Snowflake; + + return connection; + } + + private static RichIterable collectAllNodes(Root_meta_pure_executionPlan_ExecutionNode node) + { + return Lists.mutable.with(node).withAll(node._executionNodes().flatCollect(SnowflakeAppGenerator::collectAllNodes)); + } +} diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension new file mode 100644 index 00000000000..550ecc294b9 --- /dev/null +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-generator/src/main/resources/META-INF/services/org.finos.legend.engine.language.pure.dsl.generation.extension.ArtifactGenerationExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.snowflakeApp.generator.SnowflakeAppArtifactGenerationExtension diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/pom.xml b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/pom.xml index ee6d48b026f..17aeda460d3 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/pom.xml +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-snowflakeApp - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -120,6 +120,10 @@ org.finos.legend.engine legend-engine-protocol-pure + + org.finos.legend.engine + legend-engine-shared-core + diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/SnowflakeAppLexerGrammar.g4 b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/SnowflakeAppLexerGrammar.g4 index 8a1eb1451c6..180679616ee 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/SnowflakeAppLexerGrammar.g4 +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/SnowflakeAppLexerGrammar.g4 @@ -7,6 +7,7 @@ SNOWFLAKE_APP__APPLICATION_NAME: 'applicationName'; SNOWFLAKE_APP__DESCRIPTION: 'description'; SNOWFLAKE_APP__FUNCTION: 'function'; SNOWFLAKE_APP__OWNER: 'owner'; +SNOWFLAKE_APP__TYPE: 'type'; SNOWFLAKE_APP__ACTIVATION: 'activationConfiguration'; // ------------------------------------- CONFIGURATION ------------------------------- diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/SnowflakeAppParserGrammar.g4 b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/SnowflakeAppParserGrammar.g4 index be29564551c..666e316e235 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/SnowflakeAppParserGrammar.g4 +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/antlr4/org/finos/legend/engine/language/pure/grammar/from/antlr4/SnowflakeAppParserGrammar.g4 @@ -13,6 +13,7 @@ identifier: VALID_STRING | STRING | SNOWFLAKE_APP__DESCRIPTION | SNOWFLAKE_APP__FUNCTION | SNOWFLAKE_APP__OWNER | + SNOWFLAKE_APP__TYPE | SNOWFLAKE_APP__ACTIVATION| CONFIGURATION| DEPLOYMENT_STAGE | ACTIVATION_CONNECTION | @@ -34,6 +35,7 @@ snowflakeApp: SNOWFLAKE_APP stereotypes? taggedValues? qualifi | description | function | owner + | type | activation )* BRACE_CLOSE; @@ -51,6 +53,8 @@ function: SNOWFLAKE_APP__FUNCTION COLON functionIdentifier owner : SNOWFLAKE_APP__OWNER COLON STRING SEMI_COLON; +type : SNOWFLAKE_APP__TYPE COLON identifier SEMI_COLON; + activation: SNOWFLAKE_APP__ACTIVATION COLON qualifiedName SEMI_COLON ; // ----------------------------------- Deployment ------------------------------------------------------ diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/java/org/finos/legend/engine/language/snowflakeApp/grammar/from/SnowflakeAppTreeWalker.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/java/org/finos/legend/engine/language/snowflakeApp/grammar/from/SnowflakeAppTreeWalker.java index 31444f92ce3..61b72fae5fb 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/java/org/finos/legend/engine/language/snowflakeApp/grammar/from/SnowflakeAppTreeWalker.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/java/org/finos/legend/engine/language/snowflakeApp/grammar/from/SnowflakeAppTreeWalker.java @@ -21,6 +21,7 @@ import org.finos.legend.engine.language.pure.grammar.from.PureGrammarParserUtility; import org.finos.legend.engine.language.pure.grammar.from.antlr4.SnowflakeAppParserGrammar; import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentStage; +import org.finos.legend.engine.protocol.pure.v1.model.context.EngineErrorType; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.ConnectionPointer; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.StereotypePtr; @@ -28,7 +29,8 @@ import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.TaggedValue; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.section.DefaultCodeSection; import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeApp; -import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeDeploymentConfiguration; +import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeAppDeploymentConfiguration; +import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeAppType; import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; import java.util.Collections; @@ -56,21 +58,6 @@ public void visit(SnowflakeAppParserGrammar.DefinitionContext ctx) { ctx.snowflakeApp().stream().map(this::visitSnowflakeApp).peek(e -> this.section.elements.add(e.getPath())).forEach(this.elementConsumer); } - if (ctx.deploymentConfig() != null && !ctx.deploymentConfig().isEmpty()) - { - ctx.deploymentConfig().stream().map(this::visitDeploymentConfig).peek(e -> this.section.elements.add(e.getPath())).forEach(this.elementConsumer); - } - } - - private SnowflakeDeploymentConfiguration visitDeploymentConfig(SnowflakeAppParserGrammar.DeploymentConfigContext ctx) - { - SnowflakeDeploymentConfiguration config = new SnowflakeDeploymentConfiguration(); - ConnectionPointer pointer = new ConnectionPointer(); - pointer.connection = PureGrammarParserUtility.fromQualifiedName(ctx.activationConnection().qualifiedName().packagePath() == null - ? Collections.emptyList() : ctx.activationConnection().qualifiedName().packagePath().identifier(), ctx.activationConnection().qualifiedName().identifier()); - pointer.sourceInformation = walkerSourceInformation.getSourceInformation(ctx.activationConnection().qualifiedName()); - config.activationConnection = pointer; - return config; } private SnowflakeApp visitSnowflakeApp(SnowflakeAppParserGrammar.SnowflakeAppContext ctx) @@ -91,11 +78,33 @@ private SnowflakeApp visitSnowflakeApp(SnowflakeAppParserGrammar.SnowflakeAppCon { snowflakeApp.owner = PureGrammarParserUtility.fromGrammarString(ownerContext.STRING().getText(), true); } + SnowflakeAppParserGrammar.TypeContext typeContext = PureGrammarParserUtility.validateAndExtractOptionalField(ctx.type(), "type", snowflakeApp.sourceInformation); + if (typeContext != null) + { + try + { + snowflakeApp.type = SnowflakeAppType.valueOf(PureGrammarParserUtility.fromIdentifier(typeContext.identifier())); + } + catch (Exception e) + { + throw new EngineException("Unknown type '" + PureGrammarParserUtility.fromIdentifier(typeContext.identifier()) + "'", this.walkerSourceInformation.getSourceInformation(typeContext), EngineErrorType.PARSER); + } + } + SnowflakeAppParserGrammar.DescriptionContext descriptionContext = PureGrammarParserUtility.validateAndExtractOptionalField(ctx.description(), "description", snowflakeApp.sourceInformation); if (descriptionContext != null) { snowflakeApp.description = PureGrammarParserUtility.fromGrammarString(descriptionContext.STRING().getText(), true); } + SnowflakeAppParserGrammar.ActivationContext activationContext = PureGrammarParserUtility.validateAndExtractOptionalField(ctx.activation(), "activation", snowflakeApp.sourceInformation); + if (activationContext != null) + { + ConnectionPointer p = new ConnectionPointer(); + p.connection = PureGrammarParserUtility.fromQualifiedName(activationContext.qualifiedName().packagePath() == null + ? Collections.emptyList() : activationContext.qualifiedName().packagePath().identifier(), activationContext.qualifiedName().identifier()); + p.sourceInformation = walkerSourceInformation.getSourceInformation(activationContext.qualifiedName()); + snowflakeApp.activationConfiguration = new SnowflakeAppDeploymentConfiguration(p); + } return snowflakeApp; } diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/java/org/finos/legend/engine/language/snowflakeApp/grammar/to/SnowflakeAppGrammarComposer.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/java/org/finos/legend/engine/language/snowflakeApp/grammar/to/SnowflakeAppGrammarComposer.java index 329693d0887..d8c4ede601d 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/java/org/finos/legend/engine/language/snowflakeApp/grammar/to/SnowflakeAppGrammarComposer.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/main/java/org/finos/legend/engine/language/snowflakeApp/grammar/to/SnowflakeAppGrammarComposer.java @@ -26,6 +26,7 @@ import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.externalFormat.Binding; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.externalFormat.ExternalFormatSchemaSet; import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeApp; +import org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeAppDeploymentConfiguration; import java.util.Collections; import java.util.List; @@ -53,6 +54,8 @@ private static String renderSnowflakeApp(SnowflakeApp app) " function : " + app.function + ";\n" + (app.owner == null ? "" : " owner : '" + app.owner + "';\n") + (app.description == null ? "" : " description : '" + app.description + "';\n") + + (app.type == null ? "" : " type : " + app.type.name() + ";\n") + + (app.activationConfiguration == null ? "" : " activationConfiguration : " + ((SnowflakeAppDeploymentConfiguration)app.activationConfiguration).activationConnection.connection + ";\n") + "}"; } @@ -77,7 +80,7 @@ public List, PureGrammarComposerContext, Stri } @Override - public List, PureGrammarComposerContext, List, PureGrammarComposerExtension.PureFreeSectionGrammarComposerResult>> getExtraFreeSectionComposers() + public List, PureGrammarComposerContext, List, PureFreeSectionGrammarComposerResult>> getExtraFreeSectionComposers() { return Collections.singletonList((elements, context, composedSections) -> { diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/test/java/org/finos/legend/engine/language/snowflakeApp/grammar/test/TestSnowflakeAppRoundtrip.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/test/java/org/finos/legend/engine/language/snowflakeApp/grammar/test/TestSnowflakeAppRoundtrip.java index 54b331fcf73..a733a083c52 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/test/java/org/finos/legend/engine/language/snowflakeApp/grammar/test/TestSnowflakeAppRoundtrip.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/test/java/org/finos/legend/engine/language/snowflakeApp/grammar/test/TestSnowflakeAppRoundtrip.java @@ -29,6 +29,8 @@ public void testSnowflakeApp() " function : zxx(Integer[1]):String[1];\n" + " owner : 'pierre';\n" + " description : 'A super nice app!';\n" + + " type : STAGE;\n" + + " activationConfiguration : a::b::connection;\n" + "}\n"); } diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/test/java/org/finos/legend/engine/language/snowflakeApp/grammar/test/TestSnowflakeParsing.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/test/java/org/finos/legend/engine/language/snowflakeApp/grammar/test/TestSnowflakeParsing.java index 1edd7bbe4f7..2beafe81dc7 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/test/java/org/finos/legend/engine/language/snowflakeApp/grammar/test/TestSnowflakeParsing.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-grammar/src/test/java/org/finos/legend/engine/language/snowflakeApp/grammar/test/TestSnowflakeParsing.java @@ -48,7 +48,7 @@ public void testGetParserErrorWrongProperty() "SnowflakeApp x::A\n" + "{\n" + " applicatioName : 'sass';\n" + - "}\n", "PARSER error at [4:4-17]: Unexpected token 'applicatioName'. Valid alternatives: ['applicationName', 'description', 'function', 'owner', 'activationConfiguration']"); + "}\n", "PARSER error at [4:4-17]: Unexpected token 'applicatioName'. Valid alternatives: ['applicationName', 'description', 'function', 'owner', 'type', 'activationConfiguration']"); } @Test diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/pom.xml b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/pom.xml index 44ba7e492a3..8f5cf8bed64 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/pom.xml +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-snowflakeApp - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -50,6 +50,10 @@ junit test + + org.finos.legend.engine + legend-engine-xt-relationalStore-protocol + diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppArtifact.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppArtifact.java similarity index 55% rename from legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppArtifact.java rename to legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppArtifact.java index 9ab608335d6..3115f9b154c 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppArtifact.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppArtifact.java @@ -12,12 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.snowflakeApp.deployment; +package org.finos.legend.engine.protocol.snowflakeApp.deployment; -import org.eclipse.collections.api.RichIterable; -import org.eclipse.collections.api.factory.Lists; -import org.eclipse.collections.api.list.MutableList; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; public class SnowflakeAppArtifact extends FunctionActivatorArtifact { @@ -27,14 +24,15 @@ public SnowflakeAppArtifact() //empty artifact } - public SnowflakeAppArtifact(String name, MutableList sqlExpressions) + public SnowflakeAppArtifact(SnowflakeAppContent content) { - this.content = new SnowflakeAppContent(name, sqlExpressions); + this.content = content; } - public SnowflakeAppArtifact(String name, MutableList sqlExpressions, SnowflakeAppDeploymentConfiguration config) + public SnowflakeAppArtifact(SnowflakeAppContent content, SnowflakeAppDeploymentConfiguration config) { - this.content = new SnowflakeAppContent(name, sqlExpressions); + this(content); this.deploymentConfiguration = config; } + } diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppContent.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppContent.java new file mode 100644 index 00000000000..6bbe1c1b9db --- /dev/null +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppContent.java @@ -0,0 +1,82 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.snowflakeApp.deployment; + +import org.eclipse.collections.api.factory.Lists; +import org.eclipse.collections.api.list.MutableList; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentContent; +import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; + +public class SnowflakeAppContent extends FunctionActivatorDeploymentContent +{ + public MutableList sqlExpressions = Lists.mutable.empty(); + public String functionArguments; + public String type; + public String applicationName; + public String description; + public List owners; + public String groupId; + public String artifactId; + public String version; + public String creationTime; + + public SnowflakeAppContent() + { + //Empty constructor for Jackson + } + + public SnowflakeAppContent(String applicationName, MutableList sqlExpressions, String functionArguments, String type, AlloySDLC sdlc) + { + this.applicationName = applicationName; + this.sqlExpressions = sqlExpressions; + this.creationTime = convertToValidDate(new Date()); + this.functionArguments = functionArguments; + this.type = type; + if (sdlc != null) + { + this.groupId = sdlc.groupId; + this.artifactId = sdlc.artifactId; + this.version = sdlc.version; + } + } + + public SnowflakeAppContent(String applicationName, MutableList sqlExpressions, String description, String functionArguments, String type,List owners, AlloySDLC sdlc) + { + this(applicationName, sqlExpressions, functionArguments, type, sdlc); + this.description = description; + this.owners = owners; + + } + + public static String convertToValidDate(Date date) + { + SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + return format.format(date); + } + + public String getVersionInfo() + { + if (this.version != null) + { + return groupId + ":" + this.artifactId + ":" + this.version; + } + return ""; + } +} + diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppDeploymentConfiguration.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppDeploymentConfiguration.java similarity index 65% rename from legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppDeploymentConfiguration.java rename to legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppDeploymentConfiguration.java index 6308e215f8b..01e07879f29 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-api/src/main/java/org/finos/legend/engine/language/snowflakeApp/deployment/SnowflakeAppDeploymentConfiguration.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/deployment/SnowflakeAppDeploymentConfiguration.java @@ -12,12 +12,22 @@ // See the License for the specific language governing permissions and // limitations under the License. -package org.finos.legend.engine.language.snowflakeApp.deployment; +package org.finos.legend.engine.protocol.snowflakeApp.deployment; -import org.finos.legend.engine.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.connection.RelationalDatabaseConnection; public class SnowflakeAppDeploymentConfiguration extends FunctionActivatorDeploymentConfiguration { - RelationalDatabaseConnection connection; + public RelationalDatabaseConnection connection; + + public SnowflakeAppDeploymentConfiguration() + { + //jackson + } + + public SnowflakeAppDeploymentConfiguration(RelationalDatabaseConnection connection) + { + this.connection = connection; + } } diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeApp.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeApp.java index de25183590f..7c0aedc93a4 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeApp.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeApp.java @@ -30,4 +30,5 @@ public class SnowflakeApp extends FunctionActivator public String applicationName; public String description; public String owner; + public SnowflakeAppType type; } diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeDeploymentConfiguration.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppDeploymentConfiguration.java similarity index 79% rename from legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeDeploymentConfiguration.java rename to legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppDeploymentConfiguration.java index 0aff2c51431..7e4032b33fa 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeDeploymentConfiguration.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppDeploymentConfiguration.java @@ -17,14 +17,18 @@ import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.ConnectionPointer; -public class SnowflakeDeploymentConfiguration extends DeploymentConfiguration +public class SnowflakeAppDeploymentConfiguration extends DeploymentConfiguration { public ConnectionPointer activationConnection; // public String applicationName; - public SnowflakeDeploymentConfiguration() + public SnowflakeAppDeploymentConfiguration() { + } + public SnowflakeAppDeploymentConfiguration(ConnectionPointer cp) + { + activationConnection = cp; } } diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppProtocolExtension.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppProtocolExtension.java index 88d78239a52..4e0f3fd3e83 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppProtocolExtension.java +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppProtocolExtension.java @@ -17,9 +17,15 @@ import org.eclipse.collections.api.block.function.Function0; import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.factory.Maps; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorArtifact; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentConfiguration; +import org.finos.legend.engine.protocol.functionActivator.deployment.FunctionActivatorDeploymentContent; +import org.finos.legend.engine.protocol.functionActivator.metamodel.DeploymentConfiguration; import org.finos.legend.engine.protocol.pure.v1.extension.ProtocolSubTypeInfo; import org.finos.legend.engine.protocol.pure.v1.extension.PureProtocolExtension; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppArtifact; +import org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppContent; import java.util.List; import java.util.Map; @@ -34,6 +40,18 @@ public List>>> getExtraProtocolSubTypeInfo return Lists.fixedSize.with(() -> Lists.mutable.with( ProtocolSubTypeInfo.newBuilder(PackageableElement.class) .withSubtype(SnowflakeApp.class, packageJSONType) + .build(), + ProtocolSubTypeInfo.newBuilder(DeploymentConfiguration.class) + .withSubtype(SnowflakeAppDeploymentConfiguration.class, "snowflakeDeploymentConfiguration") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorDeploymentConfiguration.class) + .withSubtype(org.finos.legend.engine.protocol.snowflakeApp.deployment.SnowflakeAppDeploymentConfiguration.class, "snowflakeDeploymentConfig") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorArtifact.class) + .withSubtype(SnowflakeAppArtifact.class, "snowflakeArtifact") + .build(), + ProtocolSubTypeInfo.newBuilder(FunctionActivatorDeploymentContent.class) + .withSubtype(SnowflakeAppContent.class, "snowflakeDeploymentContent") .build() )); } diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppType.java b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppType.java new file mode 100644 index 00000000000..f362f5f1674 --- /dev/null +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/java/org/finos/legend/engine/protocol/snowflakeApp/metamodel/SnowflakeAppType.java @@ -0,0 +1,20 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package org.finos.legend.engine.protocol.snowflakeApp.metamodel; + +public enum SnowflakeAppType +{ + STAGE, FULL +} diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.functionActivator.metamodel.FunctionActivator b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.functionActivator.metamodel.FunctionActivator new file mode 100644 index 00000000000..a6668cefc25 --- /dev/null +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-protocol/src/main/resources/META-INF/services/org.finos.legend.engine.protocol.functionActivator.metamodel.FunctionActivator @@ -0,0 +1 @@ +org.finos.legend.engine.protocol.snowflakeApp.metamodel.SnowflakeApp \ No newline at end of file diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure/pom.xml b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure/pom.xml index 180449a04c2..7a8da1dcb5f 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure/pom.xml +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-snowflakeApp - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure/src/main/resources/core_snowflakeapp/metamodel/metamodel.pure b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure/src/main/resources/core_snowflakeapp/metamodel/metamodel.pure index 40816df1ddb..eec22e2b59d 100644 --- a/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure/src/main/resources/core_snowflakeapp/metamodel/metamodel.pure +++ b/legend-engine-xts-snowflakeApp/legend-engine-xt-snowflakeApp-pure/src/main/resources/core_snowflakeapp/metamodel/metamodel.pure @@ -1,10 +1,12 @@ import meta::external::function::activator::*; +import meta::external::function::activator::snowflakeApp::*; Class meta::external::function::activator::snowflakeApp::SnowflakeApp extends FunctionActivator { applicationName : String[1]; description : String[0..1]; owner : String[0..1]; + type: SnowflakeDeploymentType[0..1] ;//default to full } Class meta::external::function::activator::snowflakeApp::SnowflakeDeploymentConfiguration extends DeploymentConfiguration @@ -17,6 +19,11 @@ Class meta::external::function::activator::snowflakeApp::SnowflakeApp extends Fu } +Enum meta::external::function::activator::snowflakeApp::SnowflakeDeploymentType +{ + STAGE, FULL +} + // This section needs to be code generated from the section above Class meta::protocols::pure::vX_X_X::metamodel::function::activator::snowflakeApp::SnowflakeApp extends meta::protocols::pure::vX_X_X::metamodel::function::activator::FunctionActivator { diff --git a/legend-engine-xts-snowflakeApp/pom.xml b/legend-engine-xts-snowflakeApp/pom.xml index 92a05d2beb7..a26c28dd940 100644 --- a/legend-engine-xts-snowflakeApp/pom.xml +++ b/legend-engine-xts-snowflakeApp/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -34,5 +34,6 @@ legend-engine-xt-snowflakeApp-grammar legend-engine-xt-snowflakeApp-protocol legend-engine-xt-snowflakeApp-pure + legend-engine-xt-snowflakeApp-generator \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-compiler/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-compiler/pom.xml index 2b47b85f498..991a55bca69 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-compiler/pom.xml +++ b/legend-engine-xts-sql/legend-engine-xt-sql-compiler/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-sql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-grammar-integration/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-grammar-integration/pom.xml index 77abaf653c1..9ec6c5f63fe 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-grammar-integration/pom.xml +++ b/legend-engine-xts-sql/legend-engine-xt-sql-grammar-integration/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-sql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-grammar/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-grammar/pom.xml index fd4200aa96f..6b66c473a1e 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-grammar/pom.xml +++ b/legend-engine-xts-sql/legend-engine-xt-sql-grammar/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-sql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-grammar/src/main/java/org/finos/legend/engine/language/sql/grammar/from/SQLGrammarParser.java b/legend-engine-xts-sql/legend-engine-xt-sql-grammar/src/main/java/org/finos/legend/engine/language/sql/grammar/from/SQLGrammarParser.java index 408f72c14c9..83be78cddba 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-grammar/src/main/java/org/finos/legend/engine/language/sql/grammar/from/SQLGrammarParser.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-grammar/src/main/java/org/finos/legend/engine/language/sql/grammar/from/SQLGrammarParser.java @@ -28,6 +28,7 @@ import org.finos.legend.engine.language.sql.grammar.from.antlr4.SqlBaseLexer; import org.finos.legend.engine.language.sql.grammar.from.antlr4.SqlBaseParser; import org.finos.legend.engine.protocol.pure.v1.model.SourceInformation; +import org.finos.legend.engine.protocol.sql.metamodel.Expression; import org.finos.legend.engine.protocol.sql.metamodel.Statement; import java.util.BitSet; @@ -51,6 +52,12 @@ public Statement parseStatement(String query) return this.parse(query, "statement"); } + public Expression parseExpression(String expression) + { + SqlBaseParser parser = getSqlBaseParser(expression, "expression"); + return (Expression) sqlVisitor.visitSingleExpression(parser.singleExpression()); + } + private Statement parse(String query, String name) { SqlBaseParser parser = getSqlBaseParser(query, name); diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-grammar/src/test/java/org/finos/legend/engine/language/sql/grammar/test/roundtrip/TestSQLRoundTrip.java b/legend-engine-xts-sql/legend-engine-xt-sql-grammar/src/test/java/org/finos/legend/engine/language/sql/grammar/test/roundtrip/TestSQLRoundTrip.java index befce651ac1..2643bb292bc 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-grammar/src/test/java/org/finos/legend/engine/language/sql/grammar/test/roundtrip/TestSQLRoundTrip.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-grammar/src/test/java/org/finos/legend/engine/language/sql/grammar/test/roundtrip/TestSQLRoundTrip.java @@ -128,18 +128,36 @@ public void testWhere() check("SELECT * FROM myTable WHERE col1 = 1"); } + @Test + public void testWhereExpression() + { + checkExpression("col1 = 1"); + } + @Test public void testCompositeWhere() { check("SELECT * FROM myTable WHERE col1 = 1 AND col2 = 1"); } + @Test + public void testCompositeWhereExpression() + { + checkExpression("col1 = 1 AND col2 = 1"); + } + @Test public void testWhereQualified() { check("SELECT * FROM myTable WHERE myTable.col1 = 1"); } + @Test + public void testWhereQualifiedExpression() + { + checkExpression("myTable.col1 = 1"); + } + @Test public void testCompositeWhereQualifiedWithAlias() { @@ -155,6 +173,15 @@ public void testCompositeWhereOperators() "col BETWEEN 0 AND 1"); } + @Test + public void testCompositeWhereOperatorsExpression() + { + checkExpression("col = 1 AND col > 1 AND col < 1 " + + "AND col >= 1 AND col <= 1 AND col IN (1, 2, 3) AND col IS NULL AND " + + "col IS NOT NULL AND col IS DISTINCT FROM 1 AND col IS NOT DISTINCT FROM 1 AND " + + "col BETWEEN 0 AND 1"); + } + @Test public void testGroupBy() { @@ -324,4 +351,19 @@ private void check(String sql, String expected) String result = composer.renderNode(node); MatcherAssert.assertThat(result.trim(), IsEqualIgnoringCase.equalToIgnoringCase(expected)); } + + private void checkExpression(String expression) + { + checkExpression(expression, expression); + checkExpression(expression.toLowerCase(), expression); + } + + private void checkExpression(String expression, String expected) + { + SQLGrammarParser parser = SQLGrammarParser.newInstance(); + Node node = parser.parseExpression(expression); + SQLGrammarComposer composer = SQLGrammarComposer.newInstance(); + String result = composer.renderNode(node); + MatcherAssert.assertThat(result.trim(), IsEqualIgnoringCase.equalToIgnoringCase(expected)); + } } diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-postgres-server/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-postgres-server/pom.xml index 755b3aef202..0369db0ff79 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-postgres-server/pom.xml +++ b/legend-engine-xts-sql/legend-engine-xt-sql-postgres-server/pom.xml @@ -3,7 +3,7 @@ legend-engine-xts-sql org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-protocol/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-protocol/pom.xml index 6b4295071a4..01b9b50acca 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-protocol/pom.xml +++ b/legend-engine-xts-sql/legend-engine-xt-sql-protocol/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-sql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/pom.xml new file mode 100644 index 00000000000..d8ffdc39ae4 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/pom.xml @@ -0,0 +1,74 @@ + + + + + + org.finos.legend.engine + legend-engine-xt-sql-providers + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-sql-providers-core + jar + Legend Engine - XT - SQL - Providers - Core + + + + + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-xt-sql-protocol + + + + + + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections + + + + + + org.pac4j + pac4j-core + + + + + org.apache.commons + commons-lang3 + + + + + junit + junit + test + + + + \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLContext.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLContext.java similarity index 72% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLContext.java rename to legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLContext.java index b7588f5b78c..e75c93dee21 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLContext.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLContext.java @@ -13,23 +13,21 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.providers.core; import org.finos.legend.engine.protocol.sql.metamodel.Node; -import java.util.Map; - public class SQLContext { private final Node query; - private final Map> arguments; - public SQLContext( - Node query, - Map> arguments - ) + public SQLContext(Node query) { this.query = query; - this.arguments = arguments; + } + + public Node getQuery() + { + return query; } } diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSource.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSource.java similarity index 88% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSource.java rename to legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSource.java index 44a9d8e6e56..3fc08a71e1c 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSource.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSource.java @@ -13,7 +13,7 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.providers.core; import org.finos.legend.engine.protocol.pure.v1.model.executionOption.ExecutionOption; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.runtime.Runtime; @@ -33,12 +33,6 @@ public class SQLSource private final ExecutionContext executionContext; private final List key; - @Deprecated - public SQLSource(String type, Lambda func, String mapping, Runtime runtime, List executionOptions, List key) - { - this(type, func, mapping, runtime, executionOptions, null, key); - } - public SQLSource(String type, Lambda func, String mapping, Runtime runtime, List executionOptions, ExecutionContext executionContext, List key) { this.type = type; diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceArgument.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceArgument.java similarity index 94% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceArgument.java rename to legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceArgument.java index 41a952f2e83..60f94fc0db1 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceArgument.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceArgument.java @@ -13,7 +13,7 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.providers.core; public class SQLSourceArgument { diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceProvider.java similarity index 93% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceProvider.java rename to legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceProvider.java index 325a88a3caf..4e835b357b7 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceProvider.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceProvider.java @@ -13,7 +13,7 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.providers.core; import org.eclipse.collections.api.list.MutableList; import org.pac4j.core.profile.CommonProfile; diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceResolvedContext.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceResolvedContext.java similarity index 96% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceResolvedContext.java rename to legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceResolvedContext.java index 959bd70ce1f..f1852c7e2b1 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceResolvedContext.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/SQLSourceResolvedContext.java @@ -13,7 +13,7 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.providers.core; import org.eclipse.collections.impl.list.mutable.FastList; import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSource.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/TableSource.java similarity index 70% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSource.java rename to legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/TableSource.java index 80dcc2ff5ea..880982335dc 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSource.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/TableSource.java @@ -13,7 +13,7 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.providers.core; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; @@ -40,8 +40,18 @@ public String getType() return type; } + public List getArguments() + { + return this.arguments; + } + //get named argument, or default to index public TableSourceArgument getArgument(String name, int index) + { + return getArgument(name, index, true); + } + + public TableSourceArgument getArgument(String name, int index, boolean required) { Optional found = getNamedArgument(name); return found.orElseGet(() -> @@ -50,7 +60,14 @@ public TableSourceArgument getArgument(String name, int index) { return this.arguments.get(index); } - throw new IllegalArgumentException("Argument of name " + name + " or " + index + " not found"); + if (required) + { + throw new IllegalArgumentException("'" + name + "' parameter is required"); + } + else + { + return null; + } }); } @@ -59,9 +76,22 @@ public Optional getNamedArgument(String name) return ListIterate.select(this.arguments, a -> name.equals(a.getName())).getFirstOptional(); } - public List getArguments() + public T getArgumentValueAs(String name, int index, Class type, boolean required) { - return this.arguments; + TableSourceArgument argument = getArgument(name, index, required); + + if (!required && argument == null) + { + return null; + } + Object value = argument.getValue(); + + if (type.isInstance(value)) + { + return (T) value; + } + + throw new IllegalArgumentException("Argument of name '" + name + "' or index '" + index + "' is not of type " + type.getName()); } @Override diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSourceArgument.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/TableSourceArgument.java similarity index 96% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSourceArgument.java rename to legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/TableSourceArgument.java index 58bbe1bd904..28f4e3b86c2 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSourceArgument.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-core/src/main/java/org/finos/legend/engine/query/sql/providers/core/TableSourceArgument.java @@ -13,7 +13,7 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.providers.core; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/pom.xml new file mode 100644 index 00000000000..bbc76de9cab --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/pom.xml @@ -0,0 +1,84 @@ + + + + + + org.finos.legend.engine + legend-engine-xt-sql-providers + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-sql-providers-relationalStore + jar + Legend Engine - XT - SQL - Providers - Relational Store + + + + + org.finos.legend.engine + legend-engine-xt-sql-providers-core + + + org.finos.legend.engine + legend-engine-xt-sql-providers-shared + + + org.finos.legend.engine + legend-engine-xt-relationalStore-protocol + + + org.finos.legend.engine + legend-engine-protocol-pure + + + + + + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections + + + + + + junit + junit + test + + + org.mockito + mockito-core + + + org.finos.legend.engine + legend-engine-xt-sql-providers-shared + test-jar + test + + + org.finos.legend.engine + legend-engine-xt-relationalStore-grammar + test + + + + \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/main/java/org/finos/legend/engine/query/sql/providers/RelationalStoreSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/main/java/org/finos/legend/engine/query/sql/providers/RelationalStoreSQLSourceProvider.java new file mode 100644 index 00000000000..c6334569552 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/main/java/org/finos/legend/engine/query/sql/providers/RelationalStoreSQLSourceProvider.java @@ -0,0 +1,83 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers; + +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.PackageableConnection; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.runtime.EngineRuntime; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.model.Database; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.model.Schema; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.relational.model.Table; +import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.raw.Lambda; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.shared.AbstractLegendStoreSQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; +import org.finos.legend.engine.query.sql.providers.shared.utils.SQLProviderUtils; + +import java.util.Collections; +import java.util.List; + +/** + * This class serves for handling the **relationalStore** source type + *

+ * Sample Select statement + * select * from relationalStore(connection => 'my::Connection', store => 'my::Store', schema => 'schema1', table => 'table1', coordinates => 'com.gs:proj1:1.0.0') + * select * from relationalStore(connection => 'my::Connection', store => 'my::Store', schema => 'schema1', table => 'table1', project => 'PROD-12345', workspace => 'myworkspace') + * select * from relationalStore(connection => 'my::Connection', store => 'my::Store', schema => 'schema1', table => 'table1', project => 'PROD-12345', groupWorkspace => 'myworkspace') + */ +public class RelationalStoreSQLSourceProvider extends AbstractLegendStoreSQLSourceProvider +{ + + private static final String TYPE = "relationalStore"; + private static final String ARG_SCHEMA = "schema"; + private static final String ARG_TABLE = "table"; + + public RelationalStoreSQLSourceProvider(ProjectCoordinateLoader projectCoordinateLoader) + { + super(Database.class, projectCoordinateLoader); + } + + @Override + public String getType() + { + return TYPE; + } + + @Override + protected SQLSource createSource(TableSource source, Database store, PackageableConnection connection, List keys, PureModelContextData pmcd) + { + String schemaName = source.getArgumentValueAs(ARG_SCHEMA, -1, String.class, true); + String tableName = source.getArgumentValueAs(ARG_TABLE, -1, String.class, true); + + Lambda lambda = tableToTDS(store, schemaName, tableName); + EngineRuntime runtime = SQLProviderUtils.createRuntime(connection.getPath(), store.getPath()); + + Collections.addAll(keys, new SQLSourceArgument(ARG_SCHEMA, null, schemaName), new SQLSourceArgument(ARG_TABLE, null, tableName)); + + return new SQLSource(TYPE, lambda, null, runtime, null, null, keys); + } + + + protected static Lambda tableToTDS(Database database, String schemaName, String tableName) + { + Schema schema = SQLProviderUtils.extractElement("schema", database.schemas, s -> SQLProviderUtils.equalsEscaped(s.name, schemaName)); + Table table = SQLProviderUtils.extractElement("table", schema.tables, t -> SQLProviderUtils.equalsEscaped(t.name, tableName)); + + return SQLProviderUtils.tableToTDS(database.getPath(), schema.name, table.name); + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/java/org/finos/legend/engine/query/sql/providers/TestRelationalStoreSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/java/org/finos/legend/engine/query/sql/providers/TestRelationalStoreSQLSourceProvider.java new file mode 100644 index 00000000000..b220a030028 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/java/org/finos/legend/engine/query/sql/providers/TestRelationalStoreSQLSourceProvider.java @@ -0,0 +1,221 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers; + +import org.eclipse.collections.impl.list.mutable.FastList; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextPointer; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.ConnectionPointer; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.runtime.EngineRuntime; +import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.raw.Lambda; +import org.finos.legend.engine.query.sql.providers.core.*; +import org.finos.legend.engine.query.sql.providers.shared.AbstractTestLegendStoreSQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.shared.SQLSourceProviderTestUtils; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateWrapper; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectResolvedContext; +import org.finos.legend.engine.query.sql.providers.shared.utils.SQLProviderUtils; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import java.util.List; + +import static org.finos.legend.engine.query.sql.providers.shared.SQLSourceProviderTestUtils.loadPureModelContextFromResource; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class TestRelationalStoreSQLSourceProvider extends AbstractTestLegendStoreSQLSourceProvider +{ + private static final String CONNECTION_NAME = "simple::store::DB::H2Connection"; + + @Mock + private ProjectCoordinateLoader projectCoordinateLoader; + + private RelationalStoreSQLSourceProvider provider; + + @Before + public void setup() + { + provider = new RelationalStoreSQLSourceProvider(projectCoordinateLoader); + } + + @Override + protected SQLSourceProvider getProvider() + { + return provider; + } + + @Override + protected ProjectCoordinateLoader getProjectCoordinateLoader() + { + return projectCoordinateLoader; + } + + @Test + public void testType() + { + Assert.assertEquals("relationalStore", provider.getType()); + } + + @Test + public void testMissingSchema() + { + TableSource tableSource = new TableSource("relationalStore", FastList.newListWith( + new TableSourceArgument("store", null, "simple::store::DBForSQL"), + new TableSourceArgument("coordinates", null, "group:artifact:version"), + new TableSourceArgument("connection", null, CONNECTION_NAME))); + + testError(tableSource, ProjectCoordinateWrapper.coordinates("group:artifact:version"), "'schema' parameter is required"); + } + + @Test + public void testMissingTable() + { + TableSource tableSource = new TableSource("relationalStore", FastList.newListWith( + new TableSourceArgument("store", null, "simple::store::DBForSQL"), + new TableSourceArgument("schema", null, "nonexistent"), + new TableSourceArgument("connection", null, CONNECTION_NAME), + new TableSourceArgument("coordinates", null, "group:artifact:version"))); + + testError(tableSource, ProjectCoordinateWrapper.coordinates("group:artifact:version"), "'table' parameter is required"); + } + + @Test + public void testDatabaseFoundSchemaNotFound() + { + testNotFound("nonexistent", "nonexistent", "No element found for 'schema'"); + } + + @Test + public void testDatabaseFoundSchemaFoundTableNotFound() + { + testNotFound("DBSchema", "nonexistent", "No element found for 'table'"); + } + + @Test + public void testSingleFromCoordinates() + { + testSuccess( + ProjectCoordinateWrapper.coordinates("group:artifact:version"), + new PureModelContextPointer(), + FastList.newListWith(new TableSourceArgument("coordinates", null, "group:artifact:version")), + FastList.newListWith(new SQLSourceArgument("coordinates", null, "group:artifact:version"))); + + } + + @Test + public void testSingleFromProjectWorkspace() + { + testSuccess( + ProjectCoordinateWrapper.workspace("proj1", "ws1"), + new PureModelContextPointer(), + FastList.newListWith( + new TableSourceArgument("project", null, "proj1"), + new TableSourceArgument("workspace", null, "ws1")), + FastList.newListWith( + new SQLSourceArgument("project", null, "proj1"), + new SQLSourceArgument("workspace", null, "ws1") + ) + ); + } + + @Test + public void testSingleFromProjectGroupWorkspace() + { + testSuccess( + ProjectCoordinateWrapper.groupWorkspace("proj1", "ws1"), + new PureModelContextPointer(), + FastList.newListWith( + new TableSourceArgument("project", null, "proj1"), + new TableSourceArgument("groupWorkspace", null, "ws1")), + FastList.newListWith( + new SQLSourceArgument("project", null, "proj1"), + new SQLSourceArgument("groupWorkspace", null, "ws1") + ) + ); + } + + private void testNotFound(String schema, String table, String error) + { + PureModelContextData pmcd = loadPureModelContextFromResource("pmcd.pure", this.getClass()); + when(projectCoordinateLoader.resolve(eq(ProjectCoordinateWrapper.coordinates("group:artifact:version")), any())).thenReturn(new ProjectResolvedContext(pmcd, pmcd)); + + TableSource tableSource = new TableSource("relationalStore", FastList.newListWith( + new TableSourceArgument("store", null, "simple::store::DBForSQL"), + new TableSourceArgument("connection", null, CONNECTION_NAME), + new TableSourceArgument("schema", null, schema), + new TableSourceArgument("table", null, table), + new TableSourceArgument("coordinates", null, "group:artifact:version"))); + + testError(tableSource, error); + } + + private void testError(TableSource tableSource, ProjectCoordinateWrapper projectCoordinateWrapper, String error) + { + PureModelContextData pmcd = loadPureModelContextFromResource("pmcd.pure", this.getClass()); + when(projectCoordinateLoader.resolve(eq(projectCoordinateWrapper), any())).thenReturn(new ProjectResolvedContext(pmcd, pmcd)); + + testError(tableSource, error); + } + + private void testSuccess(ProjectCoordinateWrapper projectCoordinateWrapper, PureModelContext expectedContext, List tableSourceKeys, List sourceKeys) + { + PureModelContextData pmcd = loadPureModelContextFromResource("pmcd.pure", this.getClass()); + when(projectCoordinateLoader.resolve(eq(projectCoordinateWrapper), any())).thenReturn(new ProjectResolvedContext(expectedContext, pmcd)); + + String databaseName = "simple::store::DBForSQL"; + String schemaName = "DBSchema"; + String tableName = "FIRM_TABLE"; + + TableSource tablesource = new TableSource("relationalStore", FastList.newListWith( + new TableSourceArgument("store", null, databaseName), + new TableSourceArgument("schema", null, schemaName), + new TableSourceArgument("table", null, tableName), + new TableSourceArgument("connection", null, CONNECTION_NAME)).withAll(tableSourceKeys) + ); + + List keys = FastList.newListWith( + new SQLSourceArgument("store", null, databaseName), + new SQLSourceArgument("connection", null, CONNECTION_NAME)) + .withAll(sourceKeys) + .with(new SQLSourceArgument("schema", null, schemaName)) + .with(new SQLSourceArgument("table", null, tableName)); + + SQLSourceResolvedContext result = provider.resolve(FastList.newListWith(tablesource), null, FastList.newList()); + + Lambda lambda = SQLProviderUtils.tableToTDS(databaseName, schemaName, tableName); + + ConnectionPointer connectionPtr = new ConnectionPointer(); + connectionPtr.connection = CONNECTION_NAME; + + EngineRuntime runtime = SQLProviderUtils.createRuntime(CONNECTION_NAME, databaseName); + + SQLSource expected = new SQLSource("relationalStore", lambda, null, runtime, null, null, keys); + + //ASSERT + Assert.assertEquals(FastList.newListWith(expectedContext), result.getPureModelContexts()); + Assert.assertEquals(1, result.getSources().size()); + + SQLSourceProviderTestUtils.assertLogicalEquality(expected, result.getSources().get(0)); + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension new file mode 100644 index 00000000000..14ee6d447b1 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.pure.grammar.from.RelationalGrammarParserExtension \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/resources/pmcd.pure b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/resources/pmcd.pure new file mode 100644 index 00000000000..948018fa779 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-relationalStore/src/test/resources/pmcd.pure @@ -0,0 +1,45 @@ +###Relational +Database simple::store::DBForSQL +( + Schema DBSchema + ( + Table FIRM_TABLE + ( + ID INTEGER PRIMARY KEY, + LEGAL_NAME VARCHAR(100) + ) + + Table PERSON_TABLE + ( + ID INTEGER PRIMARY KEY, + FIRST_NAME VARCHAR(100), + LAST_NAME VARCHAR(100), + FIRM_ID INTEGER + ) + ) +) + +###Connection +RelationalDatabaseConnection simple::store::DB::H2Connection{ + store: simple::store::DB; + type: H2; + specification: LocalH2{ + testDataSetupSqls: [ + 'DROP TABLE IF EXISTS PERSON_TABLE;', + 'CREATE TABLE PERSON_TABLE(ID INT PRIMARY KEY, FIRST_NAME VARCHAR(100), LAST_NAME VARCHAR(100), FIRM_ID INT);', + 'INSERT INTO PERSON_TABLE(ID,FIRST_NAME,LAST_NAME,FIRM_ID) VALUES (1,\'Peter\',\'Smith\',1);', + 'INSERT INTO PERSON_TABLE(ID,FIRST_NAME,LAST_NAME,FIRM_ID) VALUES (2,\'John\',\'Johnson\',1);', + 'INSERT INTO PERSON_TABLE(ID,FIRST_NAME,LAST_NAME,FIRM_ID) VALUES (3,\'John\',\'Hill\',1);', + 'INSERT INTO PERSON_TABLE(ID,FIRST_NAME,LAST_NAME,FIRM_ID) VALUES (4,\'Anthony\',\'Allen\',1)', + 'INSERT INTO PERSON_TABLE(ID,FIRST_NAME,LAST_NAME,FIRM_ID) VALUES (5,\'Fabrice\',\'Roberts\',2)', + 'INSERT INTO PERSON_TABLE(ID,FIRST_NAME,LAST_NAME,FIRM_ID) VALUES (6,\'Oliver\',\'Hill\',3)', + 'INSERT INTO PERSON_TABLE(ID,FIRST_NAME,LAST_NAME,FIRM_ID) VALUES (7,\'David\',\'Harris\',3)', + 'DROP TABLE IF EXISTS FIRM_TABLE;', + 'CREATE TABLE FIRM_TABLE(ID INT PRIMARY KEY, LEGAL_NAME VARCHAR(100));', + 'INSERT INTO FIRM_TABLE(ID,LEGAL_NAME) VALUES (1,\'Firm X\');', + 'INSERT INTO FIRM_TABLE(ID,LEGAL_NAME) VALUES (2,\'Firm A\');', + 'INSERT INTO FIRM_TABLE(ID,LEGAL_NAME) VALUES (3,\'Firm B\');' + ]; + }; + auth: DefaultH2; +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/pom.xml new file mode 100644 index 00000000000..6a347f9fa45 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/pom.xml @@ -0,0 +1,97 @@ + + + + + + org.finos.legend.engine + legend-engine-xt-sql-providers + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-sql-providers-service + jar + Legend Engine - XT - SQL - Providers - Service + + + + + + org.finos.legend.engine + legend-engine-xt-sql-providers-core + + + org.finos.legend.engine + legend-engine-xt-sql-providers-shared + + + org.finos.legend.engine + legend-engine-language-pure-dsl-service + + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-shared-core + + + + + + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections + + + + + + org.pac4j + pac4j-core + + + + + + junit + junit + test + + + org.mockito + mockito-core + + + org.finos.legend.engine + legend-engine-xt-sql-providers-shared + test-jar + test + + + org.finos.legend.engine + legend-engine-xt-relationalStore-grammar + test + + + + + \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/main/java/org/finos/legend/engine/query/sql/providers/LegendServiceSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/main/java/org/finos/legend/engine/query/sql/providers/LegendServiceSQLSourceProvider.java new file mode 100644 index 00000000000..ada0258cda0 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/main/java/org/finos/legend/engine/query/sql/providers/LegendServiceSQLSourceProvider.java @@ -0,0 +1,120 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers; + +import org.eclipse.collections.api.list.MutableList; +import org.eclipse.collections.api.tuple.Pair; +import org.eclipse.collections.impl.list.mutable.FastList; +import org.eclipse.collections.impl.tuple.Tuples; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.KeyedExecutionParameter; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.PureMultiExecution; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.PureSingleExecution; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.Service; +import org.finos.legend.engine.query.sql.providers.core.SQLContext; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceResolvedContext; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateWrapper; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectResolvedContext; +import org.finos.legend.engine.query.sql.providers.shared.utils.SQLProviderUtils; +import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; +import org.pac4j.core.profile.CommonProfile; + +import java.util.List; +import java.util.Optional; + +/** + * This class serves for handling the **service** source type + *

+ * Sample Select statement + * select * from service('/my/service', coordinates => 'com.gs:proj1:1.0.0') + * select * from service('/my/service', project => 'PROD-12345', workspace => 'myWorkspace') + * select * from service('/my/service', project => 'PROD-12345', groupWorkspace => 'myGroupWorkspace') + */ +public class LegendServiceSQLSourceProvider implements SQLSourceProvider +{ + private static final String PATTERN = "pattern"; + private static final String SERVICE = "service"; + + private final ProjectCoordinateLoader projectCoordinateLoader; + + public LegendServiceSQLSourceProvider(ProjectCoordinateLoader projectCoordinateLoader) + { + this.projectCoordinateLoader = projectCoordinateLoader; + } + + @Override + public String getType() + { + return SERVICE; + } + + @Override + public SQLSourceResolvedContext resolve(List sources, SQLContext context, MutableList profiles) + { + MutableList> resolved = ListIterate.collect(sources, source -> + { + String pattern = source.getArgumentValueAs(PATTERN, 0, String.class, true); + ProjectCoordinateWrapper projectCoordinateWrapper = ProjectCoordinateWrapper.extractFromTableSource(source); + + ProjectResolvedContext resolvedProject = projectCoordinateLoader.resolve(projectCoordinateWrapper, profiles); + + Service service = SQLProviderUtils.extractElement("service", Service.class, resolvedProject.getData(), s -> pattern.equals(s.pattern)); + FastList keys = FastList.newListWith(new SQLSourceArgument(PATTERN, 0, pattern)); + projectCoordinateWrapper.addProjectCoordinatesAsSQLSourceArguments(keys); + SQLSource resolvedSource; + + if (service.execution instanceof PureSingleExecution) + { + resolvedSource = from((PureSingleExecution) service.execution, keys); + } + else if (service.execution instanceof PureMultiExecution) + { + resolvedSource = from((PureMultiExecution) service.execution, source, keys); + } + else + { + throw new EngineException("Execution Type Unsupported"); + } + + return Tuples.pair(resolvedSource, resolvedProject.getContext()); + }); + + return new SQLSourceResolvedContext(resolved.collect(Pair::getTwo), resolved.collect(Pair::getOne)); + } + + private SQLSource from(PureSingleExecution pse, List keys) + { + return new SQLSource(SERVICE, pse.func, pse.mapping, pse.runtime, pse.executionOptions, null, keys); + } + + private SQLSource from(PureMultiExecution pme, TableSource source, List keys) + { + String key = (String) source.getArgument(pme.executionKey, -1).getValue(); + Optional optional = ListIterate.select(pme.executionParameters, e -> e.key.equals(key)).getFirstOptional(); + + KeyedExecutionParameter execution = optional.orElseThrow(() -> new IllegalArgumentException("No execution found for key " + key)); + + keys.add(new SQLSourceArgument(pme.executionKey, null, key)); + + return new SQLSource(SERVICE, pme.func, execution.mapping, execution.runtime, execution.executionOptions, null, keys); + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/java/org/finos/legend/engine/query/sql/providers/TestLegendServiceSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/java/org/finos/legend/engine/query/sql/providers/TestLegendServiceSQLSourceProvider.java new file mode 100644 index 00000000000..3a6a8759df3 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/java/org/finos/legend/engine/query/sql/providers/TestLegendServiceSQLSourceProvider.java @@ -0,0 +1,194 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers; + +import org.eclipse.collections.api.block.function.Function; +import org.eclipse.collections.api.block.procedure.Procedure; +import org.eclipse.collections.impl.list.mutable.FastList; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextPointer; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.KeyedExecutionParameter; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.PureMultiExecution; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.PureSingleExecution; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.Service; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceResolvedContext; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.core.TableSourceArgument; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateWrapper; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectResolvedContext; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import static org.finos.legend.engine.query.sql.providers.shared.SQLSourceProviderTestUtils.assertLogicalEquality; +import static org.finos.legend.engine.query.sql.providers.shared.SQLSourceProviderTestUtils.loadPureModelContextFromResource; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class TestLegendServiceSQLSourceProvider +{ + @Mock + private ProjectCoordinateLoader projectCoordinateLoader; + + private LegendServiceSQLSourceProvider provider; + + @Before + public void setup() + { + provider = new LegendServiceSQLSourceProvider(projectCoordinateLoader); + } + + @Test + public void testType() + { + Assert.assertEquals("service", provider.getType()); + } + + public void testSingleService(String pattern, TableSource table, Procedure pmcdSupplier, Function expectedFunc) + { + PureModelContextData pmcd = loadPureModelContextFromResource("pmcd.pure", this.getClass()); + + Service service = ListIterate.select(pmcd.getElementsOfType(Service.class), s -> s.pattern.equals(pattern)).getOnly(); + PureSingleExecution execution = (PureSingleExecution) service.execution; + + pmcdSupplier.accept(pmcd); + + SQLSource expected = expectedFunc.apply(execution); + + SQLSourceResolvedContext resolved = provider.resolve(FastList.newListWith(table), null, FastList.newList()); + Assert.assertNotNull(resolved.getPureModelContext()); + Assert.assertEquals(1, resolved.getSources().size()); + + assertLogicalEquality(expected, resolved.getSources().get(0)); + } + + @Test + public void testSingleServicePatternAndCoordinates() + { + String pattern = "/people"; + TableSource table = new TableSource("service", FastList.newListWith( + new TableSourceArgument("pattern", 0, pattern), + new TableSourceArgument("coordinates", null, "group:artifact:version") + )); + + testSingleService(pattern, table, pmcd -> + { + PureModelContextPointer pointer = new PureModelContextPointer(); + when(projectCoordinateLoader.resolve(eq(ProjectCoordinateWrapper.coordinates("group:artifact:version")), any())).thenReturn(new ProjectResolvedContext(pointer, pmcd)); + }, execution -> new SQLSource("service", execution.func, execution.mapping, execution.runtime, execution.executionOptions, null, FastList.newListWith( + new SQLSourceArgument("pattern", 0, pattern), + new SQLSourceArgument("coordinates", null, "group:artifact:version") + ))); + } + + @Test + public void testMultiServicePatternAndCoordinates() + { + String pattern = "/people/{key}"; + PureModelContextData pmcd = loadPureModelContextFromResource("pmcd.pure", this.getClass()); + + + PureModelContextPointer pointer = new PureModelContextPointer(); + when(projectCoordinateLoader.resolve(eq(ProjectCoordinateWrapper.coordinates("group:artifact:version")), any())).thenReturn(new ProjectResolvedContext(pointer, pmcd)); + + Service service = ListIterate.select(pmcd.getElementsOfType(Service.class), s -> s.pattern.equals(pattern)).getOnly(); + PureMultiExecution multi = (PureMultiExecution) service.execution; + KeyedExecutionParameter execution = multi.executionParameters.get(1); + + + TableSource table = new TableSource("service", FastList.newListWith( + new TableSourceArgument("pattern", 0, pattern), + new TableSourceArgument("key", null, "k2"), + new TableSourceArgument("coordinates", null, "group:artifact:version") + )); + + SQLSource expected = new SQLSource("service", multi.func, execution.mapping, execution.runtime, execution.executionOptions, null, FastList.newListWith( + new SQLSourceArgument("pattern", 0, pattern), + new SQLSourceArgument("coordinates", null, "group:artifact:version"), + new SQLSourceArgument("key", null, "k2") + )); + + SQLSourceResolvedContext resolved = provider.resolve(FastList.newListWith(table), null, FastList.newList()); + Assert.assertEquals(FastList.newListWith(pointer), resolved.getPureModelContexts()); + Assert.assertEquals(1, resolved.getSources().size()); + + assertLogicalEquality(expected, resolved.getSources().get(0)); + } + + @Test + public void testSingleServicePatternPatternAndWorkspace() + { + String pattern = "/people"; + TableSource table = new TableSource("service", FastList.newListWith( + new TableSourceArgument("pattern", 0, pattern), + new TableSourceArgument("project", null, "p1"), + new TableSourceArgument("workspace", null, "ws") + )); + + testSingleService(pattern, table, pmcd -> + when(projectCoordinateLoader.resolve(eq(ProjectCoordinateWrapper.workspace("p1", "ws")), any())).thenReturn(new ProjectResolvedContext(pmcd, pmcd)), + execution -> new SQLSource("service", execution.func, execution.mapping, execution.runtime, execution.executionOptions, null, FastList.newListWith( + new SQLSourceArgument("pattern", 0, pattern), + new SQLSourceArgument("project", null, "p1"), + new SQLSourceArgument("workspace", null, "ws") + ))); + + } + + @Test + public void testSingleServicePatternPatternAndGroupWorkspace() + { + String pattern = "/people"; + TableSource table = new TableSource("service", FastList.newListWith( + new TableSourceArgument("pattern", 0, pattern), + new TableSourceArgument("project", null, "p1"), + new TableSourceArgument("groupWorkspace", null, "gws") + )); + + testSingleService(pattern, table, pmcd -> + when(projectCoordinateLoader.resolve(eq(ProjectCoordinateWrapper.groupWorkspace("p1", "gws")), any())).thenReturn(new ProjectResolvedContext(pmcd, pmcd)), + execution -> new SQLSource("service", execution.func, execution.mapping, execution.runtime, execution.executionOptions, null, FastList.newListWith( + new SQLSourceArgument("pattern", 0, pattern), + new SQLSourceArgument("project", null, "p1"), + new SQLSourceArgument("groupWorkspace", null, "gws") + ))); + + } + + @Test + public void testNoServiceFound() + { + PureModelContextData pmcd = loadPureModelContextFromResource("pmcd.pure", this.getClass()); + when(projectCoordinateLoader.resolve(eq(ProjectCoordinateWrapper.workspace("p1", "ws")), any())).thenReturn(new ProjectResolvedContext(pmcd, pmcd)); + + TableSource table = new TableSource("service", FastList.newListWith( + new TableSourceArgument("pattern", 0, "notfound"), + new TableSourceArgument("project", null, "p1"), + new TableSourceArgument("workspace", null, "ws") + )); + IllegalArgumentException exception = Assert.assertThrows("Should throw given no service found", IllegalArgumentException.class, () -> provider.resolve(FastList.newListWith(table), null, FastList.newList())); + Assert.assertEquals("No element found for 'service'", exception.getMessage()); + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension new file mode 100644 index 00000000000..14ee6d447b1 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/resources/META-INF/services/org.finos.legend.engine.language.pure.grammar.from.extension.PureGrammarParserExtension @@ -0,0 +1 @@ +org.finos.legend.engine.language.pure.grammar.from.RelationalGrammarParserExtension \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/resources/pmcd.pure b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/resources/pmcd.pure new file mode 100644 index 00000000000..fa96a3b66fb --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-service/src/test/resources/pmcd.pure @@ -0,0 +1,125 @@ +###Pure +import simple::model::*; + +Class simple::model::Person +{ + firstName: String[1]; + lastName: String[1]; +} + +###Relational +Database simple::store::DB +( + Table PERSON_TABLE + ( + FIRST_NAME VARCHAR(100), + LAST_NAME VARCHAR(100) + ) +) + +###Mapping +import simple::model::*; +import simple::store::*; + +Mapping simple::mapping::Mapping +( + Person : Relational + { + firstName: [DB]PERSON_TABLE.FIRST_NAME, + lastName: [DB]PERSON_TABLE.LAST_NAME + } +) + +Mapping simple::mapping::Mapping2 +( + Person : Relational + { + firstName: [DB]PERSON_TABLE.FIRST_NAME, + lastName: [DB]PERSON_TABLE.LAST_NAME + } +) + +###Runtime +Runtime simple::runtime::Runtime +{ + mappings : + [ + simple::mapping::Mapping + ]; + connections : + [ + simple::store::DB : + [ + connection_1 : #{ + RelationalDatabaseConnection { + store: simple::store::DB; + type: H2; + specification: LocalH2{ + testDataSetupSqls: [ + 'DROP TABLE IF EXISTS PERSON_TABLE;', + 'CREATE TABLE PERSON_TABLE(ID INT PRIMARY KEY, FIRST_NAME VARCHAR(100), LAST_NAME VARCHAR(100), FIRM_ID INT);', + 'INSERT INTO PERSON_TABLE(ID,FIRST_NAME,LAST_NAME,FIRM_ID) VALUES (1,\'Peter\',\'Smith\',1);' + ]; + }; + auth: DefaultH2; + } + }# + ] + ]; +} + +###Service +Service simple::service::PeopleService +{ + pattern: '/people'; + owners: + [ + 'person1', + 'person2' + ]; + documentation: ''; + autoActivateUpdates: true; + execution: Single + { + query: {| + simple::model::Person.all()->project([ + col(x | $x.firstName, 'first name'), + col(x | $x.lastName, 'last name') + ]) + }; + mapping: simple::mapping::Mapping; + runtime: simple::runtime::Runtime; + } +} + +Service simple::service::MultiExecutionService +{ + pattern: '/people/{key}'; + owners: + [ + 'person1', + 'person2' + ]; + documentation: ''; + autoActivateUpdates: true; + execution: Multi + { + query: {| + simple::model::Person.all()->project([ + col(x | $x.firstName, 'first name'), + col(x | $x.lastName, 'last name') + ]) + }; + key: 'key'; + executions['k1']: + { + mapping: simple::mapping::Mapping; + runtime: simple::runtime::Runtime; + } + executions['k2']: + { + mapping: simple::mapping::Mapping2; + runtime: simple::runtime::Runtime; + } + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/pom.xml new file mode 100644 index 00000000000..455b9176d2e --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/pom.xml @@ -0,0 +1,146 @@ + + + + + + org.finos.legend.engine + legend-engine-xt-sql-providers + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-sql-providers-shared + jar + Legend Engine - XT - SQL - Providers - Shared + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + test-jar + + + + + + + + + + + + org.finos.legend.engine + legend-engine-protocol-pure + + + org.finos.legend.engine + legend-engine-xt-sql-providers-core + + + org.finos.legend.engine + legend-engine-language-pure-modelManager-sdlc + + + org.finos.legend.engine + legend-engine-language-pure-grammar + + + org.finos.legend.engine + legend-engine-language-pure-modelManager + + + org.finos.legend.engine + legend-engine-shared-core + + + + + + + org.eclipse.collections + eclipse-collections-api + + + org.eclipse.collections + eclipse-collections + + + + + + org.apache.httpcomponents + httpclient + + + + + + org.pac4j + pac4j-core + + + + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + + + + io.opentracing + opentracing-util + + + io.opentracing + opentracing-api + + + + + org.apache.commons + commons-lang3 + + + + + junit + junit + test + + + org.mockito + mockito-core + test + + + commons-io + commons-io + test + + + + \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/AbstractLegendStoreSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/AbstractLegendStoreSQLSourceProvider.java new file mode 100644 index 00000000000..c55a8bfad34 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/AbstractLegendStoreSQLSourceProvider.java @@ -0,0 +1,83 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared; + +import org.eclipse.collections.api.list.MutableList; +import org.eclipse.collections.impl.list.mutable.FastList; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.PackageableConnection; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.store.Store; +import org.finos.legend.engine.query.sql.providers.core.SQLContext; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceResolvedContext; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateWrapper; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectResolvedContext; +import org.finos.legend.engine.query.sql.providers.shared.utils.SQLProviderUtils; +import org.pac4j.core.profile.CommonProfile; + +import java.util.List; + +public abstract class AbstractLegendStoreSQLSourceProvider implements SQLSourceProvider +{ + + private static final String ARG_CONNECTION = "connection"; + private static final String ARG_STORE = "store"; + + private final Class storeType; + private final ProjectCoordinateLoader projectCoordinateLoader; + + public AbstractLegendStoreSQLSourceProvider(Class storeType, ProjectCoordinateLoader projectCoordinateLoader) + { + this.storeType = storeType; + this.projectCoordinateLoader = projectCoordinateLoader; + } + + protected abstract SQLSource createSource(TableSource source, T store, PackageableConnection connection, List keys, PureModelContextData pmcd); + + @Override + public SQLSourceResolvedContext resolve(List sources, SQLContext context, MutableList profiles) + { + List contexts = FastList.newList(); + List sqlSources = FastList.newList(); + + ListIterate.forEach(sources, source -> + { + ProjectCoordinateWrapper projectCoordinateWrapper = ProjectCoordinateWrapper.extractFromTableSource(source); + ProjectResolvedContext resolved = projectCoordinateLoader.resolve(projectCoordinateWrapper, profiles); + + String storeName = source.getArgumentValueAs(ARG_STORE, -1, String.class, true); + String connectionName = source.getArgumentValueAs(ARG_CONNECTION, -1, String.class, true); + + T store = SQLProviderUtils.extractElement(ARG_STORE, this.storeType, resolved.getData(), s -> storeName.equals(s.getPath())); + PackageableConnection connection = SQLProviderUtils.extractElement(ARG_CONNECTION, PackageableConnection.class, resolved.getData(), c -> connectionName.equals(c.getPath())); + + List keys = FastList.newListWith(new SQLSourceArgument(ARG_STORE, null, storeName), new SQLSourceArgument(ARG_CONNECTION, null, connectionName)); + projectCoordinateWrapper.addProjectCoordinatesAsSQLSourceArguments(keys); + + sqlSources.add(createSource(source, store, connection, keys, resolved.getData())); + + contexts.add(resolved.getContext()); + }); + + return new SQLSourceResolvedContext(contexts, sqlSources); + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/FunctionSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/FunctionSQLSourceProvider.java new file mode 100644 index 00000000000..7c2a3312601 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/FunctionSQLSourceProvider.java @@ -0,0 +1,104 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared; + +import org.eclipse.collections.api.factory.Sets; +import org.eclipse.collections.api.list.MutableList; +import org.eclipse.collections.api.set.ImmutableSet; +import org.eclipse.collections.api.tuple.Pair; +import org.eclipse.collections.impl.list.mutable.FastList; +import org.eclipse.collections.impl.tuple.Tuples; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.Function; +import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.raw.Lambda; +import org.finos.legend.engine.query.sql.providers.core.SQLContext; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceResolvedContext; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateWrapper; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectResolvedContext; +import org.finos.legend.engine.query.sql.providers.shared.utils.SQLProviderUtils; +import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; +import org.pac4j.core.profile.CommonProfile; + +import java.util.List; + +/** + * This class serves for handling the **function** source type + *

+ * Sample Select statement + * select * from func('my::func__TabularDataSet_1_', coordinates => 'com.gs:proj1:1.0.0') + * select * from func('my::func__TabularDataSet_1_', project => 'PROD-12345', workspace => 'myWorkspace') + * select * from func('my::func__TabularDataSet_1_', project => 'PROD-12345', groupWorkspace => 'myGroupWorkspace') + * select * from func('my::func_String_1__TabularDataSet_1_', project => 'PROD-12345', groupWorkspace => 'myGroupWorkspace', myParam => 'abc') + */ +public class FunctionSQLSourceProvider implements SQLSourceProvider +{ + + private static final String FUNCTION = "func"; + private static final String PATH = "path"; + + private static final ImmutableSet TABULAR_TYPES = Sets.immutable.of( + "meta::pure::tds::TabularDataSet" + ); + + private final ProjectCoordinateLoader projectCoordinateLoader; + + public FunctionSQLSourceProvider(ProjectCoordinateLoader projectCoordinateLoader) + { + this.projectCoordinateLoader = projectCoordinateLoader; + } + + @Override + public String getType() + { + return FUNCTION; + } + + @Override + public SQLSourceResolvedContext resolve(List sources, SQLContext context, MutableList profiles) + { + MutableList> resolved = ListIterate.collect(sources, source -> + { + String path = source.getArgumentValueAs(PATH, 0, String.class, true); + ProjectCoordinateWrapper projectCoordinateWrapper = ProjectCoordinateWrapper.extractFromTableSource(source); + + ProjectResolvedContext resolvedProject = projectCoordinateLoader.resolve(projectCoordinateWrapper, profiles); + + Function function = SQLProviderUtils.extractElement("function", Function.class, resolvedProject.getData(), f -> path.equals(f.getPath())); + + if (!TABULAR_TYPES.contains(function.returnType)) + { + throw new EngineException("Function " + path + " does not return Tabular data type"); + } + + Lambda lambda = new Lambda(); + lambda.parameters = function.parameters; + lambda.body = function.body; + + List keys = FastList.newListWith(new SQLSourceArgument(PATH, 0, path)); + projectCoordinateWrapper.addProjectCoordinatesAsSQLSourceArguments(keys); + + return Tuples.pair(new SQLSource(getType(), lambda, null, null, FastList.newList(), null, keys), resolvedProject.getContext()); + }); + + return new SQLSourceResolvedContext(resolved.collect(Pair::getTwo), resolved.collect(Pair::getOne)); + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectCoordinateLoader.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectCoordinateLoader.java new file mode 100644 index 00000000000..6e85e4667cd --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectCoordinateLoader.java @@ -0,0 +1,120 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared.project; + +import java.util.Optional; +import org.apache.http.impl.client.BasicCookieStore; +import org.apache.http.impl.client.CloseableHttpClient; +import org.eclipse.collections.api.block.function.Function; +import org.eclipse.collections.api.list.MutableList; +import org.finos.legend.engine.language.pure.modelManager.ModelManager; +import org.finos.legend.engine.language.pure.modelManager.sdlc.configuration.ServerConnectionConfiguration; +import org.finos.legend.engine.protocol.pure.PureClientVersions; +import org.finos.legend.engine.protocol.pure.v1.model.context.AlloySDLC; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextPointer; +import org.finos.legend.engine.protocol.pure.v1.model.context.WorkspaceSDLC; +import org.finos.legend.engine.shared.core.kerberos.HttpClientBuilder; +import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; +import org.pac4j.core.profile.CommonProfile; + +public class ProjectCoordinateLoader +{ + private final ModelManager modelManager; + + public ProjectCoordinateLoader(ModelManager modelManager, ServerConnectionConfiguration sdlcServerConfig) + { + this(modelManager, sdlcServerConfig, profiles -> (CloseableHttpClient) HttpClientBuilder.getHttpClient(new BasicCookieStore())); + } + + public ProjectCoordinateLoader(ModelManager modelManager, ServerConnectionConfiguration sdlcServerConfig, Function, CloseableHttpClient> httpClientProvider) + { + this.modelManager = modelManager; + } + + public ProjectResolvedContext resolve(ProjectCoordinateWrapper projectCoordinateWrapper, MutableList profiles) + { + return resolve(projectCoordinateWrapper, true, profiles); + } + + public ProjectResolvedContext resolve(ProjectCoordinateWrapper projectCoordinateWrapper, boolean required, MutableList profiles) + { + Optional coordinates = projectCoordinateWrapper.getCoordinates(); + if (coordinates.isPresent()) + { + PureModelContextPointer pointer = pointerFromCoordinates(coordinates.get()); + + PureModelContextData pmcd = modelManager.loadData(pointer, PureClientVersions.production, profiles); + + return new ProjectResolvedContext(pointer, pmcd); + } + Optional project = projectCoordinateWrapper.getProject(); + if (project.isPresent()) + { + Optional workspace = projectCoordinateWrapper.getWorkspace(); + Optional groupWorkspace = projectCoordinateWrapper.getGroupWorkspace(); + String workspaceId = workspace.orElseGet(groupWorkspace::get); + boolean isGroup = groupWorkspace.isPresent(); + String projectId = project.get(); + + PureModelContextData pmcd = loadProjectPureModelContextData(projectId, workspaceId, isGroup, profiles); + + return new ProjectResolvedContext(pmcd, pmcd); + } + + if (required) + { + throw new EngineException("project/workspace or coordinates must be supplied"); + } + + return null; + } + + private PureModelContextPointer pointerFromCoordinates(String coordinates) + { + AlloySDLC sdlc = new AlloySDLC(); + enrichCoordinates(sdlc, coordinates); + PureModelContextPointer pointer = new PureModelContextPointer(); + pointer.sdlcInfo = sdlc; + return pointer; + } + + private void enrichCoordinates(AlloySDLC alloySDLC, String coordinates) + { + String[] parts = coordinates.split(":"); + if (parts.length != 3) + { + throw new IllegalArgumentException("Invalid coordinates on service " + coordinates); + } + + alloySDLC.groupId = parts[0]; + alloySDLC.artifactId = parts[1]; + alloySDLC.version = parts[2]; + } + + private PureModelContextData loadProjectPureModelContextData(String project, String workspace, boolean isGroup, MutableList profiles) + { + WorkspaceSDLC sdlcInfo = new WorkspaceSDLC(); + sdlcInfo.project = project; + sdlcInfo.version = workspace; + sdlcInfo.isGroupWorkspace = isGroup; + + PureModelContextPointer pointer = new PureModelContextPointer(); + pointer.sdlcInfo = sdlcInfo; + + return this.modelManager.loadData(pointer, PureClientVersions.production, profiles); + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectCoordinateWrapper.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectCoordinateWrapper.java new file mode 100644 index 00000000000..76e453291c0 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectCoordinateWrapper.java @@ -0,0 +1,137 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared.project; + +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; +import org.finos.legend.engine.query.sql.providers.core.TableSource; + +import java.util.List; +import java.util.Optional; + +public class ProjectCoordinateWrapper +{ + + private static final String ARG_COORDINATES = "coordinates"; + private static final String ARG_PROJECT = "project"; + private static final String ARG_WORKSPACE = "workspace"; + private static final String ARG_GROUP_WORKSPACE = "groupWorkspace"; + + private final Optional coordinates; + private final Optional project; + private final Optional workspace; + private final Optional groupWorkspace; + + private ProjectCoordinateWrapper(Optional coordinates, Optional project, Optional workspace, Optional groupWorkspace) + { + this.coordinates = coordinates; + this.project = project; + this.workspace = workspace; + this.groupWorkspace = groupWorkspace; + } + + public static ProjectCoordinateWrapper coordinates(String coordinates) + { + return new ProjectCoordinateWrapper(Optional.of(coordinates), Optional.empty(), Optional.empty(), Optional.empty()); + } + + public static ProjectCoordinateWrapper workspace(String project, String workspace) + { + return new ProjectCoordinateWrapper(Optional.empty(), Optional.of(project), Optional.of(workspace), Optional.empty()); + } + + public static ProjectCoordinateWrapper groupWorkspace(String project, String groupWorkspace) + { + return new ProjectCoordinateWrapper(Optional.empty(), Optional.of(project), Optional.empty(), Optional.of(groupWorkspace)); + } + + public static ProjectCoordinateWrapper extractFromTableSource(TableSource source) + { + return extractFromTableSource(source, true); + } + + public static ProjectCoordinateWrapper extractFromTableSource(TableSource source, boolean required) + { + Optional coordinates = Optional.ofNullable(source.getArgumentValueAs(ARG_COORDINATES, -1, String.class, false)); + Optional project = Optional.ofNullable(source.getArgumentValueAs(ARG_PROJECT, -1, String.class, false)); + Optional workspace = Optional.ofNullable(source.getArgumentValueAs(ARG_WORKSPACE, -1, String.class, false)); + Optional groupWorkspace = Optional.ofNullable(source.getArgumentValueAs(ARG_GROUP_WORKSPACE, -1, String.class, false)); + + validateArguments(coordinates, project, workspace, groupWorkspace, required); + + return new ProjectCoordinateWrapper(coordinates, project, workspace, groupWorkspace); + } + + public void addProjectCoordinatesAsSQLSourceArguments(List keys) + { + coordinates.ifPresent(value -> keys.add(new SQLSourceArgument(ARG_COORDINATES, null, value))); + project.ifPresent(value -> keys.add(new SQLSourceArgument(ARG_PROJECT, null, value))); + workspace.ifPresent(value -> keys.add(new SQLSourceArgument(ARG_WORKSPACE, null, value))); + groupWorkspace.ifPresent(value -> keys.add(new SQLSourceArgument(ARG_GROUP_WORKSPACE, null, value))); + } + + private static void validateArguments(Optional coordinates, Optional project, Optional workspace, Optional groupWorkspace, boolean required) + { + if (coordinates.isPresent() && (project.isPresent() || workspace.isPresent() || groupWorkspace.isPresent())) + { + throw new IllegalArgumentException("cannot mix coordinates with project/workspace"); + } + if (project.isPresent() && !(workspace.isPresent() || groupWorkspace.isPresent())) + { + throw new IllegalArgumentException("workspace/group workspace must be supplied if loading from project"); + } + + if (required && !(coordinates.isPresent() || project.isPresent())) + { + throw new IllegalArgumentException("coordinates or project/workspace must be supplied"); + } + } + + + public Optional getCoordinates() + { + return coordinates; + } + + public Optional getProject() + { + return project; + } + + public Optional getWorkspace() + { + return workspace; + } + + public Optional getGroupWorkspace() + { + return groupWorkspace; + } + + @Override + public boolean equals(Object o) + { + return EqualsBuilder.reflectionEquals(this, o); + } + + @Override + public int hashCode() + { + return HashCodeBuilder.reflectionHashCode(this); + } + +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectResolvedContext.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectResolvedContext.java new file mode 100644 index 00000000000..8546465c896 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/project/ProjectResolvedContext.java @@ -0,0 +1,45 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared.project; + +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; + +/** + * This class acts as a holder for a ProjectCoordinateWrapper resolved data + */ +public class ProjectResolvedContext +{ + /** this will be the smallest unit possible, eg. a pointer instead of the full pmcd if available*/ + private final PureModelContext context; + private final PureModelContextData data; + + public ProjectResolvedContext(PureModelContext context, PureModelContextData data) + { + this.context = context; + this.data = data; + } + + public PureModelContext getContext() + { + return context; + } + + public PureModelContextData getData() + { + return data; + } +} diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/utils/SQLProviderUtils.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/utils/SQLProviderUtils.java new file mode 100644 index 00000000000..7c95523c763 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/utils/SQLProviderUtils.java @@ -0,0 +1,112 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared.utils; + +import org.eclipse.collections.api.list.MutableList; +import org.eclipse.collections.impl.list.mutable.FastList; +import org.eclipse.collections.impl.utility.ListIterate; +import org.finos.legend.engine.protocol.pure.v1.model.context.PackageableElementPointer; +import org.finos.legend.engine.protocol.pure.v1.model.context.PackageableElementType; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.PackageableElement; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.connection.ConnectionPointer; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.runtime.EngineRuntime; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.runtime.IdentifiedConnection; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.runtime.StoreConnections; +import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.application.AppliedFunction; +import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.raw.CString; +import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.raw.Lambda; +import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.raw.PackageableElementPtr; + +import java.util.Collections; +import java.util.List; +import java.util.function.Predicate; + +public class SQLProviderUtils +{ + + public static T extractElement(String argumentName, Class type, PureModelContextData pmcd, Predicate predicate) + { + return extractElement(argumentName, pmcd.getElementsOfType(type), predicate); + } + + public static T extractElement(String argumentName, List list, Predicate predicate) + { + MutableList elements = ListIterate.select(list, + element -> predicate.test(element)); + + if (elements.isEmpty()) + { + throw new IllegalArgumentException("No element found for '" + argumentName + "'"); + } + + if (elements.size() > 1) + { + throw new IllegalArgumentException("Multiple elements found for '" + argumentName + "'"); + } + + return elements.getOnly(); + } + + public static Lambda tableToTDS(String databasePath, String schemaName, String tableName) + { + PackageableElementPtr databasePtr = new PackageableElementPtr(); + databasePtr.fullPath = databasePath; + + AppliedFunction tableReferenceFunc = new AppliedFunction(); + tableReferenceFunc.function = "tableReference"; + tableReferenceFunc.fControl = "tableReference_Database_1__String_1__String_1__Table_1_"; + tableReferenceFunc.parameters = FastList.newListWith(databasePtr, new CString(schemaName), new CString(tableName)); + + AppliedFunction tableToTdsFunc = new AppliedFunction(); + tableToTdsFunc.function = "tableToTDS"; + tableToTdsFunc.fControl = "tableToTDS_Table_1__TableTDS_1_"; + tableToTdsFunc.parameters = Collections.singletonList(tableReferenceFunc); + + Lambda lambda = new Lambda(); + lambda.body = Collections.singletonList(tableToTdsFunc); + + return lambda; + } + + public static boolean equalsEscaped(String value, String toMatch) + { + return value.equals(toMatch) || value.equals("\"" + toMatch + "\""); + } + + public static EngineRuntime createRuntime(String connection, String store) + { + ConnectionPointer connectionPtr = new ConnectionPointer(); + connectionPtr.connection = connection; + + PackageableElementPointer storePointer = new PackageableElementPointer(); + storePointer.path = store; + storePointer.type = PackageableElementType.STORE; + + IdentifiedConnection identifiedConnection = new IdentifiedConnection(); + identifiedConnection.id = "connection1"; + identifiedConnection.connection = connectionPtr; + + StoreConnections storeConnection = new StoreConnections(); + storeConnection.store = storePointer; + storeConnection.storeConnections = FastList.newListWith(identifiedConnection); + + EngineRuntime runtime = new EngineRuntime(); + runtime.connections = FastList.newListWith(storeConnection); + + return runtime; + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/utils/TraceUtils.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/utils/TraceUtils.java new file mode 100644 index 00000000000..baed8276557 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/main/java/org/finos/legend/engine/query/sql/providers/shared/utils/TraceUtils.java @@ -0,0 +1,72 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared.utils; + +import io.opentracing.Scope; +import io.opentracing.Span; +import io.opentracing.util.GlobalTracer; +import org.eclipse.collections.api.block.procedure.Procedure; + +import java.util.function.Function; +import java.util.function.Supplier; + +public class TraceUtils +{ + private static final String PREFIX = "Legend SQL: "; + + public static void trace(String name, Procedure procedure) + { + Span span = GlobalTracer.get().buildSpan(PREFIX + name).start(); + + try (Scope ignored = GlobalTracer.get().activateSpan(span)) + { + procedure.accept(span); + } + finally + { + span.finish(); + } + } + + public static T trace(String name, Supplier supplier) + { + + Span span = GlobalTracer.get().buildSpan(PREFIX + name).start(); + + try (Scope ignored = GlobalTracer.get().activateSpan(span)) + { + return supplier.get(); + } + finally + { + span.finish(); + } + } + + public static T trace(String name, Function supplier) + { + Span span = GlobalTracer.get().buildSpan(PREFIX + name).start(); + + try (Scope ignored = GlobalTracer.get().activateSpan(span)) + { + return supplier.apply(span); + } + finally + { + span.finish(); + } + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/AbstractTestLegendStoreSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/AbstractTestLegendStoreSQLSourceProvider.java new file mode 100644 index 00000000000..80d8c9e2104 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/AbstractTestLegendStoreSQLSourceProvider.java @@ -0,0 +1,109 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared; + +import org.eclipse.collections.impl.list.mutable.FastList; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.core.TableSourceArgument; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectResolvedContext; +import org.junit.Assert; +import org.junit.Test; + +import static org.finos.legend.engine.query.sql.providers.shared.SQLSourceProviderTestUtils.loadPureModelContextFromResource; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public abstract class AbstractTestLegendStoreSQLSourceProvider +{ + @Test + public void testNoProjectOrCoordindates() + { + TableSource tableSource = new TableSource("store", FastList.newListWith( + new TableSourceArgument("store", null, "notfound")) + ); + + testError(tableSource, "coordinates or project/workspace must be supplied"); + } + + @Test + public void testMissingWorkspace() + { + TableSource tableSource = new TableSource("store", FastList.newListWith( + new TableSourceArgument("store", null, "notfound"), + new TableSourceArgument("project", null, "proj1")) + ); + + testError(tableSource, "workspace/group workspace must be supplied if loading from project"); + } + + @Test + public void testMixedCoordinatesWorkspace() + { + TableSource tableSource = new TableSource("store", FastList.newListWith( + new TableSourceArgument("store", null, "notfound"), + new TableSourceArgument("project", null, "proj1"), + new TableSourceArgument("coordinates", null, "group:artifact:version")) + ); + + testError(tableSource, "cannot mix coordinates with project/workspace"); + } + + @Test + public void testMissingStoreParams() + { + String connectionName = "simple::store::DB::H2Connection"; + + PureModelContextData pmcd = loadPureModelContextFromResource("pmcd.pure", this.getClass()); + when(getProjectCoordinateLoader().resolve(any(), any())).thenReturn(new ProjectResolvedContext(pmcd, pmcd)); + + TableSource table = new TableSource("relationalStore", FastList.newListWith( + new TableSourceArgument("coordinates", null, "group:artifact:version"), + new TableSourceArgument("connection", null, connectionName))); + + IllegalArgumentException exception = Assert.assertThrows("Should throw given no store found", IllegalArgumentException.class, () -> getProvider().resolve(FastList.newListWith(table), null, FastList.newList())); + Assert.assertEquals("'store' parameter is required", exception.getMessage()); + } + + @Test + public void testStoreNotFound() + { + when(getProjectCoordinateLoader().resolve(any(), any())).thenReturn(new ProjectResolvedContext(mock(PureModelContextData.class), mock(PureModelContextData.class))); + String connectionName = "simple::store::DB::H2Connection"; + + TableSource table = new TableSource("store", FastList.newListWith( + new TableSourceArgument("store", null, "simple::store::DBForSQL"), + new TableSourceArgument("connection", null, connectionName), + new TableSourceArgument("coordinates", null, "group:artifact:version"))); + + IllegalArgumentException exception = Assert.assertThrows("Should throw given no store found", IllegalArgumentException.class, () -> getProvider().resolve(FastList.newListWith(table), null, FastList.newList())); + Assert.assertEquals("No element found for 'store'", exception.getMessage()); + } + + protected void testError(TableSource tableSource, String error) + { + IllegalArgumentException exception = Assert.assertThrows("Should throw error", IllegalArgumentException.class, () -> getProvider().resolve(FastList.newListWith(tableSource), null, FastList.newList())); + Assert.assertEquals(error, exception.getMessage()); + } + + protected abstract SQLSourceProvider getProvider(); + + protected abstract ProjectCoordinateLoader getProjectCoordinateLoader(); + +} diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/SQLSourceProviderTestUtils.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/SQLSourceProviderTestUtils.java new file mode 100644 index 00000000000..ad318655867 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/SQLSourceProviderTestUtils.java @@ -0,0 +1,79 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.io.IOUtils; +import org.finos.legend.engine.language.pure.grammar.from.PureGrammarParser; +import org.finos.legend.engine.protocol.pure.v1.PureProtocolObjectMapperFactory; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.junit.Assert; + +import java.io.IOException; +import java.util.Objects; + +public class SQLSourceProviderTestUtils +{ + + //TODO replace once equals method added in legend + public static void assertLogicalEquality(Object expected, Object actual) + { + try + { + ObjectMapper mapper = PureProtocolObjectMapperFactory.getNewObjectMapper(); + Assert.assertEquals( + mapper.writeValueAsString(expected), + mapper.writeValueAsString(actual)); + } + catch (JsonProcessingException e) + { + throw new RuntimeException(e); + } + } + + public static PureModelContextData loadPureModelContextFromResource(String resource, Class clazz) + { + String model = getResource(resource, clazz); + return PureModelContextData.newBuilder().withPureModelContextData(PureGrammarParser.newInstance().parseModel(model)).build(); + } + + public static String getResource(String resource, Class clazz) + { + try + { + return IOUtils.toString(Objects.requireNonNull(clazz.getClassLoader().getResourceAsStream(resource))); + } + catch (IOException e) + { + throw new RuntimeException(e); + } + } + + public static T loadFromResources(String resource, TypeReference typeReference, Class clazz) + { + String sources = getResource(resource, clazz); + try + { + return PureProtocolObjectMapperFactory.getNewObjectMapper().readValue(sources, typeReference); + } + catch (JsonProcessingException e) + { + throw new RuntimeException(e); + } + } +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/TestFunctionSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/TestFunctionSQLSourceProvider.java new file mode 100644 index 00000000000..7b3f4e168dd --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/java/org/finos/legend/engine/query/sql/providers/shared/TestFunctionSQLSourceProvider.java @@ -0,0 +1,181 @@ +// Copyright 2023 Goldman Sachs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package org.finos.legend.engine.query.sql.providers.shared; + +import org.eclipse.collections.impl.list.mutable.FastList; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContext; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; +import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextPointer; +import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.domain.Function; +import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.raw.Lambda; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceResolvedContext; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.core.TableSourceArgument; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateLoader; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectCoordinateWrapper; +import org.finos.legend.engine.query.sql.providers.shared.project.ProjectResolvedContext; +import org.finos.legend.engine.query.sql.providers.shared.utils.SQLProviderUtils; +import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import static org.finos.legend.engine.query.sql.providers.shared.SQLSourceProviderTestUtils.loadPureModelContextFromResource; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class TestFunctionSQLSourceProvider +{ + @Mock + private ProjectCoordinateLoader projectCoordinateLoader; + + private FunctionSQLSourceProvider provider; + + @Before + public void setup() + { + this.provider = new FunctionSQLSourceProvider(projectCoordinateLoader); + } + + @Test + public void testType() + { + Assert.assertEquals("func", provider.getType()); + } + + @Test + public void testWorkspace() + { + String functionName = "simple::func::simpleFunction_String_MANY__TabularDataSet_1_"; + + ProjectCoordinateWrapper coordinates = ProjectCoordinateWrapper.workspace("proj1", "ws1"); + + PureModelContextData pmcd = loadPureModelContextFromResource("function-pmcd.pure", this.getClass()); + Function function = SQLProviderUtils.extractElement("function", pmcd.getElementsOfType(Function.class), f -> f.getPath().equals(functionName)); + + when(projectCoordinateLoader.resolve(eq(coordinates), any())).thenReturn(new ProjectResolvedContext(pmcd, pmcd)); + + TableSource tableSource = createTableSource(functionName, + new TableSourceArgument("project", null, "proj1"), + new TableSourceArgument("workspace", null, "ws1") + ); + + Lambda lambda = new Lambda(); + lambda.body = function.body; + lambda.parameters = function.parameters; + + SQLSource expected = new SQLSource("func", lambda, null, null, FastList.newList(), null, FastList.newListWith( + new SQLSourceArgument("path", 0, functionName), + new SQLSourceArgument("project", null, "proj1"), + new SQLSourceArgument("workspace", null, "ws1") + )); + + testSuccess(tableSource, pmcd, expected); + } + + @Test + public void testCoordinates() + { + String functionName = "simple::func::simpleFunction_String_MANY__TabularDataSet_1_"; + + ProjectCoordinateWrapper coordinates = ProjectCoordinateWrapper.coordinates("proj1:art:1.0.0"); + + PureModelContextData pmcd = loadPureModelContextFromResource("function-pmcd.pure", this.getClass()); + Function function = SQLProviderUtils.extractElement("function", pmcd.getElementsOfType(Function.class), f -> f.getPath().equals(functionName)); + PureModelContextPointer pointer = new PureModelContextPointer(); + + when(projectCoordinateLoader.resolve(eq(coordinates), any())).thenReturn(new ProjectResolvedContext(pointer, pmcd)); + + TableSource tableSource = createTableSource(functionName, + new TableSourceArgument("coordinates", null, "proj1:art:1.0.0") + ); + + Lambda lambda = new Lambda(); + lambda.body = function.body; + lambda.parameters = function.parameters; + + SQLSource expected = new SQLSource("func", lambda, null, null, FastList.newList(), null, FastList.newListWith( + new SQLSourceArgument("path", 0, functionName), + new SQLSourceArgument("coordinates", null, "proj1:art:1.0.0") + )); + + testSuccess(tableSource, pointer, expected); + } + + @Test + public void testNoProjectOrCoordinates() + { + TableSource tableSource = createTableSource("simple::func__TabularDataSet_1_"); + testException(tableSource, IllegalArgumentException.class, "coordinates or project/workspace must be supplied"); + } + + @Test + public void testNoWorkspaceWithProject() + { + TableSource tableSource = createTableSource("simple::func__TabularDataSet_1_", new TableSourceArgument("project", null, "proj1")); + testException(tableSource, IllegalArgumentException.class, "workspace/group workspace must be supplied if loading from project"); + } + + @Test + public void testNotTDSFunc() + { + String functionName = "simple::func::nonTdsFunction__String_1_"; + + ProjectCoordinateWrapper coordinates = ProjectCoordinateWrapper.coordinates("proj1:art:1.0.0"); + + PureModelContextData pmcd = loadPureModelContextFromResource("function-pmcd.pure", this.getClass()); + + when(projectCoordinateLoader.resolve(eq(coordinates), any())).thenReturn(new ProjectResolvedContext(pmcd, pmcd)); + + TableSource tableSource = createTableSource(functionName, + new TableSourceArgument("coordinates", null, "proj1:art:1.0.0") + ); + + testException(tableSource, EngineException.class, "Function " + functionName + " does not return Tabular data type"); + } + + private void testException(TableSource tableSource, Class throwable, String expected) + { + T exception = Assert.assertThrows("Should throw given no service found", throwable, () -> provider.resolve(FastList.newListWith(tableSource), null, FastList.newList())); + Assert.assertEquals(expected, exception.getMessage()); + } + + private void testSuccess(TableSource tableSource, PureModelContext expectedContext, SQLSource expected) + { + SQLSourceResolvedContext result = provider.resolve(FastList.newListWith(tableSource), null, FastList.newList()); + + //ASSERT + Assert.assertEquals(FastList.newListWith(expectedContext), result.getPureModelContexts()); + Assert.assertEquals(1, result.getSources().size()); + + SQLSourceProviderTestUtils.assertLogicalEquality(expected, result.getSources().get(0)); + } + + private final TableSource createTableSource(String func, TableSourceArgument... extraArguments) + { + return new TableSource("func", FastList.newListWith( + new TableSourceArgument(null, 0, func)).with(extraArguments) + ); + } +} + diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/resources/function-pmcd.pure b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/resources/function-pmcd.pure new file mode 100644 index 00000000000..fbeb18c1a2d --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/legend-engine-xt-sql-providers-shared/src/test/resources/function-pmcd.pure @@ -0,0 +1,22 @@ +###Pure +import simple::model::*; + +Class simple::model::Person +{ + firstName: String[1]; + lastName: String[1]; +} +function simple::func::simpleFunction(lastNames:String[*]):meta::pure::tds::TabularDataSet[1] +{ + simple::model::Person.all() + ->filter(p | $p.lastName->in($lastNames)) + ->project([ + col(x | $x.firstName, 'first name'), + col(x | $x.lastName, 'last name') + ]) +} + +function simple::func::nonTdsFunction():String[1] +{ + '' +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-providers/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-providers/pom.xml new file mode 100644 index 00000000000..733eeb61c96 --- /dev/null +++ b/legend-engine-xts-sql/legend-engine-xt-sql-providers/pom.xml @@ -0,0 +1,35 @@ + + + + + org.finos.legend.engine + legend-engine-xts-sql + 4.35.4-SNAPSHOT + + 4.0.0 + + legend-engine-xt-sql-providers + pom + Legend Engine - XTS - SQL - Providers + + + legend-engine-xt-sql-providers-core + legend-engine-xt-sql-providers-shared + legend-engine-xt-sql-providers-relationalStore + legend-engine-xt-sql-providers-service + + \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-pure-metamodel/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-pure-metamodel/pom.xml index c75d9c9073a..6be69c00b32 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-pure-metamodel/pom.xml +++ b/legend-engine-xts-sql/legend-engine-xt-sql-pure-metamodel/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-sql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-pure/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-pure/pom.xml index 8e344b7b3c5..6434c6e2585 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-pure/pom.xml +++ b/legend-engine-xts-sql/legend-engine-xt-sql-pure/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-sql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -65,6 +65,7 @@ legend-pure-m2-dsl-diagram-grammar ${legend.pure.version} + org.finos.legend.pure legend-pure-m2-dsl-graph-grammar @@ -75,6 +76,11 @@ legend-engine-pure-code-compiled-core ${project.version} + + org.finos.legend.engine + legend-engine-pure-platform-store-relational-java + ${project.version} + org.finos.legend.engine legend-engine-language-pure-dsl-service-pure @@ -148,6 +154,11 @@ legend-engine-pure-code-compiled-core ${project.version} + + org.finos.legend.engine + legend-engine-pure-platform-store-relational-java + ${project.version} + org.finos.legend.engine legend-engine-language-pure-dsl-service-pure @@ -201,10 +212,16 @@ legend-pure-runtime-java-engine-compiled + org.finos.legend.engine legend-engine-pure-code-compiled-core + + org.finos.legend.engine + legend-engine-pure-platform-store-relational-java + ${project.version} + org.finos.legend.engine legend-engine-language-pure-dsl-service-pure diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-pure/src/main/resources/core_external_query_sql/binding/fromPure/fromPure.pure b/legend-engine-xts-sql/legend-engine-xt-sql-pure/src/main/resources/core_external_query_sql/binding/fromPure/fromPure.pure index 965871119a0..c1c38849f61 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-pure/src/main/resources/core_external_query_sql/binding/fromPure/fromPure.pure +++ b/legend-engine-xts-sql/legend-engine-xt-sql-pure/src/main/resources/core_external_query_sql/binding/fromPure/fromPure.pure @@ -228,7 +228,9 @@ function <> meta::external::query::sql::transformation::queryToP let newExp = appendTdsFunc($left.expression->toOne(), concatenate_TabularDataSet_1__TabularDataSet_1__TabularDataSet_1_, [list($right.expression->toOne())]); - ^$context(expression = $newExp, contexts = [$left, $right]); + let aliases = $left.aliases->concatenate($right.aliases)->removeDuplicates(); + + ^$context(expression = $newExp, contexts = [$left, $right], aliases = $aliases); } function <> meta::external::query::sql::transformation::queryToPure::processQuerySpec(querySpec: QuerySpecification[1], context: SqlTransformContext[1]): SqlTransformContext[1] @@ -236,10 +238,16 @@ function <> meta::external::query::sql::transformation::queryToP debug('processQuerySpec', $context.debug); let from = $querySpec.from->processFrom($context); + //if the from context comes back and is still the same (ie no subquery, realias etc) then we can use that directly, else we + //need to subnest the context + let fromContext = if ($from.root || $from.name == $context.name, | $from, | ^$context(contexts = ^$from(contexts = []), expression = $from.expression)); + + let projection = $querySpec.select->processProjection($querySpec.groupBy, $querySpec.having, $querySpec.orderBy.sortKey, $querySpec.where->processWhere($fromContext)); + let query = $querySpec.limit->processLimitOffset($querySpec.offset, - $querySpec.orderBy->processOrderBy($querySpec.select.selectItems, - $querySpec.having->processHaving($querySpec.select, - $querySpec.select->processProjection($querySpec.groupBy, $querySpec.having, $querySpec.where->processWhere($from)) + $querySpec.orderBy->processOrderBy($projection.second.selectItems, + $querySpec.having->processHaving($projection.second, + $projection.first ) ) ); @@ -253,7 +261,10 @@ function <> meta::external::query::sql::transformation::queryToP let alias = $query.alias($nonAliasedName, $s.alias, true)->toOne(); let finalAlias = $query.alias($alias.name, $s.alias, false); - ^$alias(alias = if ($finalAlias->isEmpty(), | $alias.alias, | $finalAlias.alias));, + + let realias = if ($alias.realias->isNotEmpty(), | $alias.realias, | $finalAlias.realias); + + ^$alias(alias = if ($finalAlias->isEmpty(), | $alias.alias, | $finalAlias.alias), realias = $realias);, a:AllColumns[1] | if ($a.prefix->isEmpty(), | $query.contexts->map(c | $c.aliases)->concatenate($query.aliases), @@ -268,15 +279,17 @@ function <> meta::external::query::sql::transformation::queryToP expression = ^QualifiedNameReference(name = ^QualifiedName(parts = if ($clash, | $p.expected, | $p.actual)))); ); - //if this is the root context we want to align the schema (order and name) with what has been specified in the sql let currentSchema = $query.columns.name; - let toRename = $expected->filter(sc | $sc.expression->extractNameFromExpression([])->in($currentSchema)); - let renamed = if ($query.root && $expected.alias != $currentSchema, | ^$query(expression = processRename($toRename, $query)), | $query); + let renamed = if ($expected.alias != $currentSchema, | ^$query(expression = processRename($expected->filter(e | !$e.alias->in($currentSchema)), $query)), | $query); - if ($renamed.columns.name != $expected.alias->removeDuplicates() && $expected.alias->isNotEmpty(), + let final = if ($renamed.columns.name != $expected.alias->removeDuplicates() && $expected.alias->isNotEmpty(), | ^$query(expression = processRestrict($expected.alias, $renamed)), | $renamed); + + let aliases = $final.columns.name->map(c | ^SQLColumnAlias(name = $c)); + + ^$final(aliases = $aliases); } function <> meta::external::query::sql::transformation::queryToPure::extractAggregatesFromExpression(expression:meta::external::query::sql::metamodel::Expression[0..1]):meta::external::query::sql::metamodel::Expression[*] @@ -284,6 +297,7 @@ function <> meta::external::query::sql::transformation::queryToP $expression->match([ a:ArithmeticExpression[1] | $a.left->extractAggregatesFromExpression()->concatenate($a.right->extractAggregatesFromExpression()), b:BetweenPredicate[1] | $b.min->extractAggregatesFromExpression()->concatenate($b.value->extractAggregatesFromExpression())->concatenate($b.max->extractAggregatesFromExpression()), + c:Cast[1] | $c.expression->extractAggregatesFromExpression(), c:ComparisonExpression[1] | $c.left->extractAggregatesFromExpression()->concatenate($c.right->extractAggregatesFromExpression()), e:Extract[1] | $e.expression->extractAggregatesFromExpression(), f:FunctionCall[1] | if (isExpressionAggregate($f, false, false), | $f, | $f.arguments->map(a | $a->extractAggregatesFromExpression())), @@ -300,22 +314,50 @@ function <> meta::external::query::sql::transformation::queryToP } -function <> meta::external::query::sql::transformation::queryToPure::processProjection(select: Select[1], groupBy: meta::external::query::sql::metamodel::Expression[*], having:meta::external::query::sql::metamodel::Expression[0..1], context: SqlTransformContext[1]): SqlTransformContext[1] +function <> meta::external::query::sql::transformation::queryToPure::processProjection(originalSelect: Select[1], groupBy: meta::external::query::sql::metamodel::Expression[*], having:meta::external::query::sql::metamodel::Expression[0..1], orderBy:meta::external::query::sql::metamodel::Expression[*], context: SqlTransformContext[1]): Pair[1] { debug('processProjection', $context.debug); - let aggregates = $select.selectItems->filter(si | $si->isSelectItemAggregate()); - let windows = $select.selectItems->filter(si | $si->isSelectItemWindow())->cast(@SingleColumn); - let standard = $select.selectItems->removeAll($aggregates)->removeAll($windows); + let aggregates = $originalSelect.selectItems->filter(si | $si->isSelectItemAggregate()); + let windows = $originalSelect.selectItems->filter(si | $si->isSelectItemWindow())->cast(@SingleColumn); + let standard = $originalSelect.selectItems->removeAll($aggregates)->removeAll($windows); let havingExtensions = extractAggregatesFromExpression($having)->map(e | ^SingleColumn(expression = $e)); - let standardExtensions = $standard->filter(si | !$si->isSelectItemColumnReference() && !$si->isSelectItemAggregate()); + let standardExtensions = $standard->filter(si | !$si->isNonAliasedSelectItemColumnReference() && !$si->isSelectItemAggregate()); let windowExtensions = extractWindowExtensionExpressions($windows); - let extensions = $standardExtensions->concatenate($windowExtensions); + let columns = $context.columns.name; + + let extensionPairs = $standardExtensions->concatenate($windowExtensions)->cast(@SingleColumn)->map(sc | + let name = extractNameFromSingleColumn($sc->toOne(), $context); + pair($sc, if ($columns->contains($name), | ^$sc(alias = $name + '_1'), | $sc)); + ); + + let selectItems = $originalSelect.selectItems->map(si | + let extension = $extensionPairs->filter(e | $e.first == $si)->first(); + if ($extension.first == $extension.second, | $si, | $extension->toOne().second); + ); + + let extensions = $extensionPairs.second; + + let aliases = range(0, $originalSelect.selectItems->size())->map(i | + let si = $originalSelect.selectItems->at($i); + let eis = $selectItems->at($i); + $si->match([ + s:SingleColumn[1] | + let alias = extractAliasFromColumn($s); + let ealias = extractAliasFromColumn($eis->cast(@SingleColumn)); + + if ($si != $eis, | ^$alias(realias = $ealias.actual), | $alias);, + a:AllColumns[1] | $context.aliases->map(a | ^SQLColumnAlias(name = $a.actual)) + ]); + ); + + let select = ^$originalSelect(selectItems = $selectItems); let isAggregate = $groupBy->isNotEmpty() || anyColumnAggregate($select); + let isWindow = $windows->isNotEmpty(); let project = if ($standard->isNotEmpty() && !($isAggregate || $isWindow || $havingExtensions->isNotEmpty()), @@ -338,14 +380,7 @@ function <> meta::external::query::sql::transformation::queryToP | appendTdsFunc($olapGroupBy, distinct_TabularDataSet_1__TabularDataSet_1_, []), | $olapGroupBy); - let aliases = $select.selectItems->map(si | - $si->match([ - s:SingleColumn[1] | extractAliasFromColumn($s), - a:AllColumns[1] | $context.aliases->map(a | ^SQLColumnAlias(name = $a.name)); - ]) - ); - - ^$context(expression = $distinctExp, aliases = $aliases); + pair(^$context(expression = $distinctExp, aliases = $aliases), $select); } function <> meta::external::query::sql::transformation::queryToPure::extractColumnNameFromExpression(expression:meta::external::query::sql::metamodel::Expression[1], selectItems: SelectItem[*], context: SqlTransformContext[1]):String[1] @@ -374,23 +409,10 @@ function <> meta::external::query::sql::transformation::queryToP let groupByColumns = $groupBy->map(g | $g->extractColumnNameFromExpression($select.selectItems, $context)); - - let aggregates = $select.selectItems->filter(s | $s->isSelectItemAggregate() && !$s->isSelectItemWindow()); + let aggregates = $select.selectItems->filter(s | $s->isSelectItemAggregate() && !$s->isSelectItemWindow())->cast(@SingleColumn); let rename = processSelect(^$select(selectItems = $select.selectItems->removeAll($aggregates)->removeAll($windows)->removeAll($extensions)->removeAll($havingExtensions)->filter(c | $c->instanceOf(SingleColumn))->cast(@SingleColumn)), false, $context); - //TODO should use equals on the expression instead of name checking in groupBy, cannot at moment due to no equality key. - let aggregatePairs = $aggregates->cast(@SingleColumn) - ->map(column | pair(extractNameFromSingleColumn($column, $context), $column)) - ->filter(pair | !$groupByColumns->contains($pair.first)); - - let aggregateExpressionNames = $aggregates->cast(@SingleColumn)->map(column | extractNameFromExpression($column.expression, $context)); - - let havingAggregatePairs = $havingExtensions->cast(@SingleColumn) - ->map(column | pair(extractNameFromSingleColumn($column, $context), $column)) - ->filter(pair | !$aggregateExpressionNames->contains($pair.first)) - ->removeDuplicatesBy(x | $x.first); - let additionalGroupColumns = $select.selectItems->removeAll($aggregates)->map(s | $s->match([ s:SingleColumn[1] | extractNameFromSingleColumn($s, $context), a:AllColumns[1] | @@ -403,11 +425,13 @@ function <> meta::external::query::sql::transformation::queryToP let allGroupByColumns = $groupByColumns->concatenate($additionalGroupColumns)->distinct(); + let aggregateExpressionNames = $aggregates->cast(@SingleColumn)->map(column | extractNameFromExpression($column.expression, $context)); + let allAggregateColumns = $aggregates->concatenate($havingExtensions->cast(@SingleColumn)->filter(h | + !extractNameFromExpression($h.expression, $context)->in($aggregateExpressionNames); + ))->removeDuplicatesBy(c | $c->extractNameFromSingleColumn($context)); - let aggregations = $aggregatePairs->concatenate($havingAggregatePairs) - ->map(pair | - let name = $pair.first; - let column = $pair.second; + let aggregations = $allAggregateColumns->map(column | + let name = extractNameFromSingleColumn($column, $context); let aggregateExpression = extractAggregatesFromExpression($column.expression); @@ -544,27 +568,35 @@ function <> meta::external::query::sql::transformation::queryToP function <> meta::external::query::sql::transformation::queryToPure::processExtend(select: Select[1], context: SqlTransformContext[1]):FunctionExpression[1] { debug('processExtend', $context.debug); - let typeArguments = ^GenericType(rawType = TDSRow); - let genericType = ^GenericType(rawType = BasicColumnSpecification, typeArguments = $typeArguments); + let selectItems = $select.selectItems->processSelectItems($context, false); - let args = $select.selectItems->processSelectItems($context, false)->map(item | - sfe(col_Function_1__String_1__BasicColumnSpecification_1_, $genericType, $typeArguments, [$item.first->iv(), $item.second->iv()]); + let columns = $context.columns.name; + + let args = $selectItems->map(item | + let rename = $columns->contains($item.second); + let name = if ($rename, | $item.second + '_1', | $item.second); + createCol($item.first, $name); ); - let iv = iv($args); + appendTdsFunc($context.expression->toOne(), extend_TabularDataSet_1__BasicColumnSpecification_MANY__TabularDataSet_1_, list(iv($args))); +} + +function meta::external::query::sql::transformation::queryToPure::createCol(lambda:LambdaFunction[1], name:String[1]):SimpleFunctionExpression[1] +{ + let typeArguments = ^GenericType(rawType = TDSRow); + let genericType = ^GenericType(rawType = BasicColumnSpecification, typeArguments = $typeArguments); - appendTdsFunc($context.expression->toOne(), extend_TabularDataSet_1__BasicColumnSpecification_MANY__TabularDataSet_1_, list($iv)); + sfe(col_Function_1__String_1__BasicColumnSpecification_1_, $genericType, $typeArguments, [iv($lambda), iv($name)]); } -function <> meta::external::query::sql::transformation::queryToPure::processSelect(select: Select[1], restrict:Boolean[1],context: SqlTransformContext[1]):FunctionExpression[1] +function <> meta::external::query::sql::transformation::queryToPure::processSelect(select: Select[1], restrict:Boolean[1], context: SqlTransformContext[1]):FunctionExpression[1] { debug('processSelect', $context.debug); - if (isSelectStar($select), + if (isSelectStar($select, $context), | $context.expression->toOne(), | if (allColumnsSimpleSelect($select), | processSelectToRestrictAndRename($select, $restrict, $context), | processSelectToProject($select, $context))); - } @@ -583,6 +615,7 @@ function <> meta::external::query::sql::transformation::queryToP $e->match([ a:ArithmeticExpression[1] | $a.left->isExpressionAggregate($includeParameters, $includeWindow) || $a.right->isExpressionAggregate($includeParameters, $includeWindow), b:BetweenPredicate[1] | $b.min->isExpressionAggregate($includeParameters, $includeWindow) || $b.value->isExpressionAggregate($includeParameters, $includeWindow) || $b.max->isExpressionAggregate($includeParameters, $includeWindow), + c:Cast[1] | $c.expression->isExpressionAggregate($includeParameters, $includeWindow), c:ComparisonExpression[1] | $c.left->isExpressionAggregate($includeParameters, $includeWindow) || $c.right->isExpressionAggregate($includeParameters, $includeWindow), e:Extract[1] | $e.expression->isExpressionAggregate($includeParameters, $includeWindow), f:FunctionCall[1] | @@ -608,9 +641,12 @@ function <> meta::external::query::sql::transformation::queryToP && $si->cast(@SingleColumn).expression->cast(@FunctionCall).window->isNotEmpty() } -function <> meta::external::query::sql::transformation::queryToPure::isSelectItemColumnReference(si:SelectItem[1]):Boolean[1] +function <> meta::external::query::sql::transformation::queryToPure::isNonAliasedSelectItemColumnReference(si:SelectItem[1]):Boolean[1] { - $si->instanceOf(AllColumns) || ($si->instanceOf(SingleColumn) && $si->cast(@SingleColumn).expression->instanceOf(QualifiedNameReference)) + $si->match([ + a:AllColumns[1] | true, + s:SingleColumn[1] | $s.expression->instanceOf(QualifiedNameReference) && $s.alias->isEmpty() + ]) } function <> meta::external::query::sql::transformation::queryToPure::processSelectToProject(select: Select[1], context: SqlTransformContext[1]):FunctionExpression[1] @@ -620,7 +656,7 @@ function <> meta::external::query::sql::transformation::queryToP let genericType = ^GenericType(rawType = BasicColumnSpecification, typeArguments = $typeArguments); let args = $select.selectItems->processSelectItems($context, false)->map(item | - sfe(col_Function_1__String_1__BasicColumnSpecification_1_, $genericType, $typeArguments, [$item.first->iv(), $item.second->iv()]); + createCol($item.first, $item.second); ); let iv = iv($args); @@ -655,8 +691,6 @@ function <> meta::external::query::sql::transformation::queryToP let renames = $selectItems->map(si | let defaultName = extractNameFromExpression($si.expression, $context); - let existing = $context.columns.name; - if ($si.alias->isNotEmpty() && $si.alias != $defaultName, | @@ -669,9 +703,12 @@ function <> meta::external::query::sql::transformation::queryToP | $context.expression->toOne()); } -function <> meta::external::query::sql::transformation::queryToPure::isSelectStar(select: Select[1]):Boolean[1] +function <> meta::external::query::sql::transformation::queryToPure::isSelectStar(select: Select[1], context: SqlTransformContext[1]):Boolean[1] { - $select.selectItems->forAll(si | $si->instanceOf(AllColumns)); + $select.selectItems->forAll(si | $si->match([ + a:AllColumns[1] | assert($a.prefix->isEmpty() || $a.prefix == $context.name || $a.prefix->in($context.contexts.name), 'invalid select * - alias not in scope'), + s:SelectItem[1] | false + ])); } function <> meta::external::query::sql::transformation::queryToPure::allColumnsSimpleSelect(select: Select[1]):Boolean[1] @@ -749,14 +786,13 @@ function <> meta::external::query::sql::transformation::queryToP function <> meta::external::query::sql::transformation::queryToPure::processDuplicateRenames(names:String[*], suffix:String[1], context:SqlTransformContext[1]): SqlTransformContext[1] { - if ($names->isNotEmpty(), | let expression = processRename($names->map(n | ^SingleColumn(alias = $n + '_' + $suffix, expression = ^QualifiedNameReference(name = ^QualifiedName(parts = $n)))), $context); let aliases = $context.aliases->map(a | - if ($a.name->in($names) || $a.alias->in($names), - | ^$a(realias = $a.expected + '_' + $suffix), + if ($a.name->in($names) || $a.alias->in($names) || $a.realias->in($names), + | ^$a(realias = $a.actual + '_' + $suffix), | $a); ); @@ -771,8 +807,8 @@ function <> meta::external::query::sql::transformation::queryToP let leftContext = processRelation($join.left, ^$context(root = false, id = $context.id + 2)); let rightContext = processRelation($join.right, ^$context(root = false, id = $context.id + 3)); - let leftColumns = $leftContext.aliases.expected(); - let rightColumns = $rightContext.aliases.expected(); + let leftColumns = $leftContext.aliases.actual(); + let rightColumns = $rightContext.aliases.actual(); let leftName = $join.left->relationName(); let rightName = $join.right->relationName(); @@ -787,7 +823,9 @@ function <> meta::external::query::sql::transformation::queryToP let right = wrapWithFrom($rightRenamed); //all of the child contexts are in scope for joining on so the new context needs to know about them - let newContext = ^$context(contexts = $left->concatenate($left.contexts)->concatenate($right)->concatenate($right.contexts)); + let leftContexts = if ($join.left->instanceOf(Join), | $left.contexts, | $left); + let rightContexts = if ($join.right->instanceOf(Join), | $right.contexts, | $right); + let newContext = ^$context(contexts = $leftContexts->concatenate($rightContexts)); let row1 = ^VariableExpression(multiplicity = PureOne, name = 'row1', genericType = ^GenericType(rawType = TDSRow)); let row2 = ^VariableExpression(multiplicity = PureOne, name = 'row2', genericType = ^GenericType(rawType = TDSRow)); @@ -838,7 +876,8 @@ function <> meta::external::query::sql::transformation::queryToP pair(JoinType.LEFT, meta::relational::metamodel::join::JoinType.LEFT_OUTER), pair(JoinType.RIGHT, meta::relational::metamodel::join::JoinType.RIGHT_OUTER), pair(JoinType.INNER, meta::relational::metamodel::join::JoinType.INNER), - pair(JoinType.CROSS, meta::relational::metamodel::join::JoinType.INNER) + pair(JoinType.CROSS, meta::relational::metamodel::join::JoinType.INNER), + pair(JoinType.FULL, meta::relational::metamodel::join::JoinType.FULL_OUTER) ]->getValue($joinType); } @@ -860,6 +899,7 @@ function <> meta::external::query::sql::transformation::queryToP function <> meta::external::query::sql::transformation::queryToPure::processOrderBy(sortItems: meta::external::query::sql::metamodel::SortItem[*], selectItems:SelectItem[*], context: SqlTransformContext[1]): SqlTransformContext[1] { debug('processOrderBy', $context.debug); + let sortInformation = $sortItems->map(si| createSortItemFunction($si, $selectItems, $context)); let newExp = if ($sortInformation->isEmpty(), @@ -876,7 +916,10 @@ function <> meta::external::query::sql::transformation::queryToP function <> meta::external::query::sql::transformation::queryToPure::createSortItemFunction(si:SortItem[1], selectItems:SelectItem[*], context: SqlTransformContext[1]):FunctionExpression[1] { + assert($si.nullOrdering == SortItemNullOrdering.UNDEFINED, 'null ordering type not yet supported'); + let column = extractColumnNameFromExpression($si.sortKey, $selectItems, $context); + let sortFunc = [ pair(SortItemOrdering.ASCENDING, asc_String_1__SortInformation_1_), pair(SortItemOrdering.DESCENDING, desc_String_1__SortInformation_1_) @@ -939,7 +982,7 @@ function <> meta::external::query::sql::transformation::queryToP function <> meta::external::query::sql::transformation::queryToPure::processTableSubquery(tsq: TableSubquery[1], context: SqlTransformContext[1]): SqlTransformContext[1] { debug('processTableSubquery', $context.debug); - processQuery($tsq.query, $context); + processQuery($tsq.query, ^$context(root = false)); } function meta::external::query::sql::transformation::queryToPure::extractSourceArguments(expressions:meta::external::query::sql::metamodel::Expression[*]):SQLSourceArgument[*] @@ -1518,7 +1561,8 @@ function <> meta::external::query::sql::transformation::queryToP pair(String, | processCastAsParse(^$c(type = ^ColumnType(name = 'DOUBLE PRECISION')), $v, $expContext, $context)), pair(Float, | $v), pair(Decimal, | sfe(toFloat_Number_1__Float_1_, $v)), - pair(Integer, | sfe(toFloat_Number_1__Float_1_, $v)) + pair(Integer, | sfe(toFloat_Number_1__Float_1_, $v)), + pair(Number, | sfe(toFloat_Number_1__Float_1_, $v)) ]->getValue($type)->eval(); } @@ -1536,7 +1580,8 @@ function <> meta::external::query::sql::transformation::queryToP pair(String, | processCastAsParse(^$c(type = ^ColumnType(name = 'NUMERIC')), $v, $expContext, $context)), pair(Decimal, | $v), pair(Float, | sfe(toDecimal_Number_1__Decimal_1_, $v)), - pair(Integer, | sfe(toDecimal_Number_1__Decimal_1_, $v)) + pair(Integer, | sfe(toDecimal_Number_1__Decimal_1_, $v)), + pair(Number, | sfe(toDecimal_Number_1__Decimal_1_, $v)) ]->getValue($type)->eval(); let scale = if ($c.type.parameters->size() == 2, | $c.type.parameters->at(1), | []); @@ -1696,7 +1741,6 @@ function <> meta::external::query::sql::transformation::queryToP function <> meta::external::query::sql::transformation::queryToPure::createTdsColumn(qualifiedName:QualifiedName[1], var:VariableExpression[1], expContext:SqlTransformExpressionContext[1], context: SqlTransformContext[1]):ValueSpecification[1] { - if ($qualifiedName.parts->isEmpty(), | $var, | let column = $context.columnByNameParts($qualifiedName.parts, true); @@ -1947,12 +1991,14 @@ function meta::external::query::sql::transformation::queryToPure::functionProces }), processor('length', length_String_1__Integer_1_), processor('lower', toLower_String_1__String_1_), + processor('lpad', String, {args, fc, ctx | processPad($args, true)}), processor('ltrim', String, {args, fc, ctx | processTrim(ltrim_String_1__String_1_, $args)}), processor('md5', String, {args, fc, ctx | processHash($args, meta::pure::functions::hash::HashType.MD5)}), processor('regexp_like', matches_String_1__String_1__Boolean_1_), processor('repeat', repeatString_String_$0_1$__Integer_1__String_$0_1$_), processor('replace', replace_String_1__String_1__String_1__String_1_), processor('reverse', reverseString_String_1__String_1_), + processor('rpad', String, {args, fc, ctx | processPad($args, false)}), processor('rtrim', String, {args, fc, ctx | processTrim(rtrim_String_1__String_1_, $args)}), processor('sha256', String, {args, fc, ctx | processHash($args, meta::pure::functions::hash::HashType.SHA256)}), processor('split_part', String, {args, fc, ctx | @@ -1960,10 +2006,10 @@ function meta::external::query::sql::transformation::queryToPure::functionProces let position = $args->at(2)->match([ i:InstanceValue[1] | $i.values->match([ - i:Integer[1] | iv($i + 1), + i:Integer[1] | iv($i - 1), a:Any[*] | fail('invalid split part position'); iv(1); ]), - v:ValueSpecification[1] | sfe(plus_Integer_MANY__Integer_1_, iv([$args->at(2), iv(1)])) + v:ValueSpecification[1] | sfe(minus_Integer_MANY__Integer_1_, iv([$args->at(2), iv(1)])) ]); let arguments = [$args->at(0), $args->at(1), $position]; @@ -1989,9 +2035,8 @@ function meta::external::query::sql::transformation::queryToPure::functionProces possiblyProcessParseDate($args->at(0)) }), processor('date_part', Integer, {args, fc, ctx | - assertEquals(2, $args->size(), 'incorrect number of args'); - let part = $args->at(0); - let value = $part->reactivate()->toOne()->cast(@String); + assertEquals(2, $args->size(), 'incorrect number of args for date_part'); + let part = $args->at(0)->reactivate()->toOne()->cast(@String); let func = [ pair('year', year_Date_1__Integer_1_), @@ -2005,12 +2050,12 @@ function meta::external::query::sql::transformation::queryToPure::functionProces pair('minute', minute_Date_1__Integer_1_), pair('second', second_Date_1__Integer_1_), pair('epoch', toEpochValue_Date_1__Integer_1_) - ]->getValue($value->toLower()); + ]->getValue($part->toLower()); nullOrSfe($func, $args->at(1)); }), processor('date_trunc', Date, {args, fc, ctx | - assertEquals(2, $args->size(), 'incorrect number of args'); + assertEquals(2, $args->size(), 'incorrect number of args for date_trunc'); let part = $args->at(0); let value = $part->reactivate()->toOne()->cast(@String); @@ -2040,6 +2085,19 @@ function meta::external::query::sql::transformation::queryToPure::functionProces sfe(meta::pure::tds::extensions::firstNotNull_T_MANY__T_$0_1$_, ^GenericType(rawType = $type), ^GenericType(rawType = $type), $filteredArgs->iv()); }), + //FORMAT + processor('to_char', String, {args, fc, ctx | + assertEquals(2, $args->size(), 'incorrect number of args for t-_char'); + + let arg = $args->at(0); + let type = $arg.genericType.rawType; + let format = $args->at(1)->reactivate()->toOne()->cast(@String); + + assert($type->isNotEmpty() && $type->toOne()->normalizeType() == Date, 'to_char currently only supported for know date inputs'); + + toChar($format, [], $arg->evaluateAndDeactivate()); + }), + //WINDOW processor('row_number', false, true, [], {args, fc, ctx | let values = $ctx.defaultVar->toOne()->concatenate($args); @@ -2056,6 +2114,187 @@ function meta::external::query::sql::transformation::queryToPure::functionProces ] } +function <> meta::external::query::sql::transformation::queryToPure::toCharFormats():Pair ValueSpecification[1]}>>[*] +{ + [ + pair('HH', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('HH')}), + pair('HH12', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('HH12')}), + pair('HH24', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(hour_Date_1__Integer_1_, $a, false, false, 2, '0', [], [], $p, $s)}), + pair('MI', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(minute_Date_1__Integer_1_, $a, false, false, 2, '0', [], [], $p, $s)}), + pair('SS', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(second_Date_1__Integer_1_, $a, false, false, 2, '0', [], [], $p, $s)}), + pair('MS', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('MS')}), + pair('US', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('US')}), + pair('FF1', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('FF1')}), + pair('FF2', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('FF2')}), + pair('FF3', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('FF3')}), + pair('FF4', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('FF4')}), + pair('FF5', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('FF5')}), + pair('FF6', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('FF6')}), + pair('SSSS', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('SSSS')}), + pair('SSSSS', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('SSSSS')}), + pair('AM', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('AM')}), + pair('am', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('am')}), + pair('PM', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('PM')}), + pair('pm', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('pm')}), + pair('A.M', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('A.M')}), + pair('a.m', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('a.m')}), + pair('P.M', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('P.M')}), + pair('p.m', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('p.m')}), + pair('Y,YYY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('Y,YYYY')}), + pair('YYYY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(year_Date_1__Integer_1_, $a, $p, $s)}), + pair('YYY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | processSubstring([toChar(year_Date_1__Integer_1_, $a, $p, $s), iv(2), iv(3)])}), + pair('YY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | processSubstring([toChar(year_Date_1__Integer_1_, $a, $p, $s), iv(3), iv(2)])}), + pair('Y', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | processSubstring([toChar(year_Date_1__Integer_1_, $a, $p, $s), iv(4), iv(1)])}), + pair('IYYY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('IYYY')}), + pair('IYY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('IYY')}), + pair('IY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('IY')}), + pair('I', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('I')}), + pair('BC', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('BC')}), + pair('bc', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('bc')}), + pair('AD', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('AD')}), + pair('ad', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('ad')}), + pair('B.C.', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('B.C.')}), + pair('b.c.', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('b.c.')}), + pair('A.D.', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('A.D.')}), + pair('a.d', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('a.d.')}), + pair('MONTH', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(month_Date_1__Month_1_, $a, true, false, [], [], 9, ' ', $p, $s)}), + pair('Month', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(month_Date_1__Month_1_, $a, false, false, [], [], 9, ' ', $p, $s)}), + pair('month', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(month_Date_1__Month_1_, $a, false, true, [], [], 9, ' ', $p, $s)}), + pair('MON', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(month_Date_1__Month_1_, $a, true, false, [], [], 3, ' ', $p, $s)}), + pair('Mon', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(month_Date_1__Month_1_, $a, false, false, [], [], 3, ' ', $p, $s)}), + pair('mon', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(month_Date_1__Month_1_, $a, false, true, [], [], 3, ' ', $p, $s)}), + pair('MM', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(monthNumber_Date_1__Integer_1_, $a, false, false, 2, '0', [], [], $p, $s)}), + pair('DAY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfWeek_Date_1__DayOfWeek_1_, $a, true, false, [], [], 9, ' ', $p, $s)}), + pair('Day', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfWeek_Date_1__DayOfWeek_1_, $a, false, false, [], [], 9, ' ', $p, $s)}), + pair('day', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfWeek_Date_1__DayOfWeek_1_, $a, false, true, [], [], 9, ' ', $p, $s)}), + pair('DY', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfWeek_Date_1__DayOfWeek_1_, $a, true, false, [], [], 3, ' ', $p, $s)}), + pair('Dy', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfWeek_Date_1__DayOfWeek_1_, $a, false, false, [], [], 3, ' ', $p, $s)}), + pair('dy', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfWeek_Date_1__DayOfWeek_1_, $a, false, true, [], [], 3, ' ', $p, $s)}), + pair('DDD', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfYear_Date_1__Integer_1_, $a, false, false, 3, '0', [], [], $p, $s)}), + pair('IDDD', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('IDDD')}), + pair('DD', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfMonth_Date_1__Integer_1_, $a, false, false, 2, '0', [], [], $p, $s)}), + pair('D', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(dayOfWeekNumber_Date_1__Integer_1_, $a, $p, $s)}), + pair('ID', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('ID')}), + pair('W', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('W')}), + pair('WW', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(weekOfYear_Date_1__Integer_1_, $a, false, false, 2, '0', [], [], $p, $s)}), + pair('IW', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('IW')}), + pair('CC', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('CC')}), + pair('J', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('J')}), + pair('Q', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toChar(quarterNumber_Date_1__Integer_1_, $a, $p, $s)}), + pair('RM', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('RM')}), + pair('rm', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('rm')}), + pair('TZ', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('TZ')}), + pair('tz', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('tz')}), + pair('TZH', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('TZH')}), + pair('TZM', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('TZM')}), + pair('OF', {p:String[0..1], s:String[0..1], a:ValueSpecification[1] | toCharUnsupported('OF')}) + ]->sortBy(p | $p.first->length())->reverse(); +} + +Class meta::external::query::sql::transformation::queryToPure::ToCharContext +{ + format: String[1]; + result: ValueSpecification[0..1]; + matched: Boolean[1]; + prefix: String[0..1]; + suffix: String[0..1]; +} + +function <> meta::external::query::sql::transformation::queryToPure::toCharPrefixes():PairBoolean[1]}>>[*] +{ + [ + pair('FM', | true), + pair('FX', | toCharUnsupported('FX'); false;), + pair('TM', | toCharUnsupported('TM'); false;) + ]->sortBy(p | $p.first->length())->reverse(); +} + +//token -> supported +function <> meta::external::query::sql::transformation::queryToPure::toCharSuffixes():PairBoolean[1]}>>[*] +{ + [ + pair('TH', {| toCharUnsupported('th'); false;}), + pair('th', {| toCharUnsupported('th'); false;}), + pair('SP', {| toCharUnsupported('SP'); false;}) + ]->sortBy(p | $p.first->length())->reverse(); +} + +function <> meta::external::query::sql::transformation::queryToPure::toChar(format:String[1], result:ValueSpecification[0..1], arg:ValueSpecification[1]):ValueSpecification[1] +{ + let formats = toCharFormats(); + let prefixes = toCharPrefixes(); + let suffixes = toCharSuffixes(); + + if ($format->length() == 0, + | if ($result->isEmpty(), | $arg, | $result->toOne()), + | + let prefix = $prefixes->fold({p, acc | + if ($format->startsWith($p.first) && $p.second->eval(), | pair($format->substring($p.first->length(), $format->length()), $p.first), | $acc); + }, pair($format, '')); + + let scan = $formats->fold({token, acc | + if (!$acc.matched && $acc.format->startsWith($token.first), + | + let newFormat = $acc.format->substring($token.first->length(), $acc.format->length()); + + + let suffix = $suffixes->fold({s, acc | + if ($newFormat->startsWith($s.first) && $s.second->eval(), | pair($newFormat->substring($s.first->length(), $newFormat->length()), $s.first), | $acc); + }, pair($newFormat, '')); + + let newResult = toCharCombine($result->evaluateAndDeactivate(), $token.second->eval($acc.prefix, if ($suffix.second != '', | $suffix.second, | []), $arg)); + + ^$acc(format = $suffix.first, result = $newResult->evaluateAndDeactivate() , matched = true);, + | $acc); + }, ^ToCharContext(format = $prefix.first, result = $result->evaluateAndDeactivate(), matched = false, prefix = if ($prefix.second != '', | $prefix.second, | []))); + + if ($scan.matched, + | toChar($scan.format, $scan.result->evaluateAndDeactivate(), $arg), + | toChar($scan.format->substring(1, $scan.format->length()), toCharCombine($scan.result->evaluateAndDeactivate(), iv($scan.format->chunk(1)->at(0))), $arg)); + ); +} + +function <> meta::external::query::sql::transformation::queryToPure::toCharCombine(v:ValueSpecification[0..1], v2:ValueSpecification[1]):ValueSpecification[1] +{ + if ($v->isEmpty(), | $v2, | sfe(plus_String_MANY__String_1_, iv([$v->toOne(), $v2])))->evaluateAndDeactivate() +} + + +function <> meta::external::query::sql::transformation::queryToPure::toCharUnsupported(func:String[1]):ValueSpecification[1] +{ + fail($func + ' is not supported to_char token'); + iv(1); +} + +function <> meta::external::query::sql::transformation::queryToPure::toChar(func:Function[1], arg:ValueSpecification[1], prefix:String[0..1], suffix:String[0..1]):ValueSpecification[1] +{ + toChar($func, $arg, false, false, $prefix, $suffix) +} + +function <> meta::external::query::sql::transformation::queryToPure::toChar(func:Function[1], arg:ValueSpecification[1], upper:Boolean[1], lower:Boolean[1], prefix:String[0..1], suffix:String[0..1]):ValueSpecification[1] +{ + toChar($func, $arg, $upper, $lower, [], [], [], [], $prefix, $suffix) +} + +function <> meta::external::query::sql::transformation::queryToPure::toChar(func:Function[1], arg:ValueSpecification[1], upper:Boolean[1], lower:Boolean[1], lpadLength:Integer[0..1], lpadChar:String[0..1], rpadLength:Integer[0..1], rpadChar:String[0..1], prefix:String[0..1], suffix:String[0..1]):ValueSpecification[1] +{ + let f = nullOrSfe($func, $arg->evaluateAndDeactivate())->evaluateAndDeactivate(); + + let type = $func->functionReturnType().rawType->toOne(); + let str = if ($type != String, | nullOrSfe(toString_Any_1__String_1_, $f), | $f); + + let cased = if ($upper, + | nullOrSfe(toUpper_String_1__String_1_, $str), + | if ($lower, + | nullOrSfe(toLower_String_1__String_1_, $str), + | $str)); + + if ($prefix == 'FM', + | $cased, + | let lpad = if ($lpadLength->isNotEmpty() && $lpadChar->isNotEmpty(), | processPad([$cased, iv($lpadLength), iv($lpadChar)], true), | $cased); + if ($rpadLength->isNotEmpty() && $rpadChar->isNotEmpty(), | processPad([$cased, iv($rpadLength), iv($rpadChar)], false), | $lpad);); +} + function meta::external::query::sql::transformation::queryToPure::processPercentile(name:String[1], args:ValueSpecification[*], continuous:Boolean[1], fc:FunctionCall[1], expContext:SqlTransformExpressionContext[1]):ValueSpecification[1] { assert($fc.group->isNotEmpty(), | $name + ' must specify group'); @@ -2077,6 +2316,20 @@ function meta::external::query::sql::transformation::queryToPure::processTrim(fu nullOrSfe($func, $args->at(0)); } +function meta::external::query::sql::transformation::queryToPure::processPad(args:ValueSpecification[*], left:Boolean[1]):ValueSpecification[1] +{ + assert($args->size() == 2 || $args->size() == 3, | 'incorrect number of args for pad function'); + + let func = [ + pair($left && $args->size() == 2, lpad_String_1__Integer_1__String_1_), + pair($left && $args->size() == 3, lpad_String_1__Integer_1__String_1__String_1_), + pair(!$left && $args->size() == 2, rpad_String_1__Integer_1__String_1_), + pair(!$left && $args->size() == 3, rpad_String_1__Integer_1__String_1__String_1_) + ]->getValue(true); + + nullOrSfe($func, $args); +} + function meta::external::query::sql::transformation::queryToPure::processHash(args:ValueSpecification[*], type:meta::pure::functions::hash::HashType[1]):ValueSpecification[1] { assert($args->size() == 1, 'incorrect number of args'); @@ -2105,7 +2358,7 @@ function <> meta::external::query::sql::transformation::queryToP debug('processNegativeExpression', $context.debug); let value = $n.value->processExpression($expContext, $context); - sfe(minus_Number_MANY__Number_1_, $value); + createMinus($value.genericType.rawType, false, $value); } function <> meta::external::query::sql::transformation::queryToPure::getLiteralType(literal:Literal[1]):Type[1] @@ -2207,28 +2460,50 @@ function <> meta::external::query::sql::transformation::queryToP let type = if ($leftType->isNotEmpty() && $leftType == $rightType, | $leftType->toOne(), | Number); let expression = [ - pair(ArithmeticType.ADD, | sfe([ - pair(Integer, plus_Integer_MANY__Integer_1_), - pair(Float, plus_Float_MANY__Float_1_), - pair(Float, plus_Decimal_MANY__Decimal_1_) - ]->getValue($type, plus_Number_MANY__Number_1_), iv($left->concatenate($right)))), - pair(ArithmeticType.SUBTRACT, | sfe([ + pair(ArithmeticType.ADD, | createPlus($type, false, $left->concatenate($right))), + pair(ArithmeticType.SUBTRACT, | createMinus($type, false, $left->concatenate($right))), + pair(ArithmeticType.MULTIPLY, | createTimes($type, false, $left->concatenate($right))), + pair(ArithmeticType.DIVIDE, | nullOrSfe(divide_Number_1__Number_1__Float_1_, [$left, $right])), + pair(ArithmeticType.MODULUS, | nullOrSfe(mod_Integer_1__Integer_1__Integer_1_, [$left, $right])), + pair(ArithmeticType.POWER, | nullOrSfe(pow_Number_1__Number_1__Number_1_, [$left, $right])) + ]->getValue($a.type)->eval(); + + if ($left->isNull() || $right->isNull(), | iv([], $expression->evaluateAndDeactivate().genericType), | $expression); + +} + +function meta::external::query::sql::transformation::queryToPure::createMinus(type:Type[0..1], null:Boolean[1], args:ValueSpecification[*]):ValueSpecification[1] +{ + createTypedFunction([ pair(Integer, minus_Integer_MANY__Integer_1_), pair(Float, minus_Float_MANY__Float_1_), pair(Decimal, minus_Decimal_MANY__Decimal_1_) - ]->getValue($type, minus_Number_MANY__Number_1_), iv($left->concatenate($right)))), - pair(ArithmeticType.MULTIPLY, | sfe([ + ], minus_Number_MANY__Number_1_, $type, $null, iv($args)); +} + +function meta::external::query::sql::transformation::queryToPure::createPlus(type:Type[0..1], null:Boolean[1], args:ValueSpecification[*]):ValueSpecification[1] +{ + createTypedFunction([ + pair(Integer, plus_Integer_MANY__Integer_1_), + pair(Float, plus_Float_MANY__Float_1_), + pair(Decimal, plus_Decimal_MANY__Decimal_1_) + ], plus_Number_MANY__Number_1_, $type, $null, iv($args)); +} + +function meta::external::query::sql::transformation::queryToPure::createTimes(type:Type[0..1], null:Boolean[1], args:ValueSpecification[*]):ValueSpecification[1] +{ + createTypedFunction([ pair(Integer, times_Integer_MANY__Integer_1_), pair(Float, times_Float_MANY__Float_1_), pair(Decimal, times_Decimal_MANY__Decimal_1_) - ]->getValue($type, times_Number_MANY__Number_1_), iv($left->concatenate($right)))), - pair(ArithmeticType.DIVIDE, | nullOrSfe(divide_Number_1__Number_1__Float_1_, [$left, $right])), - pair(ArithmeticType.MODULUS, | nullOrSfe(mod_Integer_1__Integer_1__Integer_1_, [$left, $right])), - pair(ArithmeticType.POWER, | nullOrSfe(pow_Number_1__Number_1__Number_1_, [$left, $right])) - ]->getValue($a.type)->eval(); + ], times_Number_MANY__Number_1_, $type, $null, iv($args)); +} - if ($left->isNull() || $right->isNull(), | iv([], $expression->evaluateAndDeactivate().genericType), | $expression); +function <> meta::external::query::sql::transformation::queryToPure::createTypedFunction(pairs:Pair>[*], default:Function[1], type:Type[0..1], null:Boolean[1], args:ValueSpecification[*]):ValueSpecification[1] +{ + let func = if ($type->isEmpty(), | $default, | $pairs->getValue($type->toOne(), $default)); + if ($null, | nullOrSfe($func, $args), | sfe($func, $args)); } //purely internal class to ensure we can handle expressions in the interval calculation logic @@ -2248,14 +2523,19 @@ Class <> meta::external::query::sql::transformation::queryToPure function <> meta::external::query::sql::transformation::queryToPure::simplifyDateArithmetic(e:meta::external::query::sql::metamodel::Expression[1], expContext:SqlTransformExpressionContext[1], context:SqlTransformContext[1]):meta::external::query::sql::metamodel::Expression[1] { $e->match([ - a:ArithmeticExpression[1] | + ae:ArithmeticExpression[1] | + let a = ^$ae( + left = if ($ae.left->instanceOf(StringLiteral), | ^Cast(expression = $ae.left, type = ^ColumnType(name = 'TIMESTAMP')), | $ae.left), + right = if ($ae.right->instanceOf(StringLiteral), | ^Cast(expression = $ae.right, type = ^ColumnType(name = 'TIMESTAMP')), | $ae.right) + ); + if ($a.right->instanceOf(NullLiteral) || $a.left->instanceOf(NullLiteral), | ^NullLiteral(), | if ($a.type == ArithmeticType.MULTIPLY && $a.left->instanceOf(IntervalLiteral), | multiplyIntervalLiteral($a.left->cast(@IntervalLiteral), $a.right, $expContext, $context), | if ($a.type == ArithmeticType.MULTIPLY && $a.right->instanceOf(IntervalLiteral), | multiplyIntervalLiteral($a.right->cast(@IntervalLiteral), $a.left, $expContext, $context), - | ^$a(left = $a.left->simplifyDateArithmetic($expContext, $context), right = $a.right->simplifyDateArithmetic($expContext, $context))))), + | ^$a(left = $a.left->simplifyDateArithmetic($expContext, $context), right = $a.right->simplifyDateArithmetic($expContext, $context)))));, i:IntervalLiteral[1] | ^IntervalLiteralWrapper(ago = $i.ago, years = $i.years->ivIfNotEmpty(), months = $i.months->ivIfNotEmpty(), weeks = $i.weeks->ivIfNotEmpty(), days = $i.days->ivIfNotEmpty(), hours = $i.hours->ivIfNotEmpty(), minutes = $i.minutes->ivIfNotEmpty(), seconds = $i.seconds->ivIfNotEmpty()), e:meta::external::query::sql::metamodel::Expression[1] | $e @@ -2291,19 +2571,28 @@ function <> meta::external::query::sql::transformation::queryToP assert($simplified.type == ArithmeticType.ADD || $simplified.type == ArithmeticType.SUBTRACT, | 'arithmetic type ' + $simplified.type.name + ' not currently supported for dates'); //note we are making assumption here that Any is fine. This results from a function call that is generic return type (e.g. max) - assert($leftTypeNormalized == Date || $leftTypeNormalized == Any, | 'left side of date arithmetic must be non interval date'); - assert($rightTypeNormalized == Number || $simplified.right->instanceOf(IntervalLiteralWrapper) || $simplified.right->instanceOf(NullLiteral), | 'right side of date arithmetic must be numeric or interval'); + assert($leftTypeNormalized == Date || $leftTypeNormalized == String || $leftTypeNormalized == Any, | 'left side of date arithmetic must be non interval date'); + + assert($rightTypeNormalized == Number || $rightTypeNormalized == String || $simplified.right->instanceOf(IntervalLiteralWrapper) || $simplified.right->instanceOf(NullLiteral) || ($rightTypeNormalized == Date && $simplified.type == ArithmeticType.SUBTRACT), | 'right side of date arithmetic must be numeric or interval'); let negate = $simplified.type == ArithmeticType.SUBTRACT; [ pair($simplified.right->instanceOf(IntervalLiteralWrapper) && !$simplified.left->instanceOf(NullLiteral), {| let left = $simplified.left->processExpression($expContext, $context); - processIntervalToAdjust($left, $simplified.right->cast(@IntervalLiteralWrapper), $negate);}), + processIntervalToAdjust($left, $simplified.right->cast(@IntervalLiteralWrapper), $negate); + }), pair($simplified.right->instanceOf(NullLiteral) || $simplified.left->instanceOf(NullLiteral), {| let left = $simplified.left->processExpression($expContext, $context); let cast = ^Cast(expression = ^NullLiteral(), type = ^ColumnType(name = 'DATE')); - processCastAsCast($cast, processExpression($cast, $expContext, $context), $expContext, $context);}) + processCastAsCast($cast, processExpression($cast, $expContext, $context), $expContext, $context); + }), + pair(($leftTypeNormalized == Date || $leftTypeNormalized == String) && ($rightTypeNormalized == Date || $rightTypeNormalized == String) && $simplified.type == ArithmeticType.SUBTRACT && !$simplified.right->instanceOf(IntervalLiteralWrapper), {| + let left = $simplified.left->processExpression($expContext, $context); + let right = $simplified.right->processExpression($expContext, $context); + + sfe(dateDiff_Date_1__Date_1__DurationUnit_1__Integer_1_, [$left, $right, processExtractEnumValue(DurationUnit, DurationUnit.DAYS.name)]); + }) ]->getValue(true, {| let left = $simplified.left->processExpression($expContext, $context); let right = $simplified.right->processExpression($expContext, $context); @@ -2536,7 +2825,11 @@ function <> meta::external::query::sql::transformation::queryToP { if ($amount->isNotEmpty(), | - let adjustedAmount = if ($ago, | nullOrSfe(minus_Number_MANY__Number_1_, $amount->toOne()), | $amount); + let adjustedAmount = if ($ago, + | + let type = $amount->evaluateAndDeactivate().genericType.rawType; + createMinus($type, true, $amount->toOne());, + | $amount); nullOrSfe(adjust_Date_1__Integer_1__DurationUnit_1__Date_1_, [$input, iv($adjustedAmount->toOne()), processExtractEnumValue(DurationUnit, $unit.name)]);, | $input) } @@ -2556,6 +2849,13 @@ function <> meta::external::query::sql::transformation::queryToP } +function meta::external::query::sql::transformation::queryToPure::appendTdsFunc(lambda:LambdaFunction[1], execFunc: meta::pure::metamodel::function::Function[1], args: List[*]): LambdaFunction[1] +{ + ^$lambda( + expressionSequence = appendTdsFunc($lambda.expressionSequence->last()->toOne()->cast(@FunctionExpression), $execFunc, $args) + ) +} + function <> meta::external::query::sql::transformation::queryToPure::appendTdsFunc(func: FunctionExpression[1], execFunc: meta::pure::metamodel::function::Function[1], args: List[*]): FunctionExpression[1] { let pvs = $args->fold( @@ -2683,6 +2983,8 @@ function <> meta::external::query::sql::transformation::queryToP ^Multiplicity(lowerBound = $mv, upperBound = $mv); ]); + + ^InstanceValue(multiplicity = $multiplicity, genericType = $genericType, values = $value); } @@ -2760,7 +3062,7 @@ Class meta::external::query::sql::transformation::queryToPure::SqlTransformConte $indent + 'name: ' + if ($this.name->isEmpty(), | '[]', | $this.name->toOne()) + '\n' + $indent + 'root: ' + $this.root->toString() + '\n' + if ($this.aliases->isNotEmpty(), | $indent + 'aliases: \n' + $indent + ' ' + $this.aliases.toString()->joinStrings('\n' + $indent + ' ') + '\n', | '') + - if ($this.contexts->isNotEmpty(), | $indent + 'contexts: \n' + $indent + $this.contexts->map(c | $c.toString($indent + ' '))->joinStrings('\n' + $indent), | '') + + if ($this.contexts->isNotEmpty(), | $indent + 'contexts: \n' + $this.contexts->map(c | $c.toString($indent + ' '))->joinStrings('\n'), | '') + if ($this.assignments->isNotEmpty(), | $indent + 'assigments: ' + $this.assignments->size()->toString(), | '') }:String[1]; @@ -2774,13 +3076,14 @@ Class meta::external::query::sql::transformation::queryToPure::SqlTransformConte }:SqlTransformContext[1]; columns(){ if ($this.expression->isNotEmpty(), - | meta::pure::tds::schema::resolveSchema($this.lambda(), $this.extensions), + | meta::pure::tds::schema::resolveSchema($this.lambda(), $this.extensions);, | []) }: TDSColumn[*]; columnByNameParts(parts:String[*], failIfNotFound:Boolean[1]) { let name = if ($parts->size() > 1, | $parts->last(), | $parts)->joinStrings('.'); let contextName = $parts->init()->joinStrings('.'); + let foundContext = $this.contexts->filter(c | $c.name == $contextName); let context = if ($contextName->isEmpty() || $foundContext->isEmpty(), | $this, | $foundContext->toOne()); @@ -2797,14 +3100,14 @@ Class meta::external::query::sql::transformation::queryToPure::SqlTransformConte }: TDSColumn[0..1]; alias(nameParts:String[*], alias:String[0..1], failIfNotFound:Boolean[1]){ - let name = $nameParts->last()->toOne(); - let contexts = if ($nameParts->size() > 1, | $this.context($nameParts->at(0)), | $this->concatenate($this.contexts)); + let contextName = $nameParts->at(0); + let contexts = if ($nameParts->size() > 1, | $this.context($contextName), | $this->concatenate($this.contexts)); let aliases = $contexts.aliases->removeDuplicates(); - let filter = if ($nameParts->size() > 1, - | {a:SQLColumnAlias[1] | $a.name == $name || $a.alias == $name}, + let filter = if ($nameParts->size() > 1 && $contextName != $this.name, + | {a:SQLColumnAlias[1] | ($a.alias->isEmpty() && $a.name == $name) || ($a.alias == $name)}, | {a:SQLColumnAlias[1] | $a.name == $name && $a.alias == $alias}); let found = $aliases->filter($filter); @@ -2923,5 +3226,10 @@ function meta::external::query::sql::transformation::queryToPure::getParameters( function meta::external::query::sql::transformation::queryToPure::debug(a:String[1], debug:DebugContext[1]):Any[0] { - if ($debug.debug, | println($debug.space + $a), | []); + debug({|$a}, $debug) } + +function meta::external::query::sql::transformation::queryToPure::debug(f:FunctionDefinition<{->String[1]}>[1], debug:DebugContext[1]):Any[0] +{ + if ($debug.debug, | println($debug.space + $f->eval()), | []); +} \ No newline at end of file diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-pure/src/main/resources/core_external_query_sql/binding/fromPure/tests/testTranspile.pure b/legend-engine-xts-sql/legend-engine-xt-sql-pure/src/main/resources/core_external_query_sql/binding/fromPure/tests/testTranspile.pure index 1d745b94da8..926356cd1e2 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-pure/src/main/resources/core_external_query_sql/binding/fromPure/tests/testTranspile.pure +++ b/legend-engine-xts-sql/legend-engine-xt-sql-pure/src/main/resources/core_external_query_sql/binding/fromPure/tests/testTranspile.pure @@ -157,6 +157,19 @@ function <> meta::external::query::sql::transformation::queryToPure:: }) } +function <> meta::external::query::sql::transformation::queryToPure::tests::testSelectWithTableAlias():Boolean[1] +{ + test( + 'SELECT t1."String" AS "str" FROM service."/service/service1" t1', + + {| FlatInput.all()->project( + [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) + ->restrict('String')->renameColumns(pair('String', 'str')) + } + ) +} + function <> meta::external::query::sql::transformation::queryToPure::tests::testSelectColumnMultiTimes():Boolean[1] { test( @@ -174,7 +187,10 @@ function <> meta::external::query::sql::transformation::queryToPure:: function <> meta::external::query::sql::transformation::queryToPure::tests::testSelectColumnMultiTimesRealiasToExisting():Boolean[1] { test( - 'SELECT String as "str", String as "String" FROM service."/service/service1"', + [ + 'SELECT String as "str", String as "String" FROM service."/service/service1"', + 'SELECT "t1"."String" as "str", "t1".String as "String" FROM service."/service/service1" t1' + ], {| FlatInput.all()->project( [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], @@ -185,18 +201,72 @@ function <> meta::external::query::sql::transformation::queryToPure:: }) } -function <> meta::external::query::sql::transformation::queryToPure::tests::testSelectColumnAliasedAsUnusedTableColumnName():Boolean[1] +function <> meta::external::query::sql::transformation::queryToPure::tests::testSelectColumnMultiTimesRealiasToExisting2():Boolean[1] { test( - 'SELECT Integer as "String" FROM service."/service/service1"', + [ + 'SELECT String as "str", String as "Integer" FROM service."/service/service1"', + 'SELECT "t1"."String" as "str", "t1".String as "Integer" FROM service."/service/service1" t1' + ], {| FlatInput.all()->project( [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], - [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) - ->restrict('Integer')->renameColumns(pair('Integer', 'String')) + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ])->project([ + col(row:TDSRow[1] | $row.getString('String'), 'str'), + col(row:TDSRow[1] | $row.getString('String'), 'Integer') + ]) }) } +function <> meta::external::query::sql::transformation::queryToPure::tests::testSelectColumnMultiTimesRealiasToExistingWithGroupBy():Boolean[1] +{ + test( + 'SELECT ' + + 't1.String AS "col1", ' + + 't1.String AS "col2", ' + + 't1.StrictDate AS "StrictDate", ' + + 't1.String AS "String", ' + + 'SUM(t1.Integer) AS "sum" ' + + 'FROM (select * from service."/service/service1") "t1" GROUP BY 3, 4', + + {| FlatInput.all()->project( + [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) + ->extend([ + col(row:TDSRow[1] | $row.getString('String'), 'col1'), + col(row:TDSRow[1] | $row.getString('String'), 'col2'), + col(row:TDSRow[1] | $row.getStrictDate('StrictDate'), 'StrictDate_1'), + col(row:TDSRow[1] | $row.getString('String'), 'String_1') + ])->groupBy( + ['StrictDate_1', 'String_1', 'col1', 'col2'], + agg('sum', row | $row.getInteger('Integer'), y | $y->sum()) + )->renameColumns([ + pair('StrictDate_1', 'StrictDate'), + pair('String_1', 'String') + ])->restrict([ + 'col1', 'col2', 'StrictDate', 'String', 'sum' + ]) + }, false + ) +} + +function <> meta::external::query::sql::transformation::queryToPure::tests::selectStarFromRealisedColumnSubQuery():Boolean[1] +{ + test( + [ + 'SELECT * FROM (select String as "S" from service."/service/service1")', + 'SELECT t1.* FROM (select String as "S" from service."/service/service1") t1' + ], + + {| FlatInput.all()->project( + [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) + ->restrict('String') + ->renameColumns(pair('String', 'S')) + } + ) +} + function <> meta::external::query::sql::transformation::queryToPure::tests::testSelectColumnMultiTimesGroupBy():Boolean[1] { test( @@ -204,8 +274,7 @@ function <> meta::external::query::sql::transformation::queryToPure:: {| FlatInput.all()->project( [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], - [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ])->project([ - col(row:TDSRow[1] | $row.getString('String'), 'String'), + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ])->extend([ col(row:TDSRow[1] | $row.getString('String'), 'str') ])->restrict(['String', 'str'])->distinct() }) @@ -411,6 +480,26 @@ function <> meta::external::query::sql::transformation::queryToPure:: }) } +function <> meta::external::query::sql::transformation::queryToPure::tests::testOrderByWithExtendAlias():Boolean[1] +{ + test( + 'SELECT Calc AS Calc, Calc AS Calc2 FROM (select 1 AS Calc from service."/service/service1") GROUP BY 1 ORDER BY 1, Calc2 ASC', + + {| FlatInput.all()->project( + [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) + ->project(col(row:TDSRow[1] | 1, 'Calc')) + ->extend([ + col(row:TDSRow[1] | $row.getInteger('Calc'), 'Calc_1'), + col(row:TDSRow[1] | $row.getInteger('Calc'), 'Calc2') + ]) + ->restrict(['Calc_1', 'Calc2']) + ->distinct() + ->sort([asc('Calc_1'), asc('Calc2')]) + ->renameColumns(pair('Calc_1', 'Calc')) + }) +} + //WHERE function <> meta::external::query::sql::transformation::queryToPure::tests::testWhere():Boolean[1] { @@ -626,7 +715,9 @@ function <> meta::external::query::sql::transformation::queryToPure:: {| FlatInput.all()->project( [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ] - )->renameColumns(pair('Boolean', 'bool')) + )->extend([ + col(row:TDSRow[1] | $row.getBoolean('Boolean'), 'bool') + ]) ->groupBy(['String', 'bool'], agg('sum', row | $row.getInteger('Integer'), y | $y->sum())) ->restrict(['String', 'sum', 'bool']) }, false) @@ -640,7 +731,9 @@ function <> meta::external::query::sql::transformation::queryToPure:: {| FlatInput.all()->project( [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ] - )->renameColumns(pair('Boolean', 'bool')) + )->extend([ + col(row:TDSRow[1] | $row.getBoolean('Boolean'), 'bool') + ]) ->groupBy(['String', 'bool'], agg('sum', row | $row.getInteger('Integer'), y | $y->sum())) ->restrict(['String', 'sum', 'bool']) }, false) @@ -715,21 +808,53 @@ function <> meta::external::query::sql::transformation::queryToPure:: }, false) } -function <> meta::external::query::sql::transformation::queryToPure::tests::testGroupByAggWithinCase():Boolean[1] +function <> meta::external::query::sql::transformation::queryToPure::tests::testGroupByAggWithinFunc():Boolean[1] { - test('SELECT CASE WHEN(SUM("Integer") <10) THEN max("Integer") ELSE min("Integer") END AS "MIN/MAX", ' + - 'CASE WHEN(SUM("Integer") < 10) THEN \'LOW\' ELSE \'HIGH\' END AS "HIGH/LOW" FROM service."/service/service1"', + test('SELECT ' + + 'CASE WHEN(SUM("Integer") <10) THEN max("Integer") ELSE min("Integer") END AS "MIN/MAX", ' + + 'CASE WHEN(SUM("Integer") < 10) THEN \'LOW\' ELSE \'HIGH\' END AS "HIGH/LOW", ' + + 'cast(sum("Integer") AS VARCHAR) AS "CAST", ' + + 'floor(sum("Integer")) AS "FUNC" ' + + 'FROM service."/service/service1"', {| FlatInput.all()->project( [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ] )->groupBy([], [ agg('MIN/MAX', row | $row.getInteger('Integer'), y | if ($y->sum() < 10, | max($y), | min($y))), - agg('HIGH/LOW', row | $row.getInteger('Integer'), y | if ($y->sum() < 10, | 'LOW', | 'HIGH')) + agg('HIGH/LOW', row | $row.getInteger('Integer'), y | if ($y->sum() < 10, | 'LOW', | 'HIGH')), + agg('CAST', row | $row.getInteger('Integer'), y | $y->sum()->toString()), + agg('FUNC', row | $row.getInteger('Integer'), y | floor($y->sum())) ]) }, false) } +function <> meta::external::query::sql::transformation::queryToPure::tests::testAggregationWithConstantSelectItemNoGroupBy():Boolean[1] +{ + test( + 'SELECT count("Integer") AS "Count", count("Float") AS "Float Count", \'abc\' AS "String", cast("Float" AS VARCHAR) AS "Float", Float AS "Original Float" from service."/service/service1"', + + {| FlatInput.all()->project( + [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) + ->extend([ + col(row:TDSRow[1] | 'abc', 'String_1'), + col(row:TDSRow[1] | $row.getFloat('Float')->toString(), 'Float_1'), + col(row:TDSRow[1] | $row.getFloat('Float'), 'Original Float') + ]) + ->groupBy(['String_1', 'Float_1', 'Original Float'], [ + agg('Count', row | $row.getInteger('Integer'), y | $y->count()), + agg('Float Count', row | $row.getFloat('Float'), y | $y->count()) + ]) + ->renameColumns([ + pair('String_1', 'String'), + pair('Float_1', 'Float') + ]) + ->restrict(['Count', 'Float Count', 'String', 'Float', 'Original Float']) + }, false + ) +} + //HAVING function <> meta::external::query::sql::transformation::queryToPure::tests::testHaving():Boolean[1] { @@ -796,9 +921,9 @@ function <> meta::external::query::sql::transformation::queryToPure:: pair('DateTime', 'DateTime_table2'), pair('String', 'String_table2') ]), meta::relational::metamodel::join::JoinType.LEFT_OUTER, ['String']) - ->renameColumns([ - pair('String_table2', 'str') - ]) + ->extend( + col(row:TDSRow[1] | $row.getString('String_table2'), 'str') + ) ->groupBy([ 'Boolean_table1', 'Integer_table1', 'Float_table1', 'Decimal_table1', 'StrictDate_table1', 'DateTime_table1', 'String_table1', 'str' @@ -839,8 +964,8 @@ function <> meta::external::query::sql::transformation::queryToPure:: ' CAST(String AS DOUBLE PRECISION) AS "double", CAST(String AS NUMERIC) AS "numeric",' + ' CAST(String AS TIMESTAMP) AS "timestamp", CAST(Integer AS TEXT) AS "integerText", CAST(Integer AS VARCHAR) AS "integerString", CAST(Integer AS Integer) AS "expression",' + ' CAST(String AS VARCHAR(2)) AS "stringChars", CAST(Integer AS VARCHAR(2)) AS "integerStringChars",' + - ' CAST(Float AS NUMERIC) AS "floatNumeric", CAST(Decimal AS NUMERIC) AS "decimalNumeric", CAST(Decimal AS DOUBLE PRECISION) AS "decimalDoublePrecision",' + - ' CAST(Float AS DOUBLE PRECISION) AS "floatDoublePrecision", CAST(String AS NUMERIC(4, 2)) AS "numericParams" FROM service."/service/service1"', + ' CAST(Float AS NUMERIC) AS "floatNumeric", CAST(Decimal AS NUMERIC) AS "decimalNumeric", CAST(1 + 1.1 AS NUMERIC) AS "numberNumeric", CAST(Decimal AS DOUBLE PRECISION) AS "decimalDoublePrecision",' + + ' CAST(Float AS DOUBLE PRECISION) AS "floatDoublePrecision", CAST(1 + 1.1 AS DOUBLE PRECISION) AS "numberDoublePrecision", CAST(String AS NUMERIC(4, 2)) AS "numericParams" FROM service."/service/service1"', {| FlatInput.all()->project( [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], @@ -864,8 +989,10 @@ function <> meta::external::query::sql::transformation::queryToPure:: col(row:TDSRow[1] | substring(toString($row.getInteger('Integer')), 1, 2), 'integerStringChars'), col(row:TDSRow[1] | toDecimal($row.getFloat('Float')), 'floatNumeric'), col(row:TDSRow[1] | $row.getDecimal('Decimal'), 'decimalNumeric'), + col(row:TDSRow[1] | toDecimal(1 + 1.1), 'numberNumeric'), col(row:TDSRow[1] | toFloat($row.getDecimal('Decimal')), 'decimalDoublePrecision'), col(row:TDSRow[1] | $row.getFloat('Float'), 'floatDoublePrecision'), + col(row:TDSRow[1] | toFloat(1 + 1.1), 'numberDoublePrecision'), col(row:TDSRow[1] | round(parseDecimal($row.getString('String')), 2), 'numericParams') ]) }) @@ -1189,6 +1316,83 @@ function <> meta::external::query::sql::transformation::queryToPure:: ])}, false) } +function <> meta::external::query::sql::transformation::queryToPure::tests::testJoinWithAliasRenamingInSubQueries():Boolean[1] +{ + test( + 'SELECT "t0"."String" AS "String", "t1"."measure" AS "sum" FROM (SELECT "t3"."String" AS "String" FROM service."/service/service1" "t3" GROUP BY 1) "t0" CROSS JOIN (SELECT SUM("t3"."Integer") AS "measure" FROM service."/service/service1" "Staples" HAVING (COUNT(1) > 0)) "t1"', + + {| FlatInput.all()->project( + [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) + ->extend([ + col(row:TDSRow[1]|$row.getString('String'), 'String_1') + ]) + ->restrict('String_1')->distinct() + ->renameColumns(pair('String_1', 'String')) + ->renameColumns(pair('String', 'String_t0')) + ->join(FlatInput.all()->project( + [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], + ['Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) + ->groupBy([], [ + agg('measure', row | $row.getInteger('Integer'), y | $y->sum()), + agg('COUNT(1)', row | 1, y | $y->count()) + ]) + ->filter(row|($row.getInteger('COUNT(1)') > 0)) + ->restrict('measure') + ->renameColumns(pair('measure', 'measure_t1')), + meta::relational::metamodel::join::JoinType.INNER, {row1:TDSRow[1], row2:TDSRow[1] | true}) + ->restrict(['String_t0', 'measure_t1']) + ->renameColumns([ + pair('String_t0', 'String'), + pair('measure_t1', 'sum') + ]) + }, false + ) +} + +function <> meta::external::query::sql::transformation::queryToPure::tests::testJoinWithMultiCommonTableAliases():Boolean[1] +{ + test( + 'SELECT "t0"."int" AS "int",' + + ' "t1"."Integer" AS "Integer"' + + 'FROM service."/service/service1" "t1"' + + 'INNER JOIN (' + + ' SELECT "t1"."Integer" AS "int",' + + ' "t1"."String" AS "str"' + + ' FROM service."/service/service2" "t1"' + + ') "t0" ON (("t1"."Integer" = "t0"."int") AND ("t1"."String" = "t0"."str"))', + + {|meta::external::query::sql::transformation::queryToPure::tests::FlatInput.all() + ->project([x|$x.booleanIn, x|$x.integerIn, x|$x.floatIn, x|$x.decimalIn, x|$x.strictDateIn, x|$x.dateTimeIn, x|$x.stringIn], + ['Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String']) + ->renameColumns([ + pair('Boolean', 'Boolean_t1'), + pair('Integer', 'Integer_t1'), + pair('Float', 'Float_t1'), + pair('Decimal', 'Decimal_t1'), + pair('StrictDate', 'StrictDate_t1'), + pair('DateTime', 'DateTime_t1'), + pair('String', 'String_t1')]) + ->join(meta::external::query::sql::transformation::queryToPure::tests::FlatInput.all() + ->project([x|$x.idIn, x|$x.integerIn, x|$x.stringIn], ['ID', 'Integer', 'String']) + ->restrict(['Integer', 'String']) + ->renameColumns([ + pair('Integer', 'int'), + pair('String', 'str')]) + ->renameColumns([ + pair('int', 'int_t0'), + pair('str', 'str_t0')]), + meta::relational::metamodel::join::JoinType->extractEnumValue('INNER'), + {row1, row2|(($row2.getInteger('Integer_t1') == $row2.getInteger('int_t0')) && ($row2.getString('String_t1') == $row2.getString('str_t0')))}) + ->restrict(['int_t0', 'Integer_t1']) + ->renameColumns([ + pair('int_t0', 'int'), + pair('Integer_t1', 'Integer')]) + }, false + ) +} + + //UNION function <> meta::external::query::sql::transformation::queryToPure::tests::testUnion():Boolean[1] @@ -1211,6 +1415,42 @@ function <> meta::external::query::sql::transformation::queryToPure:: }) } +function <> meta::external::query::sql::transformation::queryToPure::tests::testSelectFromAliasedUnion():Boolean[1] +{ + test('SELECT 1 AS "Number of Records",' + + '"t0"."String" AS "String"' + + 'FROM (SELECT "t1"."String" AS "String"' + + ' FROM (' + + ' SELECT "s1"."Integer" AS "int",' + + ' \'Value\' AS "String"' + + ' FROM service."/service/service1" "s1") "t1"' + + ' UNION ALL' + + ' SELECT "s1"."String" AS "String"' + + ' FROM (SELECT "s1"."Integer" AS "int",' + + ' \'Value2\' AS "String"' + + ' FROM service."/service/service1" "s1") "t2") "t0" LIMIT 1000', + + {|meta::external::query::sql::transformation::queryToPure::tests::FlatInput.all() + ->project([x|$x.booleanIn, x|$x.integerIn, x|$x.floatIn, x|$x.decimalIn, x|$x.strictDateIn, x|$x.dateTimeIn, x|$x.stringIn], + ['Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String']) + ->project([ + col(row:TDSRow[1] |$row.getInteger('Integer'), 'int'), + col(row:TDSRow[1]| 'Value', 'String')]) + ->restrict('String') + ->concatenate(meta::external::query::sql::transformation::queryToPure::tests::FlatInput.all() + ->project([x|$x.booleanIn, x|$x.integerIn, x|$x.floatIn, x|$x.decimalIn, x|$x.strictDateIn, x|$x.dateTimeIn, x|$x.stringIn], + ['Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String']) + ->project([ + col(row:TDSRow[1]| $row.getInteger('Integer'), 'int'), + col(row:TDSRow[1]|'Value2', 'String')]) + ->restrict('String') + )->project([ + col(row:TDSRow[1]|1, 'Number of Records'), + col(row:TDSRow[1]| $row.getString('String'), 'String')]) + ->limit(1000)} + ) +} + //CURRENT TIME function <> meta::external::query::sql::transformation::queryToPure::tests::testCurrentTime():Boolean[1] { @@ -1328,7 +1568,11 @@ function <> meta::external::query::sql::transformation::queryToPure:: 'StrictDate + NULL + INTERVAL \'1 YEAR 3 WEEKS 2 DAYS\' AS "INTERVAL_ADD_NULL", ' + '(CAST(\'2023-01-01\' AS DATE) + 2 * INTERVAL \'1 DAY\') + 3 * INTERVAL \'2 DAY\' AS "INTERVAL_MIX", ' + 'StrictDate + EXTRACT(\'year\' FROM StrictDate) * INTERVAL \'2 YEAR 3 DAYS\' AS "INTERVAL_MIX2", ' + - 'CAST((DATE_TRUNC( \'DAY\', CAST("StrictDate" AS DATE) ) + (EXTRACT(DOW FROM "StrictDate") * INTERVAL \'1 DAY\')) AS DATE) AS "INTERVAL_MIX3" ' + + 'CAST((DATE_TRUNC( \'DAY\', CAST("StrictDate" AS DATE) ) + (EXTRACT(DOW FROM "StrictDate") * INTERVAL \'1 DAY\')) AS DATE) AS "INTERVAL_MIX3", ' + + 'StrictDate - DateTime AS "DATE_SUBTRACT", ' + + 'StrictDate - INTERVAL \'1 DAY\' AS "INTERVAL_SUBTRACT", ' + + 'StrictDate - \'2023-01-01\' AS "STRING_SUBSTRACT", ' + + '\'2023-01-01\' - StrictDate AS "STRING_SUBSTRACT2" ' + 'FROM service."/service/service1"', {| @@ -1345,7 +1589,11 @@ function <> meta::external::query::sql::transformation::queryToPure:: col(row:TDSRow[1] | []->cast(@StrictDate), 'INTERVAL_ADD_NULL'), col(row:TDSRow[1] | parseDate('2023-01-01')->adjust(2 * 1, DurationUnit.DAYS)->adjust(3 * 2, DurationUnit.DAYS), 'INTERVAL_MIX'), col(row:TDSRow[1] | $row.getStrictDate('StrictDate')->adjust(year($row.getStrictDate('StrictDate')) * 2, DurationUnit.YEARS)->adjust(year($row.getStrictDate('StrictDate')) * 3, DurationUnit.DAYS), 'INTERVAL_MIX2'), - col(row:TDSRow[1] | $row.getStrictDate('StrictDate')->firstHourOfDay()->adjust(($row.getStrictDate('StrictDate')->dayOfWeekNumber() * 1), DurationUnit.DAYS), 'INTERVAL_MIX3') + col(row:TDSRow[1] | $row.getStrictDate('StrictDate')->firstHourOfDay()->adjust(($row.getStrictDate('StrictDate')->dayOfWeekNumber() * 1), DurationUnit.DAYS), 'INTERVAL_MIX3'), + col(row:TDSRow[1] | dateDiff($row.getStrictDate('StrictDate'), $row.getDateTime('DateTime'), DurationUnit.DAYS), 'DATE_SUBTRACT'), + col(row:TDSRow[1] | adjust($row.getStrictDate('StrictDate'), -1, DurationUnit.DAYS), 'INTERVAL_SUBTRACT'), + col(row:TDSRow[1] | dateDiff($row.getStrictDate('StrictDate'), parseDate('2023-01-01'), DurationUnit.DAYS), 'STRING_SUBSTRACT'), + col(row:TDSRow[1] | dateDiff(parseDate('2023-01-01'), $row.getStrictDate('StrictDate'), DurationUnit.DAYS), 'STRING_SUBSTRACT2') ]) }) } @@ -1358,7 +1606,8 @@ function <> meta::external::query::sql::transformation::queryToPure:: 'char_length(String) AS "CHAR_LENGTH", length(String) AS "LENGTH", ltrim(String) AS "LTRIM", ltrim(String, \' \') AS "LTRIM2", md5(String) AS "MD5", upper(String) AS "UPPER", ' + 'lower(String) AS "LOWER", repeat(String, 2) AS "REPEAT", replace(String, \'A\', \'a\') AS "REPLACE", starts_with(String, \'a\') AS "STARTSWITH", strpos(String, \'abc\') AS "STRPOS",' + 'reverse(String) AS "REVERSE", rtrim(String) AS "RTRIM", rtrim(String, \' \') AS "RTRIM2", sha256(String) AS "SHA256", split_part(String, \',\', 1) AS "SPLITPART", ' + - 'split_part(String, \',\', Integer) AS "SPLITPART2", substring(String, 1) AS "SUBSTRING", substr(String, 1, 2) AS "SUBSTR", btrim(String) AS "TRIM", btrim(String, \' \') AS "TRIM2" FROM service."/service/service1"', + 'split_part(String, \',\', Integer) AS "SPLITPART2", substring(String, 1) AS "SUBSTRING", substr(String, 1, 2) AS "SUBSTR", btrim(String) AS "TRIM", btrim(String, \' \') AS "TRIM2",' + + 'lpad(String, 2) AS "LPAD", lpad(String, 2, \'a\') AS "LPAD2", rpad(String, 2) AS "RPAD", rpad(String, 2, \'a\') AS "RPAD2" FROM service."/service/service1"', {| FlatInput.all() @@ -1386,12 +1635,16 @@ function <> meta::external::query::sql::transformation::queryToPure:: col(row:TDSRow[1] | rtrim($row.getString('String')), 'RTRIM'), col(row:TDSRow[1] | rtrim($row.getString('String')), 'RTRIM2'), col(row:TDSRow[1] | hash($row.getString('String'), HashType.SHA256), 'SHA256'), - col(row:TDSRow[1] | splitPart($row.getString('String'), ',', 2), 'SPLITPART'), - col(row:TDSRow[1] | splitPart($row.getString('String'), ',', $row.getInteger('Integer') + 1), 'SPLITPART2'), + col(row:TDSRow[1] | splitPart($row.getString('String'), ',', 0), 'SPLITPART'), + col(row:TDSRow[1] | splitPart($row.getString('String'), ',', $row.getInteger('Integer') - 1), 'SPLITPART2'), col(row:TDSRow[1] | substring($row.getString('String'), 1), 'SUBSTRING'), col(row:TDSRow[1] | substring($row.getString('String'), 1, 2), 'SUBSTR'), col(row:TDSRow[1] | trim($row.getString('String')), 'TRIM'), - col(row:TDSRow[1] | trim($row.getString('String')), 'TRIM2') + col(row:TDSRow[1] | trim($row.getString('String')), 'TRIM2'), + col(row:TDSRow[1] | lpad($row.getString('String'), 2), 'LPAD'), + col(row:TDSRow[1] | lpad($row.getString('String'), 2, 'a'), 'LPAD2'), + col(row:TDSRow[1] | rpad($row.getString('String'), 2), 'RPAD'), + col(row:TDSRow[1] | rpad($row.getString('String'), 2, 'a'), 'RPAD2') ]) }) } @@ -1402,7 +1655,8 @@ function <> meta::external::query::sql::transformation::queryToPure:: 'SELECT ascii(NULL) AS "ASCII", chr(NULL) AS "CHR", regexp_like(NULL, \'test\') AS "MATCH", char_length(NULL) AS "CHAR_LENGTH", length(NULL) AS "LENGTH", ltrim(NULL) AS "LTRIM", ' + 'ltrim(NULL, \' \') AS "LTRIM2", md5(NULL) AS "MD5", upper(NULL) AS "UPPER", lower(NULL) AS "LOWER", replace(NULL, \'A\', \'a\') AS "REPLACE", starts_with(NULL, \'a\') AS "STARTSWITH", ' + 'strpos(NULL, \'abc\') AS "STRPOS", reverse(NULL) AS "REVERSE", rtrim(NULL) AS "RTRIM", rtrim(NULL, \' \') AS "RTRIM2", sha256(NULL) AS "SHA256", substring(NULL, 1) AS "SUBSTRING", ' + - 'substr(NULL, 1, 2) AS "SUBSTR", btrim(NULL) AS "TRIM", btrim(NULL, \' \') AS "TRIM2" FROM service."/service/service1"', + 'substr(NULL, 1, 2) AS "SUBSTR", btrim(NULL) AS "TRIM", btrim(NULL, \' \') AS "TRIM2", lpad(NULL, 2) AS "LPAD", lpad(String, NULL) AS "LPAD2", lpad(String, 2, NULL) AS "LPAD3",' + + 'rpad(NULL, 2) AS "RPAD", rpad(String, NULL) AS "RPAD2", rpad(String, 2, NULL) AS "RPAD3" FROM service."/service/service1"', {| FlatInput.all() @@ -1430,12 +1684,133 @@ function <> meta::external::query::sql::transformation::queryToPure:: col(row:TDSRow[1] | []->cast(@String), 'SUBSTRING'), col(row:TDSRow[1] | []->cast(@String), 'SUBSTR'), col(row:TDSRow[1] | []->cast(@String), 'TRIM'), - col(row:TDSRow[1] | []->cast(@String), 'TRIM2') + col(row:TDSRow[1] | []->cast(@String), 'TRIM2'), + col(row:TDSRow[1] | []->cast(@String), 'LPAD'), + col(row:TDSRow[1] | []->cast(@String), 'LPAD2'), + col(row:TDSRow[1] | []->cast(@String), 'LPAD3'), + col(row:TDSRow[1] | []->cast(@String), 'RPAD'), + col(row:TDSRow[1] | []->cast(@String), 'RPAD2'), + col(row:TDSRow[1] | []->cast(@String), 'RPAD3') ]) }) } + + +Class meta::external::query::sql::transformation::queryToPure::tests::ToCharTestConfig +{ + sql: String[1]; + col: LambdaFunction<{TDSRow[1]->Any[*]}>[0..1]; + name : String[1]; + value: String[1]; +} + +function meta::external::query::sql::transformation::queryToPure::tests::tc(sql:String[1], col: LambdaFunction<{TDSRow[1]->Any[*]}>[0..1], name:String[1], value:String[1]):ToCharTestConfig[1] +{ + ^ToCharTestConfig(sql = $sql, col = $col, name = $name, value = $value); +} + +function meta::external::query::sql::transformation::queryToPure::tests::toCharTest(items:ToCharTestConfig[*], compareLambda:Boolean[1]):Boolean[1] +{ + let sqlString = 'SELECT ' + $items->map(i | $i.sql + ' AS "' + $i.name + '"')->joinStrings(', ') + ' FROM service."/service/service1"'; + + let sqlTransformContext = $sqlString->processQuery(); + let csv = 'default\n'+ + 'flat\n'+ + 'id,boolean,integer,float,decimal,strictDate,dateTime,string,enum,type\n'+ + '1,1,10,1.5,2.5,2023-10-02,2023-04-01T09:08:07,test,V1,T1\n'; + + let runtime = ^meta::core::runtime::Runtime( + connectionStores = ^meta::core::runtime::ConnectionStore( + element = meta::external::query::sql::transformation::queryToPure::tests::dummyDatabase, + connection = ^meta::external::store::relational::runtime::TestDatabaseConnection( + type=meta::relational::runtime::DatabaseType.H2, + testDataSetupCsv = $csv + )) + ); + + let sqlStatements = meta::alloy::service::execution::setUpDataSQLs($csv, meta::external::query::sql::transformation::queryToPure::tests::dummyDatabase); + $sqlStatements->map(sql| + meta::relational::functions::database::executeInDb($sql, $runtime.connectionStores->at(0), false); + )->size(); // NO NOT REMOVE: size called to inhibit lazy evaluation ensuring the map is activated; + + + if ($compareLambda, + | + let cols = $items->map(c | createCol($c.col->toOne(), $c.name)); + + let expected = {| + FlatInput.all() + ->project( + [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], + [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) + }->appendTdsFunc(project_TabularDataSet_1__ColumnSpecification_MANY__TabularDataSet_1_, list(iv($cols))); + + + assertLambdaEquals($expected, $sqlTransformContext.lambda());, + | []); + + + let tds = meta::pure::router::execute($sqlTransformContext.lambda()->toOne()->cast(@LambdaFunction<{->TabularDataSet[1]}>), dummyMapping, $runtime, relationalExtensions()).values; + let values = $tds.rows->at(0).values; + + $items->forAll(i | + assertEquals($i.value, $values->at($items->indexOf($i)), | 'expected ' + $values->at($items->indexOf($i))->toString() + ' to equal ' + $i.value + ' sql: ' + $i.sql); + ); + +} + +function <> meta::external::query::sql::transformation::queryToPure::tests::testToChar():Boolean[1] +{ + toCharTest([ + tc('to_char(DateTime, \'HH24\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->hour()->toString()->lpad(2, '0')}, 'HH24', '09'), + tc('to_char(DateTime, \'FMHH24\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->hour()->toString()}, 'FMHH24', '9'), + tc('to_char(DateTime, \'MI\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->minute()->toString()->lpad(2, '0')}, 'MI', '08'), + tc('to_char(DateTime, \'FMMI\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->minute()->toString()}, 'FMMI', '8'), + tc('to_char(DateTime, \'SS\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->second()->toString()->lpad(2, '0')}, 'SS', '07'), + tc('to_char(DateTime, \'FMSS\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->second()->toString()}, 'FMSS', '7'), + tc('to_char(DateTime, \'YYYY\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->year()->toString()}, 'YYYY', '2023'), + tc('to_char(DateTime, \'YYY\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->year()->toString()->substring(2, 3)}, 'YYY', '023'), + tc('to_char(DateTime, \'YY\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->year()->toString()->substring(3, 2)}, 'YY', '23'), + tc('to_char(DateTime, \'Y\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->year()->toString()->substring(4, 1)}, 'Y', '3'), + tc('to_char(DateTime, \'MONTH\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->month()->toString()->toUpper()->rpad(9, ' ')}, 'MONTHUPPER', 'APRIL '), + tc('to_char(DateTime, \'FMMONTH\')',{row:TDSRow[1] | $row.getDateTime('DateTime')->month()->toString()->toUpper()}, 'FMMONTHUPPER', 'APRIL'), + tc('to_char(DateTime, \'Month\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->month()->toString()->rpad(9, ' ')}, 'Month', 'April '), + tc('to_char(DateTime, \'FMMonth\')',{row:TDSRow[1] | $row.getDateTime('DateTime')->month()->toString()}, 'FMMonth', 'April'), + tc('to_char(DateTime, \'month\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->month()->toString()->toLower()->rpad(9, ' ')}, 'monthLower', 'april '), + tc('to_char(DateTime, \'FMmonth\')',{row:TDSRow[1] | $row.getDateTime('DateTime')->month()->toString()->toLower()}, 'FMmonthLower', 'april'), + tc('to_char(DateTime, \'MM\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->monthNumber()->toString()->lpad(2, '0')}, 'MM', '04'), + tc('to_char(DateTime, \'FMMM\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->monthNumber()->toString()}, 'FMMM', '4'), + tc('to_char(DateTime, \'DAY\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfWeek()->toString()->toUpper()->rpad(9, ' ')}, 'DAYUPPER', 'SATURDAY '), + tc('to_char(DateTime, \'FMDAY\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfWeek()->toString()->toUpper()}, 'FMDAYUPPER', 'SATURDAY'), + tc('to_char(DateTime, \'Day\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfWeek()->toString()->rpad(9, ' ')}, 'Day', 'Saturday '), + tc('to_char(DateTime, \'FMDay\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfWeek()->toString()}, 'FMDay', 'Saturday'), + tc('to_char(DateTime, \'day\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfWeek()->toString()->toLower()->rpad(9, ' ')}, 'dayLower', 'saturday '), + tc('to_char(DateTime, \'FMday\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfWeek()->toString()->toLower()}, 'FMdayLower', 'saturday'), + tc('to_char(DateTime, \'DDD\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfYear()->toString()->lpad(3, '0')}, 'DDD', '091'), + tc('to_char(DateTime, \'FMDDD\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfYear()->toString()}, 'FMDDD', '91'), + tc('to_char(DateTime, \'DD\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfMonth()->toString()->lpad(2, '0')}, 'DD', '01'), + tc('to_char(DateTime, \'FMDD\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfMonth()->toString()}, 'FMDD', '1'), + tc('to_char(DateTime, \'D\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->dayOfWeekNumber()->toString()}, 'D', '7'), + tc('to_char(DateTime, \'WW\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->weekOfYear()->toString()->lpad(2, '0')}, 'WW', '13'), + tc('to_char(DateTime, \'FMWW\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->weekOfYear()->toString()}, 'FMWW', '13'), + tc('to_char(DateTime, \'Q\')', {row:TDSRow[1] | $row.getDateTime('DateTime')->quarterNumber()->toString()}, 'Q', '2') + ], true); +} + +function <> meta::external::query::sql::transformation::queryToPure::tests::testToCharCombined():Boolean[1] +{ + toCharTest([ + tc('to_char(DateTime, \'HH24-FMHH24-MI-FMMI-SS-FMSS-YYYY-YYY-YY-MONTH-FMMONTH-month-FMmonth-DAY-FMDAY-Day-FMDay-day-FMday-DDD-FMDDD-DD-FMDD-D-WW-FMWW-Q\')', [], + 'a-l-l', '09-9-08-8-07-7-2023-023-23-APRIL -APRIL-april -april-SATURDAY -SATURDAY-Saturday -Saturday-saturday -saturday-091-91-01-1-7-13-13-2'), + tc('to_char(DateTime, \'HH24FMHH24MIFMMISSFMSSYYYY-YYY-YYMONTHFMMONTHmonthFMmonthDAYFMDAYDayFMDaydayFMdayDDDFMDD-D-DDFMDDDWWFMWWQ\')', [], + 'all', '0990880772023-023-23APRIL APRILapril aprilSATURDAY SATURDAYSaturday Saturdaysaturday saturday0911-7-019113132'), + tc('to_char(DateTime, \'YYYY-MM-DD\')', [], 'yyyymmdd', '2023-04-01') + ], false); +} + + //MATH FUNCTIONS function <> meta::external::query::sql::transformation::queryToPure::tests::testMathFunctions():Boolean[1] { @@ -1582,7 +1957,9 @@ function <> meta::external::query::sql::transformation::queryToPure:: ->project( [ x | $x.booleanIn, x | $x.integerIn, x | $x.floatIn, x | $x.decimalIn, x | $x.strictDateIn, x | $x.dateTimeIn, x | $x.stringIn ], [ 'Boolean', 'Integer', 'Float', 'Decimal', 'StrictDate', 'DateTime', 'String' ]) - ->renameColumns(pair('String', 'string')) + ->extend([ + col(row:TDSRow[1] | $row.getString('String'), 'string') + ]) ->olapGroupBy(['String'], asc('Integer'), y | $y->meta::pure::functions::math::olap::rowNumber(), 'ROW') ->olapGroupBy(['String'], desc('Integer'), y | $y->meta::pure::functions::math::olap::denseRank(), 'DENSE RANK') ->olapGroupBy(['String'], asc('Integer'), y | $y->meta::pure::functions::math::olap::rank(), 'RANK') @@ -1983,23 +2360,25 @@ function meta::external::query::sql::transformation::queryToPure::tests::testSou ] } -function meta::external::query::sql::transformation::queryToPure::tests::test(sql:String[1], expected:FunctionDefinition[1]):Boolean[1] +function meta::external::query::sql::transformation::queryToPure::tests::test(sqls:String[*], expected:FunctionDefinition[1]):Boolean[1] { - test($sql, $expected, true); + test($sqls, $expected, true); } -function meta::external::query::sql::transformation::queryToPure::tests::test(sql:String[1], expected:FunctionDefinition[1], assertJSON:Boolean[1]):Boolean[1] +function meta::external::query::sql::transformation::queryToPure::tests::test(sqls:String[*], expected:FunctionDefinition[1], assertJSON:Boolean[1]):Boolean[1] { - test($sql, $expected, testSources(), false, true, $assertJSON); + test($sqls, $expected, testSources(), false, true, $assertJSON); } -function meta::external::query::sql::transformation::queryToPure::tests::test(sql:String[1], expected:FunctionDefinition[1], sources:SQLSource[*], scopeWithFrom:Boolean[1], assertLambda:Boolean[1], assertJSON:Boolean[1]):Boolean[1] +function meta::external::query::sql::transformation::queryToPure::tests::test(sqls:String[*], expected:FunctionDefinition[1], sources:SQLSource[*], scopeWithFrom:Boolean[1], assertLambda:Boolean[1], assertJSON:Boolean[1]):Boolean[1] { - let sqlTransformContext = $sql->processQuery($sources, $scopeWithFrom); - let actual = $sqlTransformContext.lambda(); + $sqls->forAll(sql | + let sqlTransformContext = $sql->processQuery($sources, $scopeWithFrom); + let actual = $sqlTransformContext.lambda(); - if ($assertLambda, | assertLambdaEquals($expected, $actual), | true); - if ($assertJSON, | assertLambdaJSONEquals($expected, $actual), | true); + if ($assertLambda, | assertLambdaEquals($expected, $actual), | true); + if ($assertJSON, | assertLambdaJSONEquals($expected, $actual), | true); + ) } function meta::external::query::sql::transformation::queryToPure::tests::processQuery(sql: String[1]): SqlTransformContext[1] diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/pom.xml b/legend-engine-xts-sql/legend-engine-xt-sql-query/pom.xml index c1e0cd03af2..e45230c06ce 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/pom.xml +++ b/legend-engine-xts-sql/legend-engine-xt-sql-query/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-sql - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -96,6 +96,10 @@ org.finos.legend.engine legend-engine-xt-sql-compiler + + org.finos.legend.engine + legend-engine-xt-sql-providers-core + diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/SQLExecutor.java b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/SQLExecutor.java index 798afbdf56b..74bf4bbf455 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/SQLExecutor.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/SQLExecutor.java @@ -47,13 +47,11 @@ import org.finos.legend.engine.protocol.sql.metamodel.Query; import org.finos.legend.engine.protocol.sql.schema.metamodel.MetamodelToProtocolTranslator; import org.finos.legend.engine.protocol.sql.schema.metamodel.Schema; -import org.finos.legend.engine.query.sql.api.sources.SQLContext; -import org.finos.legend.engine.query.sql.api.sources.SQLSource; -import org.finos.legend.engine.query.sql.api.sources.SQLSourceProvider; -import org.finos.legend.engine.query.sql.api.sources.SQLSourceResolvedContext; -import org.finos.legend.engine.query.sql.api.sources.SQLSourceTranslator; -import org.finos.legend.engine.query.sql.api.sources.TableSource; -import org.finos.legend.engine.query.sql.api.sources.TableSourceExtractor; +import org.finos.legend.engine.query.sql.providers.core.SQLContext; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceResolvedContext; +import org.finos.legend.engine.query.sql.providers.core.TableSource; import org.finos.legend.engine.shared.core.ObjectMapperFactory; import org.finos.legend.engine.shared.core.operational.errorManagement.EngineException; import org.finos.legend.engine.shared.core.operational.logs.LogInfo; @@ -89,9 +87,10 @@ public class SQLExecutor private final PlanExecutor planExecutor; private final Function> routerExtensions; private final Iterable transformers; - private final MutableMap providers; + private final MutableMap providers; - public SQLExecutor(ModelManager modelManager, PlanExecutor planExecutor, + public SQLExecutor(ModelManager modelManager, + PlanExecutor planExecutor, Function> routerExtensions, List providers, Iterable transformers) @@ -128,7 +127,7 @@ public Result execute(Query query, String user, SQLContext context, MutableList< { Root_meta_pure_executionPlan_ExecutionPlan l = PlanPlatform.JAVA.bindPlan(p._plan(), null, pureModel, routerExtensions.apply(pureModel)); SingleExecutionPlan m = transformExecutionPlan(l, pureModel, PureClientVersions.production, profiles, routerExtensions.apply(pureModel), transformers); - result = planExecutor.execute(m, Maps.mutable.empty(), "pentej", profiles); + result = planExecutor.execute(m, Maps.mutable.empty(), user, profiles); } return Tuples.pair(p._name(), result); @@ -165,7 +164,7 @@ public SingleExecutionPlan plan(Query query, SQLContext context, MutableList profiles) { - SQLContext context = new SQLContext(query, Maps.mutable.of()); + SQLContext context = new SQLContext(query); return process(query, (t, pm, sources) -> { Root_meta_external_query_sql_schema_metamodel_Schema schema = core_external_query_sql_binding_fromPure_fromPure.Root_meta_external_query_sql_transformation_queryToPure_getSchema_SqlTransformContext_1__Schema_1_(t, pm.getExecutionSupport()); @@ -216,7 +215,7 @@ private Pair, PureModelContext> getSourcesAndModel(Query if (!schemasValid) { - throw new IllegalArgumentException("Unsupported schema types " + String.join(", ", grouped.keySet().select(k -> !providers.containsKey(k)))); + throw new IllegalArgumentException("Unsupported schema types [" + String.join(", ", grouped.keySet().select(k -> !providers.containsKey(k))) + "], supported types: [" + String.join(", ", providers.keySet()) + "]"); } RichIterable resolved = grouped.keySet().collect(k -> resolve(grouped.get(k), context, providers.get(k), profiles)); diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceTranslator.java b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/SQLSourceTranslator.java similarity index 96% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceTranslator.java rename to legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/SQLSourceTranslator.java index ede0ac21bba..64ae7757879 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/SQLSourceTranslator.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/SQLSourceTranslator.java @@ -13,7 +13,7 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.api; import org.eclipse.collections.api.RichIterable; import org.eclipse.collections.impl.list.mutable.FastList; @@ -25,6 +25,9 @@ import org.finos.legend.engine.language.pure.compiler.toPureGraph.PureModel; import org.finos.legend.engine.protocol.pure.v1.model.executionOption.ExecutionOption; import org.finos.legend.engine.protocol.pure.v1.model.valueSpecification.raw.executionContext.ExecutionContext; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; + import org.finos.legend.pure.generated.*; import java.util.Objects; diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSourceExtractor.java b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/TableSourceExtractor.java similarity index 98% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSourceExtractor.java rename to legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/TableSourceExtractor.java index ad1dd23d4b4..5d05297574d 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/sources/TableSourceExtractor.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/TableSourceExtractor.java @@ -13,12 +13,14 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.api; import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.factory.Sets; import org.eclipse.collections.impl.utility.ListIterate; import org.finos.legend.engine.protocol.sql.metamodel.*; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.core.TableSourceArgument; import java.util.Collection; import java.util.Collections; diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/execute/SqlExecute.java b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/execute/SqlExecute.java index 9d463443298..fca71827a75 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/execute/SqlExecute.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/main/java/org/finos/legend/engine/query/sql/api/execute/SqlExecute.java @@ -35,8 +35,8 @@ import org.finos.legend.engine.protocol.sql.metamodel.Query; import org.finos.legend.engine.protocol.sql.schema.metamodel.Schema; import org.finos.legend.engine.query.sql.api.SQLExecutor; -import org.finos.legend.engine.query.sql.api.sources.SQLContext; -import org.finos.legend.engine.query.sql.api.sources.SQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.core.SQLContext; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceProvider; import org.finos.legend.engine.shared.core.kerberos.ProfileManagerHelper; import org.finos.legend.engine.shared.core.operational.logs.LoggingEventType; import org.finos.legend.pure.generated.Root_meta_pure_extension_Extension; @@ -101,7 +101,7 @@ public Response executeSql(@Context HttpServletRequest request, Query query, @De @ApiParam(hidden = true) @Pac4JProfileManager ProfileManager pm, @Context UriInfo uriInfo) { MutableList profiles = ProfileManagerHelper.extractProfiles(pm); - SQLContext context = new SQLContext(query, Maps.mutable.of()); + SQLContext context = new SQLContext(query); Result result = this.executor.execute(query, request.getRemoteUser(), context, profiles); @@ -129,7 +129,7 @@ public Lambda generateLambda(@Context HttpServletRequest request, Query query, @ { MutableList profiles = ProfileManagerHelper.extractProfiles(pm); - SQLContext context = new SQLContext(query, Maps.mutable.of()); + SQLContext context = new SQLContext(query); return executor.lambda(query, context, profiles); } @@ -151,7 +151,7 @@ public ExecutionPlan generatePlan(@Context HttpServletRequest request, Query que { MutableList profiles = ProfileManagerHelper.extractProfiles(pm); - SQLContext context = new SQLContext(query, Maps.mutable.of()); + SQLContext context = new SQLContext(query); return this.executor.plan(query, context, profiles); } diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/sources/TableSourceExtractorTest.java b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/TableSourceExtractorTest.java similarity index 94% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/sources/TableSourceExtractorTest.java rename to legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/TableSourceExtractorTest.java index 9df7972debf..b92133ae745 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/sources/TableSourceExtractorTest.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/TableSourceExtractorTest.java @@ -13,12 +13,14 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.api; import org.eclipse.collections.api.factory.Sets; import org.eclipse.collections.impl.list.mutable.FastList; import org.finos.legend.engine.language.sql.grammar.from.SQLGrammarParser; import org.finos.legend.engine.protocol.sql.metamodel.Statement; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.core.TableSourceArgument; import org.junit.Assert; import org.junit.Test; @@ -26,7 +28,6 @@ public class TableSourceExtractorTest { - private static final TableSource TABLE_1 = new TableSource("service", FastList.newListWith(new TableSourceArgument(null, 0, "table1"))); private static final TableSource TABLE_2 = new TableSource("service", FastList.newListWith(new TableSourceArgument(null, 0, "table2"))); diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/sources/TestSQLSourceProvider.java b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/TestSQLSourceProvider.java similarity index 89% rename from legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/sources/TestSQLSourceProvider.java rename to legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/TestSQLSourceProvider.java index b09021383c9..8268e162f5d 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/sources/TestSQLSourceProvider.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/TestSQLSourceProvider.java @@ -13,7 +13,7 @@ // limitations under the License. // -package org.finos.legend.engine.query.sql.api.sources; +package org.finos.legend.engine.query.sql.api; import org.eclipse.collections.api.LazyIterable; import org.eclipse.collections.api.list.MutableList; @@ -24,6 +24,13 @@ import org.finos.legend.engine.protocol.pure.v1.model.context.PureModelContextData; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.PureSingleExecution; import org.finos.legend.engine.protocol.pure.v1.model.packageableElement.service.Service; +import org.finos.legend.engine.query.sql.providers.core.SQLContext; +import org.finos.legend.engine.query.sql.providers.core.SQLSource; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceArgument; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceProvider; +import org.finos.legend.engine.query.sql.providers.core.SQLSourceResolvedContext; +import org.finos.legend.engine.query.sql.providers.core.TableSource; +import org.finos.legend.engine.query.sql.providers.core.TableSourceArgument; import org.pac4j.core.profile.CommonProfile; import java.io.BufferedReader; diff --git a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/execute/SqlExecuteTest.java b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/execute/SqlExecuteTest.java index 408501ec7c3..c2f383ee6e5 100644 --- a/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/execute/SqlExecuteTest.java +++ b/legend-engine-xts-sql/legend-engine-xt-sql-query/src/test/java/org/finos/legend/engine/query/sql/api/execute/SqlExecuteTest.java @@ -39,7 +39,7 @@ import org.finos.legend.engine.pure.code.core.PureCoreExtensionLoader; import org.finos.legend.engine.query.sql.api.CatchAllExceptionMapper; import org.finos.legend.engine.query.sql.api.MockPac4jFeature; -import org.finos.legend.engine.query.sql.api.sources.TestSQLSourceProvider; +import org.finos.legend.engine.query.sql.api.TestSQLSourceProvider; import org.finos.legend.engine.shared.core.api.grammar.RenderStyle; import org.finos.legend.engine.shared.core.deployment.DeploymentMode; import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory; diff --git a/legend-engine-xts-sql/pom.xml b/legend-engine-xts-sql/pom.xml index b9297f87eee..c4bcbf14f1d 100644 --- a/legend-engine-xts-sql/pom.xml +++ b/legend-engine-xts-sql/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 @@ -34,6 +34,7 @@ legend-engine-xt-sql-grammar-integration legend-engine-xt-sql-postgres-server legend-engine-xt-sql-protocol + legend-engine-xt-sql-providers legend-engine-xt-sql-pure legend-engine-xt-sql-pure-metamodel legend-engine-xt-sql-query diff --git a/legend-engine-xts-text/legend-engine-xt-text-compiler/pom.xml b/legend-engine-xts-text/legend-engine-xt-text-compiler/pom.xml index 0f3f8adcec6..73563850afd 100644 --- a/legend-engine-xts-text/legend-engine-xt-text-compiler/pom.xml +++ b/legend-engine-xts-text/legend-engine-xt-text-compiler/pom.xml @@ -18,7 +18,7 @@ legend-engine-xts-text org.finos.legend.engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-text/legend-engine-xt-text-grammar/pom.xml b/legend-engine-xts-text/legend-engine-xt-text-grammar/pom.xml index c287fc5642e..c236fc6356d 100644 --- a/legend-engine-xts-text/legend-engine-xt-text-grammar/pom.xml +++ b/legend-engine-xts-text/legend-engine-xt-text-grammar/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-text - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-text/legend-engine-xt-text-protocol/pom.xml b/legend-engine-xts-text/legend-engine-xt-text-protocol/pom.xml index 5bfd598aea5..a790bcdcae9 100644 --- a/legend-engine-xts-text/legend-engine-xt-text-protocol/pom.xml +++ b/legend-engine-xts-text/legend-engine-xt-text-protocol/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine-xts-text - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-text/legend-engine-xt-text-pure-metamodel/pom.xml b/legend-engine-xts-text/legend-engine-xt-text-pure-metamodel/pom.xml index 867ed175c09..772144c3eb0 100644 --- a/legend-engine-xts-text/legend-engine-xt-text-pure-metamodel/pom.xml +++ b/legend-engine-xts-text/legend-engine-xt-text-pure-metamodel/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-text - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-text/pom.xml b/legend-engine-xts-text/pom.xml index 61d1359d0b4..837f271672a 100644 --- a/legend-engine-xts-text/pom.xml +++ b/legend-engine-xts-text/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-xml/legend-engine-xt-xml-javaPlatformBinding-pure/pom.xml b/legend-engine-xts-xml/legend-engine-xt-xml-javaPlatformBinding-pure/pom.xml index b46994939a5..a655017d4b3 100644 --- a/legend-engine-xts-xml/legend-engine-xt-xml-javaPlatformBinding-pure/pom.xml +++ b/legend-engine-xts-xml/legend-engine-xt-xml-javaPlatformBinding-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-xml - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-xml/legend-engine-xt-xml-model/pom.xml b/legend-engine-xts-xml/legend-engine-xt-xml-model/pom.xml index d95936aef22..3fc0640c9df 100644 --- a/legend-engine-xts-xml/legend-engine-xt-xml-model/pom.xml +++ b/legend-engine-xts-xml/legend-engine-xt-xml-model/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-xml - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-xml/legend-engine-xt-xml-pure/pom.xml b/legend-engine-xts-xml/legend-engine-xt-xml-pure/pom.xml index 72e70b7d74c..850e459f5e2 100644 --- a/legend-engine-xts-xml/legend-engine-xt-xml-pure/pom.xml +++ b/legend-engine-xts-xml/legend-engine-xt-xml-pure/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-xml - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-xml/legend-engine-xt-xml-runtime/pom.xml b/legend-engine-xts-xml/legend-engine-xt-xml-runtime/pom.xml index 4d2f71269e9..1c6c2454b1f 100644 --- a/legend-engine-xts-xml/legend-engine-xt-xml-runtime/pom.xml +++ b/legend-engine-xts-xml/legend-engine-xt-xml-runtime/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-xml - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-xml/legend-engine-xt-xml-shared/pom.xml b/legend-engine-xts-xml/legend-engine-xt-xml-shared/pom.xml index 84eb12e2a7b..24f172269df 100644 --- a/legend-engine-xts-xml/legend-engine-xt-xml-shared/pom.xml +++ b/legend-engine-xts-xml/legend-engine-xt-xml-shared/pom.xml @@ -19,7 +19,7 @@ org.finos.legend.engine legend-engine-xts-xml - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/legend-engine-xts-xml/pom.xml b/legend-engine-xts-xml/pom.xml index 9bd279755df..61a42aaac04 100644 --- a/legend-engine-xts-xml/pom.xml +++ b/legend-engine-xts-xml/pom.xml @@ -18,7 +18,7 @@ org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT 4.0.0 diff --git a/pom.xml b/pom.xml index 0c66a7c491d..609904e7d25 100644 --- a/pom.xml +++ b/pom.xml @@ -26,7 +26,7 @@ Legend Engine org.finos.legend.engine legend-engine - 4.32.1-SNAPSHOT + 4.35.4-SNAPSHOT pom @@ -75,6 +75,7 @@ legend-engine-xts-functionActivator legend-engine-xts-snowflakeApp + legend-engine-xts-bigqueryFunction legend-engine-xts-service legend-engine-xts-persistence legend-engine-xts-hostedService @@ -91,6 +92,7 @@ legend-engine-xts-authentication + legend-engine-xts-connection legend-engine-config @@ -156,6 +158,7 @@ 2.6 3.7 1.10.0 + 0.12.0 1.3.17-1 4.1.16 1.3.29 @@ -187,6 +190,7 @@ 1.2.17 0.1.5 1.2.3 + 8.5.5 4.4.0 3.12.8 4.6 @@ -579,6 +583,11 @@ legend-engine-xt-functionActivator-api ${project.version} + + org.finos.legend.engine + legend-engine-xt-functionActivator-deployment + ${project.version} + @@ -637,25 +646,54 @@ legend-engine-xt-snowflakeApp-api ${project.version} - org.finos.legend.engine - legend-engine-xt-authentication-pure + legend-engine-xt-snowflakeApp-generator ${project.version} org.finos.legend.engine - legend-engine-xt-authentication-protocol + legend-engine-xt-bigqueryFunction-protocol ${project.version} org.finos.legend.engine - legend-engine-xt-authentication-grammar + legend-engine-xt-bigqueryFunction-pure + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-compiler + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-grammar + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-bigqueryFunction-api + ${project.version} + + + + + + + + org.finos.legend.engine + legend-engine-xt-authentication-pure ${project.version} org.finos.legend.engine - legend-engine-xt-authentication-connection-factory + legend-engine-xt-authentication-protocol + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-authentication-grammar ${project.version} @@ -673,6 +711,31 @@ legend-engine-xt-authentication-implementation-gcp-federation ${project.version} + + org.finos.legend.engine + legend-engine-xt-connection-factory + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-connection-compiler + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-connection-grammar + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-connection-protocol + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-connection-pure-metamodel + ${project.version} + org.finos.legend.engine legend-engine-xt-graphQL-query @@ -1995,6 +2058,32 @@ ${project.version} test-jar + + org.finos.legend.engine + legend-engine-xt-sql-providers-core + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-sql-providers-shared + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-sql-providers-shared + test-jar + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-sql-providers-relationalStore + ${project.version} + + + org.finos.legend.engine + legend-engine-xt-sql-providers-service + ${project.version} + org.finos.legend.engine legend-engine-xt-sql-grammar-integration @@ -2963,6 +3052,17 @@ docker-java-api 3.3.0 + + io.minio + minio + ${minio.version} + + + org.jetbrains + * + + + @@ -3084,6 +3184,19 @@ + + + io.deephaven + deephaven-csv + ${deephaven-csv.version} + + + io.deephaven + deephaven-csv-fast-double-parser + ${deephaven-csv.version} + runtime + + org.apache.arrow @@ -3127,7 +3240,6 @@ - @@ -3168,9 +3280,7 @@ vX_X_X v1_33_0 - -