From f45293e8fd3ab239104a6e2c18db4c91f4698ec6 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Wed, 21 Aug 2024 15:36:17 -0400 Subject: [PATCH 01/29] Fix OTEL traces not including JDBC statement SQL --- .../hasura/ndc/app/application/Application.kt | 7 +++++++ .../ndc/app/services/AgroalDataSourceService.kt | 17 ++++------------- .../dataConnectors/BaseDataConnectorService.kt | 2 +- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/ndc-app/src/main/kotlin/io/hasura/ndc/app/application/Application.kt b/ndc-app/src/main/kotlin/io/hasura/ndc/app/application/Application.kt index 55a7837..f50b84b 100644 --- a/ndc-app/src/main/kotlin/io/hasura/ndc/app/application/Application.kt +++ b/ndc-app/src/main/kotlin/io/hasura/ndc/app/application/Application.kt @@ -15,9 +15,16 @@ object Application { class MyApp : QuarkusApplication { override fun run(vararg args: String): Int { + disableOtelJdbcSanitizer() Quarkus.waitForExit() return 0 } + + // Disables stripping of whitespace and replacement of literals in SQL queries with "?" + // See: https://github.com/open-telemetry/opentelemetry-java-instrumentation/issues/6610 + private fun disableOtelJdbcSanitizer() { + System.setProperty("otel.instrumentation.common.db-statement-sanitizer.enabled", "false") + } } } diff --git a/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/AgroalDataSourceService.kt b/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/AgroalDataSourceService.kt index e030290..24a1f62 100644 --- a/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/AgroalDataSourceService.kt +++ b/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/AgroalDataSourceService.kt @@ -9,6 +9,8 @@ import io.agroal.api.security.SimplePassword import io.hasura.ndc.common.ConnectorConfiguration import io.opentelemetry.instrumentation.annotations.WithSpan import io.opentelemetry.instrumentation.jdbc.datasource.OpenTelemetryDataSource +import io.quarkus.agroal.runtime.AgroalOpenTelemetryWrapper +import io.quarkus.agroal.runtime.OpenTelemetryAgroalDataSource import io.smallrye.config.ConfigMapping import io.smallrye.config.WithDefault import io.smallrye.config.WithName @@ -140,7 +142,7 @@ class AgroalDataSourceService { fun createDataSource( connConfig: ConnectorConfiguration - ): DataSource { + ): AgroalDataSource { val configSupplier = mkAgroalDataSourceConfigurationSupplier(connConfig.jdbcUrl, connConfig.jdbcProperties) val ds = AgroalDataSource.from(configSupplier).apply { loginTimeout = config.connectionFactoryConfiguration().loginTimeout().toSeconds().toInt() @@ -154,18 +156,7 @@ class AgroalDataSourceService { connConfig: ConnectorConfiguration ): DataSource { val agroalDs = createDataSource(connConfig) - return OpenTelemetryDataSource(agroalDs) - } - - @WithSpan - fun createDataSourceFromConnInfo( - connConfig: ConnectorConfiguration, - tracing: Boolean = true - ): DataSource { - return when (tracing) { - true -> createTracingDataSource(connConfig) - false -> createDataSource(connConfig) - } + return OpenTelemetryAgroalDataSource(agroalDs) } private object PoolInterceptor : AgroalPoolInterceptor { diff --git a/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/dataConnectors/BaseDataConnectorService.kt b/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/dataConnectors/BaseDataConnectorService.kt index 6d5d341..b072360 100644 --- a/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/dataConnectors/BaseDataConnectorService.kt +++ b/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/dataConnectors/BaseDataConnectorService.kt @@ -38,7 +38,7 @@ class JDBCDataSourceProvider : IDataSourceProvider { override fun getDataSource(config: ConnectorConfiguration): DataSource { if (dataSource == null) { - dataSource = agroalDataSourceService.createDataSource(config) + dataSource = agroalDataSourceService.createTracingDataSource(config) } return dataSource!! } From 02d6e690087e3c9edc567a37b28918e19e40d865 Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Thu, 22 Aug 2024 16:18:18 -0700 Subject: [PATCH 02/29] fix for empty schemas option in the CLI --- ndc-cli/src/main/kotlin/io/hasura/cli/main.kt | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt b/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt index 137db7b..6e32314 100644 --- a/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt +++ b/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt @@ -53,10 +53,9 @@ class CLI { names = ["-s", "--schemas"], arity = "0..*", split = ",", - defaultValue = "", description = ["Comma-separated list of schemas to introspect"] ) - schemas: List = emptyList() + schemas: List? ) { val configGenerator = when (database) { @@ -67,7 +66,7 @@ class CLI { val config = configGenerator.getConfig( jdbcUrl = jdbcUrl, - schemas = schemas + schemas = schemas ?: emptyList() ) val file = File(outfile) From 0a0f941fa5906e16fafe93a5cf74c20b58b777bd Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Fri, 23 Aug 2024 14:06:26 -0400 Subject: [PATCH 03/29] Update connector metdata for v1.0.1, plus build scripts --- create-github-release.sh | 32 +++++++++++++------ gradle.properties | 2 +- .../.hasura-connector/connector-metadata.yaml | 2 +- .../.hasura-connector/connector-metadata.yaml | 2 +- .../.hasura-connector/connector-metadata.yaml | 2 +- 5 files changed, 26 insertions(+), 14 deletions(-) diff --git a/create-github-release.sh b/create-github-release.sh index 6360ab6..5586342 100755 --- a/create-github-release.sh +++ b/create-github-release.sh @@ -1,18 +1,30 @@ #!/bin/bash +set -euo pipefail +set -x # Variables OWNER="hasura" REPO="ndc-jvm-mono" -VERSION="v0.1.0" +VERSION="v1.0.1" -SUBDIRS=("ndc-connector-oracle" "ndc-connector-mysql" "ndc-connector-snowflake") +CONNECTORS=("ndc-connector-oracle" "ndc-connector-mysql" "ndc-connector-snowflake") -# Loop through each subdirectory and create a release -for SUBDIR in "${SUBDIRS[@]}"; do - TAG="${SUBDIR#ndc-connector-}/${VERSION}" # Create tag like oracle/v1.0.0 - RELEASE_NAME="${SUBDIR#ndc-connector-} Release ${VERSION}" - RELEASE_DESCRIPTION="Release for ${SUBDIR#ndc-connector-} version ${VERSION}" - FILE_PATH="${SUBDIR}/package.tar.gz" +# Loop through each connector and create a release +for CONNECTOR in "${CONNECTORS[@]}"; do + + # First, build and push the Docker images + export IMAGE_TAG="${VERSION}" + docker compose build "${CONNECTOR}" + docker compose push "${CONNECTOR}" + + # Create the .tar.gz with the connector's ".hasura-connector" directory + tar -czvf "${CONNECTOR}/package.tar.gz" -C "${CONNECTOR}" .hasura-connector + + # Now, create a GitHub release + TAG="${CONNECTOR#ndc-connector-}/${VERSION}" # Create tag like oracle/v1.0.0 + RELEASE_NAME="${CONNECTOR#ndc-connector-} Release ${VERSION}" + RELEASE_DESCRIPTION="Release for ${CONNECTOR#ndc-connector-} version ${VERSION}" + FILE_PATH="${CONNECTOR}/package.tar.gz" # Create a new release gh release create "$TAG" \ @@ -23,8 +35,8 @@ for SUBDIR in "${SUBDIRS[@]}"; do # Verify the release if [ $? -eq 0 ]; then - echo "Release ${RELEASE_NAME} created and file uploaded successfully for ${SUBDIR}." + echo "Release ${RELEASE_NAME} created and file uploaded successfully for ${CONNECTOR}." else - echo "Failed to create release ${RELEASE_NAME} or upload file for ${SUBDIR}." + echo "Failed to create release ${RELEASE_NAME} or upload file for ${CONNECTOR}." fi done \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 3e69b01..2f4752a 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -version=0.1.0 +version=1.0.1 #Gradle properties quarkusPluginId=io.quarkus diff --git a/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml b/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml index 12edee2..d748bbd 100644 --- a/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml +++ b/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml @@ -1,6 +1,6 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "ghcr.io/hasura/ndc-jvm-mysql:v0.1.0" + dockerImage: "ghcr.io/hasura/ndc-jvm-mysql:v1.0.1" supportedEnvironmentVariables: - name: JDBC_URL description: "The JDBC URL to connect to the database" diff --git a/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml b/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml index f175e4f..74dadaa 100644 --- a/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml +++ b/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml @@ -1,6 +1,6 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "ghcr.io/hasura/ndc-jvm-oracle:v0.1.0" + dockerImage: "ghcr.io/hasura/ndc-jvm-oracle:v1.0.1" supportedEnvironmentVariables: - name: JDBC_URL description: "The JDBC URL to connect to the database" diff --git a/ndc-connector-snowflake/.hasura-connector/connector-metadata.yaml b/ndc-connector-snowflake/.hasura-connector/connector-metadata.yaml index 2453f37..aa99c14 100644 --- a/ndc-connector-snowflake/.hasura-connector/connector-metadata.yaml +++ b/ndc-connector-snowflake/.hasura-connector/connector-metadata.yaml @@ -1,6 +1,6 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "ghcr.io/hasura/ndc-jvm-snowflake:v0.1.0" + dockerImage: "ghcr.io/hasura/ndc-jvm-snowflake:v1.0.1" supportedEnvironmentVariables: - name: JDBC_URL description: "The JDBC URL to connect to the database" From 1c9c2c024f754b48440094b0157387dc135fabf3 Mon Sep 17 00:00:00 2001 From: Nihad Date: Wed, 18 Sep 2024 10:31:20 +0530 Subject: [PATCH 04/29] fix: Modified the DSL.jsonEntry (closes #7) Previously the jsonEntry was ``` is ColumnField -> { DSL.jsonEntry( alias, DSL.field(DSL.name(field.name)) ) } ``` Modified that so that, the issue of aliasing is resolved ``` is ColumnField -> { DSL.jsonEntry( alias, DSL.field(DSL.name(alias)) ) } ``` --- .../src/main/kotlin/io/hasura/mysql/JSONGenerator.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/JSONGenerator.kt b/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/JSONGenerator.kt index 3821791..7edc89e 100644 --- a/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/JSONGenerator.kt +++ b/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/JSONGenerator.kt @@ -56,7 +56,7 @@ object JsonQueryGenerator : BaseQueryGenerator() { is ColumnField -> { DSL.jsonEntry( alias, - DSL.field(DSL.name(field.column)) + DSL.field(DSL.name(alias)) ) } From ef3dd9d851e62c18fc1fbee5d049b61065f37f72 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Wed, 25 Sep 2024 11:35:24 -0400 Subject: [PATCH 05/29] Oracle: Patch generating "null" when no relation rows --- .../main/kotlin/io/hasura/oracle/JSONGenerator.kt | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/JSONGenerator.kt b/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/JSONGenerator.kt index 69784c3..5171f27 100644 --- a/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/JSONGenerator.kt +++ b/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/JSONGenerator.kt @@ -85,8 +85,15 @@ object JsonQueryGenerator : BaseQueryGenerator() { DSL.jsonEntry( alias, - DSL.select( - subQuery.asField(alias) + DSL.coalesce( + DSL.select( + subQuery.asField(alias) + ), DSL.jsonObject( + DSL.jsonEntry( + "rows", + DSL.jsonArray().returning(SQLDataType.CLOB) + ) + ).returning(SQLDataType.CLOB) ) ) } @@ -286,7 +293,7 @@ object JsonQueryGenerator : BaseQueryGenerator() { ) private fun getTableName(collection: String): String { - return collection.split('.').last() + return collection.split('.').last() } } From f062e7ffe7db463bbae7c5d1b1d70f279bf81226 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Thu, 17 Oct 2024 12:39:08 -0400 Subject: [PATCH 06/29] WIP Snowflake work --- create-github-release.sh | 71 ++-- docker-compose.yaml | 6 +- .../.hasura-connector/connector-metadata.yaml | 4 +- .../.hasura-connector/connector-metadata.yaml | 4 +- ndc-connector-phoenix.dockerfile | 25 ++ ndc-connector-phoenix/requests.http | 50 +++ ndc-connector-snowflake/rsa_key.p8 | 30 ++ ndc-connector-snowflake/rsa_key.pub | 9 + .../io/hasura/snowflake/CTEQueryGenerator.kt | 371 ++++++++++++++++++ .../SnowflakeDataConnectorService.kt | 18 +- .../io/hasura/ndc/sqlgen/BaseGenerator.kt | 126 ++---- .../hasura/ndc/sqlgen/BaseQueryGenerator.kt | 10 +- requests.http | 126 ++++++ rsa_key.p8 | 30 ++ rsa_key.pub | 9 + 15 files changed, 749 insertions(+), 140 deletions(-) create mode 100644 ndc-connector-phoenix.dockerfile create mode 100644 ndc-connector-phoenix/requests.http create mode 100644 ndc-connector-snowflake/rsa_key.p8 create mode 100644 ndc-connector-snowflake/rsa_key.pub create mode 100644 ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/CTEQueryGenerator.kt create mode 100644 requests.http create mode 100644 rsa_key.p8 create mode 100644 rsa_key.pub diff --git a/create-github-release.sh b/create-github-release.sh index 5586342..cfa7974 100755 --- a/create-github-release.sh +++ b/create-github-release.sh @@ -1,42 +1,39 @@ #!/bin/bash -set -euo pipefail -set -x + +# Check if the correct number of arguments is provided +if [ "$#" -ne 2 ]; then + echo "Usage: $0 " + exit 1 +fi # Variables OWNER="hasura" REPO="ndc-jvm-mono" -VERSION="v1.0.1" - -CONNECTORS=("ndc-connector-oracle" "ndc-connector-mysql" "ndc-connector-snowflake") - -# Loop through each connector and create a release -for CONNECTOR in "${CONNECTORS[@]}"; do - - # First, build and push the Docker images - export IMAGE_TAG="${VERSION}" - docker compose build "${CONNECTOR}" - docker compose push "${CONNECTOR}" - - # Create the .tar.gz with the connector's ".hasura-connector" directory - tar -czvf "${CONNECTOR}/package.tar.gz" -C "${CONNECTOR}" .hasura-connector - - # Now, create a GitHub release - TAG="${CONNECTOR#ndc-connector-}/${VERSION}" # Create tag like oracle/v1.0.0 - RELEASE_NAME="${CONNECTOR#ndc-connector-} Release ${VERSION}" - RELEASE_DESCRIPTION="Release for ${CONNECTOR#ndc-connector-} version ${VERSION}" - FILE_PATH="${CONNECTOR}/package.tar.gz" - - # Create a new release - gh release create "$TAG" \ - --repo "$OWNER/$REPO" \ - --title "$RELEASE_NAME" \ - --notes "$RELEASE_DESCRIPTION" \ - "$FILE_PATH" - - # Verify the release - if [ $? -eq 0 ]; then - echo "Release ${RELEASE_NAME} created and file uploaded successfully for ${CONNECTOR}." - else - echo "Failed to create release ${RELEASE_NAME} or upload file for ${CONNECTOR}." - fi -done \ No newline at end of file +SUBDIR="$1" # Subdirectory passed as the second argument +VERSION="$2" # Version passed as the first argument + +# Create tag, release name, and description +TAG="${SUBDIR#ndc-connector-}/${VERSION}" # Create tag like oracle/v1.0.0 +RELEASE_NAME="${SUBDIR#ndc-connector-} Release ${VERSION}" +RELEASE_DESCRIPTION="Release for ${SUBDIR#ndc-connector-} version ${VERSION}" +FILE_PATH="${SUBDIR}/package.tar.gz" + +# Build connector package tarball +pushd "$SUBDIR" +tar -czf package.tar.gz ./.hasura-connector +echo "Created ${SUBDIR}-package.tar.gz" +popd + +# Create a new release +gh release create "$TAG" \ + --repo "$OWNER/$REPO" \ + --title "$RELEASE_NAME" \ + --notes "$RELEASE_DESCRIPTION" \ + "$FILE_PATH" + +# Verify the release +if [ $? -eq 0 ]; then + echo "Release ${RELEASE_NAME} created and file uploaded successfully for ${SUBDIR}." +else + echo "Failed to create release ${RELEASE_NAME} or upload file for ${SUBDIR}." +fi \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index a609d21..80c9262 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -5,7 +5,7 @@ services: ### NDC Connectors ################## ndc-connector-oracle: - image: ghcr.io/hasura/ndc-jvm-oracle:${IMAGE_TAG} + image: hasuraci/ndc-oracle-connector:${IMAGE_TAG_ORACLE} build: context: "./" dockerfile: "ndc-connector-oracle.dockerfile" @@ -18,7 +18,7 @@ services: - ./ndc-connector-oracle/configuration.json:/etc/connector/configuration.json ndc-connector-mysql: - image: ghcr.io/hasura/ndc-jvm-mysql:${IMAGE_TAG} + image: ghcr.io/hasura/ndc-jvm-mysql:${IMAGE_TAG_MYSQL} build: context: "./" dockerfile: "ndc-connector-mysql.dockerfile" @@ -28,7 +28,7 @@ services: - ./ndc-connector-mysql/configuration.json:/etc/connector/configuration.json ndc-connector-snowflake: - image: ghcr.io/hasura/ndc-jvm-snowflake:${IMAGE_TAG} + image: ghcr.io/hasura/ndc-jvm-snowflake:${IMAGE_TAG_SNOWFLAKE} build: context: "./" dockerfile: "ndc-connector-snowflake.dockerfile" diff --git a/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml b/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml index d748bbd..059a203 100644 --- a/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml +++ b/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml @@ -1,6 +1,6 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "ghcr.io/hasura/ndc-jvm-mysql:v1.0.1" + dockerImage: "ghcr.io/hasura/ndc-jvm-mysql:v1.0.2" supportedEnvironmentVariables: - name: JDBC_URL description: "The JDBC URL to connect to the database" @@ -9,7 +9,7 @@ commands: docker run \ -e HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH \ -v ${HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH}:/app/output \ - ghcr.io/hasura/ndc-jvm-cli:v0.1.0 update $JDBC_URL \ + ghcr.io/hasura/ndc-jvm-cli:v0.1.1 update $JDBC_URL \ --database MYSQL \ --schemas $JDBC_SCHEMAS \ --outfile /app/output/configuration.json diff --git a/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml b/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml index 74dadaa..53b19cb 100644 --- a/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml +++ b/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml @@ -1,6 +1,6 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "ghcr.io/hasura/ndc-jvm-oracle:v1.0.1" + dockerImage: "ghcr.io/hasura/ndc-jvm-oracle:v1.0.2" supportedEnvironmentVariables: - name: JDBC_URL description: "The JDBC URL to connect to the database" @@ -11,7 +11,7 @@ commands: docker run \ -e HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH \ -v ${HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH}:/app/output \ - ghcr.io/hasura/ndc-jvm-cli:v0.1.0 update $JDBC_URL \ + ghcr.io/hasura/ndc-jvm-cli:v0.1.1 update $JDBC_URL \ --database ORACLE \ --schemas $JDBC_SCHEMAS \ --outfile /app/output/configuration.json diff --git a/ndc-connector-phoenix.dockerfile b/ndc-connector-phoenix.dockerfile new file mode 100644 index 0000000..85bd6b1 --- /dev/null +++ b/ndc-connector-phoenix.dockerfile @@ -0,0 +1,25 @@ +# Build stage +FROM registry.access.redhat.com/ubi9/openjdk-21:1.20-2 AS build + +ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' + +WORKDIR /build +COPY . /build + +# Run Gradle build +USER root +RUN ./gradlew :ndc-connector-phoenix:build --no-daemon --console=plain -x test + +# Final stage +FROM registry.access.redhat.com/ubi9/openjdk-21:1.20-2 + +ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' +ENV JAVA_OPTS_APPEND="-Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.lang=ALL-UNNAMED" +ENV JAVA_APP_JAR="/app/quarkus-run.jar" + +WORKDIR /app + +COPY --from=build /build/ndc-connector-phoenix/build/quarkus-app /app + +EXPOSE 8080 5005 +ENTRYPOINT [ "/opt/jboss/container/java/run/run-java.sh" ] \ No newline at end of file diff --git a/ndc-connector-phoenix/requests.http b/ndc-connector-phoenix/requests.http new file mode 100644 index 0000000..ed0c44f --- /dev/null +++ b/ndc-connector-phoenix/requests.http @@ -0,0 +1,50 @@ +POST http://localhost:8080/query +Content-Type: application/json + +{ + "collection": "US_POPULATION", + "query": { + "aggregates": null, + "fields": { + "CITY": { + "type": "column", + "column": "CITY" + }, + "STATE": { + "type": "column", + "column": "STATE" + }, + "POPULATION": { + "type": "column", + "column": "POPULATION" + } + }, + "limit": null, + "offset": null, + "order_by": null, + "predicate": { + "type": "binary_comparison_operator", + "operator": "_eq", + "column": { + "type": "column", + "name": "STATE", + "path": [] + }, + "value": { + "type": "variable", + "name": "STATE" + } + } + }, + "arguments": {}, + "collection_relationships": {}, + "variables": [ + { + "STATE": "NY" + }, + { + "STATE": "CA" + } + ], + "root_collection": "US_POPULATION" +} \ No newline at end of file diff --git a/ndc-connector-snowflake/rsa_key.p8 b/ndc-connector-snowflake/rsa_key.p8 new file mode 100644 index 0000000..d6ad158 --- /dev/null +++ b/ndc-connector-snowflake/rsa_key.p8 @@ -0,0 +1,30 @@ +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIFHDBOBgkqhkiG9w0BBQ0wQTApBgkqhkiG9w0BBQwwHAQIT5DlQO7K/8ACAggA +MAwGCCqGSIb3DQIJBQAwFAYIKoZIhvcNAwcECN52Tor4Zy19BIIEyJ3PRIrNFOrc +UFg8x841OmVPXHZkLO+ABdKcM6kMIcUQHeJ15FAh+yCpTul1dE4GkKoiKNLd4TWW +oJxa7C92Pli4EffIUND+SCL5umoujmfHi+B9huaWhhBo7y0OgbvpLQvLoyux2r9H +gUSq+SC3jKQKgp9XLqlb8ggTFEPr1+zq1z9B06TVwRTMOv2cHLzaV87Etfi23U26 +o7xC88UnPZfBM6O07M9I+28NPoKJd5VFrYZqzqpfkOb5UgySapXyZD5E1tPyg5tN +TPF3PjoxumBwEfP+uKg9zf15bR9/yTtGzyWOdVF8frUE8BgPlUvc4pWA+NNk5Tp7 +I4ACeRmf9LfHM85xWRIli3lXLhHgaoDFePmVX9yuXpzjkoX6Bx3tkH2/RZJGvay4 +32GIm2Q2Z06MNkjvUp4y+kpeGaBhWxqDILUf5/JHg8MKWnCMkj7vNdc42AxKqCyZ +CAhUq4vdIRvzDO6TqCewgqFkSE/mAGibk9WgkyWZKBKHTfOp0BcZWkD2IuUAVYfY +S0pfxRmBjRQEJHHXp3eJP/eOIeiTjlWWqQvPhnd1Utp99RuvaNz+pbVo6kKzr5jN +V7nmAPpx8OreX+kHufyD7cWuJdFFKzYwBS0oE8fMUUhUVnKkHiL330WcvfZwhNM8 +ZI3pCY5IiBJ2p08zZHYQoatH3Q9hLdBUJNbbgZJXbX9nCbInhn06BXh3E3GMOz+c +GXV8spUZHpMuwY03pdZycn82iF7y2ztFDBqH5i730dCLs/lhU+syhnX3vTZZVX6W +lvZYBTOwcx8DfRR3SnvqQbpa0ALUOZEX30NaqwzK5EtK4oPIFASaoD0W65g7gaou +8/1ne9YJwqkNoKfT0+VoLKbrzMT9vsYYcD77X/+ksGo5M/gGZRMeAgVrImGsuTlV +3BcQMRFWLi/voU1P05Zf2dpHQluqBV+dcWA/o8N4eIg7AcodP2NTkA76OaxM9313 +DmiBAi3SaHeH10Eee5hMEqOYXutkAsGDCA+XxBXbrTTi6oIV/CTSPlbyygwmOYtG +Y27dv7fULDc0iMAy5r0KiE0qxM7q9BRoUTN9qa4OdKJaopRIfPNzEyqdk7FbBtOi +THLQHyUwG4yCBT+iurhWb2uSLvbzSZi96JND2/qzvzvORXgFc2sAWuvEx38B5KVL +FX4pCnz2xKfxchTpEMOxLGYY0zVnO4qEUI85YDovNTMGlFg4/XuGodOoxHSZbkDF +u8wguslcbtqqQnASN0ccjcGF9o3IYraa3BkrsN4D9+4sQdFIZlvh1cfq4wUn95RM +Vei9sfRSwdQjne62hneM4UA+U+wHZ8hJcEh4aaScppAmqWTGVAPIGEPQqf7hG1Zy +BEiME0+mSNt8J4uWR/kFda9vBiMoIblvwBNEOdzUf7spKy4h5qB21CkVcumphHcX +Sj8VeT5i1bouvExY3+BxSQSvveWP8tfGJuWyndssmlx3IYzisJoceepjH+vIyzHF +AeF8mdaq3V2OAiXihh88cHqU469HQrymUvdqWdNRTiovBPnMYmUy1rebH9yhGze9 +knD1XXGDku9dVc4Nm4RU6/eBtYXOdPrGTEoFVsCuU1pCe3IbKcE9oQb0IHAj46tG +W9tJsNdts5lH0I53pbGe5w== +-----END ENCRYPTED PRIVATE KEY----- diff --git a/ndc-connector-snowflake/rsa_key.pub b/ndc-connector-snowflake/rsa_key.pub new file mode 100644 index 0000000..a466a4c --- /dev/null +++ b/ndc-connector-snowflake/rsa_key.pub @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxzVyIqmfTXxY8f0zuG/f +07VVF+iDq/4klM7rgMszQoQH3yXReWLB2QcBsTizQ10VIMWFNpgh7B/AChk2rw7z +KwZlFAwIOyYBIgYiyhtbOBP0mM4J7hGXWjV98f72GhS0DPTN6fIU492qAxY9xHIr +pll1XyjLARBejDkghjT+MYrnZRnVmSK6ntF16CGRoOehrsVy/UppMqmFYmkwfT7n +RUoLK4A0lognathg0JFMY82pnpi8RQmSiu9BUA78oefMQtOfklD2iOAlrxtszlaU +II2afigG5gZRxm+qfURiJ5IEOgRG3KEKjeHtmRjv4NS/08ZK3mC08JZ1UZbkutYi +UwIDAQAB +-----END PUBLIC KEY----- diff --git a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/CTEQueryGenerator.kt b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/CTEQueryGenerator.kt new file mode 100644 index 0000000..7bfb2bb --- /dev/null +++ b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/CTEQueryGenerator.kt @@ -0,0 +1,371 @@ +package io.hasura + +import io.hasura.ndc.ir.* +import io.hasura.ndc.ir.extensions.isVariablesRequest +import io.hasura.ndc.sqlgen.BaseQueryGenerator +import io.hasura.ndc.sqlgen.BaseQueryGenerator.Companion.INDEX +import io.hasura.ndc.sqlgen.BaseQueryGenerator.Companion.ROWS_AND_AGGREGATES +import org.jooq.* +import org.jooq.impl.DSL +import io.hasura.ndc.ir.Field as IRField + + +object SnowflakeDSL { + fun rlike(col: Field, field: Field): Condition = DSL.condition("? rlike(?)", col, field) +} + +object CTEQueryGenerator : BaseQueryGenerator() { + override fun queryRequestToSQL( + request: QueryRequest + ): Select<*> { + return buildCTEs(request) + .select(DSL.jsonArrayAgg(DSL.field(DSL.name(listOf("data", ROWS_AND_AGGREGATES))))) + .from(buildSelections(request).asTable("data")) + } + + override fun buildComparison( + col: Field, + operator: ApplyBinaryComparisonOperator, + value: Field + ): Condition { + return when (operator) { + ApplyBinaryComparisonOperator.EQ -> col.eq(value) + ApplyBinaryComparisonOperator.GT -> col.gt(value) + ApplyBinaryComparisonOperator.GTE -> col.ge(value) + ApplyBinaryComparisonOperator.LT -> col.lt(value) + ApplyBinaryComparisonOperator.LTE -> col.le(value) + ApplyBinaryComparisonOperator.IN -> col.`in`(value) + ApplyBinaryComparisonOperator.IS_NULL -> col.isNull + ApplyBinaryComparisonOperator.LIKE -> col.like(value as Field) + } + } + + + override fun forEachQueryRequestToSQL(request: QueryRequest): Select<*> { + return buildCTEs(request, listOf(buildVarsCTE(request))) + .select( + DSL.jsonArrayAgg( + DSL.coalesce( + DSL.field(DSL.name(listOf("data", ROWS_AND_AGGREGATES))) as Field<*>, + DSL.jsonObject("rows", DSL.jsonArray()) + ) + ) + .orderBy(DSL.field(DSL.name(listOf(VARS, INDEX)))) + ) + .from(buildSelections(request).asTable("data")) + .rightJoin(DSL.name(VARS)) + .on( + DSL.field(DSL.name("data", INDEX)) + .eq(DSL.field(DSL.name(VARS, INDEX))) + ) + } + + private fun buildCTEs(request: QueryRequest, varCTE: List> = emptyList()): WithStep { + return DSL.with( + varCTE + + forEachQueryLevelRecursively(request, CTEQueryGenerator::buildCTE).distinct() + ) + } + + private fun getCollectionAsjOOQName(collection: String): Name { + return DSL.name(collection.split(".")) + } + + private fun buildCTE( + request: QueryRequest, + relationship: Relationship?, + relSource: String? + ): CommonTableExpression<*> { + return DSL.name(genCTEName(request.collection)).`as`( + DSL.select(DSL.asterisk()) + .from( + DSL.select(DSL.table(getCollectionAsjOOQName(request.collection)).asterisk(), + DSL.rowNumber().over( + DSL.partitionBy( + mkJoinKeyFields( + relationship, request.collection + ) + ).orderBy( + run { + val orderByFields = translateIROrderByField(request) + + if (request.isVariablesRequest()) listOf( + DSL.field( + DSL.name( + listOf( + VARS, + INDEX + ) + ) + ) + ) else emptyList() + orderByFields.distinct().ifEmpty { listOf(DSL.trueCondition()) } + } + ) + ).`as`(getRNName(request.collection)) + + ) + .apply { + if (request.isVariablesRequest()) + this.select(DSL.table(DSL.name(VARS)).asterisk()) + } + .apply { + if (relationship != null + && (relationship.column_mapping.isNotEmpty() || relationship.arguments.isNotEmpty()) + ) { + from(DSL.name(genCTEName(relSource ?: request.collection))) + .innerJoin(DSL.name(relationship.target_collection)) + .on( + mkSQLJoin( + relationship, + sourceCollection = genCTEName(relSource ?: request.collection) + ) + ) + } else from(getCollectionAsjOOQName(request.collection)) + } + .apply { + addJoinsRequiredForOrderByFields( + this as SelectJoinStep<*>, + request, + sourceCollection = request.collection + ) + } + .apply {// cross join "vars" if this request contains variables + if (request.isVariablesRequest()) + (this as SelectJoinStep<*>).crossJoin(DSL.name(VARS)) + } + .apply { + addJoinsRequiredForPredicate( + request, + this as SelectJoinStep<*> + ) + } + .where(getWhereConditions(request)) + .asTable(request.collection.split(".").joinToString("_")) + ).where(mkOffsetLimit(request, DSL.field(DSL.name(getRNName(request.collection))))) + ) + } + + private fun forEachQueryLevelRecursively( + request: QueryRequest, + elementFn: (request: QueryRequest, rel: Relationship?, relSource: String?) -> T + ): List { + + fun recur( + request: QueryRequest, + relationship: Relationship?, + relSource: String? = null + ): List = buildList { + add(elementFn(request, relationship, relSource)) + + getQueryRelationFields(request.query.fields ?: emptyMap()).flatMapTo(this) { + val rel = request.collection_relationships[it.value.relationship]!! + val args = + if (rel.arguments.isEmpty() && rel.column_mapping.isEmpty() && it.value.arguments.isNotEmpty()) { + it.value.arguments + } else rel.arguments + + recur( + request = request.copy( + collection = rel.target_collection, + query = it.value.query + ), + relationship = rel.copy(arguments = args), + request.collection + ) + } + } + + return recur(request, null) + } + + + private fun genCTEName(collection: String) = "${collection}_CTE".split(".").joinToString("_") + private fun getRNName(collection: String) = "${collection}_RN".split(".").joinToString("_") + + private fun buildRows(request: QueryRequest): Field<*> { + val isObjectTarget = isTargetOfObjRel(request) + val agg = if (isObjectTarget) DSL::jsonArrayAggDistinct else DSL::jsonArrayAgg + return DSL.coalesce( + agg(buildRow(request)) + .orderBy( + setOrderBy(request, isObjectTarget) + ), + DSL.jsonArray() + ) + } + + private fun buildVariableRows(request: QueryRequest): Field<*> { + return DSL.arrayAgg(buildRow(request)) + .over(DSL.partitionBy(DSL.field(DSL.name(listOf(genCTEName(request.collection), INDEX))))) + } + + private fun buildRow(request: QueryRequest): Field<*> { + return DSL.jsonObject( + (request.query.fields?.map { (alias, field) -> + when (field) { + is IRField.ColumnField -> + DSL.jsonEntry( + alias, + DSL.field(DSL.name(genCTEName(request.collection), field.column)) + ) + + is IRField.RelationshipField -> { + val relation = request.collection_relationships[field.relationship]!! + + DSL.jsonEntry( + alias, + DSL.coalesce( + DSL.field( + DSL.name( + createAlias( + relation.target_collection, + isAggOnlyRelationField(field) + ), + ROWS_AND_AGGREGATES + ) + ) as Field<*>, + setRelFieldDefaults(field) + ) + ) + } + } + } ?: emptyList>()) + ) + } + + private fun isTargetOfObjRel(request: QueryRequest): Boolean { + return request.collection_relationships.values.find { + it.target_collection == request.collection && it.relationship_type == RelationshipType.Object + } != null + } + + private fun setRelFieldDefaults(field: IRField.RelationshipField): Field<*> { + return if (isAggOnlyRelationField(field)) + DSL.jsonObject("aggregates", setAggregateDefaults(field)) + else if (isAggRelationField(field)) + DSL.jsonObject( + DSL.jsonEntry("rows", DSL.jsonArray()), + DSL.jsonEntry("aggregates", setAggregateDefaults(field)) + ) + else DSL.jsonObject("rows", DSL.jsonArray()) + } + + + private fun isAggRelationField(field: IRField.RelationshipField) = !field.query.aggregates.isNullOrEmpty() + + private fun isAggOnlyRelationField(field: IRField.RelationshipField) = + field.query.fields == null && isAggRelationField(field) + + private fun setAggregateDefaults(field: IRField.RelationshipField): Field<*> = + getDefaultAggregateJsonEntries(field.query.aggregates) + + private fun setOrderBy(request: QueryRequest, isObjectTarget: Boolean): List> { + return if (isObjectTarget /* || request.isNativeQuery() */) emptyList() + else listOf(DSL.field(DSL.name(getRNName(request.collection))) as Field<*>) + } + + private fun buildSelections(request: QueryRequest): Select<*> { + val selects = forEachQueryLevelRecursively(request, CTEQueryGenerator::buildSelect) + + // this is a non-relational query so just return the single select + if (selects.size == 1) return selects.first().second + + + selects.forEachIndexed() { idx, (request, select) -> + val relationships = getQueryRelationFields(request.query.fields).values.map { + val rel = request.collection_relationships[it.relationship]!! + val args = if (rel.arguments.isEmpty() && rel.column_mapping.isEmpty() && it.arguments.isNotEmpty()) { + it.arguments + } else rel.arguments + rel.copy(arguments = args) + } + + relationships.forEach { relationship -> + + val innerSelects = + selects.minus(selects[idx]).filter { it.first.collection == relationship.target_collection } + + innerSelects.forEach { (innerRequest, innerSelect) -> + val innerAlias = createAlias( + innerRequest.collection, isAggregateOnlyRequest(innerRequest) + ) + + run { + select + .leftJoin( + innerSelect.asTable(innerAlias) + ) + .on( + mkSQLJoin( + relationship, + sourceCollection = genCTEName(request.collection), + targetTableNameTransform = { innerAlias } + ) + ) + } + } + } + } + return selects.first().second + } + + private fun getVarCols(request: QueryRequest): List> { + fun getVars(e: Expression): List> { + return when (e) { + is Expression.And -> e.expressions.flatMap { getVars(it) } + is Expression.Or -> e.expressions.flatMap { getVars(it) } + is Expression.Not -> getVars(e.expression) + is Expression.ApplyBinaryArrayComparison -> + if (e.values.filterIsInstance().isNotEmpty()) + listOf(DSL.field(DSL.name(e.column.name))) + else emptyList() + + is Expression.ApplyBinaryComparison -> + if (e.value is ComparisonValue.VariableComp) + listOf(DSL.field(DSL.name(e.column.name))) + else emptyList() + + else -> emptyList() + } + } + + return request.query.predicate?.let { getVars(it) } ?: emptyList() + } + + private fun buildSelect( + request: QueryRequest, + relationship: Relationship? = null, + relSource: String? = null + ): Pair> { + val joinFields = if (relationship != null) + mkJoinKeyFields(relationship, genCTEName(relationship.target_collection)) + else emptyList() + + return Pair( + request, + DSL.selectDistinct( + listOf( + buildOuterStructure( + request, + if (request.isVariablesRequest()) CTEQueryGenerator::buildVariableRows else CTEQueryGenerator::buildRows + ).`as`(ROWS_AND_AGGREGATES) + ) + + if (request.isVariablesRequest()) + (getVarCols(request) + listOf(DSL.field(DSL.name(INDEX)))) + else emptyList() + ) + .apply { + this.select(joinFields) + } + .from(DSL.name(genCTEName(request.collection))) + .apply { + if (joinFields.isNotEmpty()) groupBy(joinFields) + } + ) + } + + private fun createAlias(collection: String, isAggregateOnly: Boolean): String { + return "$collection${if (isAggregateOnly) "_AGG" else ""}".replace(".", "_") + } + +} \ No newline at end of file diff --git a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt index 42fde11..ed1a6b9 100644 --- a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt +++ b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt @@ -2,6 +2,7 @@ package io.hasura.snowflake import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.fasterxml.jackson.module.kotlin.readValue +import io.hasura.CTEQueryGenerator import io.hasura.ndc.app.interfaces.IDataSourceProvider import io.hasura.ndc.app.services.ConnectorConfigurationLoader import io.hasura.ndc.app.services.dataConnectors.BaseDataConnectorService @@ -65,7 +66,12 @@ class SnowflakeDataConnectorService @Inject constructor( println(ConnectorConfigurationLoader.config) val dslCtx = mkDSLCtx() - val query = JsonQueryGenerator.queryRequestToSQL(request) + + val query = if (!request.variables.isNullOrEmpty()) { + CTEQueryGenerator.forEachQueryRequestToSQL(request) + } else { + CTEQueryGenerator.queryRequestToSQL(request) + } println( dslCtx @@ -74,18 +80,14 @@ class SnowflakeDataConnectorService @Inject constructor( val rows = executeDbQuery(query, dslCtx) val json = rows.getValue(0, 0).toString() - val rowset = objectMapper.readValue(json) - return if (rowset == null) { - listOf(RowSet(rows = emptyList(), aggregates = emptyMap())) - } else { - listOf(rowset) - } + val rowsets = objectMapper.readValue>(json) + return rowsets } override val jooqDialect = SQLDialect.SNOWFLAKE override val jooqSettings = - commonDSLContextSettings.withRenderQuotedNames(RenderQuotedNames.EXPLICIT_DEFAULT_UNQUOTED) + commonDSLContextSettings.withRenderQuotedNames(RenderQuotedNames.EXPLICIT_DEFAULT_QUOTED) override val sqlGenerator = JsonQueryGenerator override val mutationTranslator = MutationTranslator } diff --git a/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt b/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt index b15bf14..0708774 100644 --- a/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt +++ b/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt @@ -81,36 +81,33 @@ sealed interface BaseGenerator { request: QueryRequest ): Condition { + fun splitCollectionName(collectionName: String): List { + return collectionName.split(".") + } + return when (e) { - // The negation of a single subexpression - is Expression.Not -> DSL.not(expressionToCondition(e.expression,request)) + is Expression.Not -> DSL.not(expressionToCondition(e.expression, request)) - // A conjunction of several subexpressions is Expression.And -> when (e.expressions.size) { 0 -> DSL.trueCondition() - else -> DSL.and(e.expressions.map { expressionToCondition(it,request) }) + else -> DSL.and(e.expressions.map { expressionToCondition(it, request) }) } - // A disjunction of several subexpressions is Expression.Or -> when (e.expressions.size) { 0 -> DSL.falseCondition() - else -> DSL.or(e.expressions.map { expressionToCondition(it,request) }) + else -> DSL.or(e.expressions.map { expressionToCondition(it, request) }) } - // Test the specified column against a single value using a particular binary comparison operator is Expression.ApplyBinaryComparison -> { val column = DSL.field( DSL.name( - listOf( - getCollectionForCompCol(e.column, request), - e.column.name - ) + splitCollectionName(getCollectionForCompCol(e.column, request)) + e.column.name ) ) val comparisonValue = when (val v = e.value) { is ComparisonValue.ColumnComp -> { - val col = getCollectionForCompCol(v.column, request) - DSL.field(DSL.name(listOf(col, v.column.name))) + val col = splitCollectionName(getCollectionForCompCol(v.column, request)) + DSL.field(DSL.name(col + v.column.name)) } is ComparisonValue.ScalarComp -> DSL.inline(v.value) @@ -119,91 +116,53 @@ sealed interface BaseGenerator { return buildComparison(column, e.operator, comparisonValue) } - // Test the specified column against a particular unary comparison operator is Expression.ApplyUnaryComparison -> { - val baseCond = run { - val column = DSL.field(DSL.name(listOf(request.collection, e.column))) - when (e.operator) { - ApplyUnaryComparisonOperator.IS_NULL -> column.isNull - } + val column = DSL.field(DSL.name(splitCollectionName(request.collection) + e.column)) + when (e.operator) { + ApplyUnaryComparisonOperator.IS_NULL -> column.isNull } - baseCond } - // Test the specified column against an array of values using a particular binary comparison operator is Expression.ApplyBinaryArrayComparison -> { - val baseCond = run { - val column = DSL.field( - DSL.name( - listOf( - getCollectionForCompCol(e.column, request), - e.column.name - ) - ) + val column = DSL.field( + DSL.name( + splitCollectionName(getCollectionForCompCol(e.column, request)) + e.column.name ) - when (e.operator) { - ApplyBinaryArrayComparisonOperator.IN -> { - when { - // Generate "IN (SELECT NULL WHERE 1 = 0)" for easier debugging - e.values.isEmpty() -> column.`in`( - DSL.select(DSL.nullCondition()) - .where(DSL.inline(1).eq(DSL.inline(0))) - ) - - else -> { + ) + when (e.operator) { + ApplyBinaryArrayComparisonOperator.IN -> { + when { + e.values.isEmpty() -> column.`in`( + DSL.select(DSL.nullCondition()) + .where(DSL.inline(1).eq(DSL.inline(0))) + ) - // TODO: swtich map to local context as map will need to be separate for the select column comparisions - // TODO: is it safe to assume that cols will all be from one collections? - column.`in`(DSL.list(e.values.map { - when (it) { - is ComparisonValue.ScalarComp -> DSL.inline(it.value) - is ComparisonValue.VariableComp -> DSL.field(DSL.name(listOf("vars", it.name))) - is ComparisonValue.ColumnComp -> { - val col = getCollectionForCompCol(it.column, request) - DSL.field(DSL.name(listOf(col, it.column.name))) - } - } - })) + else -> column.`in`(DSL.list(e.values.map { + when (it) { + is ComparisonValue.ScalarComp -> DSL.inline(it.value) + is ComparisonValue.VariableComp -> DSL.field(DSL.name(listOf("vars", it.name))) + is ComparisonValue.ColumnComp -> { + val col = splitCollectionName(getCollectionForCompCol(it.column, request)) + DSL.field(DSL.name(col + it.column.name)) + } } - } + })) } } } - baseCond } - // Test if a row exists that matches the where subexpression in the specified table (in_table) - // - // where ( - // exists ( - // select 1 "one" - // from "AwsDataCatalog"."chinook"."album" - // where ( - // "AwsDataCatalog"."chinook"."album"."artistid" = "artist_base_fields_0"."artistid" - // and "AwsDataCatalog"."chinook"."album"."title" = 'For Those About To Rock We Salute You' - // and exists ( - // select 1 "one" - // from "AwsDataCatalog"."chinook"."track" - // where ( - // "AwsDataCatalog"."chinook"."track"."albumid" = "albumid" - // and "AwsDataCatalog"."chinook"."track"."name" = 'For Those About To Rock (We Salute You)' - // ) - // ) - // ) - // ) - // ) is Expression.Exists -> { when (val inTable = e.in_collection) { - // The table is related to the current table via the relationship name specified in relationship - // (this means it should be joined to the current table via the relationship) is ExistsInCollection.Related -> { - val relOrig = request.collection_relationships[inTable.relationship] ?: throw Exception("Exists relationship not found") + val relOrig = request.collection_relationships[inTable.relationship] + ?: throw Exception("Exists relationship not found") val rel = relOrig.copy(arguments = relOrig.arguments + inTable.arguments) DSL.exists( DSL .selectOne() .from( - DSL.table(DSL.name(rel.target_collection)) + DSL.table(DSL.name(splitCollectionName(rel.target_collection))) ) .where( DSL.and( @@ -214,17 +173,15 @@ sealed interface BaseGenerator { rel.target_collection ) ) + - rel.column_mapping.map { (sourceCol, targetCol) -> - DSL.field(DSL.name(listOf(request.collection, sourceCol))) - .eq(DSL.field(DSL.name(listOf(rel.target_collection, targetCol)))) - } + rel.arguments.map {argumentToCondition(request, it, rel.target_collection) } + rel.column_mapping.map { (sourceCol, targetCol) -> + DSL.field(DSL.name(splitCollectionName(request.collection) + sourceCol)) + .eq(DSL.field(DSL.name(splitCollectionName(rel.target_collection) + targetCol))) + } + rel.arguments.map { argumentToCondition(request, it, rel.target_collection) } ) ) ) } - // The table specified by table is unrelated to the current table and therefore is not explicitly joined to the current table - // (this means it should be joined to the current table via a subquery) is ExistsInCollection.Unrelated -> { val condition = mkSQLJoin( Relationship( @@ -239,7 +196,7 @@ sealed interface BaseGenerator { DSL .selectOne() .from( - DSL.table(DSL.name(inTable.collection)) + DSL.table(DSL.name(splitCollectionName(inTable.collection))) ) .where( listOf( @@ -256,4 +213,5 @@ sealed interface BaseGenerator { } } } + } diff --git a/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseQueryGenerator.kt b/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseQueryGenerator.kt index cd944b7..0f9a2f1 100644 --- a/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseQueryGenerator.kt +++ b/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseQueryGenerator.kt @@ -28,7 +28,7 @@ abstract class BaseQueryGenerator : BaseGenerator { .mapValues { it.value as IRField.ColumnField } } - fun getQueryRelationFields(fields: Map?): Map { + protected fun getQueryRelationFields(fields: Map?): Map { return fields ?.filterValues { it is IRField.RelationshipField } ?.mapValues { it.value as IRField.RelationshipField } @@ -373,7 +373,7 @@ abstract class BaseQueryGenerator : BaseGenerator { val fields = request.variables!!.flatMap { it.keys }.toSet() return DSL .name(VARS + suffix) - .fields(*fields.toTypedArray().plus(INDEX)) + .fields(*fields.plus(INDEX).map { DSL.quotedName(it) }.toTypedArray()) .`as`( request.variables!!.mapIndexed { idx, variable -> val f = variable.values.map { value -> @@ -407,7 +407,9 @@ abstract class BaseQueryGenerator : BaseGenerator { e = where, request ) - } ?: DSL.noCondition())) + } ?: DSL.noCondition())).also { + println("Where conditions: $it") + } } protected fun getDefaultAggregateJsonEntries(aggregates: Map?): Field<*> { @@ -429,7 +431,7 @@ abstract class BaseQueryGenerator : BaseGenerator { const val MAX_QUERY_ROWS = 2147483647 const val FOREACH_ROWS = "foreach_rows" const val VARS = "vars" - const val INDEX = "index" + const val INDEX = "idx" const val ROWS_AND_AGGREGATES = "rows_and_aggregates" } } diff --git a/requests.http b/requests.http new file mode 100644 index 0000000..ff17d50 --- /dev/null +++ b/requests.http @@ -0,0 +1,126 @@ +POST http://0.0.0.0:8080/query +Content-Type: application/json + +{ + "collection": "CHINOOK.PUBLIC.ARTIST", + "query": { + "fields": { + "artistid": { + "type": "column", + "column": "ARTISTID", + "fields": null + }, + "name": { + "type": "column", + "column": "NAME", + "fields": null + }, + "chinookAlbums": { + "type": "relationship", + "query": { + "fields": { + "albumid": { + "type": "column", + "column": "ALBUMID", + "fields": null + }, + "artistid": { + "type": "column", + "column": "ARTISTID", + "fields": null + }, + "title": { + "type": "column", + "column": "TITLE", + "fields": null + } + }, + "limit": 1, + "order_by": { + "elements": [ + { + "order_direction": "asc", + "target": { + "type": "column", + "name": "ALBUMID", + "path": [] + } + } + ] + } + }, + "relationship": "app___ChinookArtist__chinookAlbums", + "arguments": {} + } + }, + "limit": 25, + "order_by": { + "elements": [ + { + "order_direction": "asc", + "target": { + "type": "column", + "name": "ARTISTID", + "path": [] + } + } + ] + } + }, + "arguments": {}, + "collection_relationships": { + "app___ChinookArtist__chinookAlbums": { + "column_mapping": { + "ARTISTID": "ARTISTID" + }, + "relationship_type": "array", + "target_collection": "CHINOOK.PUBLIC.ALBUM", + "arguments": {} + } + } +} + +### +POST http://0.0.0.0:8080/query +Content-Type: application/json + +{ + "collection": "CHINOOK.PUBLIC.ARTIST", + "query": { + "fields": { + "artistid": { + "type": "column", + "column": "ARTISTID", + "fields": null + }, + "name": { + "type": "column", + "column": "NAME", + "fields": null + } + }, + "predicate": { + "type": "binary_comparison_operator", + "operator": "_eq", + "column": { + "type": "column", + "name": "ARTISTID", + "path": [] + }, + "value": { + "type": "variable", + "name": "$ARTISTID" + } + } + }, + "arguments": {}, + "variables": [ + { + "$ARTISTID": 1 + }, + { + "$ARTISTID": 2 + } + ], + "root_collection": "CHINOOK.PUBLIC.ARTIST" +} \ No newline at end of file diff --git a/rsa_key.p8 b/rsa_key.p8 new file mode 100644 index 0000000..d6ad158 --- /dev/null +++ b/rsa_key.p8 @@ -0,0 +1,30 @@ +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIFHDBOBgkqhkiG9w0BBQ0wQTApBgkqhkiG9w0BBQwwHAQIT5DlQO7K/8ACAggA +MAwGCCqGSIb3DQIJBQAwFAYIKoZIhvcNAwcECN52Tor4Zy19BIIEyJ3PRIrNFOrc +UFg8x841OmVPXHZkLO+ABdKcM6kMIcUQHeJ15FAh+yCpTul1dE4GkKoiKNLd4TWW +oJxa7C92Pli4EffIUND+SCL5umoujmfHi+B9huaWhhBo7y0OgbvpLQvLoyux2r9H +gUSq+SC3jKQKgp9XLqlb8ggTFEPr1+zq1z9B06TVwRTMOv2cHLzaV87Etfi23U26 +o7xC88UnPZfBM6O07M9I+28NPoKJd5VFrYZqzqpfkOb5UgySapXyZD5E1tPyg5tN +TPF3PjoxumBwEfP+uKg9zf15bR9/yTtGzyWOdVF8frUE8BgPlUvc4pWA+NNk5Tp7 +I4ACeRmf9LfHM85xWRIli3lXLhHgaoDFePmVX9yuXpzjkoX6Bx3tkH2/RZJGvay4 +32GIm2Q2Z06MNkjvUp4y+kpeGaBhWxqDILUf5/JHg8MKWnCMkj7vNdc42AxKqCyZ +CAhUq4vdIRvzDO6TqCewgqFkSE/mAGibk9WgkyWZKBKHTfOp0BcZWkD2IuUAVYfY +S0pfxRmBjRQEJHHXp3eJP/eOIeiTjlWWqQvPhnd1Utp99RuvaNz+pbVo6kKzr5jN +V7nmAPpx8OreX+kHufyD7cWuJdFFKzYwBS0oE8fMUUhUVnKkHiL330WcvfZwhNM8 +ZI3pCY5IiBJ2p08zZHYQoatH3Q9hLdBUJNbbgZJXbX9nCbInhn06BXh3E3GMOz+c +GXV8spUZHpMuwY03pdZycn82iF7y2ztFDBqH5i730dCLs/lhU+syhnX3vTZZVX6W +lvZYBTOwcx8DfRR3SnvqQbpa0ALUOZEX30NaqwzK5EtK4oPIFASaoD0W65g7gaou +8/1ne9YJwqkNoKfT0+VoLKbrzMT9vsYYcD77X/+ksGo5M/gGZRMeAgVrImGsuTlV +3BcQMRFWLi/voU1P05Zf2dpHQluqBV+dcWA/o8N4eIg7AcodP2NTkA76OaxM9313 +DmiBAi3SaHeH10Eee5hMEqOYXutkAsGDCA+XxBXbrTTi6oIV/CTSPlbyygwmOYtG +Y27dv7fULDc0iMAy5r0KiE0qxM7q9BRoUTN9qa4OdKJaopRIfPNzEyqdk7FbBtOi +THLQHyUwG4yCBT+iurhWb2uSLvbzSZi96JND2/qzvzvORXgFc2sAWuvEx38B5KVL +FX4pCnz2xKfxchTpEMOxLGYY0zVnO4qEUI85YDovNTMGlFg4/XuGodOoxHSZbkDF +u8wguslcbtqqQnASN0ccjcGF9o3IYraa3BkrsN4D9+4sQdFIZlvh1cfq4wUn95RM +Vei9sfRSwdQjne62hneM4UA+U+wHZ8hJcEh4aaScppAmqWTGVAPIGEPQqf7hG1Zy +BEiME0+mSNt8J4uWR/kFda9vBiMoIblvwBNEOdzUf7spKy4h5qB21CkVcumphHcX +Sj8VeT5i1bouvExY3+BxSQSvveWP8tfGJuWyndssmlx3IYzisJoceepjH+vIyzHF +AeF8mdaq3V2OAiXihh88cHqU469HQrymUvdqWdNRTiovBPnMYmUy1rebH9yhGze9 +knD1XXGDku9dVc4Nm4RU6/eBtYXOdPrGTEoFVsCuU1pCe3IbKcE9oQb0IHAj46tG +W9tJsNdts5lH0I53pbGe5w== +-----END ENCRYPTED PRIVATE KEY----- diff --git a/rsa_key.pub b/rsa_key.pub new file mode 100644 index 0000000..a466a4c --- /dev/null +++ b/rsa_key.pub @@ -0,0 +1,9 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxzVyIqmfTXxY8f0zuG/f +07VVF+iDq/4klM7rgMszQoQH3yXReWLB2QcBsTizQ10VIMWFNpgh7B/AChk2rw7z +KwZlFAwIOyYBIgYiyhtbOBP0mM4J7hGXWjV98f72GhS0DPTN6fIU492qAxY9xHIr +pll1XyjLARBejDkghjT+MYrnZRnVmSK6ntF16CGRoOehrsVy/UppMqmFYmkwfT7n +RUoLK4A0lognathg0JFMY82pnpi8RQmSiu9BUA78oefMQtOfklD2iOAlrxtszlaU +II2afigG5gZRxm+qfURiJ5IEOgRG3KEKjeHtmRjv4NS/08ZK3mC08JZ1UZbkutYi +UwIDAQAB +-----END PUBLIC KEY----- From 2fc2de01e0258535cf905e59ab14cc9eeaa76aac Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Mon, 28 Oct 2024 14:47:55 -0400 Subject: [PATCH 07/29] changed return status on /health from 204 to 200 --- .../io/hasura/ndc/app/controllers/DataConnectorResource.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ndc-app/src/main/kotlin/io/hasura/ndc/app/controllers/DataConnectorResource.kt b/ndc-app/src/main/kotlin/io/hasura/ndc/app/controllers/DataConnectorResource.kt index 52605fc..8cdd890 100644 --- a/ndc-app/src/main/kotlin/io/hasura/ndc/app/controllers/DataConnectorResource.kt +++ b/ndc-app/src/main/kotlin/io/hasura/ndc/app/controllers/DataConnectorResource.kt @@ -22,7 +22,7 @@ class DataConnectorResource @Inject constructor( val canConnectToDB = dataConnectorService.runHealthCheckQuery() if (canConnectToDB) { return Response - .status(Response.Status.NO_CONTENT) + .status(Response.Status.OK) .build() } else { throw RuntimeException("Unable to connect to DB") From 023cd0c1b63317599fe93f19ece48b7361b2c570 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 29 Oct 2024 10:17:03 -0400 Subject: [PATCH 08/29] Amend temporary image name swap used for testing --- docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index 80c9262..fb1eabf 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -5,7 +5,7 @@ services: ### NDC Connectors ################## ndc-connector-oracle: - image: hasuraci/ndc-oracle-connector:${IMAGE_TAG_ORACLE} + image: ghcr.io/hasura/ndc-jvm-oracle:${IMAGE_TAG_ORACLE} build: context: "./" dockerfile: "ndc-connector-oracle.dockerfile" From 024d06f337329fc9a7a6534ac20381b7b47ec4d7 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 29 Oct 2024 14:52:52 -0400 Subject: [PATCH 09/29] Release Snowflake v1.0.2 --- .../.hasura-connector/connector-metadata.yaml | 2 +- .../io/hasura/snowflake/SnowflakeDataConnectorService.kt | 9 +-------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/ndc-connector-snowflake/.hasura-connector/connector-metadata.yaml b/ndc-connector-snowflake/.hasura-connector/connector-metadata.yaml index aa99c14..a8b09d5 100644 --- a/ndc-connector-snowflake/.hasura-connector/connector-metadata.yaml +++ b/ndc-connector-snowflake/.hasura-connector/connector-metadata.yaml @@ -1,6 +1,6 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "ghcr.io/hasura/ndc-jvm-snowflake:v1.0.1" + dockerImage: "ghcr.io/hasura/ndc-jvm-snowflake:v1.0.2" supportedEnvironmentVariables: - name: JDBC_URL description: "The JDBC URL to connect to the database" diff --git a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt index ed1a6b9..886f3ef 100644 --- a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt +++ b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt @@ -63,8 +63,6 @@ class SnowflakeDataConnectorService @Inject constructor( ) override fun handleQuery(request: QueryRequest): List { - println(ConnectorConfigurationLoader.config) - val dslCtx = mkDSLCtx() val query = if (!request.variables.isNullOrEmpty()) { @@ -73,11 +71,6 @@ class SnowflakeDataConnectorService @Inject constructor( CTEQueryGenerator.queryRequestToSQL(request) } - println( - dslCtx - .renderInlined(query) - ) - val rows = executeDbQuery(query, dslCtx) val json = rows.getValue(0, 0).toString() @@ -88,6 +81,6 @@ class SnowflakeDataConnectorService @Inject constructor( override val jooqDialect = SQLDialect.SNOWFLAKE override val jooqSettings = commonDSLContextSettings.withRenderQuotedNames(RenderQuotedNames.EXPLICIT_DEFAULT_QUOTED) - override val sqlGenerator = JsonQueryGenerator + override val sqlGenerator = CTEQueryGenerator override val mutationTranslator = MutationTranslator } From 33e5b325290dfb85f72b8872aecbe67b5a261521 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 09:52:26 -0500 Subject: [PATCH 10/29] Update MySQL, preserve NQ's in CLI, misc cleanups around codebase --- .../io/hasura/ndc/app/application/Filters.kt | 3 +- .../services/ConnectorConfigurationLoader.kt | 31 -- .../BaseDataConnectorService.kt | 5 +- ndc-cli/src/main/kotlin/io/hasura/cli/main.kt | 31 +- .../kotlin/io/hasura/mysql/JSONGenerator.kt | 372 +++++++++--------- .../hasura/mysql/MySQLDataConnectorService.kt | 50 +-- .../kotlin/io/hasura/oracle/JSONGenerator.kt | 20 +- .../oracle/OracleDataConnectorService.kt | 29 +- .../io/hasura/snowflake/CTEQueryGenerator.kt | 17 - .../io/hasura/snowflake/JSONGenerator.kt | 319 --------------- .../SnowflakeDataConnectorService.kt | 25 +- .../ndc/common/ConnectorConfiguration.kt | 33 +- .../src/main/kotlin/io/hasura/ndc/ir/Query.kt | 2 + .../io/hasura/ndc/sqlgen/BaseGenerator.kt | 43 +- .../hasura/ndc/sqlgen/BaseQueryGenerator.kt | 39 ++ 15 files changed, 347 insertions(+), 672 deletions(-) delete mode 100644 ndc-app/src/main/kotlin/io/hasura/ndc/app/services/ConnectorConfigurationLoader.kt delete mode 100644 ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/JSONGenerator.kt diff --git a/ndc-app/src/main/kotlin/io/hasura/ndc/app/application/Filters.kt b/ndc-app/src/main/kotlin/io/hasura/ndc/app/application/Filters.kt index 0b69c94..cd9658a 100644 --- a/ndc-app/src/main/kotlin/io/hasura/ndc/app/application/Filters.kt +++ b/ndc-app/src/main/kotlin/io/hasura/ndc/app/application/Filters.kt @@ -15,7 +15,8 @@ class Filters { @ServerRequestFilter(priority = 0) fun logBodyFilter(info: UriInfo, request: HttpServerRequest, ctx: ContainerRequestContext) { request.body { b -> - logger.debug("INCOMING IR: ${b.result()}") + logger.debug("INCOMING IR:") + logger.debug(b.result()) } } } diff --git a/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/ConnectorConfigurationLoader.kt b/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/ConnectorConfigurationLoader.kt deleted file mode 100644 index 9149449..0000000 --- a/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/ConnectorConfigurationLoader.kt +++ /dev/null @@ -1,31 +0,0 @@ -package io.hasura.ndc.app.services - -import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper -import io.hasura.ndc.common.ConnectorConfiguration -import java.io.File -import java.nio.file.Path - -object ConnectorConfigurationLoader { - private val mapper = jacksonObjectMapper() - - private val DEFAULT_CONFIG_DIRECTORY = "/etc/connector" - private val ENV_SUPPLIED_CONFIG_DIRECTORY = System.getenv("HASURA_CONFIGURATION_DIRECTORY") - private val CONFIG_FILE_NAME = "configuration.json" - - val config: ConnectorConfiguration - - init { - val configPath = getConfigPath() - println("Loading configuration from $configPath") - config = loadConfigFile(configPath) - } - - private fun getConfigPath(): Path { - val configDirectory = ENV_SUPPLIED_CONFIG_DIRECTORY ?: DEFAULT_CONFIG_DIRECTORY - return Path.of(configDirectory, CONFIG_FILE_NAME) - } - - private fun loadConfigFile(path: Path): ConnectorConfiguration { - return mapper.readValue(File(path.toString()), ConnectorConfiguration::class.java) - } -} diff --git a/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/dataConnectors/BaseDataConnectorService.kt b/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/dataConnectors/BaseDataConnectorService.kt index b072360..0dd3e88 100644 --- a/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/dataConnectors/BaseDataConnectorService.kt +++ b/ndc-app/src/main/kotlin/io/hasura/ndc/app/services/dataConnectors/BaseDataConnectorService.kt @@ -9,7 +9,6 @@ import io.hasura.ndc.app.interfaces.ISchemaGenerator import io.hasura.ndc.common.ConnectorConfiguration import io.hasura.ndc.app.models.ExplainResponse import io.hasura.ndc.app.services.AgroalDataSourceService -import io.hasura.ndc.app.services.ConnectorConfigurationLoader import io.opentelemetry.api.trace.Tracer import io.opentelemetry.instrumentation.annotations.WithSpan import jakarta.enterprise.inject.Produces @@ -106,14 +105,14 @@ abstract class BaseDataConnectorService( @WithSpan open fun mkDSLCtx(): DSLContext { - val config = ConnectorConfigurationLoader.config + val config = ConnectorConfiguration.Loader.config val ds = dataSourceProvider.getDataSource(config) return DSL.using(ds, jooqDialect, jooqSettings) } @WithSpan override fun getSchema(): SchemaResponse { - return schemaGenerator.getSchema(ConnectorConfigurationLoader.config) + return schemaGenerator.getSchema(ConnectorConfiguration.Loader.config) } diff --git a/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt b/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt index 6e32314..6a47906 100644 --- a/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt +++ b/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt @@ -1,10 +1,11 @@ package io.hasura.cli import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper +import io.hasura.ndc.common.ConnectorConfiguration import picocli.CommandLine import picocli.CommandLine.* -import java.io.File import java.nio.file.Files +import java.nio.file.Path import java.nio.file.attribute.PosixFilePermissions import kotlin.system.exitProcess @@ -57,6 +58,15 @@ class CLI { ) schemas: List? ) { + val configFilePath = ConnectorConfiguration.Loader.getConfigFilePath() + val existingConfig = ConnectorConfiguration.Loader.config + + println("Checking for configuration file at $configFilePath") + if (existingConfig == ConnectorConfiguration()) { + println("Non-existent or empty configuration file detected") + } else { + println("Existing configuration file detected") + } val configGenerator = when (database) { DatabaseType.ORACLE -> OracleConfigGenerator @@ -64,14 +74,21 @@ class CLI { DatabaseType.SNOWFLAKE -> SnowflakeConfigGenerator } - val config = configGenerator.getConfig( + println("Generating configuration for $database database...") + val introspectedConfig = configGenerator.getConfig( jdbcUrl = jdbcUrl, schemas = schemas ?: emptyList() ) + val mergedConfigWithNativeQueries = introspectedConfig.copy( + nativeQueries = existingConfig.nativeQueries + ) - val file = File(outfile) + val outfilePath = Path.of(ConnectorConfiguration.Loader.CONFIG_DIRECTORY, outfile) + println("Writing configuration to file: $configFilePath") + + val file = configFilePath.toFile() try { - mapper.writerWithDefaultPrettyPrinter().writeValue(file, config) + mapper.writerWithDefaultPrettyPrinter().writeValue(file, mergedConfigWithNativeQueries) } catch (e: Exception) { println("Error writing configuration to file: ${e.message}") @@ -96,9 +113,3 @@ class CLI { } } } - -fun main(args: Array) { - val cli = CommandLine(CLI()) - val exitCode = cli.execute(*args) - exitProcess(exitCode) -} diff --git a/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/JSONGenerator.kt b/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/JSONGenerator.kt index 7edc89e..db1f61f 100644 --- a/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/JSONGenerator.kt +++ b/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/JSONGenerator.kt @@ -1,7 +1,9 @@ package io.hasura.mysql -import io.hasura.ndc.app.services.ConnectorConfigurationLoader +import io.hasura.ndc.common.ConnectorConfiguration import io.hasura.ndc.common.NDCScalar +import io.hasura.ndc.common.NativeQueryInfo +import io.hasura.ndc.common.NativeQueryPart import io.hasura.ndc.ir.* import io.hasura.ndc.ir.Field.ColumnField import io.hasura.ndc.ir.Field as IRField @@ -14,25 +16,11 @@ import org.jooq.impl.SQLDataType object JsonQueryGenerator : BaseQueryGenerator() { - override fun buildComparison( - col: Field, - operator: ApplyBinaryComparisonOperator, - value: Field - ): Condition { - return when (operator) { - ApplyBinaryComparisonOperator.EQ -> col.eq(value) - ApplyBinaryComparisonOperator.GT -> col.gt(value) - ApplyBinaryComparisonOperator.GTE -> col.ge(value) - ApplyBinaryComparisonOperator.LT -> col.lt(value) - ApplyBinaryComparisonOperator.LTE -> col.le(value) - ApplyBinaryComparisonOperator.IN -> col.`in`(value) - ApplyBinaryComparisonOperator.IS_NULL -> col.isNull - ApplyBinaryComparisonOperator.LIKE -> col.like(value as Field) - } - } override fun forEachQueryRequestToSQL(request: QueryRequest): Select<*> { - TODO("Not yet implemented") + return DSL.with(buildVarsCTE(request)) + .select() + .from(queryRequestToSQLInternal(request), DSL.table(DSL.name("vars"))) } override fun queryRequestToSQL(request: QueryRequest): Select<*> { @@ -41,144 +29,196 @@ object JsonQueryGenerator : BaseQueryGenerator() { fun queryRequestToSQLInternal( request: QueryRequest, + ): SelectSelectStep<*> { + // If the QueryRequest "collection" references the name of a Native Query defined in the configuration.json, + // we need to prefix the generated query with a CTE named identically to the Native Query, containing the Native Query itself + val isNativeQuery = ConnectorConfiguration.Loader.config.nativeQueries.containsKey(request.collection) + + return if (isNativeQuery) { + mkNativeQueryCTE(request).select( + jsonArrayAgg( + buildJSONSelectionForQueryRequest(request) + ) + ) + } else { + DSL.select( + jsonArrayAgg( + buildJSONSelectionForQueryRequest(request) + ) + ) + } + } + + fun buildJSONSelectionForQueryRequest( + request: QueryRequest, parentTable: String? = null, - parentRelationship: Relationship? = null, - ): SelectHavingStep> { - return DSL.select( - DSL.jsonObject( - DSL.jsonEntry( - "rows", - DSL.select( - jsonArrayAgg( - DSL.jsonObject( - (request.query.fields ?: emptyMap()).map { (alias, field) -> - when (field) { - is ColumnField -> { - DSL.jsonEntry( - alias, - DSL.field(DSL.name(alias)) - ) - } + parentRelationship: Relationship? = null + ): JSONObjectNullStep<*> { - is IRField.RelationshipField -> { - val relationship = - request.collection_relationships[field.relationship] - ?: error("Relationship ${field.relationship} not found") - - val subQuery = queryRequestToSQLInternal( - parentTable = request.collection, - parentRelationship = relationship, - request = QueryRequest( - collection = relationship.target_collection, - collection_relationships = request.collection_relationships, - query = field.query, - arguments = field.arguments, - variables = null - ) - ) - - DSL.jsonEntry( - alias, - DSL.select( - subQuery.asField(alias) - ) - ) - } - } - } - ) + val baseSelection = DSL.select( + DSL.table(DSL.name(request.collection)).asterisk() + ).from( + if (request.query.predicate == null) { + DSL.table(DSL.name(request.collection)) + } else { + val table = DSL.table(DSL.name(request.collection)) + val requiredJoinTables = collectRequiredJoinTablesForWhereClause( + where = request.query.predicate!!, + collectionRelationships = request.collection_relationships + ) + requiredJoinTables.foldIndexed(table) { index, acc, relationship -> + val parentTable = if (index == 0) { + request.collection + } else { + requiredJoinTables.elementAt(index - 1).target_collection + } + + val joinTable = DSL.table(DSL.name(relationship.target_collection)) + acc.join(joinTable).on( + mkJoinWhereClause( + sourceTable = parentTable, + parentRelationship = relationship ) - ).from( - DSL.select( - (request.query.fields ?: emptyMap()) - .filter { (_, field) -> field is ColumnField } - .map { (alias, field) -> - field as ColumnField - DSL.field(DSL.name(request.collection, field.column)).`as`(alias) - } + - collectColumnsReferencedByRelationships( - fields = request.query.fields ?: emptyMap(), - collectionRelationships = request.collection_relationships - ).map { - DSL.field(DSL.name(request.collection, it)) - } - ).from( - run> { - val table = DSL.table(DSL.name(request.collection)) - if (request.query.predicate == null) { - table - } else { - val requiredJoinTables = collectRequiredJoinTablesForWhereClause( - where = request.query.predicate!!, - collectionRelationships = request.collection_relationships - ) + ) + } + } + ).apply { + if (request.query.predicate != null) { + where(getWhereConditions(request)) + } + if (parentRelationship != null) { + where( + mkJoinWhereClause( + sourceTable = parentTable ?: error("parentTable is null"), + parentRelationship = parentRelationship + ) + ) + } + if (request.query.order_by != null) { + orderBy( + translateIROrderByField( + orderBy = request.query.order_by, + currentCollection = getTableName(request.collection), + relationships = request.collection_relationships + ) + ) + } + if (request.query.limit != null) { + limit(request.query.limit) + } + if (request.query.offset != null) { + offset(request.query.offset) + } + }.asTable( + DSL.name(getTableName(request.collection)) + ) - requiredJoinTables.foldIndexed(table) { index, acc, relationship -> - val parentTable = if (index == 0) { - request.collection - } else { - requiredJoinTables.elementAt(index - 1).target_collection - } + return DSL.jsonObject( + buildList { + if (!request.query.fields.isNullOrEmpty()) { + add( + DSL.jsonEntry( + "rows", + DSL.select( + jsonArrayAgg( + DSL.jsonObject( + (request.query.fields ?: emptyMap()).map { (alias, field) -> + when (field) { + is ColumnField -> { + DSL.jsonEntry( + alias, + DSL.field( + DSL.name(field.column), + // columnTypeTojOOQType(request.collection, field) + ) + ) + } - val joinTable = DSL.table(DSL.name(relationship.target_collection)) - acc.join(joinTable).on( - mkJoinWhereClause( - sourceTable = parentTable, - parentRelationship = relationship - ) - ) - } - } - } - ).apply { - if (request.query.predicate != null) { - where(getWhereConditions(request)) - } - if (parentRelationship != null) { - where( - mkJoinWhereClause( - sourceTable = parentTable ?: error("parentTable is null"), - parentRelationship = parentRelationship + is IRField.RelationshipField -> { + val relationship = + request.collection_relationships[field.relationship] + ?: error("Relationship ${field.relationship} not found") + + val subQuery = buildJSONSelectionForQueryRequest( + parentTable = request.collection, + parentRelationship = relationship, + request = QueryRequest( + collection = relationship.target_collection, + collection_relationships = request.collection_relationships, + query = field.query, + arguments = field.arguments, + variables = null + ) + ) + + DSL.jsonEntry( + alias, + DSL.coalesce( + DSL.select(subQuery), + DSL.jsonObject( + DSL.jsonEntry( + "rows", + DSL.jsonArray() + ) + ) + ) + ) + } + } + } ) ) - } - if (request.query.fields != null) { - val selectedColumns = request.query.fields!!.values.filterIsInstance().map { - DSL.field(DSL.name(request.collection, it.column)) - } - val relFieldColumns = collectColumnsReferencedByRelationships( - fields = request.query.fields ?: emptyMap(), - collectionRelationships = request.collection_relationships - ).map { - DSL.field(DSL.name(request.collection, it)) - } - groupBy(selectedColumns + relFieldColumns) - } - if (request.query.order_by != null) { - orderBy( - translateIROrderByField( - orderBy = request.query.order_by, - currentCollection = getTableName(request.collection), - relationships = request.collection_relationships - ) + ).from( + baseSelection + ) + ) + ) + } + if (!request.query.aggregates.isNullOrEmpty()) { + add( + DSL.jsonEntry( + "aggregates", + DSL.select( + DSL.jsonObject( + (request.query.aggregates ?: emptyMap()).map { (alias, aggregate) -> + DSL.jsonEntry( + alias, + getAggregatejOOQFunction(aggregate) + ) + } ) - } - if (request.query.limit != null) { - limit(request.query.limit) - } - if (request.query.offset != null) { - offset(request.query.offset) - } - }.asTable( - DSL.name(getTableName(request.collection)) + ).from( + baseSelection + ) ) ) - ) - ) + } + } ) } - fun jsonArrayAgg(field: JSONObjectNullStep) = CustomField.of("mysql_json_arrayagg", SQLDataType.JSON) { + fun renderNativeQuerySQL( + nativeQuery: NativeQueryInfo, + arguments: Map + ): String { + val sql = nativeQuery.sql + val parts = sql.parts + + return parts.joinToString("") { part -> + when (part) { + is NativeQueryPart.Text -> part.value + is NativeQueryPart.Parameter -> { + val argument = arguments[part.value] ?: error("Argument ${part.value} not found") + when (argument) { + is Argument.Literal -> argument.value.toString() + else -> error("Only literals are supported in Native Queries in this version") + } + } + } + } + } + + fun jsonArrayAgg(field: JSONObjectNullStep<*>) = CustomField.of("mysql_json_arrayagg", SQLDataType.JSON) { it.visit(DSL.field("json_arrayagg({0})", field)) } @@ -215,32 +255,20 @@ object JsonQueryGenerator : BaseQueryGenerator() { } } - // Returns all the columns in a parent table which are referenced - // by fields of type "relationship" and needed to join the two tables - // - // If a join column is already present in the requested fields, we skip it to avoid duplication - fun collectColumnsReferencedByRelationships( - fields: Map, - collectionRelationships: Map - ): Set { - val columnFields = fields.values - .filterIsInstance() - .map { it.column }.toSet() - - return fields.values - .filterIsInstance() - .mapNotNull { field -> - collectionRelationships[field.relationship] - ?.column_mapping - ?.values - ?.filterNot(columnFields::contains) - } - .flatten() - .toSet() + fun ndcScalarTypeToSQLDataType(scalarType: NDCScalar): DataType = when (scalarType) { + NDCScalar.BOOLEAN -> SQLDataType.BOOLEAN + NDCScalar.INT -> SQLDataType.INTEGER + NDCScalar.FLOAT -> SQLDataType.FLOAT + NDCScalar.STRING -> SQLDataType.CLOB + NDCScalar.DATE -> SQLDataType.DATE + NDCScalar.DATETIME -> SQLDataType.TIMESTAMP + NDCScalar.DATETIME_WITH_TIMEZONE -> SQLDataType.TIMESTAMP + NDCScalar.TIME -> SQLDataType.TIME + NDCScalar.TIME_WITH_TIMEZONE -> SQLDataType.TIME } private fun columnTypeTojOOQType(collection: String, field: ColumnField): org.jooq.DataType { - val connectorConfig = ConnectorConfigurationLoader.config + val connectorConfig = ConnectorConfiguration.Loader.config val table = connectorConfig.tables.find { it.tableName == collection } ?: error("Table $collection not found in connector configuration") @@ -249,17 +277,7 @@ object JsonQueryGenerator : BaseQueryGenerator() { ?: error("Column ${field.column} not found in table $collection") val scalarType = MySQLJDBCSchemaGenerator.mapScalarType(column.type, column.numeric_scale) - return when (scalarType) { - NDCScalar.BOOLEAN -> SQLDataType.BOOLEAN - NDCScalar.INT -> SQLDataType.INTEGER - NDCScalar.FLOAT -> SQLDataType.FLOAT - NDCScalar.STRING -> SQLDataType.CLOB - NDCScalar.DATE -> SQLDataType.DATE - NDCScalar.DATETIME -> SQLDataType.TIMESTAMP - NDCScalar.DATETIME_WITH_TIMEZONE -> SQLDataType.TIMESTAMP - NDCScalar.TIME -> SQLDataType.TIME - NDCScalar.TIME_WITH_TIMEZONE -> SQLDataType.TIME - } + return ndcScalarTypeToSQLDataType(scalarType) } private fun getAggregatejOOQFunction(aggregate: Aggregate) = when (aggregate) { diff --git a/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/MySQLDataConnectorService.kt b/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/MySQLDataConnectorService.kt index 52ba1f3..7d70b67 100644 --- a/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/MySQLDataConnectorService.kt +++ b/ndc-connector-mysql/src/main/kotlin/io/hasura/mysql/MySQLDataConnectorService.kt @@ -1,41 +1,17 @@ package io.hasura.mysql -import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.fasterxml.jackson.module.kotlin.readValue import io.hasura.ndc.app.interfaces.IDataSourceProvider -import io.hasura.ndc.app.services.ConnectorConfigurationLoader import io.hasura.ndc.app.services.dataConnectors.BaseDataConnectorService -import io.hasura.ndc.common.ConnectorConfiguration -import io.opentelemetry.api.trace.Tracer -import io.vertx.core.http.HttpServerRequest -import jakarta.inject.Inject -import jakarta.inject.Singleton -import jakarta.ws.rs.container.ContainerRequestContext -import jakarta.ws.rs.core.UriInfo import io.hasura.ndc.ir.* import io.hasura.ndc.sqlgen.MutationTranslator +import io.opentelemetry.api.trace.Tracer import jakarta.annotation.Priority import jakarta.enterprise.inject.Alternative -import org.jboss.resteasy.reactive.server.ServerRequestFilter -import org.jooq.DSLContext +import jakarta.inject.Inject +import jakarta.inject.Singleton import org.jooq.SQLDialect import org.jooq.conf.RenderQuotedNames -import org.jooq.conf.Settings -import org.jooq.impl.DefaultDSLContext - - -class Filters { - - @ServerRequestFilter(priority = 0) - fun logBodyFilter(info: UriInfo, request: HttpServerRequest, ctx: ContainerRequestContext) { - request.body { - val text = it.result().toString() - // Print JSON string formatted with Jackson - val json = jacksonObjectMapper().readValue(text) - println(jacksonObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(json)) - } - } -} @Singleton @Alternative @@ -66,25 +42,19 @@ class MySQLDataConnectorService @Inject constructor( ) override fun handleQuery(request: QueryRequest): List { - println(ConnectorConfigurationLoader.config) - val dslCtx = mkDSLCtx() - val query = JsonQueryGenerator.queryRequestToSQL(request) - println( - dslCtx - .renderInlined(query) - ) + val query = if (!request.variables.isNullOrEmpty()) { + JsonQueryGenerator.forEachQueryRequestToSQL(request) + } else { + JsonQueryGenerator.queryRequestToSQL(request) + } val rows = executeDbQuery(query, dslCtx) val json = rows.getValue(0, 0).toString() - val rowset = objectMapper.readValue(json) - return if (rowset == null) { - listOf(RowSet(rows = emptyList(), aggregates = emptyMap())) - } else { - listOf(rowset) - } + val rowsets = objectMapper.readValue>(json) + return rowsets } override val jooqDialect = SQLDialect.MYSQL_8_0 diff --git a/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/JSONGenerator.kt b/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/JSONGenerator.kt index 5171f27..03139e3 100644 --- a/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/JSONGenerator.kt +++ b/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/JSONGenerator.kt @@ -1,6 +1,6 @@ package io.hasura.oracle -import io.hasura.ndc.app.services.ConnectorConfigurationLoader +import io.hasura.ndc.common.ConnectorConfiguration import io.hasura.ndc.common.NDCScalar import io.hasura.ndc.ir.* import io.hasura.ndc.ir.Field.ColumnField @@ -13,22 +13,6 @@ import org.jooq.impl.SQLDataType object JsonQueryGenerator : BaseQueryGenerator() { - override fun buildComparison( - col: Field, - operator: ApplyBinaryComparisonOperator, - value: Field - ): Condition { - return when (operator) { - ApplyBinaryComparisonOperator.EQ -> col.eq(value) - ApplyBinaryComparisonOperator.GT -> col.gt(value) - ApplyBinaryComparisonOperator.GTE -> col.ge(value) - ApplyBinaryComparisonOperator.LT -> col.lt(value) - ApplyBinaryComparisonOperator.LTE -> col.le(value) - ApplyBinaryComparisonOperator.IN -> col.`in`(value) - ApplyBinaryComparisonOperator.IS_NULL -> col.isNull - ApplyBinaryComparisonOperator.LIKE -> col.like(value as Field) - } - } override fun forEachQueryRequestToSQL(request: QueryRequest): Select<*> { TODO("Not yet implemented") @@ -237,7 +221,7 @@ object JsonQueryGenerator : BaseQueryGenerator() { } private fun columnTypeTojOOQType(collection: String, field: ColumnField): org.jooq.DataType { - val connectorConfig = ConnectorConfigurationLoader.config + val connectorConfig = ConnectorConfiguration.Loader.config val table = connectorConfig.tables.find { it.tableName == collection } ?: error("Table $collection not found in connector configuration") diff --git a/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/OracleDataConnectorService.kt b/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/OracleDataConnectorService.kt index ae9a1e1..7a2889f 100644 --- a/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/OracleDataConnectorService.kt +++ b/ndc-connector-oracle/src/main/kotlin/io/hasura/oracle/OracleDataConnectorService.kt @@ -1,39 +1,19 @@ package io.hasura.oracle -import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.fasterxml.jackson.module.kotlin.readValue import io.hasura.ndc.app.interfaces.IDataSourceProvider import io.hasura.ndc.app.services.dataConnectors.BaseDataConnectorService -import io.opentelemetry.api.trace.Tracer -import io.vertx.core.http.HttpServerRequest -import jakarta.inject.Inject -import jakarta.inject.Singleton -import jakarta.ws.rs.container.ContainerRequestContext -import jakarta.ws.rs.core.UriInfo import io.hasura.ndc.ir.* import io.hasura.ndc.sqlgen.MutationTranslator +import io.opentelemetry.api.trace.Tracer import jakarta.annotation.Priority import jakarta.enterprise.inject.Alternative -import org.jboss.resteasy.reactive.server.ServerRequestFilter +import jakarta.inject.Inject +import jakarta.inject.Singleton import org.jooq.SQLDialect import org.jooq.conf.RenderQuotedNames -import org.jooq.conf.Settings -import org.jooq.impl.DefaultDSLContext -class Filters { - - @ServerRequestFilter(priority = 0) - fun logBodyFilter(info: UriInfo, request: HttpServerRequest, ctx: ContainerRequestContext) { - request.body { - val text = it.result().toString() - // Print JSON string formatted with Jackson - val json = jacksonObjectMapper().readValue(text) - println(jacksonObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(json)) - } - } -} - @Singleton @Alternative @Priority(1) @@ -80,7 +60,8 @@ class OracleDataConnectorService @Inject constructor( } override val jooqDialect = SQLDialect.ORACLE21C - override val jooqSettings = commonDSLContextSettings.withRenderQuotedNames(RenderQuotedNames.EXPLICIT_DEFAULT_UNQUOTED) + override val jooqSettings = + commonDSLContextSettings.withRenderQuotedNames(RenderQuotedNames.EXPLICIT_DEFAULT_UNQUOTED) override val sqlGenerator = JsonQueryGenerator override val mutationTranslator = MutationTranslator } diff --git a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/CTEQueryGenerator.kt b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/CTEQueryGenerator.kt index 7bfb2bb..dd2a979 100644 --- a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/CTEQueryGenerator.kt +++ b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/CTEQueryGenerator.kt @@ -23,23 +23,6 @@ object CTEQueryGenerator : BaseQueryGenerator() { .from(buildSelections(request).asTable("data")) } - override fun buildComparison( - col: Field, - operator: ApplyBinaryComparisonOperator, - value: Field - ): Condition { - return when (operator) { - ApplyBinaryComparisonOperator.EQ -> col.eq(value) - ApplyBinaryComparisonOperator.GT -> col.gt(value) - ApplyBinaryComparisonOperator.GTE -> col.ge(value) - ApplyBinaryComparisonOperator.LT -> col.lt(value) - ApplyBinaryComparisonOperator.LTE -> col.le(value) - ApplyBinaryComparisonOperator.IN -> col.`in`(value) - ApplyBinaryComparisonOperator.IS_NULL -> col.isNull - ApplyBinaryComparisonOperator.LIKE -> col.like(value as Field) - } - } - override fun forEachQueryRequestToSQL(request: QueryRequest): Select<*> { return buildCTEs(request, listOf(buildVarsCTE(request))) diff --git a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/JSONGenerator.kt b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/JSONGenerator.kt deleted file mode 100644 index 142427c..0000000 --- a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/JSONGenerator.kt +++ /dev/null @@ -1,319 +0,0 @@ -package io.hasura.snowflake - -import io.hasura.ndc.app.services.ConnectorConfigurationLoader -import io.hasura.ndc.common.NDCScalar -import io.hasura.ndc.ir.* -import io.hasura.ndc.ir.Field.ColumnField -import io.hasura.ndc.ir.Field as IRField -import io.hasura.ndc.sqlgen.BaseQueryGenerator -import org.gradle.internal.impldep.org.junit.platform.commons.util.FunctionUtils.where -import org.jooq.* -import org.jooq.Field -import org.jooq.impl.CustomField -import org.jooq.impl.DSL -import org.jooq.impl.DSL.jsonArrayAgg -import org.jooq.impl.DSL.orderBy -import org.jooq.impl.SQLDataType - - -object JsonQueryGenerator : BaseQueryGenerator() { - override fun buildComparison( - col: Field, - operator: ApplyBinaryComparisonOperator, - value: Field - ): Condition { - return when (operator) { - ApplyBinaryComparisonOperator.EQ -> col.eq(value) - ApplyBinaryComparisonOperator.GT -> col.gt(value) - ApplyBinaryComparisonOperator.GTE -> col.ge(value) - ApplyBinaryComparisonOperator.LT -> col.lt(value) - ApplyBinaryComparisonOperator.LTE -> col.le(value) - ApplyBinaryComparisonOperator.IN -> col.`in`(value) - ApplyBinaryComparisonOperator.IS_NULL -> col.isNull - ApplyBinaryComparisonOperator.LIKE -> col.like(value as Field) - } - } - - override fun forEachQueryRequestToSQL(request: QueryRequest): Select<*> { - TODO("Not yet implemented") - } - - override fun queryRequestToSQL(request: QueryRequest): Select<*> { - return queryRequestToSQLInternal(request) - } - - fun queryRequestToSQLInternal( - request: QueryRequest, - parentTable: String? = null, - parentRelationship: Relationship? = null, - ): SelectHavingStep> { - val requiredJoinTables = if (request.query.predicate != null) { - collectRequiredJoinTablesForWhereClause( - where = request.query.predicate!!, - collectionRelationships = request.collection_relationships - ) - } else { - emptySet() - } - - val referencedColumns = collectColumnsReferencedByRelationships( - fields = request.query.fields ?: emptyMap(), - collectionRelationships = request.collection_relationships - ) - - return DSL.select( - DSL.jsonObject( - DSL.jsonEntry( - "rows", - DSL.select( - DSL.jsonArrayAgg( - DSL.jsonObject( - (request.query.fields ?: emptyMap()).map { (alias, field) -> - when (field) { - is ColumnField -> { - DSL.jsonEntry( - alias, - DSL.field(DSL.name(field.column)) - ) - } - - is IRField.RelationshipField -> { - val relationship = - request.collection_relationships[field.relationship] - ?: error("Relationship ${field.relationship} not found") - - val subQuery = queryRequestToSQLInternal( - parentTable = request.collection, - parentRelationship = relationship, - request = QueryRequest( - collection = relationship.target_collection, - collection_relationships = request.collection_relationships, - query = field.query, - arguments = field.arguments, - variables = null - ) - ) - - DSL.jsonEntry( - alias, - DSL.select( - subQuery.asField(alias) - ) - ) - } - } - } - ) - ).orderBy( - DSL.field(DSL.name("rn")) - ) - ).from( - DSL.select( - buildList> { - if (request.query.fields != null) { - addAll( - request.query.fields!!.filter { (_, field) -> field is ColumnField } - .map { (alias, field) -> - field as ColumnField - DSL.field(DSL.name(request.collection, field.column)).`as`(alias) - } - ) - } - if (referencedColumns.isNotEmpty()) { - addAll( - referencedColumns.map { - DSL.field(DSL.name(request.collection, it)) - } - ) - } - add( - DSL.rowNumber().over( - DSL.partitionBy( - parentRelationship?.column_mapping?.values?.map { - DSL.field(DSL.name(parentRelationship.target_collection, it)) - } ?: emptyList() - ).orderBy( - translateIROrderByField( - orderBy = request.query.order_by, - currentCollection = getTableName(request.collection), - relationships = request.collection_relationships - ) - ) - ).`as`( - DSL.name("rn") - ) - ) - } - ).from( - run> { - val table = DSL.table(DSL.name(request.collection)) - if (request.query.predicate == null) { - table - } else { - requiredJoinTables.foldIndexed(table) { index, acc, relationship -> - val parentTable = if (index == 0) { - request.collection - } else { - requiredJoinTables.elementAt(index - 1).target_collection - } - - val joinTable = DSL.table(DSL.name(relationship.target_collection)) - acc.join(joinTable).on( - mkJoinWhereClause( - sourceTable = parentTable, - parentRelationship = relationship - ) - ) - } - } - } - ).apply { - if (request.query.predicate != null) { - where(getWhereConditions(request)) - } - }.asTable( - DSL.name(getTableName(request.collection)) - ) - ).apply { - if (parentRelationship != null) { - where( - parentRelationship.column_mapping.map { (from, to) -> - DSL.field(DSL.name(to)).eq( - DSL.field(DSL.name(getTableName(parentTable!!), from)) - ) - } - ) - } - if (request.query.offset != null) { - where(DSL.field(DSL.name("rn")).gt(request.query.offset)) - } - if (request.query.limit != null) { - val offset = request.query.offset ?: 0 - val upperBound = offset + request.query.limit!! - where(DSL.field(DSL.name("rn")).le(upperBound)) - } - } - ) - ) - ) - } - - fun collectRequiredJoinTablesForWhereClause( - where: Expression, - collectionRelationships: Map, - previousTableName: String? = null - ): Set { - return when (where) { - is ExpressionOnColumn -> when (val column = where.column) { - is ComparisonColumn.Column -> { - column.path.fold(emptySet()) { acc, path -> - val relationship = collectionRelationships[path.relationship] - ?: error("Relationship ${path.relationship} not found") - - acc + relationship - } - } - - else -> emptySet() - } - - is Expression.And -> where.expressions.fold(emptySet()) { acc, expr -> - acc + collectRequiredJoinTablesForWhereClause(expr, collectionRelationships) - } - - is Expression.Or -> where.expressions.fold(emptySet()) { acc, expr -> - acc + collectRequiredJoinTablesForWhereClause(expr, collectionRelationships) - } - - is Expression.Not -> collectRequiredJoinTablesForWhereClause(where.expression, collectionRelationships) - - else -> emptySet() - } - } - - // Returns all the columns in a parent table which are referenced - // by fields of type "relationship" and needed to join the two tables - // - // If a join column is already present in the requested fields, we skip it to avoid duplication - fun collectColumnsReferencedByRelationships( - fields: Map, - collectionRelationships: Map - ): Set { - val columnFields = fields.values - .filterIsInstance() - .map { it.column }.toSet() - - return fields.values - .filterIsInstance() - .mapNotNull { field -> - collectionRelationships[field.relationship] - ?.column_mapping - ?.values - ?.filterNot(columnFields::contains) - } - .flatten() - .toSet() - } - - private fun columnTypeTojOOQType(collection: String, field: ColumnField): org.jooq.DataType { - val connectorConfig = ConnectorConfigurationLoader.config - - val table = connectorConfig.tables.find { it.tableName == collection } - ?: error("Table $collection not found in connector configuration") - - val column = table.columns.find { it.name == field.column } - ?: error("Column ${field.column} not found in table $collection") - - val scalarType = SnowflakeJDBCSchemaGenerator.mapScalarType(column.type, column.numeric_scale) - return when (scalarType) { - NDCScalar.BOOLEAN -> SQLDataType.BOOLEAN - NDCScalar.INT -> SQLDataType.INTEGER - NDCScalar.FLOAT -> SQLDataType.FLOAT - NDCScalar.STRING -> SQLDataType.CLOB - NDCScalar.DATE -> SQLDataType.DATE - NDCScalar.DATETIME -> SQLDataType.TIMESTAMP - NDCScalar.DATETIME_WITH_TIMEZONE -> SQLDataType.TIMESTAMP - NDCScalar.TIME -> SQLDataType.TIME - NDCScalar.TIME_WITH_TIMEZONE -> SQLDataType.TIME - } - } - - private fun getAggregatejOOQFunction(aggregate: Aggregate) = when (aggregate) { - is Aggregate.StarCount -> DSL.count() - is Aggregate.SingleColumn -> { - val col = DSL.field(DSL.name(aggregate.column)) as Field - when (aggregate.function) { - SingleColumnAggregateFunction.AVG -> DSL.avg(col) - SingleColumnAggregateFunction.MAX -> DSL.max(col) - SingleColumnAggregateFunction.MIN -> DSL.min(col) - SingleColumnAggregateFunction.SUM -> DSL.sum(col) - SingleColumnAggregateFunction.STDDEV_POP -> DSL.stddevPop(col) - SingleColumnAggregateFunction.STDDEV_SAMP -> DSL.stddevSamp(col) - SingleColumnAggregateFunction.VAR_POP -> DSL.varPop(col) - SingleColumnAggregateFunction.VAR_SAMP -> DSL.varSamp(col) - } - } - - is Aggregate.ColumnCount -> { - val col = DSL.field(DSL.name(aggregate.column)) - if (aggregate.distinct) DSL.countDistinct(col) else DSL.count(col) - } - } - - private fun mkJoinWhereClause( - sourceTable: String, - parentRelationship: Relationship, - parentTableAlias: String? = null - ) = DSL.and( - parentRelationship.column_mapping.map { (from, to) -> - val childField = DSL.field(DSL.name(getTableName(sourceTable), from)) - val parentField = DSL.field(DSL.name(parentTableAlias ?: parentRelationship.target_collection, to)) - childField.eq(parentField) - } - ) - - private fun getTableName(collection: String): String { - return collection.split('.').last() - } - -} diff --git a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt index 886f3ef..0718383 100644 --- a/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt +++ b/ndc-connector-snowflake/src/main/kotlin/io/hasura/snowflake/SnowflakeDataConnectorService.kt @@ -1,39 +1,20 @@ package io.hasura.snowflake -import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import com.fasterxml.jackson.module.kotlin.readValue import io.hasura.CTEQueryGenerator import io.hasura.ndc.app.interfaces.IDataSourceProvider -import io.hasura.ndc.app.services.ConnectorConfigurationLoader import io.hasura.ndc.app.services.dataConnectors.BaseDataConnectorService -import io.opentelemetry.api.trace.Tracer -import io.vertx.core.http.HttpServerRequest -import jakarta.inject.Inject -import jakarta.inject.Singleton -import jakarta.ws.rs.container.ContainerRequestContext -import jakarta.ws.rs.core.UriInfo import io.hasura.ndc.ir.* import io.hasura.ndc.sqlgen.MutationTranslator +import io.opentelemetry.api.trace.Tracer import jakarta.annotation.Priority import jakarta.enterprise.inject.Alternative -import org.jboss.resteasy.reactive.server.ServerRequestFilter +import jakarta.inject.Inject +import jakarta.inject.Singleton import org.jooq.SQLDialect import org.jooq.conf.RenderQuotedNames -class Filters { - - @ServerRequestFilter(priority = 0) - fun logBodyFilter(info: UriInfo, request: HttpServerRequest, ctx: ContainerRequestContext) { - request.body { - val text = it.result().toString() - // Print JSON string formatted with Jackson - val json = jacksonObjectMapper().readValue(text) - println(jacksonObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(json)) - } - } -} - @Singleton @Alternative @Priority(1) diff --git a/ndc-ir/src/main/kotlin/io/hasura/ndc/common/ConnectorConfiguration.kt b/ndc-ir/src/main/kotlin/io/hasura/ndc/common/ConnectorConfiguration.kt index 72e37b3..4e424a1 100644 --- a/ndc-ir/src/main/kotlin/io/hasura/ndc/common/ConnectorConfiguration.kt +++ b/ndc-ir/src/main/kotlin/io/hasura/ndc/common/ConnectorConfiguration.kt @@ -1,10 +1,39 @@ package io.hasura.ndc.common +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper +import com.fasterxml.jackson.module.kotlin.readValue +import java.io.File +import java.nio.file.Path + data class ConnectorConfiguration( - val jdbcUrl: String, + val jdbcUrl: String = "", val jdbcProperties: Map = emptyMap(), val schemas: List = emptyList(), val tables: List = emptyList(), val functions: List = emptyList(), val nativeQueries: Map = emptyMap() -) \ No newline at end of file +) { + + object Loader { + private val mapper = jacksonObjectMapper() + + private const val DEFAULT_CONFIG_DIRECTORY = "/etc/connector" + private const val CONFIG_FILE_NAME = "configuration.json" + + val CONFIG_DIRECTORY: String = System.getenv("HASURA_CONFIGURATION_DIRECTORY") ?: DEFAULT_CONFIG_DIRECTORY + val config: ConnectorConfiguration = loadConfigFile(getConfigFilePath()) + + fun getConfigFilePath(): Path { + return Path.of(CONFIG_DIRECTORY, CONFIG_FILE_NAME) + } + + fun loadConfigFile(path: Path): ConnectorConfiguration { + val file = File(path.toString()) + return if (!file.exists()) { + ConnectorConfiguration() + } else { + mapper.readValue(file) + } + } + } +} \ No newline at end of file diff --git a/ndc-ir/src/main/kotlin/io/hasura/ndc/ir/Query.kt b/ndc-ir/src/main/kotlin/io/hasura/ndc/ir/Query.kt index d75247c..f0d24b6 100644 --- a/ndc-ir/src/main/kotlin/io/hasura/ndc/ir/Query.kt +++ b/ndc-ir/src/main/kotlin/io/hasura/ndc/ir/Query.kt @@ -178,6 +178,8 @@ enum class ApplyBinaryComparisonOperator { IS_NULL, @JsonProperty("_like") LIKE, + @JsonProperty("_contains") + CONTAINS, } enum class ApplyUnaryComparisonOperator { diff --git a/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt b/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt index 0708774..e172410 100644 --- a/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt +++ b/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseGenerator.kt @@ -25,7 +25,23 @@ sealed interface BaseGenerator { ) } - abstract fun buildComparison(col: Field, operator: ApplyBinaryComparisonOperator, value: Field): Condition + fun buildComparison( + col: Field, + operator: ApplyBinaryComparisonOperator, + value: Field + ): Condition { + return when (operator) { + ApplyBinaryComparisonOperator.EQ -> col.eq(value) + ApplyBinaryComparisonOperator.GT -> col.gt(value) + ApplyBinaryComparisonOperator.GTE -> col.ge(value) + ApplyBinaryComparisonOperator.LT -> col.lt(value) + ApplyBinaryComparisonOperator.LTE -> col.le(value) + ApplyBinaryComparisonOperator.IN -> col.`in`(value) + ApplyBinaryComparisonOperator.IS_NULL -> col.isNull + ApplyBinaryComparisonOperator.LIKE -> col.like(value as Field) + ApplyBinaryComparisonOperator.CONTAINS -> col.contains(value as Field) + } + } private fun getCollectionForCompCol( col: ComparisonColumn, @@ -38,7 +54,8 @@ sealed interface BaseGenerator { if (col.path.isNotEmpty()) { // Traverse the relationship path to get to the current collection name val targetCollection = col.path.fold("") { acc, pathElement -> - val rel = request.collection_relationships[pathElement.relationship] ?: throw Exception("Relationship not found") + val rel = request.collection_relationships[pathElement.relationship] + ?: throw Exception("Relationship not found") rel.target_collection } targetCollection @@ -49,9 +66,13 @@ sealed interface BaseGenerator { } } - fun argumentToCondition(request: QueryRequest, argument: Map.Entry, overrideCollection: String) - = argumentToCondition(request.copy(collection = overrideCollection), argument) - fun argumentToCondition(request: QueryRequest, argument: Map.Entry) : Condition { + fun argumentToCondition( + request: QueryRequest, + argument: Map.Entry, + overrideCollection: String + ) = argumentToCondition(request.copy(collection = overrideCollection), argument) + + fun argumentToCondition(request: QueryRequest, argument: Map.Entry): Condition { val compVal = when (val arg = argument.value) { is Argument.Variable -> ComparisonValue.VariableComp(arg.name) is Argument.Literal -> ComparisonValue.ScalarComp(arg.value) @@ -66,8 +87,8 @@ sealed interface BaseGenerator { } // override request collection for expressionToCondition evaluation - fun expressionToCondition( e: Expression, request: QueryRequest, overrideCollection: String) - = expressionToCondition(e, request.copy(collection = overrideCollection)) + fun expressionToCondition(e: Expression, request: QueryRequest, overrideCollection: String) = + expressionToCondition(e, request.copy(collection = overrideCollection)) // Convert a WHERE-like expression IR into a JOOQ Condition @@ -176,7 +197,13 @@ sealed interface BaseGenerator { rel.column_mapping.map { (sourceCol, targetCol) -> DSL.field(DSL.name(splitCollectionName(request.collection) + sourceCol)) .eq(DSL.field(DSL.name(splitCollectionName(rel.target_collection) + targetCol))) - } + rel.arguments.map { argumentToCondition(request, it, rel.target_collection) } + } + rel.arguments.map { + argumentToCondition( + request, + it, + rel.target_collection + ) + } ) ) ) diff --git a/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseQueryGenerator.kt b/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseQueryGenerator.kt index 0f9a2f1..270b149 100644 --- a/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseQueryGenerator.kt +++ b/ndc-sqlgen/src/main/kotlin/io/hasura/ndc/sqlgen/BaseQueryGenerator.kt @@ -1,5 +1,8 @@ package io.hasura.ndc.sqlgen +import io.hasura.ndc.common.ConnectorConfiguration +import io.hasura.ndc.common.NativeQueryInfo +import io.hasura.ndc.common.NativeQueryPart import io.hasura.ndc.ir.* import io.hasura.ndc.ir.extensions.isVariablesRequest import io.hasura.ndc.ir.Field as IRField @@ -22,6 +25,42 @@ abstract class BaseQueryGenerator : BaseGenerator { throw NotImplementedError("Mutation not supported for this data source") } + fun mkNativeQueryCTE( + request: QueryRequest + ): org.jooq.WithStep { + val config = ConnectorConfiguration.Loader.config + + fun renderNativeQuerySQL( + nativeQuery: NativeQueryInfo, + arguments: Map + ): String { + val sql = nativeQuery.sql + val parts = sql.parts + + return parts.joinToString("") { part -> + when (part) { + is NativeQueryPart.Text -> part.value + is NativeQueryPart.Parameter -> { + val argument = arguments[part.value] ?: error("Argument ${part.value} not found") + when (argument) { + is Argument.Literal -> argument.value.toString() + else -> error("Only literals are supported in Native Queries in this version") + } + } + } + } + } + + val nativeQuery = config.nativeQueries[request.collection]!! + val nativeQuerySQL = renderNativeQuerySQL(nativeQuery, request.arguments) + + return DSL.with( + DSL.name(request.collection) + ).`as`( + DSL.resultQuery(nativeQuerySQL) + ) + } + fun getQueryColumnFields(fields: Map): Map { return fields .filterValues { it is IRField.ColumnField } From 19bf312fb503c7ed89d25eb50a9d6457553eb71b Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 12:40:50 -0500 Subject: [PATCH 11/29] Initial try at GH Actions workflow for builds --- .../workflows/build-connectors-action.yaml | 84 +++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 .github/workflows/build-connectors-action.yaml diff --git a/.github/workflows/build-connectors-action.yaml b/.github/workflows/build-connectors-action.yaml new file mode 100644 index 0000000..4084f71 --- /dev/null +++ b/.github/workflows/build-connectors-action.yaml @@ -0,0 +1,84 @@ +name: Build and Release Database Connector + +on: + push: + branches: + - "snowflake/*" + - "mysql/*" + - "oracle/*" + +jobs: + docker-build: + runs-on: ubuntu-latest + strategy: + matrix: + database: [snowflake, mysql, oracle] + + outputs: + release_tag: ${{ steps.extract_tag.outputs.tag }} + + steps: + - name: Checkout repository + if: contains(github.ref, matrix.database) + uses: actions/checkout@v3 + + - name: Extract version from branch name + if: contains(github.ref, matrix.database) + id: extract_tag + run: | + DB_TYPE=${{ matrix.database }} + VERSION=${GITHUB_REF#refs/heads/${DB_TYPE}/} + echo "tag=${DB_TYPE}/${VERSION}" >> $GITHUB_OUTPUT + + - name: Set up Docker Buildx + if: contains(github.ref, matrix.database) + uses: docker/setup-buildx-action@v2 + + - name: Cross-platform Docker build and push + if: contains(github.ref, matrix.database) + uses: docker/build-push-action@v5 + with: + context: . + push: true + tags: ghcr.io/hasura/ndc-jvm-${{ matrix.database }}:${{ steps.extract_tag.outputs.tag }} + platforms: linux/amd64,linux/arm64 + build-args: | + JOOQ_PRO_EMAIL=${{ secrets.JOOQ_PRO_EMAIL }} + JOOQ_PRO_LICENSE=${{ secrets.JOOQ_PRO_LICENSE }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + create-release: + needs: docker-build + runs-on: ubuntu-latest + strategy: + matrix: + database: [snowflake, mysql, oracle] + + steps: + - name: Checkout repository + if: contains(github.ref, matrix.database) + uses: actions/checkout@v3 + + - name: Update dockerImage in connector-metadata.yaml + if: contains(github.ref, matrix.database) + run: | + sed -i "s|^ dockerImage:.*| dockerImage: \"ghcr.io/hasura/ndc-jvm-${{ matrix.database }}:${{ needs.docker-build.outputs.release_tag }}\"|" ndc-connector-${{ matrix.database }}/.hasura-connector/connector-metadata.yaml + + - name: Compress Hasura Connector Metadata + if: contains(github.ref, matrix.database) + run: | + cd ndc-connector-${{ matrix.database }} + tar -czf package.tar.gz ./.hasura-connector + + - name: Upload package.tar.gz to GitHub Release + if: contains(github.ref, matrix.database) + uses: actions/upload-release-asset@v1 + with: + tag_name: ${{ needs.docker-build.outputs.release_tag }} + upload_url: ${{ steps.release.outputs.upload_url }} + asset_path: ndc-connector-${{ matrix.database }}/package.tar.gz + asset_name: package-${{ matrix.database }}.tar.gz + asset_content_type: application/gzip + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 8925a91e6915455b56d562c151746683cc48580f Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 12:49:38 -0500 Subject: [PATCH 12/29] Update GH Actions --- .github/workflows/build-connectors-action.yaml | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-connectors-action.yaml b/.github/workflows/build-connectors-action.yaml index 4084f71..13e7bb1 100644 --- a/.github/workflows/build-connectors-action.yaml +++ b/.github/workflows/build-connectors-action.yaml @@ -26,9 +26,13 @@ jobs: if: contains(github.ref, matrix.database) id: extract_tag run: | + # Get the database type from the matrix DB_TYPE=${{ matrix.database }} + # Extract only the version part from the branch name, omitting the database type VERSION=${GITHUB_REF#refs/heads/${DB_TYPE}/} - echo "tag=${DB_TYPE}/${VERSION}" >> $GITHUB_OUTPUT + # Set the tag output as "v1.0.3" and the full Docker tag as "ghcr.io/hasura/ndc-jvm-mysql:v1.0.3" + echo "tag=${VERSION}" >> $GITHUB_OUTPUT + echo "docker_tag=ghcr.io/hasura/ndc-jvm-${DB_TYPE}:${VERSION}" >> $GITHUB_OUTPUT - name: Set up Docker Buildx if: contains(github.ref, matrix.database) @@ -39,8 +43,9 @@ jobs: uses: docker/build-push-action@v5 with: context: . + file: ndc-connector-${{ matrix.database }}.dockerfile push: true - tags: ghcr.io/hasura/ndc-jvm-${{ matrix.database }}:${{ steps.extract_tag.outputs.tag }} + tags: ${{ steps.extract_tag.outputs.docker_tag }} platforms: linux/amd64,linux/arm64 build-args: | JOOQ_PRO_EMAIL=${{ secrets.JOOQ_PRO_EMAIL }} @@ -63,7 +68,8 @@ jobs: - name: Update dockerImage in connector-metadata.yaml if: contains(github.ref, matrix.database) run: | - sed -i "s|^ dockerImage:.*| dockerImage: \"ghcr.io/hasura/ndc-jvm-${{ matrix.database }}:${{ needs.docker-build.outputs.release_tag }}\"|" ndc-connector-${{ matrix.database }}/.hasura-connector/connector-metadata.yaml + # Use the full Docker tag from the docker-build job output + sed -i "s|^ dockerImage:.*| dockerImage: \"${{ needs.docker-build.outputs.docker_tag }}\"|" ndc-connector-${{ matrix.database }}/.hasura-connector/connector-metadata.yaml - name: Compress Hasura Connector Metadata if: contains(github.ref, matrix.database) @@ -75,7 +81,7 @@ jobs: if: contains(github.ref, matrix.database) uses: actions/upload-release-asset@v1 with: - tag_name: ${{ needs.docker-build.outputs.release_tag }} + tag_name: ${{ needs.docker-build.outputs.tag }} # Use the correct tag output upload_url: ${{ steps.release.outputs.upload_url }} asset_path: ndc-connector-${{ matrix.database }}/package.tar.gz asset_name: package-${{ matrix.database }}.tar.gz From d2de46a82fb30f4fdfb3af2668cdcfe190b3fd9c Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 12:53:54 -0500 Subject: [PATCH 13/29] Update GH Action --- .github/workflows/build-connectors-action.yaml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-connectors-action.yaml b/.github/workflows/build-connectors-action.yaml index 13e7bb1..79f41a6 100644 --- a/.github/workflows/build-connectors-action.yaml +++ b/.github/workflows/build-connectors-action.yaml @@ -28,10 +28,9 @@ jobs: run: | # Get the database type from the matrix DB_TYPE=${{ matrix.database }} - # Extract only the version part from the branch name, omitting the database type - VERSION=${GITHUB_REF#refs/heads/${DB_TYPE}/} - # Set the tag output as "v1.0.3" and the full Docker tag as "ghcr.io/hasura/ndc-jvm-mysql:v1.0.3" - echo "tag=${VERSION}" >> $GITHUB_OUTPUT + # Use sed to remove the database prefix and get only the version part + VERSION=$(echo "${GITHUB_REF#refs/heads/}" | sed "s|^${DB_TYPE}/||") + # Construct the Docker tag echo "docker_tag=ghcr.io/hasura/ndc-jvm-${DB_TYPE}:${VERSION}" >> $GITHUB_OUTPUT - name: Set up Docker Buildx From 9c6541504a1fdf9b64202f2e81010687f7665a4f Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 13:02:46 -0500 Subject: [PATCH 14/29] Update MySQL dockerfile --- ndc-connector-mysql.dockerfile | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ndc-connector-mysql.dockerfile b/ndc-connector-mysql.dockerfile index 3087166..fc910a3 100644 --- a/ndc-connector-mysql.dockerfile +++ b/ndc-connector-mysql.dockerfile @@ -1,7 +1,13 @@ # Build stage FROM registry.access.redhat.com/ubi9/openjdk-21:1.20-2 AS build +ARG JOOQ_PRO_EMAIL +ARG JOOQ_PRO_LICENSE + ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' +ENV JOOQ_PRO_EMAIL=${JOOQ_PRO_EMAIL} +ENV JOOQ_PRO_LICENSE=${JOOQ_PRO_LICENSE} + WORKDIR /build COPY . /build From fca94bc1dc5f539da56869575f3d32ac14739d78 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 13:54:54 -0500 Subject: [PATCH 15/29] Add packages perms to GH Action workflow --- .github/workflows/build-connectors-action.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/build-connectors-action.yaml b/.github/workflows/build-connectors-action.yaml index 79f41a6..6310c4c 100644 --- a/.github/workflows/build-connectors-action.yaml +++ b/.github/workflows/build-connectors-action.yaml @@ -7,6 +7,11 @@ on: - "mysql/*" - "oracle/*" +permissions: + contents: read + packages: write # Allows pushing to GHCR + id-token: write # Required for authenticating with GHCR + jobs: docker-build: runs-on: ubuntu-latest From b2209afc771aecc73f6df1d283f66517b9bf12b6 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 15:06:33 -0500 Subject: [PATCH 16/29] Update GH action --- .github/workflows/build-connectors-action.yaml | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-connectors-action.yaml b/.github/workflows/build-connectors-action.yaml index 6310c4c..f668568 100644 --- a/.github/workflows/build-connectors-action.yaml +++ b/.github/workflows/build-connectors-action.yaml @@ -7,11 +7,6 @@ on: - "mysql/*" - "oracle/*" -permissions: - contents: read - packages: write # Allows pushing to GHCR - id-token: write # Required for authenticating with GHCR - jobs: docker-build: runs-on: ubuntu-latest @@ -22,6 +17,11 @@ jobs: outputs: release_tag: ${{ steps.extract_tag.outputs.tag }} + permissions: + contents: read + packages: write # Allows pushing to GHCR + id-token: write # Required for authenticating with GHCR + steps: - name: Checkout repository if: contains(github.ref, matrix.database) @@ -38,6 +38,13 @@ jobs: # Construct the Docker tag echo "docker_tag=ghcr.io/hasura/ndc-jvm-${DB_TYPE}:${VERSION}" >> $GITHUB_OUTPUT + - name: Log in to the Container registry + uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Set up Docker Buildx if: contains(github.ref, matrix.database) uses: docker/setup-buildx-action@v2 From 7615e2e3b5fd555ee2d02fd0a1064165a46ac19c Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 15:36:37 -0500 Subject: [PATCH 17/29] Update GH Action --- .github/workflows/build-connectors-action.yaml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/build-connectors-action.yaml b/.github/workflows/build-connectors-action.yaml index f668568..2a9e380 100644 --- a/.github/workflows/build-connectors-action.yaml +++ b/.github/workflows/build-connectors-action.yaml @@ -41,7 +41,7 @@ jobs: - name: Log in to the Container registry uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 with: - registry: ${{ env.REGISTRY }} + registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} @@ -61,8 +61,6 @@ jobs: build-args: | JOOQ_PRO_EMAIL=${{ secrets.JOOQ_PRO_EMAIL }} JOOQ_PRO_LICENSE=${{ secrets.JOOQ_PRO_LICENSE }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} create-release: needs: docker-build From 7a43f8cd545badd4a3d5febabc6ede5c7f5ccd90 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 16:09:43 -0500 Subject: [PATCH 18/29] Clean up CLI main method --- ndc-cli/src/main/kotlin/io/hasura/cli/main.kt | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt b/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt index 6a47906..9b3392c 100644 --- a/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt +++ b/ndc-cli/src/main/kotlin/io/hasura/cli/main.kt @@ -4,8 +4,8 @@ import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper import io.hasura.ndc.common.ConnectorConfiguration import picocli.CommandLine import picocli.CommandLine.* +import java.io.File import java.nio.file.Files -import java.nio.file.Path import java.nio.file.attribute.PosixFilePermissions import kotlin.system.exitProcess @@ -58,14 +58,17 @@ class CLI { ) schemas: List? ) { - val configFilePath = ConnectorConfiguration.Loader.getConfigFilePath() - val existingConfig = ConnectorConfiguration.Loader.config + val file = File(outfile) - println("Checking for configuration file at $configFilePath") - if (existingConfig == ConnectorConfiguration()) { - println("Non-existent or empty configuration file detected") - } else { - println("Existing configuration file detected") + println("Checking for configuration file at ${file.absolutePath}") + val existingConfig = file.let { + if (it.exists()) { + println("Existing configuration file detected") + mapper.readValue(it, ConnectorConfiguration::class.java) + } else { + println("Non-existent or empty configuration file detected") + ConnectorConfiguration() + } } val configGenerator = when (database) { @@ -83,17 +86,14 @@ class CLI { nativeQueries = existingConfig.nativeQueries ) - val outfilePath = Path.of(ConnectorConfiguration.Loader.CONFIG_DIRECTORY, outfile) - println("Writing configuration to file: $configFilePath") - - val file = configFilePath.toFile() try { + println("Writing configuration to ${file.absolutePath}") mapper.writerWithDefaultPrettyPrinter().writeValue(file, mergedConfigWithNativeQueries) } catch (e: Exception) { println("Error writing configuration to file: ${e.message}") val parentDir = file.parentFile - val permissions = Files.getPosixFilePermissions(parentDir.toPath()) + val permissions = Files.getPosixFilePermissions(parentDir.toPath()) val posixPermissions = PosixFilePermissions.toString(permissions) println("Parent directory: ${parentDir.absolutePath}") From 084bde9aac2e42c2782b110e15b82ef4bd473b79 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Tue, 12 Nov 2024 17:08:09 -0500 Subject: [PATCH 19/29] Update MySQL connector-metadata.yaml to v1.0.3 --- ndc-connector-mysql/.hasura-connector/connector-metadata.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml b/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml index 059a203..f06bff7 100644 --- a/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml +++ b/ndc-connector-mysql/.hasura-connector/connector-metadata.yaml @@ -1,6 +1,6 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "ghcr.io/hasura/ndc-jvm-mysql:v1.0.2" + dockerImage: "ghcr.io/hasura/ndc-jvm-mysql:v1.0.3" supportedEnvironmentVariables: - name: JDBC_URL description: "The JDBC URL to connect to the database" @@ -9,7 +9,7 @@ commands: docker run \ -e HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH \ -v ${HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH}:/app/output \ - ghcr.io/hasura/ndc-jvm-cli:v0.1.1 update $JDBC_URL \ + ghcr.io/hasura/ndc-jvm-cli:v0.1.3 update $JDBC_URL \ --database MYSQL \ --schemas $JDBC_SCHEMAS \ --outfile /app/output/configuration.json From d51e106a19f33db6e140d1e93204f6a043f1f0e8 Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Wed, 20 Nov 2024 10:16:30 -0500 Subject: [PATCH 20/29] updated oracle metadata --- .../.hasura-connector/connector-metadata.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml b/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml index 53b19cb..5c5a5df 100644 --- a/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml +++ b/ndc-connector-oracle/.hasura-connector/connector-metadata.yaml @@ -1,6 +1,6 @@ packagingDefinition: type: PrebuiltDockerImage - dockerImage: "ghcr.io/hasura/ndc-jvm-oracle:v1.0.2" + dockerImage: "ghcr.io/hasura/ndc-jvm-oracle:v1.0.3" supportedEnvironmentVariables: - name: JDBC_URL description: "The JDBC URL to connect to the database" @@ -11,8 +11,7 @@ commands: docker run \ -e HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH \ -v ${HASURA_PLUGIN_CONNECTOR_CONTEXT_PATH}:/app/output \ - ghcr.io/hasura/ndc-jvm-cli:v0.1.1 update $JDBC_URL \ + ghcr.io/hasura/ndc-jvm-cli:v0.1.3 update $JDBC_URL \ --database ORACLE \ --schemas $JDBC_SCHEMAS \ --outfile /app/output/configuration.json - \ No newline at end of file From 007f696f87384f38caf051b53babdae53c5b5888 Mon Sep 17 00:00:00 2001 From: Gavin Ray Date: Fri, 22 Nov 2024 16:30:39 -0500 Subject: [PATCH 21/29] Add Phoenix Query Server client --- ndc-cli/build.gradle.kts | 1 + ndc-connector-phoenix/build.gradle.kts | 1 + 2 files changed, 2 insertions(+) diff --git a/ndc-cli/build.gradle.kts b/ndc-cli/build.gradle.kts index 93ac493..9e3332e 100644 --- a/ndc-cli/build.gradle.kts +++ b/ndc-cli/build.gradle.kts @@ -28,6 +28,7 @@ dependencies { implementation("net.snowflake:snowflake-jdbc:3.16.1") implementation("org.apache.phoenix:phoenix-client-hbase-2.4:5.1.1") + implementation("org.apache.phoenix:phoenix-queryserver-client:5.0.0-HBase-2.0") } tasks.withType { diff --git a/ndc-connector-phoenix/build.gradle.kts b/ndc-connector-phoenix/build.gradle.kts index 32466da..9c788a4 100644 --- a/ndc-connector-phoenix/build.gradle.kts +++ b/ndc-connector-phoenix/build.gradle.kts @@ -28,6 +28,7 @@ dependencies { // Phoenix JDBC driver implementation("org.apache.phoenix:phoenix-client-hbase-2.4:5.1.1") + implementation("org.apache.phoenix:phoenix-queryserver-client:5.0.0-HBase-2.0") implementation("org.jooq:jooq:3.19.8") } From 36a8637fc432c8cfb20c651451cff8c012cb7699 Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Mon, 25 Nov 2024 13:46:31 -0500 Subject: [PATCH 22/29] added translation for Phoenix types to Java sql types for the thin client --- .../io/hasura/cli/PhoenixConfigGenerator.kt | 65 ++++++++++++++++++- 1 file changed, 63 insertions(+), 2 deletions(-) diff --git a/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt b/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt index 5babfb8..812ce84 100644 --- a/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt +++ b/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt @@ -6,14 +6,73 @@ import io.hasura.ndc.common.TableSchemaRow import io.hasura.ndc.common.TableType import org.jooq.impl.DSL import java.sql.JDBCType +import java.sql.Types object PhoenixConfigGenerator : IConfigGenerator { + + + private fun translatePhoenixDataTypeToSqlType(phoenixDataType: Int, isThinClient: Boolean = true): String { + + val sqlType = if (!isThinClient) { + phoenixDataType + } else { + when (phoenixDataType) { + 0 -> Types.TINYINT // UNSIGNED_TINYINT + 1 -> Types.SMALLINT // UNSIGNED_SMALLINT + 2 -> Types.INTEGER // UNSIGNED_INT + 3 -> Types.BIGINT // UNSIGNED_LONG + 4 -> Types.FLOAT // UNSIGNED_FLOAT + 5 -> Types.DOUBLE // UNSIGNED_DOUBLE + 6 -> Types.BINARY // BINARY + 7 -> Types.CHAR // CHAR + 8 -> Types.VARCHAR // VARCHAR + 9 -> Types.VARBINARY // VARBINARY + 10 -> Types.DECIMAL // DECIMAL + 11 -> Types.TIMESTAMP // TIMESTAMP + 12 -> Types.DATE // DATE + 13 -> Types.TIME // TIME + 14 -> Types.TIME // UNSIGNED_TIME + 15 -> Types.DATE // UNSIGNED_DATE + 16 -> Types.TIMESTAMP // UNSIGNED_TIMESTAMP + 17 -> Types.ARRAY // ARRAY + 18 -> Types.BOOLEAN // BOOLEAN + 19 -> Types.TINYINT // TINYINT + 20 -> Types.SMALLINT // SMALLINT + 21 -> Types.INTEGER // INTEGER + 22 -> Types.BIGINT // BIGINT + 23 -> Types.FLOAT // FLOAT + 24 -> Types.DOUBLE // DOUBLE + 25 -> Types.ARRAY // UNSIGNED_ARRAY + 26 -> Types.BINARY // UUID + else -> + if (JDBCType.valueOf(phoenixDataType) != null) { + phoenixDataType + } else { + throw IllegalArgumentException("Unknown Phoenix data type: $phoenixDataType") + } + } + } + return JDBCType.valueOf(sqlType).name + } + + fun main() { + // Example usage: + val phoenixDataType = 8 // Example: VARCHAR + val sqlType = translatePhoenixDataTypeToSqlType(phoenixDataType) + + println("Phoenix Data Type: $phoenixDataType") + println("Java SQL Type: $sqlType") // Should output: 12 (VARCHAR) + } + + override fun getConfig( jdbcUrl: String, schemas: List ): ConnectorConfiguration { val ctx = DSL.using(jdbcUrl) + val isThinClient = jdbcUrl.contains("phoenix:thin", ignoreCase = true) + val result = ctx.fetch(""" SELECT * FROM SYSTEM.CATALOG WHERE TABLE_SCHEM != 'SYSTEM' OR TABLE_SCHEM IS NULL @@ -28,10 +87,12 @@ object PhoenixConfigGenerator : IConfigGenerator { val columns = records.filter { it["COLUMN_NAME"] != null }.map { val columnFamily = it["COLUMN_FAMILY"] as String? val columnName = it["COLUMN_NAME"] as String + + ColumnSchemaRow( name = if (columnFamily != null && columnFamily != "0") "$columnFamily.$columnName" else columnName, description = null, - type = JDBCType.valueOf(it["DATA_TYPE"] as Int).name, + type = translatePhoenixDataTypeToSqlType(it["DATA_TYPE"] as Int, isThinClient), numeric_scale = null, nullable = it["NULLABLE"] == 1, auto_increment = it["IS_AUTOINCREMENT"] == "YES", @@ -59,4 +120,4 @@ object PhoenixConfigGenerator : IConfigGenerator { functions = emptyList() ) } -} \ No newline at end of file +} From dbd72d6a9e9aa0a164b043d43b00cbdae66f2e73 Mon Sep 17 00:00:00 2001 From: gneeri Date: Mon, 25 Nov 2024 13:49:35 -0500 Subject: [PATCH 23/29] Update build-connectors-action.yaml --- .github/workflows/build-connectors-action.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build-connectors-action.yaml b/.github/workflows/build-connectors-action.yaml index 2a9e380..5624fc6 100644 --- a/.github/workflows/build-connectors-action.yaml +++ b/.github/workflows/build-connectors-action.yaml @@ -6,6 +6,7 @@ on: - "snowflake/*" - "mysql/*" - "oracle/*" + - "phoenix/*" jobs: docker-build: From 2f511f23604674ad452e8a9d1029e3fdbe8cde0e Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Mon, 25 Nov 2024 13:55:44 -0500 Subject: [PATCH 24/29] added phoenix build to action --- .github/workflows/build-connectors-action.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-connectors-action.yaml b/.github/workflows/build-connectors-action.yaml index 5624fc6..fdf162a 100644 --- a/.github/workflows/build-connectors-action.yaml +++ b/.github/workflows/build-connectors-action.yaml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - database: [snowflake, mysql, oracle] + database: [snowflake, mysql, oracle, phoenix] outputs: release_tag: ${{ steps.extract_tag.outputs.tag }} @@ -68,7 +68,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - database: [snowflake, mysql, oracle] + database: [snowflake, mysql, oracle, phoenix] steps: - name: Checkout repository From 38629ba038938488c7ec6a8a1ee45e473f133f7b Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Mon, 25 Nov 2024 14:09:37 -0500 Subject: [PATCH 25/29] updated docker file with jooq keys --- ndc-connector-phoenix.dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ndc-connector-phoenix.dockerfile b/ndc-connector-phoenix.dockerfile index 84cec91..dfde389 100644 --- a/ndc-connector-phoenix.dockerfile +++ b/ndc-connector-phoenix.dockerfile @@ -2,6 +2,8 @@ FROM registry.access.redhat.com/ubi9/openjdk-21:1.20-2 AS build ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' +ENV JOOQ_PRO_EMAIL=${JOOQ_PRO_EMAIL} +ENV JOOQ_PRO_LICENSE=${JOOQ_PRO_LICENSE} WORKDIR /build COPY . /build From 8fdda8c27f1ab3ddfe99bf29071cc4f454d764b9 Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Mon, 25 Nov 2024 14:16:45 -0500 Subject: [PATCH 26/29] added jooq args to docker file --- ndc-connector-phoenix.dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ndc-connector-phoenix.dockerfile b/ndc-connector-phoenix.dockerfile index dfde389..ca44646 100644 --- a/ndc-connector-phoenix.dockerfile +++ b/ndc-connector-phoenix.dockerfile @@ -1,6 +1,9 @@ # Build stage FROM registry.access.redhat.com/ubi9/openjdk-21:1.20-2 AS build +ARG JOOQ_PRO_EMAIL +ARG JOOQ_PRO_LICENSE + ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' ENV JOOQ_PRO_EMAIL=${JOOQ_PRO_EMAIL} ENV JOOQ_PRO_LICENSE=${JOOQ_PRO_LICENSE} From 24df3471ae2afb0cd7f89ab7edadffcaa9dfe51d Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Mon, 25 Nov 2024 14:41:05 -0500 Subject: [PATCH 27/29] updates in line with code merged in from main --- .../io/hasura/phoenix/NoRelationshipsQueryGenerator.kt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ndc-connector-phoenix/src/main/kotlin/io/hasura/phoenix/NoRelationshipsQueryGenerator.kt b/ndc-connector-phoenix/src/main/kotlin/io/hasura/phoenix/NoRelationshipsQueryGenerator.kt index 3029d98..bce00f4 100644 --- a/ndc-connector-phoenix/src/main/kotlin/io/hasura/phoenix/NoRelationshipsQueryGenerator.kt +++ b/ndc-connector-phoenix/src/main/kotlin/io/hasura/phoenix/NoRelationshipsQueryGenerator.kt @@ -1,6 +1,6 @@ package io.hasura.phoenix -import io.hasura.ndc.app.services.ConnectorConfigurationLoader +import io.hasura.ndc.common.ConnectorConfiguration import io.hasura.ndc.common.NDCScalar import io.hasura.ndc.ir.* import io.hasura.ndc.ir.Field.ColumnField @@ -26,6 +26,7 @@ object NoRelationshipsQueryGenerator : BaseQueryGenerator() { ApplyBinaryComparisonOperator.IN -> col.`in`(value) ApplyBinaryComparisonOperator.IS_NULL -> col.isNull ApplyBinaryComparisonOperator.LIKE -> col.like(value as Field) + else -> throw Exception("Unsupported operator: $operator") } } @@ -130,7 +131,7 @@ object NoRelationshipsQueryGenerator : BaseQueryGenerator() { } fun columnTypeTojOOQType(collection: String, field: ColumnField): DataType { - val connectorConfig = ConnectorConfigurationLoader.config + val connectorConfig = ConnectorConfiguration.Loader.config val table = connectorConfig.tables.find { it.tableName == collection } ?: error("Table $collection not found in connector configuration") From 40b0f4e0aa2d5f37fa7cdeea3e31f5dbada33247 Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Mon, 25 Nov 2024 16:01:24 -0500 Subject: [PATCH 28/29] clean up: removed AI added function --- .../kotlin/io/hasura/cli/PhoenixConfigGenerator.kt | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt b/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt index 812ce84..095c548 100644 --- a/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt +++ b/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt @@ -55,16 +55,6 @@ object PhoenixConfigGenerator : IConfigGenerator { return JDBCType.valueOf(sqlType).name } - fun main() { - // Example usage: - val phoenixDataType = 8 // Example: VARCHAR - val sqlType = translatePhoenixDataTypeToSqlType(phoenixDataType) - - println("Phoenix Data Type: $phoenixDataType") - println("Java SQL Type: $sqlType") // Should output: 12 (VARCHAR) - } - - override fun getConfig( jdbcUrl: String, schemas: List From 132b07cf9af18033e21f5a4a4bef5be1148a2597 Mon Sep 17 00:00:00 2001 From: Jonathan Weiss Date: Mon, 25 Nov 2024 17:13:03 -0500 Subject: [PATCH 29/29] updated code mappings to comport with latest findings --- .../io/hasura/cli/PhoenixConfigGenerator.kt | 66 +++++++++---------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt b/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt index 095c548..da782d9 100644 --- a/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt +++ b/ndc-cli/src/main/kotlin/io/hasura/cli/PhoenixConfigGenerator.kt @@ -11,39 +11,37 @@ import java.sql.Types object PhoenixConfigGenerator : IConfigGenerator { - private fun translatePhoenixDataTypeToSqlType(phoenixDataType: Int, isThinClient: Boolean = true): String { + private fun translatePhoenixDataTypeToSqlType(phoenixDataType: Int?, isThinClient: Boolean = true): String { val sqlType = if (!isThinClient) { - phoenixDataType + phoenixDataType ?: Types.OTHER } else { when (phoenixDataType) { - 0 -> Types.TINYINT // UNSIGNED_TINYINT - 1 -> Types.SMALLINT // UNSIGNED_SMALLINT - 2 -> Types.INTEGER // UNSIGNED_INT - 3 -> Types.BIGINT // UNSIGNED_LONG - 4 -> Types.FLOAT // UNSIGNED_FLOAT - 5 -> Types.DOUBLE // UNSIGNED_DOUBLE - 6 -> Types.BINARY // BINARY - 7 -> Types.CHAR // CHAR - 8 -> Types.VARCHAR // VARCHAR - 9 -> Types.VARBINARY // VARBINARY - 10 -> Types.DECIMAL // DECIMAL - 11 -> Types.TIMESTAMP // TIMESTAMP - 12 -> Types.DATE // DATE - 13 -> Types.TIME // TIME - 14 -> Types.TIME // UNSIGNED_TIME - 15 -> Types.DATE // UNSIGNED_DATE - 16 -> Types.TIMESTAMP // UNSIGNED_TIMESTAMP - 17 -> Types.ARRAY // ARRAY - 18 -> Types.BOOLEAN // BOOLEAN - 19 -> Types.TINYINT // TINYINT - 20 -> Types.SMALLINT // SMALLINT - 21 -> Types.INTEGER // INTEGER - 22 -> Types.BIGINT // BIGINT - 23 -> Types.FLOAT // FLOAT - 24 -> Types.DOUBLE // DOUBLE - 25 -> Types.ARRAY // UNSIGNED_ARRAY - 26 -> Types.BINARY // UUID + null -> Types.OTHER // Handle null data_type + -6 -> Types.TINYINT // TINYINT + -5 -> Types.BIGINT // BIGINT + -3 -> Types.VARBINARY // VARBINARY + -2 -> Types.BINARY // BINARY + 1 -> Types.CHAR // CHAR + 3 -> Types.DECIMAL // DECIMAL + 4 -> Types.INTEGER // INTEGER + 5 -> Types.SMALLINT // SMALLINT + 6 -> Types.FLOAT // FLOAT + 8 -> Types.DOUBLE // DOUBLE + 9 -> Types.VARCHAR // VARCHAR + 10 -> Types.SMALLINT // UNSIGNED_SMALLINT (maps to SMALLINT) + 11 -> Types.FLOAT // UNSIGNED_FLOAT (maps to FLOAT) + 12 -> Types.VARCHAR // VARCHAR (Phoenix specific) + 13 -> Types.VARCHAR // (Custom/Unsupported, mapped to VARCHAR) + 14 -> Types.VARCHAR // (Custom/Unsupported, mapped to VARCHAR) + 15 -> Types.VARCHAR // (Custom/Unsupported, mapped to VARCHAR) + 16 -> Types.BOOLEAN // BOOLEAN + 18 -> Types.ARRAY // ARRAY + 19 -> Types.VARBINARY // VARBINARY (Phoenix specific) + 20 -> Types.VARBINARY // VARBINARY (Phoenix specific) + 91 -> Types.DATE // DATE + 92 -> Types.TIME // TIME + 93 -> Types.TIMESTAMP // TIMESTAMP else -> if (JDBCType.valueOf(phoenixDataType) != null) { phoenixDataType @@ -63,10 +61,12 @@ object PhoenixConfigGenerator : IConfigGenerator { val isThinClient = jdbcUrl.contains("phoenix:thin", ignoreCase = true) - val result = ctx.fetch(""" + val result = ctx.fetch( + """ SELECT * FROM SYSTEM.CATALOG WHERE TABLE_SCHEM != 'SYSTEM' OR TABLE_SCHEM IS NULL - """) + """ + ) val groupedBySchema = result.groupBy { it["TABLE_SCHEM"] as String? } @@ -82,7 +82,7 @@ object PhoenixConfigGenerator : IConfigGenerator { ColumnSchemaRow( name = if (columnFamily != null && columnFamily != "0") "$columnFamily.$columnName" else columnName, description = null, - type = translatePhoenixDataTypeToSqlType(it["DATA_TYPE"] as Int, isThinClient), + type = translatePhoenixDataTypeToSqlType(it["DATA_TYPE"] as? Int, isThinClient), numeric_scale = null, nullable = it["NULLABLE"] == 1, auto_increment = it["IS_AUTOINCREMENT"] == "YES", @@ -91,7 +91,7 @@ object PhoenixConfigGenerator : IConfigGenerator { } TableSchemaRow( - tableName = if (schema != null) "$schema.$tableName" else tableName, + tableName = if (schema != null) "$schema.$tableName" else tableName, tableType = if (records.any { it["TABLE_TYPE"] == "u" }) TableType.TABLE else TableType.VIEW, description = null, columns = columns,