diff --git a/bindings/java/.mvn/wrapper/maven-wrapper.jar b/bindings/java/.mvn/wrapper/maven-wrapper.jar new file mode 100644 index 00000000..cb28b0e3 Binary files /dev/null and b/bindings/java/.mvn/wrapper/maven-wrapper.jar differ diff --git a/bindings/java/.mvn/wrapper/maven-wrapper.properties b/bindings/java/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 00000000..ac184013 --- /dev/null +++ b/bindings/java/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip +wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar diff --git a/bindings/java/Cargo.toml b/bindings/java/Cargo.toml new file mode 100644 index 00000000..94542165 --- /dev/null +++ b/bindings/java/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "bendsql-java" +publish = false + +edition.workspace = true +version.workspace = true +license.workspace = true +authors.workspace = true +categories.workspace = true +keywords.workspace = true +repository.workspace = true + +[lib] +crate-type = ["cdylib"] +doc = false + +[dependencies] +anyhow = "1.0.71" +jni = "0.21.1" +once_cell = "1.19.0" +# this crate won't be published, we always use the local version +databend-driver = { version = ">=0", path = "../../driver"} +databend-driver-core = { version = ">=0", path = "../../sql"} +tokio = { version = "1.28.1", features = ["full"] } +tokio-stream = "0.1.11" +uuid = "1.3" +serde_json = "1.0.134" diff --git a/bindings/java/mvnw b/bindings/java/mvnw new file mode 100755 index 00000000..8d937f4c --- /dev/null +++ b/bindings/java/mvnw @@ -0,0 +1,308 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Apache Maven Wrapper startup batch script, version 3.2.0 +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /usr/local/etc/mavenrc ] ; then + . /usr/local/etc/mavenrc + fi + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "$(uname)" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + JAVA_HOME="$(/usr/libexec/java_home)"; export JAVA_HOME + else + JAVA_HOME="/Library/Java/Home"; export JAVA_HOME + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=$(java-config --jre-home) + fi +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$JAVA_HOME" ] && + JAVA_HOME=$(cygpath --unix "$JAVA_HOME") + [ -n "$CLASSPATH" ] && + CLASSPATH=$(cygpath --path --unix "$CLASSPATH") +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$JAVA_HOME" ] && [ -d "$JAVA_HOME" ] && + JAVA_HOME="$(cd "$JAVA_HOME" || (echo "cannot cd into $JAVA_HOME."; exit 1); pwd)" +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="$(which javac)" + if [ -n "$javaExecutable" ] && ! [ "$(expr "\"$javaExecutable\"" : '\([^ ]*\)')" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=$(which readlink) + if [ ! "$(expr "$readLink" : '\([^ ]*\)')" = "no" ]; then + if $darwin ; then + javaHome="$(dirname "\"$javaExecutable\"")" + javaExecutable="$(cd "\"$javaHome\"" && pwd -P)/javac" + else + javaExecutable="$(readlink -f "\"$javaExecutable\"")" + fi + javaHome="$(dirname "\"$javaExecutable\"")" + javaHome=$(expr "$javaHome" : '\(.*\)/bin') + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="$(\unset -f command 2>/dev/null; \command -v java)" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=$(cd "$wdir/.." || exit 1; pwd) + fi + # end of workaround + done + printf '%s' "$(cd "$basedir" || exit 1; pwd)" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + # Remove \r in case we run on Windows within Git Bash + # and check out the repository with auto CRLF management + # enabled. Otherwise, we may read lines that are delimited with + # \r\n and produce $'-Xarg\r' rather than -Xarg due to word + # splitting rules. + tr -s '\r\n' ' ' < "$1" + fi +} + +log() { + if [ "$MVNW_VERBOSE" = true ]; then + printf '%s\n' "$1" + fi +} + +BASE_DIR=$(find_maven_basedir "$(dirname "$0")") +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}; export MAVEN_PROJECTBASEDIR +log "$MAVEN_PROJECTBASEDIR" + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +wrapperJarPath="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" +if [ -r "$wrapperJarPath" ]; then + log "Found $wrapperJarPath" +else + log "Couldn't find $wrapperJarPath, downloading it ..." + + if [ -n "$MVNW_REPOURL" ]; then + wrapperUrl="$MVNW_REPOURL/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" + else + wrapperUrl="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" + fi + while IFS="=" read -r key value; do + # Remove '\r' from value to allow usage on windows as IFS does not consider '\r' as a separator ( considers space, tab, new line ('\n'), and custom '=' ) + safeValue=$(echo "$value" | tr -d '\r') + case "$key" in (wrapperUrl) wrapperUrl="$safeValue"; break ;; + esac + done < "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.properties" + log "Downloading from: $wrapperUrl" + + if $cygwin; then + wrapperJarPath=$(cygpath --path --windows "$wrapperJarPath") + fi + + if command -v wget > /dev/null; then + log "Found wget ... using wget" + [ "$MVNW_VERBOSE" = true ] && QUIET="" || QUIET="--quiet" + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + wget $QUIET "$wrapperUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" + else + wget $QUIET --http-user="$MVNW_USERNAME" --http-password="$MVNW_PASSWORD" "$wrapperUrl" -O "$wrapperJarPath" || rm -f "$wrapperJarPath" + fi + elif command -v curl > /dev/null; then + log "Found curl ... using curl" + [ "$MVNW_VERBOSE" = true ] && QUIET="" || QUIET="--silent" + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + curl $QUIET -o "$wrapperJarPath" "$wrapperUrl" -f -L || rm -f "$wrapperJarPath" + else + curl $QUIET --user "$MVNW_USERNAME:$MVNW_PASSWORD" -o "$wrapperJarPath" "$wrapperUrl" -f -L || rm -f "$wrapperJarPath" + fi + else + log "Falling back to using Java to download" + javaSource="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/MavenWrapperDownloader.java" + javaClass="$MAVEN_PROJECTBASEDIR/.mvn/wrapper/MavenWrapperDownloader.class" + # For Cygwin, switch paths to Windows format before running javac + if $cygwin; then + javaSource=$(cygpath --path --windows "$javaSource") + javaClass=$(cygpath --path --windows "$javaClass") + fi + if [ -e "$javaSource" ]; then + if [ ! -e "$javaClass" ]; then + log " - Compiling MavenWrapperDownloader.java ..." + ("$JAVA_HOME/bin/javac" "$javaSource") + fi + if [ -e "$javaClass" ]; then + log " - Running MavenWrapperDownloader.java ..." + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$wrapperUrl" "$wrapperJarPath") || rm -f "$wrapperJarPath" + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +# If specified, validate the SHA-256 sum of the Maven wrapper jar file +wrapperSha256Sum="" +while IFS="=" read -r key value; do + case "$key" in (wrapperSha256Sum) wrapperSha256Sum=$value; break ;; + esac +done < "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.properties" +if [ -n "$wrapperSha256Sum" ]; then + wrapperSha256Result=false + if command -v sha256sum > /dev/null; then + if echo "$wrapperSha256Sum $wrapperJarPath" | sha256sum -c > /dev/null 2>&1; then + wrapperSha256Result=true + fi + elif command -v shasum > /dev/null; then + if echo "$wrapperSha256Sum $wrapperJarPath" | shasum -a 256 -c > /dev/null 2>&1; then + wrapperSha256Result=true + fi + else + echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." + echo "Please install either command, or disable validation by removing 'wrapperSha256Sum' from your maven-wrapper.properties." + exit 1 + fi + if [ $wrapperSha256Result = false ]; then + echo "Error: Failed to validate Maven wrapper SHA-256, your Maven wrapper might be compromised." >&2 + echo "Investigate or delete $wrapperJarPath to attempt a clean download." >&2 + echo "If you updated your Maven version, you need to update the specified wrapperSha256Sum property." >&2 + exit 1 + fi +fi + +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$JAVA_HOME" ] && + JAVA_HOME=$(cygpath --path --windows "$JAVA_HOME") + [ -n "$CLASSPATH" ] && + CLASSPATH=$(cygpath --path --windows "$CLASSPATH") + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=$(cygpath --path --windows "$MAVEN_PROJECTBASEDIR") +fi + +# Provide a "standardized" way to retrieve the CLI args that will +# work with both Windows and non-Windows executions. +MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $*" +export MAVEN_CMD_LINE_ARGS + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +# shellcheck disable=SC2086 # safe args +exec "$JAVACMD" \ + $MAVEN_OPTS \ + $MAVEN_DEBUG_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/bindings/java/mvnw.cmd b/bindings/java/mvnw.cmd new file mode 100644 index 00000000..c4586b56 --- /dev/null +++ b/bindings/java/mvnw.cmd @@ -0,0 +1,205 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Apache Maven Wrapper startup batch script, version 3.2.0 +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%USERPROFILE%\mavenrc_pre.bat" call "%USERPROFILE%\mavenrc_pre.bat" %* +if exist "%USERPROFILE%\mavenrc_pre.cmd" call "%USERPROFILE%\mavenrc_pre.cmd" %* +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set WRAPPER_URL="https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" + +FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( + IF "%%A"=="wrapperUrl" SET WRAPPER_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + if "%MVNW_VERBOSE%" == "true" ( + echo Found %WRAPPER_JAR% + ) +) else ( + if not "%MVNW_REPOURL%" == "" ( + SET WRAPPER_URL="%MVNW_REPOURL%/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar" + ) + if "%MVNW_VERBOSE%" == "true" ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %WRAPPER_URL% + ) + + powershell -Command "&{"^ + "$webclient = new-object System.Net.WebClient;"^ + "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ + "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ + "}"^ + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%WRAPPER_URL%', '%WRAPPER_JAR%')"^ + "}" + if "%MVNW_VERBOSE%" == "true" ( + echo Finished downloading %WRAPPER_JAR% + ) +) +@REM End of extension + +@REM If specified, validate the SHA-256 sum of the Maven wrapper jar file +SET WRAPPER_SHA_256_SUM="" +FOR /F "usebackq tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( + IF "%%A"=="wrapperSha256Sum" SET WRAPPER_SHA_256_SUM=%%B +) +IF NOT %WRAPPER_SHA_256_SUM%=="" ( + powershell -Command "&{"^ + "$hash = (Get-FileHash \"%WRAPPER_JAR%\" -Algorithm SHA256).Hash.ToLower();"^ + "If('%WRAPPER_SHA_256_SUM%' -ne $hash){"^ + " Write-Output 'Error: Failed to validate Maven wrapper SHA-256, your Maven wrapper might be compromised.';"^ + " Write-Output 'Investigate or delete %WRAPPER_JAR% to attempt a clean download.';"^ + " Write-Output 'If you updated your Maven version, you need to update the specified wrapperSha256Sum property.';"^ + " exit 1;"^ + "}"^ + "}" + if ERRORLEVEL 1 goto error +) + +@REM Provide a "standardized" way to retrieve the CLI args that will +@REM work with both Windows and non-Windows executions. +set MAVEN_CMD_LINE_ARGS=%* + +%MAVEN_JAVA_EXE% ^ + %JVM_CONFIG_MAVEN_PROPS% ^ + %MAVEN_OPTS% ^ + %MAVEN_DEBUG_OPTS% ^ + -classpath %WRAPPER_JAR% ^ + "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" ^ + %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%"=="" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%USERPROFILE%\mavenrc_post.bat" call "%USERPROFILE%\mavenrc_post.bat" +if exist "%USERPROFILE%\mavenrc_post.cmd" call "%USERPROFILE%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%"=="on" pause + +if "%MAVEN_TERMINATE_CMD%"=="on" exit %ERROR_CODE% + +cmd /C exit /B %ERROR_CODE% diff --git a/bindings/java/pom.xml b/bindings/java/pom.xml new file mode 100644 index 00000000..61bea716 --- /dev/null +++ b/bindings/java/pom.xml @@ -0,0 +1,316 @@ + + + 4.0.0 + + com.databend + bendsql + 0.23.2 + + Databend™ + + Databend™ is a modern cloud data platform that allows users to + easily and efficiently retrieve data from various storage services in a unified way. + + + https://databend.com + + + Develop List + dev-subscribe@opendal.apache.org + dev-unsubscribe@opendal.apache.org + dev@opendal.apache.org + https://lists.apache.org/list.html?dev@opendal.apache.org + + + + + + 10 + UTF-8 + 1.8 + 1.8 + + + dev + default + false + + ${os.detected.classifier} + + + 3.23.1 + 2.16.1 + 2.3.2 + 1.18.34 + 2.0.7 + 5.2 + + + 3.1.2 + 3.13.0 + 3.1.0 + 1.7.0 + 2.36.0 + 2.39.0 + 3.6.3 + 0.3.4 + 0.3.4 + + + + + + + org.junit + junit-bom + 5.9.2 + pom + import + + + org.assertj + assertj-core + ${assertj.version} + + + org.projectlombok + lombok + ${lombok.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + io.github.cdimascio + dotenv-java + ${dotenv.version} + + + commons-io + commons-io + ${commons-io.version} + test + + + org.apache.httpcomponents.client5 + httpclient5 + ${httpclient.version} + test + + + + + + + + com.databend + databend-jdbc + ${databend-jdbc.version} + + + + com.databend + databend-client + ${databend-client.version} + + + com.fasterxml.jackson.core + jackson-databind + 2.15.2 + + + + com.google.guava + guava + 32.1.3-jre + + + joda-time + joda-time + 2.13.0 + + + org.projectlombok + lombok + provided + + + + org.junit.platform + junit-platform-suite + test + + + org.junit.jupiter + junit-jupiter + test + + + org.assertj + assertj-core + test + + + org.slf4j + slf4j-simple + test + + + io.github.cdimascio + dotenv-java + test + + + commons-io + commons-io + test + + + org.apache.httpcomponents.client5 + httpclient5 + test + + + com.fasterxml.jackson.core + jackson-databind + 2.15.2 + + + + + + + src/main/resources + true + + + + + src/test/resources + true + + + + + + kr.motd.maven + os-maven-plugin + ${os-maven-plugin.version} + + + + + + exec-maven-plugin + org.codehaus.mojo + ${exec-maven-plugin.version} + + + compile-native-code + compile + + exec + + + python3 + + ${project.basedir}/tools/build.py + --classifier + ${jni.classifier} + --target + ${cargo-build.target} + --profile + ${cargo-build.profile} + --enable-zigbuild + ${cargo-build.enableZigbuild} + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + + + org.projectlombok + lombok + ${lombok.version} + + + + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven-surefire-plugin.version} + + + org.apache.maven.plugins + maven-jar-plugin + + + + default-jar + + + native/** + + + + + + native-jar + + jar + + + ${jni.classifier} + + native/** + + + + + + + + com.diffplug.spotless + spotless-maven-plugin + ${spotless.version} + + + + ${palantir-java-format.version} + + + + \#| + + + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + ${maven-javadoc-plugin.version} + + + Copyright © 2022-2024, The Apache Software Foundation. Apache OpenDAL, OpenDAL, and Apache + are either registered trademarks or trademarks of the Apache Software Foundation. + + + + + + diff --git a/bindings/java/src/connection.rs b/bindings/java/src/connection.rs new file mode 100644 index 00000000..db05745a --- /dev/null +++ b/bindings/java/src/connection.rs @@ -0,0 +1,175 @@ +// Copyright 2021 Datafuse Labs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::path::Path; + +use databend_driver::Connection; +use jni::objects::JClass; +use jni::objects::JString; +use jni::sys::jlong; +use jni::JNIEnv; + +use crate::error::Error; +use crate::jni_utils::executor::executor_or_default; +use crate::jni_utils::executor::Executor; + +use crate::jni_utils::jstring_to_string; +use crate::Result; +use databend_driver::rest_api::RestAPIConnection; + +#[no_mangle] +pub extern "system" fn Java_com_databend_bendsql_NativeConnection_constructor( + mut env: JNIEnv, + _: JClass, + executor: *const Executor, + dsn: JString, +) -> jlong { + intern_constructor(&mut env, executor, dsn).unwrap_or_else(|e| { + e.throw(&mut env); + 0 + }) +} + +fn intern_constructor(env: &mut JNIEnv, executor: *const Executor, dsn: JString) -> Result { + let mut dsn = jstring_to_string(env, &dsn)?; + if dsn.starts_with("jdbc:") { + dsn = dsn[5..].to_string(); + } + let result = executor_or_default(env, executor)?.block_on(async move { + let conn = RestAPIConnection::try_create(&dsn, "jdbc".to_string()).await; + let handle = conn + .map(|conn| Box::into_raw(Box::new(conn)) as jlong) + .map_err(|e| Error::from(e)); + handle + })?; + Ok(result) +} + +// #[no_mangle] +// pub extern "system" fn Java_com_databend_bendsql_NativeConnection_execute( +// mut env: JNIEnv, +// _: JClass, +// connection: *mut RestAPIConnection, +// executor: *const Executor, +// sql: JString, +// ) -> jlong { +// intern_execute(&mut env, connection, executor, sql).unwrap_or_else(|e| { +// e.throw(&mut env); +// 0 +// }) +// } + +// fn intern_execute( +// env: &mut JNIEnv, +// connection: *mut RestAPIConnection, +// executor: *const Executor, +// sql: JString, +// ) -> Result { +// let sql = jstring_to_string(env, &sql)?; +// let connection = unsafe { &mut *connection }; + +// let result = executor_or_default(env, executor)?.block_on(async move { +// connection +// .exec(&sql) +// .await +// .map(|result| Box::into_raw(Box::new(result)) as jlong) +// .map_err(Error::from) +// })?; +// Ok(result) +// } + +#[no_mangle] +pub extern "system" fn Java_com_databend_bendsql_NativeConnection_loadFile( + mut env: JNIEnv, + _: JClass, + connection: *mut RestAPIConnection, + executor: *const Executor, + sql: JString, + path: JString, +) -> jlong { + intern_load_file(&mut env, connection, executor, sql, path).unwrap_or_else(|e| { + e.throw(&mut env); + 0 + }) +} + +fn intern_load_file( + env: &mut JNIEnv, + connection: *mut RestAPIConnection, + executor: *const Executor, + sql: JString, + path: JString, +) -> Result { + let sql = jstring_to_string(env, &sql)?; + let path = jstring_to_string(env, &path)?; + let connection = unsafe { &mut *connection }; + + let result = executor_or_default(env, executor)?.block_on(async move { + let path = Path::new(&path); + connection + .load_file(&sql, &path, None, None) + .await + .map(|result| Box::into_raw(Box::new(result)) as jlong) + .map_err(Error::from) + })?; + Ok(result) +} + +#[no_mangle] +pub extern "system" fn Java_com_databend_bendsql_NativeConnection_execute( + mut env: JNIEnv, + _: JClass, + connection: *mut RestAPIConnection, + executor: *const Executor, + sql: JString, +) -> jlong { + intern_execute(&mut env, connection, executor, sql).unwrap_or_else(|e| { + e.throw(&mut env); + 0 + }) +} + +fn intern_execute( + env: &mut JNIEnv, + connection: *mut RestAPIConnection, + executor: *const Executor, + sql: JString, +) -> Result { + let sql = jstring_to_string(env, &sql)?; + let connection = unsafe { &mut *connection }; + + let it = executor_or_default(env, executor)? + .block_on(async move { connection.query_row_batch(&sql).await })?; + if it.schema().is_empty() { + Ok(0) + } else { + Ok(Box::into_raw(Box::new(it)) as jlong) + } +} + +#[no_mangle] +pub extern "system" fn Java_com_databend_bendsql_NativeConnection_disposeInternal( + env: &mut JNIEnv, + _class: JClass, + handle: jlong, + executor: *const Executor, +) { + if handle != 0 { + let conn = unsafe { Box::from_raw(handle as *mut RestAPIConnection) }; + executor_or_default(env, executor) + .unwrap() + .block_on(async move { conn.close().await }) + .ok(); + } +} diff --git a/bindings/java/src/error.rs b/bindings/java/src/error.rs new file mode 100644 index 00000000..b9d9726d --- /dev/null +++ b/bindings/java/src/error.rs @@ -0,0 +1,84 @@ +// Copyright 2021 Datafuse Labs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::fmt::Debug; +use std::fmt::Display; +use std::fmt::Formatter; + +use jni::objects::JThrowable; +use jni::objects::JValue; +use jni::JNIEnv; + +pub(crate) struct Error { + inner: databend_driver::Error, +} + +impl Error { + pub(crate) fn throw(&self, env: &mut JNIEnv) { + if let Err(err) = self.do_throw(env) { + match err { + jni::errors::Error::JavaException => { + // other calls throws exception; safely ignored + } + _ => env.fatal_error(err.to_string()), + } + } + } + + pub(crate) fn to_exception<'local>( + &self, + env: &mut JNIEnv<'local>, + ) -> jni::errors::Result> { + let class = env.find_class("java/sql/SQLException")?; + let message = env.new_string(format!("{:?}", self.inner))?; + let exception = + env.new_object(class, "(Ljava/lang/String;)V", &[JValue::Object(&message)])?; + Ok(JThrowable::from(exception)) + } + + fn do_throw(&self, env: &mut JNIEnv) -> jni::errors::Result<()> { + let exception = self.to_exception(env)?; + env.throw(exception) + } +} + +impl From for Error { + fn from(err: databend_driver::Error) -> Self { + Self { inner: err } + } +} + +impl From for Error { + fn from(err: jni::errors::Error) -> Self { + databend_driver::Error::Unexpected(err.to_string()).into() + } +} + +impl Debug for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + Debug::fmt(&self.inner, f) + } +} + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + Display::fmt(&self.inner, f) + } +} + +impl std::error::Error for Error { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + self.inner.source() + } +} diff --git a/bindings/java/src/jni_utils/executor.rs b/bindings/java/src/jni_utils/executor.rs new file mode 100644 index 00000000..061adcc6 --- /dev/null +++ b/bindings/java/src/jni_utils/executor.rs @@ -0,0 +1,180 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +use std::cell::RefCell; +use std::ffi::c_void; +use std::future::Future; +use std::num::NonZeroUsize; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::thread::available_parallelism; + +use jni::objects::JClass; +use jni::objects::JObject; +use jni::objects::JValue; +use jni::sys::jlong; +use jni::JNIEnv; +use jni::JavaVM; +use once_cell::sync::OnceCell; + +use crate::Result; + +static mut RUNTIME: OnceCell = OnceCell::new(); +thread_local! { + static ENV: RefCell> = const { RefCell::new(None) }; +} + +/// # Safety +/// +/// This function could be only called by java vm when unload this lib. +#[no_mangle] +pub unsafe extern "system" fn JNI_OnUnload(_: JavaVM, _: *mut c_void) { + let _ = RUNTIME.take(); +} + +/// # Safety +/// +/// This function could be only called when the lib is loaded and within an executor thread. +#[allow(dead_code)] +pub(crate) unsafe fn get_current_env<'local>() -> JNIEnv<'local> { + let env = ENV + .with(|cell| *cell.borrow_mut()) + .expect("env must be available"); + JNIEnv::from_raw(env).expect("env must be valid") +} + +pub enum Executor { + Tokio(tokio::runtime::Runtime), +} + +impl Executor { + pub fn block_on(&self, future: F) -> F::Output + where + F: Future + Send + 'static, + F::Output: Send + 'static, + { + match self { + Executor::Tokio(e) => e.block_on(future), + } + } +} + +#[no_mangle] +pub extern "system" fn Java_com_databend_bendsql_AsyncExecutor_makeTokioExecutor( + mut env: JNIEnv, + _: JClass, + cores: usize, +) -> jlong { + make_tokio_executor(&mut env, cores) + .map(|executor| Box::into_raw(Box::new(executor)) as jlong) + .unwrap_or_else(|e| { + e.throw(&mut env); + 0 + }) +} + +/// # Safety +/// +/// This function should not be called before the AsyncExecutor is ready. +#[no_mangle] +pub unsafe extern "system" fn Java_com_databend_bendsql_AsyncExecutor_disposeInternal( + _: JNIEnv, + _: JObject, + executor: *mut Executor, +) { + drop(Box::from_raw(executor)); +} + +pub(crate) fn make_tokio_executor(env: &mut JNIEnv, cores: usize) -> Result { + let vm = env.get_java_vm().expect("JavaVM must be available"); + let counter = AtomicUsize::new(0); + let executor = tokio::runtime::Builder::new_multi_thread() + .worker_threads(cores) + .thread_name_fn(move || { + let id = counter.fetch_add(1, Ordering::SeqCst); + format!("databend-jdbc-tokio-worker-{}", id) + }) + .on_thread_start(move || { + ENV.with(|cell| { + let mut env = vm + .attach_current_thread_as_daemon() + .expect("attach thread must succeed"); + + set_current_thread_name(&mut env).expect("current thread name has been set above"); + + *cell.borrow_mut() = Some(env.get_raw()); + }) + }) + .enable_all() + .build() + .map_err(|e| { + databend_driver::Error::Unexpected(format!("Failed to create tokio runtime: {e}")) + //.set_source(e) + })?; + Ok(Executor::Tokio(executor)) +} + +fn set_current_thread_name(env: &mut JNIEnv) -> Result<()> { + let current_thread = env + .call_static_method( + "java/lang/Thread", + "currentThread", + "()Ljava/lang/Thread;", + &[], + )? + .l()?; + let thread_name = match std::thread::current().name() { + Some(thread_name) => env.new_string(thread_name)?, + None => unreachable!("thread name must be set"), + }; + env.call_method( + current_thread, + "setName", + "(Ljava/lang/String;)V", + &[JValue::Object(&thread_name)], + )?; + Ok(()) +} + +/// # Panic +/// +/// Crash if the executor is disposed. +#[inline] +pub(crate) fn executor_or_default<'a>( + env: &mut JNIEnv<'a>, + executor: *const Executor, +) -> Result<&'a Executor> { + unsafe { + if executor.is_null() { + default_executor(env) + } else { + // SAFETY: executor must be valid + Ok(&*executor) + } + } +} + +/// # Safety +/// +/// This function could be only when the lib is loaded. +unsafe fn default_executor<'a>(env: &mut JNIEnv<'a>) -> Result<&'a Executor> { + RUNTIME.get_or_try_init(|| { + make_tokio_executor( + env, + available_parallelism().map(NonZeroUsize::get).unwrap_or(1), + ) + }) +} diff --git a/bindings/java/src/jni_utils/mod.rs b/bindings/java/src/jni_utils/mod.rs new file mode 100644 index 00000000..07fb6e11 --- /dev/null +++ b/bindings/java/src/jni_utils/mod.rs @@ -0,0 +1,29 @@ +// Copyright 2021 Datafuse Labs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +pub(crate) mod executor; + +use jni::objects::JString; + +use crate::Result; +use jni::JNIEnv; + +/// # Safety +/// +/// The caller must guarantee that the Object passed in is an instance +/// of `java.lang.String`, passing in anything else will lead to undefined behavior. +pub(crate) fn jstring_to_string(env: &mut JNIEnv, s: &JString) -> Result { + let res = unsafe { env.get_string_unchecked(s)? }; + Ok(res.into()) +} diff --git a/bindings/java/src/lib.rs b/bindings/java/src/lib.rs new file mode 100644 index 00000000..8196751c --- /dev/null +++ b/bindings/java/src/lib.rs @@ -0,0 +1,20 @@ +// Copyright 2021 Datafuse Labs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +mod connection; +mod error; +mod jni_utils; +mod row_batch_iterator; + +pub(crate) type Result = std::result::Result; diff --git a/bindings/java/src/main/java/com/databend/bendsql/AbstractDatabendResultSet.java b/bindings/java/src/main/java/com/databend/bendsql/AbstractDatabendResultSet.java new file mode 100644 index 00000000..1920c373 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/AbstractDatabendResultSet.java @@ -0,0 +1,1173 @@ +package com.databend.bendsql; + +import com.databend.client.QueryRowField; +import com.databend.client.data.ColumnTypeHandler; +import com.databend.client.data.ColumnTypeHandlerFactory; +import com.databend.client.data.DatabendRawType; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; +import org.joda.time.DateTimeZone; +import org.joda.time.LocalDate; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.ISODateTimeFormat; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.Calendar; +import java.util.GregorianCalendar; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.TimeZone; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static com.databend.bendsql.DatabendColumnInfo.setTypeInfo; +import static java.lang.Math.toIntExact; +import static java.util.Locale.ENGLISH; +import static java.util.Objects.requireNonNull; +import static org.joda.time.DateTimeConstants.SECONDS_PER_DAY; + +abstract class AbstractDatabendResultSet extends TrivialResultSet { + static final DateTimeFormatter DATE_FORMATTER = ISODateTimeFormat.date(); + private static final long[] POWERS_OF_TEN = { + 1L, + 10L, + 100L, + 1000L, + 10_000L, + 100_000L, + 1_000_000L, + 10_000_000L, + 100_000_000L, + 1_000_000_000L, + 10_000_000_000L, + 100_000_000_000L, + 1000_000_000_000L + }; + private static final int MILLISECONDS_PER_SECOND = 1000; + private static final long NANOSECONDS_PER_SECOND = 1_000_000_000; + private static final int PICOSECONDS_PER_NANOSECOND = 1_000; + private static final Pattern DATETIME_PATTERN = Pattern.compile("" + + "(?[-+]?\\d{4,})-(?\\d{1,2})-(?\\d{1,2})" + + "(?: (?\\d{1,2}):(?\\d{1,2})(?::(?\\d{1,2})(?:\\.(?\\d+))?)?)?" + + "\\s*(?.+)?"); + private static final Pattern TIME_PATTERN = Pattern + .compile("(?\\d{1,2}):(?\\d{1,2}):(?\\d{1,2})(?:\\.(?\\d+))?"); + private static final long START_OF_MODERN_ERA_SECONDS = java.time.LocalDate.of(1901, 1, 1).toEpochDay() + * SECONDS_PER_DAY; + protected final Iterator> results; + private final Optional statement; + private final AtomicReference> row = new AtomicReference<>(); + private final AtomicLong currentRowNumber = new AtomicLong(); // Index into 'rows' of our current row (1-based) + private final AtomicBoolean wasNull = new AtomicBoolean(); + private final Map fieldMap; + private final List databendColumnInfoList; + private final ResultSetMetaData resultSetMetaData; + private final DateTimeZone resultTimeZone; + + AbstractDatabendResultSet(Optional statement, List schema, Iterator> results) { + this.statement = requireNonNull(statement, "statement is null"); + this.fieldMap = getFieldMap(schema); + this.databendColumnInfoList = getColumnInfo(schema); + this.results = requireNonNull(results, "results is null"); + this.resultSetMetaData = new DatabendResultSetMetaData(databendColumnInfoList); + this.resultTimeZone = DateTimeZone.forTimeZone(TimeZone.getDefault()); + } + + private static Map getFieldMap(List columns) { + Map map = Maps.newHashMapWithExpectedSize(columns.size()); + for (int i = 0; i < columns.size(); i++) { + String name = columns.get(i).getName().toLowerCase(ENGLISH); + if (!map.containsKey(name)) { + map.put(name, i + 1); + } + } + return ImmutableMap.copyOf(map); + } + + private static List getColumnInfo(List columns) { + ImmutableList.Builder list = ImmutableList.builderWithExpectedSize(columns.size()); + for (QueryRowField column : columns) { + DatabendColumnInfo.Builder builder = new DatabendColumnInfo.Builder() + .setCatalogName("") // TODO + .setSchemaName("") // TODO + .setTableName("") // TODO + .setColumnLabel(column.getName()) + .setColumnName(column.getName()) // TODO + .setColumnTypeSignature(column.getDataType()) + .setCurrency(false); + setTypeInfo(builder, column.getDataType()); + list.add(builder.build()); + } + return list.build(); + } + + private static Optional toBigDecimal(String value) { + try { + return Optional.of(new BigDecimal(value)); + } catch (NumberFormatException ne) { + return Optional.empty(); + } + } + + private static BigDecimal parseBigDecimal(String value) + throws SQLException { + return toBigDecimal(String.valueOf(value)) + .orElseThrow(() -> new SQLException("Value is not a number: " + value)); + } + + // static SQLException resultsException(QueryResponse results, String + // originalSQL) { + // QueryErrors error = requireNonNull(results.getError()); + // String message = format("SQL: (%s) Query failed (#%s): %s", originalSQL, + // results.getQueryId(), error.getMessage()); + // return new SQLException(message, String.valueOf(error.getCode())); + // } + + private static Date parseDate(String value, DateTimeZone localTimeZone) { + long millis = DATE_FORMATTER.withZone(localTimeZone).parseMillis(String.valueOf(value)); + if (millis >= START_OF_MODERN_ERA_SECONDS * MILLISECONDS_PER_SECOND) { + return new Date(millis); + } + + // The chronology used by default by Joda is not historically accurate for dates + // preceding the introduction of the Gregorian calendar and is not consistent + // with + // java.sql.Date (the same millisecond value represents a different + // year/month/day) + // before the 20th century. For such dates we are falling back to using the more + // expensive GregorianCalendar; note that Joda also has a chronology that works + // for + // older dates, but it uses a slightly different algorithm and yields results + // that + // are not compatible with java.sql.Date. + LocalDate localDate = DATE_FORMATTER.parseLocalDate(String.valueOf(value)); + Calendar calendar = new GregorianCalendar(localDate.getYear(), localDate.getMonthOfYear() - 1, + localDate.getDayOfMonth()); + calendar.setTimeZone(TimeZone.getTimeZone(ZoneId.of(localTimeZone.getID()))); + + return new Date(calendar.getTimeInMillis()); + } + + private static long rescale(long value, int fromPrecision, int toPrecision) { + if (value < 0) { + throw new IllegalArgumentException("value must be >= 0"); + } + + if (fromPrecision <= toPrecision) { + value *= scaleFactor(fromPrecision, toPrecision); + } else { + value = roundDiv(value, scaleFactor(toPrecision, fromPrecision)); + } + + return value; + } + + private static long scaleFactor(int fromPrecision, int toPrecision) { + if (fromPrecision > toPrecision) { + throw new IllegalArgumentException("fromPrecision must be <= toPrecision"); + } + + return POWERS_OF_TEN[toPrecision - fromPrecision]; + } + + private static long roundDiv(long value, long factor) { + + if (value >= 0) { + return (value + (factor / 2)) / factor; + } + + return (value - (factor / 2)) / factor; + } + + private static Time parseTime(String value, ZoneId localTimeZone) { + Matcher matcher = TIME_PATTERN.matcher(value); + if (!matcher.matches()) { + throw new IllegalArgumentException("Invalid time: " + value); + } + + int hour = Integer.parseInt(matcher.group("hour")); + int minute = Integer.parseInt(matcher.group("minute")); + int second = matcher.group("second") == null ? 0 : Integer.parseInt(matcher.group("second")); + + if (hour > 23 || minute > 59 || second > 59) { + throw new IllegalArgumentException("Invalid time: " + value); + } + + int precision = 0; + String fraction = matcher.group("fraction"); + long fractionValue = 0; + if (fraction != null) { + precision = fraction.length(); + fractionValue = Long.parseLong(fraction); + } + + long picosOfSecond = rescale(fractionValue, precision, 12); // maximum precision + // We eventually truncate to millis, so truncate picos to nanos for consistency + // TODO (https://github.com/trinodb/trino/issues/6205) reconsider + int nanosOfSecond = toIntExact(picosOfSecond / PICOSECONDS_PER_NANOSECOND); + long epochMilli = ZonedDateTime.of(1970, 1, 1, hour, minute, second, nanosOfSecond, localTimeZone) + .toInstant() + .toEpochMilli(); + + return new Time(epochMilli); + } + + private static Timestamp parseTimestampAsSqlTimestamp(String value, ZoneId localTimeZone) { + requireNonNull(localTimeZone, "localTimeZone is null"); + + ParsedTimestamp parsed = parseTimestamp(value); + return toTimestamp(value, parsed, timezone -> { + if (timezone.isPresent()) { + throw new IllegalArgumentException("Invalid timestamp: " + value); + } + return localTimeZone; + }); + } + + private static Timestamp toTimestamp(String originalValue, ParsedTimestamp parsed, + Function, ZoneId> timeZoneParser) { + int year = parsed.year; + int month = parsed.month; + int day = parsed.day; + int hour = parsed.hour; + int minute = parsed.minute; + int second = parsed.second; + long picosOfSecond = parsed.picosOfSecond; + ZoneId zoneId = timeZoneParser.apply(parsed.timezone); + + long epochSecond = LocalDateTime.of(year, month, day, hour, minute, second, 0) + .atZone(zoneId) + .toEpochSecond(); + + if (epochSecond < START_OF_MODERN_ERA_SECONDS) { + // slower path, but accurate for historical dates + GregorianCalendar calendar = new GregorianCalendar(year, month - 1, day, hour, minute, second); + calendar.setTimeZone(TimeZone.getTimeZone(zoneId)); + epochSecond = calendar.getTimeInMillis() / MILLISECONDS_PER_SECOND; + } + + int nanoOfSecond = (int) rescale(picosOfSecond, 12, 9); + if (nanoOfSecond == NANOSECONDS_PER_SECOND) { + epochSecond++; + nanoOfSecond = 0; + } + + Timestamp timestamp = new Timestamp(epochSecond * MILLISECONDS_PER_SECOND); + timestamp.setNanos(nanoOfSecond); + return timestamp; + } + + private static ParsedTimestamp parseTimestamp(String value) { + Matcher matcher = DATETIME_PATTERN.matcher(value); + if (!matcher.matches()) { + throw new IllegalArgumentException("Invalid timestamp: " + value); + } + + int year = Integer.parseInt(matcher.group("year")); + int month = Integer.parseInt(matcher.group("month")); + int day = Integer.parseInt(matcher.group("day")); + int hour = Integer.parseInt(matcher.group("hour")); + int minute = Integer.parseInt(matcher.group("minute")); + int second = Integer.parseInt(matcher.group("second")); + String fraction = matcher.group("fraction"); + Optional timezone = Optional.ofNullable(matcher.group("timezone")); + + long picosOfSecond = 0; + if (fraction != null) { + int precision = fraction.length(); + long fractionValue = Long.parseLong(fraction); + picosOfSecond = rescale(fractionValue, precision, 12); + } + + return new ParsedTimestamp(year, month, day, hour, minute, second, picosOfSecond, timezone); + } + + private void checkOpen() + throws SQLException { + if (isClosed()) { + throw new SQLException("ResultSet is closed"); + } + } + + @Override + public boolean next() throws SQLException { + checkOpen(); + try { + if (!results.hasNext()) { + row.set(null); + currentRowNumber.set(0); + return false; + } + row.set(results.next()); + currentRowNumber.incrementAndGet(); + return true; + } catch (RuntimeException e) { + if (e.getCause() instanceof SQLException) { + throw (SQLException) e.getCause(); + } + throw new SQLException("error fetching results", e); + } + } + + @Override + public boolean wasNull() throws SQLException { + return wasNull.get(); + } + + private void checkValidRow() + throws SQLException { + if (row.get() == null) { + throw new SQLException("Not on a valid row"); + } + } + + private Object column(int index) + throws SQLException { + checkOpen(); + checkValidRow(); + if ((index <= 0) || (index > resultSetMetaData.getColumnCount())) { + throw new SQLException("Invalid column index: " + index); + } + Object value = null; + value = row.get().get(index - 1); + if (value == null || value.toString().equals("NULL")) { + wasNull.set(true); + return null; + } else { + wasNull.set(false); + } + + return value; + } + + @Override + public String getString(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean getBoolean(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return false; + } + return (Boolean) value; + } + + @Override + public byte getByte(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return 0; + } + return ((Number) value).byteValue(); + } + + @Override + public short getShort(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return 0; + } + return ((Number) value).shortValue(); + } + + @Override + public int getInt(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return 0; + } + try { + return ((Number) value).intValue(); + } catch (ClassCastException e) { + // try to parse Number + try { + return Integer.parseInt(value.toString()); + } catch (NumberFormatException ex) { + // handler exception + throw new SQLException("Value at columnIndex " + columnIndex + " is not a number."); + } + } + } + + @Override + public long getLong(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return 0; + } + try { + return ((Number) value).longValue(); + } catch (ClassCastException e) { + // try to parse Long + try { + return Long.parseLong(value.toString()); + } catch (NumberFormatException ex) { + throw new SQLException("Value at columnIndex " + columnIndex + " is not a valid long."); + } + } + } + + @Override + public float getFloat(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return 0; + } + try { + return ((Number) value).floatValue(); + } catch (ClassCastException e) { + // try to parse Float + try { + return Float.parseFloat(value.toString()); + } catch (NumberFormatException ex) { + throw new SQLException("Value at columnIndex " + columnIndex + " is not a valid float."); + } + } + } + + @Override + public double getDouble(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return 0; + } + try { + return ((Number) value).doubleValue(); + } catch (ClassCastException e) { + // try to parse Double + try { + return Double.parseDouble(value.toString()); + } catch (NumberFormatException ex) { + throw new SQLException("Value at columnIndex " + columnIndex + " is not a valid double."); + } + } + } + + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return null; + } + try { + BigDecimal bigDecimal = (BigDecimal) value; + return bigDecimal.setScale(scale, BigDecimal.ROUND_HALF_UP); + } catch (ClassCastException e) { + // try to parse bigDecimal + try { + BigDecimal bigDecimal = new BigDecimal(value.toString()); + return bigDecimal.setScale(scale, BigDecimal.ROUND_HALF_UP); + } catch (NumberFormatException ex) { + throw new SQLException("Value at columnIndex " + columnIndex + " is not a valid BigDecimal."); + } + } + } + + @Override + public byte[] getBytes(int columnIndex) + throws SQLException { + final Object value = column(columnIndex); + if (value == null) { + return null; + } + if (value instanceof byte[]) { + return (byte[]) value; + } + throw new SQLException("Value is not a byte array: " + value); + } + + @Override + public Date getDate(int columnIndex) + throws SQLException { + return getDate(columnIndex, resultTimeZone); + } + + private Date getDate(int columnIndex, DateTimeZone localTimeZone) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return null; + } + + try { + return parseDate(String.valueOf(value), localTimeZone); + } catch (IllegalArgumentException e) { + throw new SQLException("Expected value to be a date but is: " + value, e); + } + } + + @Override + public Time getTime(int columnIndex) + throws SQLException { + return getTime(columnIndex, resultTimeZone); + } + + private Time getTime(int columnIndex, DateTimeZone localTimeZone) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return null; + } + + try { + return parseTime((String) value, ZoneId.of(localTimeZone.getID())); + } catch (IllegalArgumentException e) { + throw new SQLException("Invalid time from server: " + value, e); + } + } + + @Override + public Timestamp getTimestamp(int columnIndex) + throws SQLException { + return getTimestamp(columnIndex, resultTimeZone); + } + + private Timestamp getTimestamp(int columnIndex, DateTimeZone localTimeZone) + throws SQLException { + Object value = column(columnIndex); + if (value == null || value.toString().equalsIgnoreCase("null")) { + return new Timestamp(0); + } + + return parseTimestampAsSqlTimestamp((String) value, ZoneId.of(localTimeZone.getID())); + } + + @Override + public InputStream getAsciiStream(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return null; + } + if (!(value instanceof String)) { + throw new SQLException("Value is not a string: " + value); + } + // TODO: a stream returned here should get implicitly closed + // on any subsequent invocation of a ResultSet getter method. + return new ByteArrayInputStream(((String) value).getBytes(StandardCharsets.US_ASCII)); + } + + @Override + public InputStream getUnicodeStream(int columnIndex) + throws SQLException { + throw new SQLFeatureNotSupportedException("getUnicodeStream"); + } + + @Override + public InputStream getBinaryStream(int columnIndex) + throws SQLException { + byte[] value = getBytes(columnIndex); + if (value == null) { + return null; + } + // TODO: a stream returned here should get implicitly closed + // on any subsequent invocation of a ResultSet getter method. + return new ByteArrayInputStream(value); + } + + public int columnIndex(String label) + throws SQLException { + if (label == null) { + throw new SQLException("Column label is null"); + } + Integer index = fieldMap.get(label.toLowerCase(ENGLISH)); + if (index == null) { + throw new SQLException( + "Invalid column label: " + label + ". Valid column labels are: " + fieldMap.keySet()); + } + return index; + } + + @Override + public String getString(String columnLabel) + throws SQLException { + return getString(columnIndex(columnLabel)); + } + + @Override + public boolean getBoolean(String columnLabel) + throws SQLException { + return getBoolean(columnIndex(columnLabel)); + } + + @Override + public byte getByte(String columnLabel) + throws SQLException { + return getByte(columnIndex(columnLabel)); + } + + @Override + public short getShort(String columnLabel) + throws SQLException { + return getShort(columnIndex(columnLabel)); + } + + @Override + public int getInt(String columnLabel) + throws SQLException { + return getInt(columnIndex(columnLabel)); + } + + @Override + public long getLong(String columnLabel) + throws SQLException { + return getLong(columnIndex(columnLabel)); + } + + @Override + public float getFloat(String columnLabel) + throws SQLException { + return getFloat(columnIndex(columnLabel)); + } + + @Override + public double getDouble(String columnLabel) + throws SQLException { + return getDouble(columnIndex(columnLabel)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel, int scale) + throws SQLException { + return getBigDecimal(columnIndex(columnLabel), scale); + } + + @Override + public byte[] getBytes(String columnLabel) + throws SQLException { + return getBytes(columnIndex(columnLabel)); + } + + @Override + public Date getDate(String columnLabel) + throws SQLException { + return getDate(columnIndex(columnLabel)); + } + + @Override + public Time getTime(String columnLabel) + throws SQLException { + return getTime(columnIndex(columnLabel)); + } + + @Override + public Timestamp getTimestamp(String columnLabel) + throws SQLException { + return getTimestamp(columnIndex(columnLabel)); + } + + @Override + public InputStream getAsciiStream(String columnLabel) + throws SQLException { + return getAsciiStream(columnIndex(columnLabel)); + } + + @Override + public InputStream getUnicodeStream(String columnLabel) + throws SQLException { + throw new SQLFeatureNotSupportedException("getUnicodeStream"); + } + + @Override + public InputStream getBinaryStream(String columnLabel) + throws SQLException { + return getBinaryStream(columnIndex(columnLabel)); + } + + @Override + public SQLWarning getWarnings() + throws SQLException { + checkOpen(); + return null; + } + + @Override + public void clearWarnings() + throws SQLException { + checkOpen(); + } + + @Override + public String getCursorName() + throws SQLException { + throw new SQLFeatureNotSupportedException("getCursorName"); + } + + @Override + public ResultSetMetaData getMetaData() + throws SQLException { + return resultSetMetaData; + } + + @Override + public Object getObject(int columnIndex) + throws SQLException { + return column(columnIndex); + } + + @Override + public Object getObject(String columnLabel) + throws SQLException { + return getObject(columnIndex(columnLabel)); + } + + @Override + public int findColumn(String columnLabel) + throws SQLException { + checkOpen(); + return columnIndex(columnLabel); + } + + @Override + public Reader getCharacterStream(int columnIndex) + throws SQLException { + throw new SQLFeatureNotSupportedException("ResultSet", "getCharacterStream"); + } + + @Override + public Reader getCharacterStream(String columnLabel) + throws SQLException { + throw new SQLFeatureNotSupportedException("ResultSet", "getCharacterStream"); + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return null; + } + + return parseBigDecimal(String.valueOf(value)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel) + throws SQLException { + return getBigDecimal(columnIndex(columnLabel)); + } + + @Override + public boolean isBeforeFirst() + throws SQLException { + throw new SQLFeatureNotSupportedException("isBeforeFirst"); + } + + @Override + public boolean isAfterLast() + throws SQLException { + throw new SQLFeatureNotSupportedException("isAfterLast"); + } + + @Override + public boolean isFirst() + throws SQLException { + throw new SQLFeatureNotSupportedException("isFirst"); + } + + @Override + public boolean isLast() + throws SQLException { + throw new SQLFeatureNotSupportedException("isLast"); + } + + @Override + public void beforeFirst() + throws SQLException { + throw new SQLFeatureNotSupportedException("beforeFirst"); + } + + @Override + public void afterLast() + throws SQLException { + throw new SQLFeatureNotSupportedException("afterLast"); + } + + @Override + public boolean first() + throws SQLException { + throw new SQLFeatureNotSupportedException("first"); + } + + @Override + public boolean last() + throws SQLException { + throw new SQLFeatureNotSupportedException("last"); + } + + @Override + public int getRow() + throws SQLException { + checkOpen(); + + long rowNumber = currentRowNumber.get(); + if (rowNumber < 0 || rowNumber > Integer.MAX_VALUE) { + throw new SQLException("Current row exceeds limit of 2147483647"); + } + + return (int) rowNumber; + } + + @Override + public boolean absolute(int row) + throws SQLException { + throw new SQLFeatureNotSupportedException("absolute"); + } + + @Override + public boolean relative(int rows) + throws SQLException { + throw new SQLFeatureNotSupportedException("relative"); + } + + @Override + public boolean previous() + throws SQLException { + throw new SQLFeatureNotSupportedException("previous"); + } + + @Override + public int getFetchDirection() + throws SQLException { + checkOpen(); + return FETCH_FORWARD; + } + + @Override + public void setFetchDirection(int direction) + throws SQLException { + checkOpen(); + if (direction != FETCH_FORWARD) { + throw new SQLException("Fetch direction must be FETCH_FORWARD"); + } + } + + @Override + public int getFetchSize() + throws SQLException { + checkOpen(); + // fetch size is ignored + return 0; + } + + @Override + public void setFetchSize(int rows) + throws SQLException { + checkOpen(); + if (rows < 0) { + throw new SQLException("Rows is negative"); + } + // fetch size is ignored + } + + @Override + public int getType() + throws SQLException { + checkOpen(); + return TYPE_FORWARD_ONLY; + } + + @Override + public int getConcurrency() + throws SQLException { + checkOpen(); + return CONCUR_READ_ONLY; + } + + @Override + public boolean rowUpdated() + throws SQLException { + throw new SQLFeatureNotSupportedException("rowUpdated"); + } + + @Override + public boolean rowInserted() + throws SQLException { + throw new SQLFeatureNotSupportedException("rowInserted"); + } + + @Override + public Statement getStatement() + throws SQLException { + if (statement.isPresent()) { + return statement.get(); + } + + throw new SQLException("Statement not available"); + } + + @Override + public Object getObject(int columnIndex, Map> map) + throws SQLException { + throw new SQLFeatureNotSupportedException("getObject"); + } + + @Override + public Ref getRef(int columnIndex) + throws SQLException { + throw new SQLFeatureNotSupportedException("getRef"); + } + + @Override + public Blob getBlob(int columnIndex) + throws SQLException { + throw new SQLFeatureNotSupportedException("getBlob"); + } + + @Override + public Clob getClob(int columnIndex) + throws SQLException { + throw new SQLFeatureNotSupportedException("getClob"); + } + + @Override + public Array getArray(int columnIndex) + throws SQLException { + // TODO support it + throw new SQLFeatureNotSupportedException("getArray"); + } + + @Override + public Object getObject(String columnLabel, Map> map) + throws SQLException { + throw new SQLFeatureNotSupportedException("getObject"); + } + + @Override + public Ref getRef(String columnLabel) + throws SQLException { + throw new SQLFeatureNotSupportedException("getRef"); + } + + @Override + public Blob getBlob(String columnLabel) + throws SQLException { + throw new SQLFeatureNotSupportedException("getBlob"); + } + + @Override + public Clob getClob(String columnLabel) + throws SQLException { + throw new SQLFeatureNotSupportedException("getClob"); + } + + @Override + public Array getArray(String columnLabel) + throws SQLException { + return getArray(columnIndex(columnLabel)); + } + + @Override + public Date getDate(int columnIndex, Calendar cal) + throws SQLException { + // cal into joda local timezone + DateTimeZone timeZone = DateTimeZone.forTimeZone(cal.getTimeZone()); + return getDate(columnIndex, timeZone); + } + + @Override + public Date getDate(String columnLabel, Calendar cal) + throws SQLException { + return getDate(columnIndex(columnLabel), cal); + } + + @Override + public Time getTime(int columnIndex, Calendar cal) + throws SQLException { + // cal into joda local timezone + DateTimeZone timeZone = DateTimeZone.forTimeZone(cal.getTimeZone()); + return getTime(columnIndex, timeZone); + } + + @Override + public Time getTime(String columnLabel, Calendar cal) + throws SQLException { + return getTime(columnIndex(columnLabel), cal); + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) + throws SQLException { + // cal into joda local timezone + DateTimeZone timeZone = DateTimeZone.forTimeZone(cal.getTimeZone()); + return getTimestamp(columnIndex, timeZone); + } + + @Override + public Timestamp getTimestamp(String columnLabel, Calendar cal) + throws SQLException { + return getTimestamp(columnIndex(columnLabel), cal); + } + + @Override + public URL getURL(int columnIndex) + throws SQLException { + throw new SQLFeatureNotSupportedException("getURL"); + } + + @Override + public URL getURL(String columnLabel) + throws SQLException { + throw new SQLFeatureNotSupportedException("getURL"); + } + + @Override + public int getHoldability() throws SQLException { + throw new SQLFeatureNotSupportedException("getHoldability"); + } + + @Override + public abstract boolean isClosed() + throws SQLException; + + @Override + public String getNString(int columnIndex) + throws SQLException { + Object value = column(columnIndex); + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public String getNString(String columnLabel) + throws SQLException { + return getNString(columnIndex(columnLabel)); + } + + @Override + public T getObject(int columnIndex, Class type) + throws SQLException { + if (type == null) { + throw new SQLException("type is null"); + } + String columnTypeStr = this.resultSetMetaData.getColumnTypeName(columnIndex); + DatabendRawType databendRawType = new DatabendRawType(columnTypeStr); + ColumnTypeHandler columnTypeHandler = ColumnTypeHandlerFactory.getTypeHandler(databendRawType); + + Object object = column(columnIndex); + if (object == null) { + return null; + } + return (T) columnTypeHandler.parseValue(object); + } + + @Override + public T getObject(String columnLabel, Class type) + throws SQLException { + return getObject(columnIndex(columnLabel), type); + } + + @Override + public NClob getNClob(int columnIndex) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'getNClob'"); + } + + @Override + public NClob getNClob(String columnLabel) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'getNClob'"); + } + + @Override + public SQLXML getSQLXML(int columnIndex) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'getSQLXML'"); + } + + @Override + public SQLXML getSQLXML(String columnLabel) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'getSQLXML'"); + } + + @Override + public Reader getNCharacterStream(int columnIndex) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'getNCharacterStream'"); + } + + @Override + public Reader getNCharacterStream(String columnLabel) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'getNCharacterStream'"); + } + + @Override + public T unwrap(Class iface) + throws SQLException { + if (isWrapperFor(iface)) { + return (T) this; + } + throw new SQLException("No wrapper for " + iface); + } + + @Override + public boolean isWrapperFor(Class iface) + throws SQLException { + return iface.isInstance(this); + } + + private static class ParsedTimestamp { + private final int year; + private final int month; + private final int day; + private final int hour; + private final int minute; + private final int second; + private final long picosOfSecond; + private final Optional timezone; + + public ParsedTimestamp(int year, int month, int day, int hour, int minute, int second, long picosOfSecond, + Optional timezone) { + this.year = year; + this.month = month; + this.day = day; + this.hour = hour; + this.minute = minute; + this.second = second; + this.picosOfSecond = picosOfSecond; + this.timezone = requireNonNull(timezone, "timezone is null"); + } + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/BatchToRowIterator.java b/bindings/java/src/main/java/com/databend/bendsql/BatchToRowIterator.java new file mode 100644 index 00000000..8ae7990a --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/BatchToRowIterator.java @@ -0,0 +1,84 @@ +package com.databend.bendsql; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.stream.Collectors; + +import com.databend.client.QueryRowField; +import com.databend.client.data.ColumnTypeHandler; +import com.databend.client.data.ColumnTypeHandlerFactory; +import com.google.common.collect.ImmutableList; +import static java.util.Collections.unmodifiableList; + + +public class BatchToRowIterator implements Iterator> { + private final Iterator>> iterator; + private final List schema; + + private List> buffer; + private int bufferIndex; + + public BatchToRowIterator(Iterator>> iterator, List schema) { + this.iterator = iterator; + this.buffer = null; + this.bufferIndex = 0; + this.schema = schema; + } + + boolean isBufferEmpty() { + return buffer == null || buffer.isEmpty() || bufferIndex >= buffer.size(); + } + + @Override + public boolean hasNext() { + if (isBufferEmpty() && iterator.hasNext()) { + List> stringLists = iterator.next(); + List> objectLists = stringLists.stream() + .map(list -> new ArrayList(list)) + .collect(Collectors.toList()); + buffer = parseRawData(schema, objectLists); + } + return !isBufferEmpty(); + } + + @Override + public List next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + return new ArrayList<>(buffer.get(bufferIndex++)); + } + + + public static List> parseRawData(List schema, List> data) { + if (data == null || schema == null) { + return null; + } + int index = 0; + ColumnTypeHandler[] typeHandlers = new ColumnTypeHandler[schema.size()]; + for (QueryRowField field : schema) { + typeHandlers[index++] = ColumnTypeHandlerFactory.getTypeHandler(field.getDataType()); + } + // ensure parsed data is thread safe + ImmutableList.Builder> rows = ImmutableList.builderWithExpectedSize(data.size()); + for (List row : data) { + if (row.size() != typeHandlers.length) { + throw new IllegalArgumentException("row / column does not match schema"); + } + ArrayList newRow = new ArrayList<>(typeHandlers.length); + int column = 0; + for (Object value : row) { + if (value != null) { + value = typeHandlers[column].parseValue(value); + } + newRow.add(value); + column++; + } + rows.add(unmodifiableList(newRow)); // allow nulls in list + } + return rows.build(); + } +} + diff --git a/bindings/java/src/main/java/com/databend/bendsql/BendSQLException.java b/bindings/java/src/main/java/com/databend/bendsql/BendSQLException.java new file mode 100644 index 00000000..aa5eaccd --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/BendSQLException.java @@ -0,0 +1,20 @@ +/* + * Copyright 2021 Datafuse Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.databend.bendsql; + +public class BendSQLException extends RuntimeException { +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendColumnInfo.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendColumnInfo.java new file mode 100644 index 00000000..7170d227 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendColumnInfo.java @@ -0,0 +1,418 @@ +/* + * Copyright 2021 Datafuse Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.databend.bendsql; + +import com.databend.client.data.DatabendDataType; +import com.databend.client.data.DatabendRawType; +import com.databend.client.data.DatabendTypes; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.sql.Types; + +import static java.util.Objects.requireNonNull; + +public class DatabendColumnInfo { + private static final int VARBINARY_MAX = 1024 * 1024 * 1024; + private static final int TIMESTAMP_MAX = "yyyy-MM-dd HH:mm:ss.SSS".length(); + private static final int DATE_MAX = "yyyy-MM-dd".length(); + + private final int columnType; + private final String columnName; + private final List columnParameterTypes; + private final DatabendRawType type; + private final Nullable nullable; + private final boolean currency; + private final boolean signed; + private final int precision; + private final int scale; + private final int columnDisplaySize; + private final String columnLabel; + + private final String tableName; + private final String schemaName; + private final String catalogName; + + + public DatabendColumnInfo(int columnType, List columnParameterTypes, DatabendRawType type, Nullable nullable, boolean currency, boolean signed, int precision, int scale, int columnDisplaySize, String columnLabel, String columnName, String tableName, String schemaName, String catalogName) { + this.columnType = columnType; + this.columnParameterTypes = columnParameterTypes; + this.type = type; + this.nullable = nullable; + this.currency = currency; + this.signed = signed; + this.precision = precision; + this.scale = scale; + this.columnDisplaySize = columnDisplaySize; + this.columnLabel = columnLabel; + this.columnName = columnName; + this.tableName = tableName; + this.schemaName = schemaName; + this.catalogName = catalogName; + } + + public static DatabendColumnInfo of(String name, DatabendRawType type) { + Preconditions.checkArgument(!Strings.isNullOrEmpty(name), "Provided name is null or empty"); + Builder builder = newBuilder(name, type); + setTypeInfo(builder, type); + return builder.build(); + } + + public static void setTypeInfo(Builder builder, DatabendRawType type) { + builder.setColumnType(type.getDataType().getSqlType()); + boolean isNullable = type.isNullable(); + builder.setNullable(isNullable ? Nullable.NULLABLE : Nullable.NO_NULLS); + switch (type.getDataType().getDisplayName()) { + case DatabendTypes.BOOLEAN: + builder.setColumnDisplaySize(5); + break; + case DatabendTypes.UINT8: + builder.setSigned(false); + builder.setPrecision(3); + builder.setColumnDisplaySize(4); + builder.setScale(0); + break; + case DatabendTypes.INT8: + builder.setSigned(true); + builder.setPrecision(4); + builder.setColumnDisplaySize(5); + builder.setScale(0); + break; + case DatabendTypes.UINT16: + builder.setSigned(false); + builder.setPrecision(5); + builder.setColumnDisplaySize(6); + builder.setScale(0); + break; + case DatabendTypes.INT16: + builder.setSigned(true); + builder.setPrecision(5); + builder.setColumnDisplaySize(6); + builder.setScale(0); + break; + case DatabendTypes.UINT32: + builder.setSigned(false); + builder.setPrecision(10); + builder.setColumnDisplaySize(11); + builder.setScale(0); + break; + case DatabendTypes.INT32: + builder.setSigned(true); + builder.setPrecision(10); + builder.setColumnDisplaySize(11); + builder.setScale(0); + break; + case DatabendTypes.UINT64: + builder.setSigned(false); + builder.setPrecision(19); + builder.setColumnDisplaySize(20); + builder.setScale(0); + break; + case DatabendTypes.INT64: + builder.setSigned(true); + builder.setPrecision(19); + builder.setColumnDisplaySize(20); + builder.setScale(0); + break; + case DatabendTypes.FLOAT32: + builder.setSigned(true); + builder.setPrecision(9); + builder.setColumnDisplaySize(16); + builder.setScale(0); + break; + case DatabendTypes.FLOAT64: + builder.setSigned(true); + builder.setPrecision(17); + builder.setColumnDisplaySize(24); + builder.setScale(0); + break; + case DatabendTypes.STRING: + builder.setSigned(false); + builder.setScale(0); + builder.setPrecision(VARBINARY_MAX); + builder.setColumnDisplaySize(VARBINARY_MAX); + break; + case DatabendTypes.DATE: + builder.setSigned(false); + builder.setScale(0); + builder.setPrecision(DATE_MAX); + builder.setColumnDisplaySize(DATE_MAX); + break; + case DatabendTypes.DATETIME: + builder.setSigned(false); + builder.setScale(0); + builder.setPrecision(TIMESTAMP_MAX); + builder.setColumnDisplaySize(TIMESTAMP_MAX); + break; + case DatabendTypes.DATETIME64: + builder.setSigned(false); + builder.setScale(0); + builder.setPrecision(TIMESTAMP_MAX); + builder.setColumnDisplaySize(TIMESTAMP_MAX); + break; + case DatabendTypes.DECIMAL: + builder.setSigned(true); + builder.setScale(type.getDecimalDigits()); + builder.setPrecision(type.getColumnSize()); + builder.setColumnDisplaySize(type.getColumnSize()); + break; + } + + } + + public static Builder newBuilder(String name, DatabendRawType type) { + return (new Builder()) + .setColumnName(name) + .setDatabendRawType(type); + } + + public int getColumnType() { + return columnType; + } + + public List getColumnParameterTypes() { + return columnParameterTypes; + } + + public DatabendRawType getType() { + return type; + } + + public Nullable getNullable() { + return nullable; + } + + public String getColumnTypeName() { + return type.getDataType().getDisplayName(); + } + + public boolean isCurrency() { + return currency; + } + + public boolean isSigned() { + return signed; + } + + public int getPrecision() { + return precision; + } + + public int getScale() { + return scale; + } + + public int getColumnDisplaySize() { + return columnDisplaySize; + } + + public String getColumnLabel() { + return columnLabel; + } + + public String getColumnName() { + return columnName; + } + + public String getTableName() { + return tableName; + } + + public String getSchemaName() { + return schemaName; + } + + public String getCatalogName() { + return catalogName; + } + + + public enum Nullable { + NO_NULLS, NULLABLE, UNKNOWN + } + + // builder + public static final class Builder { + private int columnType; + private List columnParameterTypes; + private DatabendRawType type; + private Nullable nullable; + private boolean currency; + private boolean signed; + private int precision; + private int scale; + private int columnDisplaySize; + private String columnLabel; + private String columnName; + private String tableName; + private String schemaName; + private String catalogName; + + Builder() { + } + + public Builder setColumnType(int columnType) { + this.columnType = columnType; + return this; + } + + public Builder setDatabendRawType(DatabendRawType type) { + this.type = type; + return this; + } + + public void setColumnParameterTypes(List columnParameterTypes) { + this.columnParameterTypes = ImmutableList.copyOf(requireNonNull(columnParameterTypes, "columnParameterTypes is null")); + } + + public Builder setColumnTypeSignature(DatabendRawType columnTypeSignature) { + this.type = columnTypeSignature; + return this; + } + + public Builder setNullable(Nullable nullable) { + this.nullable = nullable; + return this; + } + + public Builder setCurrency(boolean currency) { + this.currency = currency; + return this; + } + + public Builder setSigned(boolean signed) { + this.signed = signed; + return this; + } + + public Builder setPrecision(int precision) { + this.precision = precision; + return this; + } + + public Builder setScale(int scale) { + this.scale = scale; + return this; + } + + public Builder setColumnDisplaySize(int columnDisplaySize) { + this.columnDisplaySize = columnDisplaySize; + return this; + } + + public Builder setColumnLabel(String columnLabel) { + this.columnLabel = columnLabel; + return this; + } + + public Builder setColumnName(String columnName) { + this.columnName = columnName; + return this; + } + + public Builder setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public Builder setSchemaName(String schemaName) { + this.schemaName = schemaName; + return this; + } + + public Builder setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public DatabendColumnInfo build() { + return new DatabendColumnInfo( + columnType, + columnParameterTypes, + type, + nullable, + currency, + signed, + precision, + scale, + columnDisplaySize, + columnLabel, + columnName, + tableName, + schemaName, + catalogName); + } + } + + public int toSqlType() { + DatabendDataType dataType = getType().getDataType(); + int sqlType = Types.OTHER; + switch (dataType) { + case BOOLEAN: + sqlType = Types.BOOLEAN; + break; + case INT_8: + sqlType = Types.TINYINT; + break; + case INT_16: + sqlType = Types.SMALLINT; + break; + case INT_32: + sqlType = Types.INTEGER; + break; + case INT_64: + sqlType = Types.BIGINT; + break; + case FLOAT: + sqlType = Types.FLOAT; + break; + case DOUBLE: + sqlType = Types.DOUBLE; + break; + case DECIMAL: + sqlType = Types.DECIMAL; + break; + case STRING: + sqlType = Types.VARCHAR; + break; + case DATE: + sqlType = Types.DATE; + break; + case TIMESTAMP: + sqlType = Types.TIMESTAMP; + break; + case ARRAY: + sqlType = Types.ARRAY; + break; + case VARIANT: + sqlType = Types.VARCHAR; + break; + case TUPLE: + sqlType = Types.STRUCT; + break; + case NULL: + sqlType = Types.NULL; + break; + default: + break; + } + return sqlType; + } + +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendConnection.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendConnection.java new file mode 100644 index 00000000..e09e361b --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendConnection.java @@ -0,0 +1,119 @@ +package com.databend.bendsql; + +import java.sql.*; +import java.util.Properties; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +public class DatabendConnection extends TrivialConnection { + private final NativeConnection nativeConnection; + + private final AtomicBoolean isClosed; + private final AtomicBoolean autoCommit; + private final AtomicReference schema = new AtomicReference<>(); + + + public DatabendConnection(String url, Properties info) throws SQLException { + try { + this.nativeConnection = NativeConnection.of(url); + this.isClosed = new AtomicBoolean(false); + this.autoCommit = new AtomicBoolean(true); + } catch (Exception e) { + throw new SQLException("Failed to create connection: " + e.getMessage(), e); + } + } + + @Override + public DatabaseMetaData getMetaData() + throws SQLException { + return new DatabendDatabaseMetaData(this); + } + + @Override + public Statement createStatement() throws SQLException { + checkClosed(); + return new DatabendStatement(this); + } + + @Override + public PreparedStatement prepareStatement(String sql) throws SQLException { + checkClosed(); + return new DatabendPreparedStatement(this, sql); + } + + @Override + public void close() throws SQLException { + if (isClosed.compareAndSet(false, true)) { + nativeConnection.close(); + } + } + + @Override + public boolean isClosed() throws SQLException { + return isClosed.get(); + } + + @Override + public void setAutoCommit(boolean autoCommit) throws SQLException { + this.autoCommit.set(autoCommit); + } + + @Override + public boolean getAutoCommit() throws SQLException { + return autoCommit.get(); + } + + private void checkClosed() throws SQLException { + if (isClosed.get()) { + throw new SQLException("Connection is closed"); + } + } + + @Override + public void commit() throws SQLException { + throw new SQLFeatureNotSupportedException("commit is not supported"); + } + + @Override + public void rollback() throws SQLException { + throw new SQLFeatureNotSupportedException("rollback is not supported"); + } + + public NativeConnection getNativeConnection() { + return nativeConnection; + } + + @Override + public T unwrap(Class iface) throws SQLException { + if (iface.isAssignableFrom(getClass())) { + return iface.cast(this); + } + throw new SQLException("Cannot unwrap to " + iface.getName()); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return iface.isAssignableFrom(getClass()); + } + + @Override + public String getSchema() + throws SQLException { + checkClosed(); + return schema.get(); + } + + @Override + public void setSchema(String schema) + throws SQLException { + checkClosed(); + this.schema.set(schema); + //TODO: this.startQuery("use " + schema); + } + + public Object getURI() { + // TODO + throw new UnsupportedOperationException("Unimplemented method 'getURI'"); + } + +} \ No newline at end of file diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendDatabaseMetaData.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendDatabaseMetaData.java new file mode 100644 index 00000000..3f66939c --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendDatabaseMetaData.java @@ -0,0 +1,1592 @@ +package com.databend.bendsql; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.RowIdLifetime; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Statement; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import com.databend.client.QueryRowField; +import com.databend.client.data.DatabendDataType; +import com.databend.client.data.DatabendRawType; +import com.google.common.base.Joiner; + +import static java.util.Objects.requireNonNull; + +public class DatabendDatabaseMetaData implements DatabaseMetaData { + private static final String DRIVER_NAME = null; + private static final int DRIVER_VERSION_MAJOR = 0; + private static final int DRIVER_VERSION_MINOR = 0; + private static final String DRIVER_VERSION = null; + private final DatabendConnection connection; + + public DatabendDatabaseMetaData(DatabendConnection connection) + throws SQLException { + requireNonNull(connection, "connection is null"); + this.connection = connection; + } + + private static void buildFilters(StringBuilder out, List filters) { + if (!filters.isEmpty()) { + out.append("\nWHERE "); + Joiner.on(" AND ").appendTo(out, filters); + } + } + + private static void optionalStringLikeFilter(List filters, String columnName, String value) { + if (value != null) { + filters.add(stringColumnLike(columnName, value)); + } + } + + private static void optionalStringInFilter(List filters, String columnName, String[] values) { + if (values == null || values.length == 0) { + return; + } + + StringBuilder filter = new StringBuilder(); + filter.append(columnName).append(" IN ("); + + for (int i = 0; i < values.length; i++) { + if (i > 0) { + filter.append(", "); + } + quoteStringLiteral(filter, values[i]); + } + + filter.append(")"); + filters.add(filter.toString()); + } + + private static void emptyStringEqualsFilter(List filters, String columnName, String value) { + if (value != null) { + if (value.isEmpty()) { + filters.add(columnName + " IS NULL"); + } else { + filters.add(stringColumnEquals(columnName, value)); + } + } + } + + private static void emptyStringLikeFilter(List filters, String columnName, String value) { + if (value != null) { + if (value.isEmpty()) { + filters.add(columnName + " IS NULL"); + } else { + filters.add(stringColumnLike(columnName, value)); + } + } + } + + private static String stringColumnEquals(String columnName, String value) { + StringBuilder filter = new StringBuilder(); + filter.append(columnName).append(" = "); + quoteStringLiteral(filter, value); + return filter.toString(); + } + + private static String stringColumnLike(String columnName, String pattern) { + StringBuilder filter = new StringBuilder(); + filter.append(columnName).append(" LIKE "); + quoteStringLiteral(filter, pattern); + + return filter.toString(); + } + + private static void quoteStringLiteral(StringBuilder out, String value) { + out.append('\''); + for (int i = 0; i < value.length(); i++) { + char c = value.charAt(i); + out.append(c); + if (c == '\'') { + out.append('\''); + } + } + out.append('\''); + } + + @Override + public boolean allProceduresAreCallable() + throws SQLException { + return true; + } + + @Override + public boolean allTablesAreSelectable() + throws SQLException { + return true; + } + + @Override + public String getURL() + throws SQLException { + return "jdbc:databend://" + connection.getURI().toString(); + } + + @Override + public String getUserName() + throws SQLException { + try (ResultSet rs = select("SELECT current_user()")) { + if (rs.next()) { + return rs.getString(1); + } + } + return null; + } + + @Override + public boolean isReadOnly() + throws SQLException { + return getConnection().isReadOnly(); + } + + @Override + public boolean nullsAreSortedHigh() + throws SQLException { + return false; + } + + @Override + public boolean nullsAreSortedLow() + throws SQLException { + return false; + } + + @Override + public boolean nullsAreSortedAtStart() + throws SQLException { + return false; + } + + @Override + public boolean nullsAreSortedAtEnd() + throws SQLException { + return false; + } + + @Override + public String getDatabaseProductName() + throws SQLException { + return "Databend"; + } + + @Override + public String getDatabaseProductVersion() + throws SQLException { + try (ResultSet rs = select("SELECT version()")) { + rs.next(); + return rs.getString(1); + } + } + + @Override + public String getDriverName() + throws SQLException { + return DRIVER_NAME; + } + + @Override + public String getDriverVersion() + throws SQLException { + return DRIVER_VERSION; + } + + @Override + public int getDriverMajorVersion() { + return DRIVER_VERSION_MAJOR; + } + + @Override + public int getDriverMinorVersion() { + return DRIVER_VERSION_MINOR; + } + + @Override + public boolean usesLocalFiles() + throws SQLException { + return false; + } + + @Override + public boolean usesLocalFilePerTable() + throws SQLException { + return false; + } + + @Override + public boolean supportsMixedCaseIdentifiers() + throws SQLException { + return false; + } + + @Override + public boolean storesUpperCaseIdentifiers() + throws SQLException { + return false; + } + + @Override + public boolean storesLowerCaseIdentifiers() + throws SQLException { + return false; + } + + @Override + public boolean storesMixedCaseIdentifiers() + throws SQLException { + return false; + } + + @Override + public boolean supportsMixedCaseQuotedIdentifiers() + throws SQLException { + return false; + } + + @Override + public boolean storesUpperCaseQuotedIdentifiers() + throws SQLException { + return false; + } + + @Override + public boolean storesLowerCaseQuotedIdentifiers() + throws SQLException { + return false; + } + + @Override + public boolean storesMixedCaseQuotedIdentifiers() + throws SQLException { + return false; + } + + @Override + public String getIdentifierQuoteString() + throws SQLException { + return "\""; + } + + @Override + public String getSQLKeywords() + throws SQLException { + ArrayList keywords = new ArrayList<>(); + try (ResultSet rs = select("SELECT keywords FROM information_schema.keywords")) { + rs.next(); + keywords.add(rs.getString(1)); + } + return String.join(",", keywords); + } + + @Override + public String getNumericFunctions() + throws SQLException { + // https://databend.rs/doc/reference/functions/numeric-functions + return "abs,acos,asin,atan,atan2,ceil,cos,cot,degrees,e,exp,floor,ln,log,log10,mod,pi,power,radians,rand,round,sign,sin,sqrt,tan,truncate"; + } + + @Override + public String getStringFunctions() + throws SQLException { + // https://databend.rs/doc/reference/functions/string-functions + return "ascii,bin,bin_length,char,char_length,character_length,concat,concat_ws,elt,export_set,field,find_in_set,format,from_base64" + + + ",hex,insert,instr,lcase,left,length,like,locate,lower,lpad,mid,oct,octet_length,ord,position,quote,regexp,regexp_instr,regexp_like" + + + ",regexp_replace,regexp_substr,repeat,replace,reverse,right,rlike,rpad,soundex,space,strcmp,substr,substring,substring_index,to_base64,trim,ucase,unhex,upper"; + } + + @Override + public String getSystemFunctions() + throws SQLException { + return "CLUSTERING_INFORMATION,FUSE_BLOCK,FUSE_SEGMENT,FUSE_SNAPSHOT,FUSE_STATISTIC"; + } + + @Override + public String getTimeDateFunctions() + throws SQLException { + // https://databend.rs/doc/reference/functions/datetime-functions + + return "addDays,addHours,addMinutes,addMonths,addQuarters,addSeconds,addWeeks,addYears,date_add,date_diff,date_sub,date_trunc,dateName,formatDateTime,FROM_UNIXTIME,fromModifiedJulianDay,fromModifiedJulianDayOrNull,now,subtractDays,subtractHours,subtractMinutes,subtractMonths,subtractQuarters,subtractSeconds,subtractWeeks,subtractYears,timeSlot,timeSlots,timestamp_add,timestamp_sub,timeZone,timeZoneOf,timeZoneOffset,today,toDayOfMonth,toDayOfWeek,toDayOfYear,toHour,toISOWeek,toISOYear,toMinute,toModifiedJulianDay,toModifiedJulianDayOrNull,toMonday,toMonth,toQuarter,toRelativeDayNum,toRelativeHourNum,toRelativeMinuteNum,toRelativeMonthNum,toRelativeQuarterNum,toRelativeSecondNum,toRelativeWeekNum,toRelativeYearNum,toSecond,toStartOfDay,toStartOfFifteenMinutes,toStartOfFiveMinute,toStartOfHour,toStartOfInterval,toStartOfISOYear,toStartOfMinute,toStartOfMonth,toStartOfQuarter,toStartOfSecond,toStartOfTenMinutes,toStartOfWeek,toStartOfYear,toTime,toTimeZone,toUnixTimestamp,toWeek,toYear,toYearWeek,toYYYYMM,toYYYYMMDD,toYYYYMMDDhhmmss,yesterday"; + } + + @Override + public String getSearchStringEscape() + throws SQLException { + return "\\"; + } + + @Override + public String getExtraNameCharacters() + throws SQLException { + return ""; + } + + @Override + public boolean supportsAlterTableWithAddColumn() + throws SQLException { + // https://github.com/datafuselabs/databend/issues/9441 + return true; + } + + @Override + public boolean supportsAlterTableWithDropColumn() + throws SQLException { + // https://github.com/datafuselabs/databend/issues/9441 + return true; + } + + @Override + public boolean supportsColumnAliasing() + throws SQLException { + return true; + } + + @Override + public boolean nullPlusNonNullIsNull() + throws SQLException { + return true; + } + + @Override + public boolean supportsConvert() + throws SQLException { + return false; + } + + @Override + public boolean supportsConvert(int i, int i1) + throws SQLException { + return false; + } + + @Override + public boolean supportsTableCorrelationNames() + throws SQLException { + return false; + } + + @Override + public boolean supportsDifferentTableCorrelationNames() + throws SQLException { + return false; + } + + @Override + public boolean supportsExpressionsInOrderBy() + throws SQLException { + return true; + } + + @Override + public boolean supportsOrderByUnrelated() + throws SQLException { + return true; + } + + @Override + public boolean supportsGroupBy() + throws SQLException { + return true; + } + + @Override + public boolean supportsGroupByUnrelated() + throws SQLException { + return true; + } + + @Override + public boolean supportsGroupByBeyondSelect() + throws SQLException { + return true; + } + + @Override + public boolean supportsLikeEscapeClause() + throws SQLException { + return true; + } + + @Override + public boolean supportsMultipleResultSets() + throws SQLException { + return false; + } + + @Override + public boolean supportsMultipleTransactions() + throws SQLException { + return false; + } + + @Override + public boolean supportsNonNullableColumns() + throws SQLException { + return true; + } + + @Override + public boolean supportsMinimumSQLGrammar() + throws SQLException { + return true; + } + + @Override + public boolean supportsCoreSQLGrammar() + throws SQLException { + return true; + } + + @Override + public boolean supportsExtendedSQLGrammar() + throws SQLException { + return true; + } + + @Override + public boolean supportsANSI92EntryLevelSQL() + throws SQLException { + return true; + } + + @Override + public boolean supportsANSI92IntermediateSQL() + throws SQLException { + return false; + } + + @Override + public boolean supportsANSI92FullSQL() + throws SQLException { + return false; + } + + @Override + public boolean supportsIntegrityEnhancementFacility() + throws SQLException { + return false; + } + + @Override + public boolean supportsOuterJoins() + throws SQLException { + return true; + } + + @Override + public boolean supportsFullOuterJoins() + throws SQLException { + return true; + } + + @Override + public boolean supportsLimitedOuterJoins() + throws SQLException { + return true; + } + + @Override + public String getSchemaTerm() + throws SQLException { + return "database"; + } + + @Override + public String getProcedureTerm() + throws SQLException { + return null; + } + + @Override + public String getCatalogTerm() + throws SQLException { + return null; + } + + @Override + public boolean isCatalogAtStart() + throws SQLException { + return false; + } + + @Override + public String getCatalogSeparator() + throws SQLException { + return null; + } + + @Override + public boolean supportsSchemasInDataManipulation() + throws SQLException { + return false; + } + + @Override + public boolean supportsSchemasInProcedureCalls() + throws SQLException { + return false; + } + + @Override + public boolean supportsSchemasInTableDefinitions() + throws SQLException { + return false; + } + + @Override + public boolean supportsSchemasInIndexDefinitions() + throws SQLException { + return false; + } + + @Override + public boolean supportsSchemasInPrivilegeDefinitions() + throws SQLException { + return false; + } + + @Override + public boolean supportsCatalogsInDataManipulation() + throws SQLException { + return true; + } + + @Override + public boolean supportsCatalogsInProcedureCalls() + throws SQLException { + return false; + } + + @Override + public boolean supportsCatalogsInTableDefinitions() + throws SQLException { + return true; + } + + @Override + public boolean supportsCatalogsInIndexDefinitions() + throws SQLException { + return false; + } + + @Override + public boolean supportsCatalogsInPrivilegeDefinitions() + throws SQLException { + return false; + } + + @Override + public boolean supportsPositionedDelete() + throws SQLException { + return false; + } + + @Override + public boolean supportsPositionedUpdate() + throws SQLException { + return false; + } + + @Override + public boolean supportsSelectForUpdate() + throws SQLException { + return false; + } + + @Override + public boolean supportsStoredProcedures() + throws SQLException { + return false; + } + + @Override + public boolean supportsSubqueriesInComparisons() + throws SQLException { + return false; + } + + @Override + public boolean supportsSubqueriesInExists() + throws SQLException { + return false; + } + + @Override + public boolean supportsSubqueriesInIns() + throws SQLException { + return false; + } + + @Override + public boolean supportsSubqueriesInQuantifieds() + throws SQLException { + return false; + } + + @Override + public boolean supportsCorrelatedSubqueries() + throws SQLException { + return false; + } + + @Override + public boolean supportsUnion() + throws SQLException { + return false; + } + + @Override + public boolean supportsUnionAll() + throws SQLException { + return false; + } + + @Override + public boolean supportsOpenCursorsAcrossCommit() + throws SQLException { + return false; + } + + @Override + public boolean supportsOpenCursorsAcrossRollback() + throws SQLException { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossCommit() + throws SQLException { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossRollback() + throws SQLException { + return false; + } + + @Override + public int getMaxBinaryLiteralLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxCharLiteralLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxColumnNameLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInGroupBy() + throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInIndex() + throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInOrderBy() + throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInSelect() + throws SQLException { + return 0; + } + + @Override + public int getMaxColumnsInTable() + throws SQLException { + return 0; + } + + @Override + public int getMaxConnections() + throws SQLException { + return 0; + } + + @Override + public int getMaxCursorNameLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxIndexLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxSchemaNameLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxProcedureNameLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxCatalogNameLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxRowSize() + throws SQLException { + return 0; + } + + @Override + public boolean doesMaxRowSizeIncludeBlobs() + throws SQLException { + return false; + } + + @Override + public int getMaxStatementLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxStatements() + throws SQLException { + return 0; + } + + @Override + public int getMaxTableNameLength() + throws SQLException { + return 0; + } + + @Override + public int getMaxTablesInSelect() + throws SQLException { + return 0; + } + + @Override + public int getMaxUserNameLength() + throws SQLException { + return 0; + } + + @Override + public int getDefaultTransactionIsolation() + throws SQLException { + return 0; + } + + @Override + public boolean supportsTransactions() + throws SQLException { + return false; + } + + @Override + public boolean supportsTransactionIsolationLevel(int i) + throws SQLException { + return false; + } + + @Override + public boolean supportsDataDefinitionAndDataManipulationTransactions() + throws SQLException { + return false; + } + + @Override + public boolean supportsDataManipulationTransactionsOnly() + throws SQLException { + return false; + } + + @Override + public boolean dataDefinitionCausesTransactionCommit() + throws SQLException { + return false; + } + + @Override + public boolean dataDefinitionIgnoredInTransactions() + throws SQLException { + return false; + } + + @Override + public ResultSet getProcedures(String s, String s1, String s2) + throws SQLException { + return null; + } + + @Override + public ResultSet getProcedureColumns(String s, String s1, String s2, String s3) + throws SQLException { + return null; + } + + @Override + public ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String[] types) + throws SQLException { + // getTables from information_schema.tables + StringBuilder sql = new StringBuilder("SELECT table_catalog as TABLE_CAT" + + ", table_schema as TABLE_SCHEM" + + ", table_name as TABLE_NAME" + + ", table_type as TABLE_TYPE" + + ", table_comment as REMARKS" + + ", '' as TYPE_CAT" + + ", engine as TYPE_SCHEM" + + ", engine as TYPE_NAME" + + ", '' as SELF_REFERENCING_COL_NAME" + + ", '' as REF_GENERATION" + + " FROM information_schema.tables"); + List filters = new ArrayList<>(); + emptyStringEqualsFilter(filters, "table_catalog", catalog); + emptyStringLikeFilter(filters, "table_schema", schemaPattern); + optionalStringLikeFilter(filters, "table_name", tableNamePattern); + if (types != null) { + // replace `TABLE` to `BASE TABLE`, `SYSTEM VIEW` to `SYSTEM TABLE` + for (int i = 0, size = types.length; i < size; i++) { + String type = types[i]; + if ("TABLE".equals(type)) { + types[i] = "BASE TABLE"; + } else if ("SYSTEM VIEW".equals(type)) { + types[i] = "SYSTEM TABLE"; + } + } + } + optionalStringInFilter(filters, "table_type", types); + buildFilters(sql, filters); + sql.append("\nORDER BY table_type, table_catalog, table_schema, table_name"); + + if (checkVersionAddView() && types != null && Arrays.stream(types).allMatch(t -> t.equalsIgnoreCase("VIEW"))) { + // add view + sql.append("\n union all "); + sql.append( + "\nselect database TABLE_CAT, database TABLE_SCHEM, name TABLE_NAME, 'VIEW' TABLE_TYPE, null REMARKS, "); + sql.append( + "'' as TYPE_CAT, engine as TYPE_SCHEM, engine as TYPE_NAME, '' as SELF_REFERENCING_COL_NAME, '' as REF_GENERATION "); + sql.append("from system.views "); + filters = new ArrayList<>(); + emptyStringEqualsFilter(filters, "database", catalog); + emptyStringLikeFilter(filters, "database", schemaPattern); + optionalStringLikeFilter(filters, "name", tableNamePattern); + buildFilters(sql, filters); + sql.append("\nORDER BY TABLE_CAT, TABLE_NAME, TABLE_TYPE"); + } + + return select(sql.toString()); + } + + // This handles bug that existed a while, views were not included in + // information_schema.tables + // https://github.com/datafuselabs/databend/issues/16039 + private boolean checkVersionAddView() throws SQLException { + // the same fix for python-sdk + // https://github.com/databendlabs/databend-sqlalchemy/blob/3226f10e0f8b6aa85185208583977037b33ec99f/databend_sqlalchemy/databend_dialect.py#L819 + String version = getDatabaseProductVersion(); + Pattern pattern = Pattern.compile("v(\\d+)\\.(\\d+)\\.(\\d+)"); + Matcher matcher = pattern.matcher(version); + if (matcher.find()) { + // > 1.2.410 and <= 1.2.566 + if (Integer.parseInt(matcher.group(1)) != 1) + return false; + if (Integer.parseInt(matcher.group(2)) != 2) + return false; + int minorVersion = Integer.parseInt(matcher.group(3)); + return minorVersion > 410 && minorVersion <= 566; + } + return false; + } + + @Override + public ResultSet getSchemas() + throws SQLException { + String sql = "SELECT schema_name as table_schema, catalog_name as table_catalog FROM information_schema.schemata ORDER BY catalog_name, schema_name"; + return select(sql); + } + + @Override + public ResultSet getCatalogs() + throws SQLException { + String sql = "SELECT catalog_name as table_cat FROM information_schema.schemata ORDER BY catalog_name"; + return select(sql); + } + + @Override + public ResultSet getTableTypes() + throws SQLException { + // Base on + // https://github.com/datafuselabs/databend/blob/main/src/query/storages/information-schema/src/tables_table.rs#L35 + // We just return 3 types: TABLE(BASE TABLE), VIEW, SYSTEM TABLE(SYSTEM VIEW) + List schema = new ArrayList<>(); + schema.add(new QueryRowField("TABLE_TYPE", new DatabendRawType("String"))); + List> results = new ArrayList<>(); + results.add(Collections.singletonList("TABLE")); + results.add(Collections.singletonList("VIEW")); + results.add(Collections.singletonList("SYSTEM TABLE")); + // Create NoQueryResultSet + return new DatabendUnboundQueryResultSet(Optional.ofNullable(connection.createStatement()), schema, + results.iterator()); + } + + /** + * Base on java.sql.DatabaseMetaData.getColumns method. + * It's a list with fixed fields, so we make a uniform variable. + */ + private static final List META_ROW_FIELDS = new ArrayList() { + { + add(new QueryRowField("TABLE_CAT", new DatabendRawType("String")));// 1 + add(new QueryRowField("TABLE_SCHEM", new DatabendRawType("String")));// 2 + // add(new QueryRowField("TABLE_TYPE", new DatabendRawType("String")));// 3 + add(new QueryRowField("TABLE_NAME", new DatabendRawType("String")));// 4 + add(new QueryRowField("COLUMN_NAME", new DatabendRawType("String")));// 5 + add(new QueryRowField("DATA_TYPE", new DatabendRawType("Int32")));// 6 + add(new QueryRowField("TYPE_NAME", new DatabendRawType("String")));// 7 + add(new QueryRowField("COLUMN_SIZE", new DatabendRawType("Int32")));// 8 + add(new QueryRowField("BUFFER_LENGTH", new DatabendRawType("Int32")));// 9 + add(new QueryRowField("DECIMAL_DIGITS", new DatabendRawType("Int32")));// 10 + add(new QueryRowField("NUM_PREC_RADIX", new DatabendRawType("Int32")));// 11 + add(new QueryRowField("NULLABLE", new DatabendRawType("Int32")));// 12 + add(new QueryRowField("REMARKS", new DatabendRawType("String")));// 13 + add(new QueryRowField("COLUMN_DEF", new DatabendRawType("String")));// 14 + add(new QueryRowField("SQL_DATA_TYPE", new DatabendRawType("Int32")));// 15 + add(new QueryRowField("SQL_DATETIME_SUB", new DatabendRawType("Int32")));// 16 + add(new QueryRowField("CHAR_OCTET_LENGTH", new DatabendRawType("Int32")));// 17 + add(new QueryRowField("ORDINAL_POSITION", new DatabendRawType("Int32")));// 18 + add(new QueryRowField("IS_NULLABLE", new DatabendRawType("String")));// 19 + add(new QueryRowField("SCOPE_CATALOG", new DatabendRawType("String")));// 20 + add(new QueryRowField("SCOPE_SCHEMA", new DatabendRawType("String")));// 21 + add(new QueryRowField("SCOPE_TABLE", new DatabendRawType("String")));// 22 + add(new QueryRowField("SOURCE_DATA_TYPE", new DatabendRawType("Int16")));// 23 + add(new QueryRowField("IS_AUTOINCREMENT", new DatabendRawType("String")));// 24 + add(new QueryRowField("IS_GENERATEDCOLUMN", new DatabendRawType("String")));// 25 + } + }; + + private static StringBuilder columnMetaSqlTemplate() { + StringBuilder sql = new StringBuilder("SELECT table_catalog as TABLE_CAT" + // 1 + ", table_schema as TABLE_SCHEM" + // 2 + ", table_name as TABLE_NAME" + // 3 + ", column_name as COLUMN_NAME" + // 4 + ", data_type as TYPE_NAME" + // 5 + ", nullable as NULLABLE" + // 6 + ", column_comment as REMARKS" + // 7 + ", `default` as COLUMN_DEF" + // 8 + ", ordinal_position as ORDINAL_POSITION" + // 9 + ", is_nullable as IS_NULLABLE" + // 10 + // ", 'NO' as IS_AUTOINCREMENT" + + // ", 'NO' as IS_GENERATEDCOLUMN" + + " FROM information_schema.columns"); + return sql; + } + + /** + * Get table columns meta data by meta sql + */ + private ResultSet getColumnsMetaDataBySQL(String sql) throws SQLException { + List> results = new ArrayList<>(); + // Get Query ResultSets + try (ResultSet rs = select(sql)) { + while (rs.next()) { + List result = new ArrayList<>(); + result.add(rs.getString(1));// TABLE_CAT + result.add(rs.getString(2));// TABLE_SCHEM + result.add(rs.getString(3));// TABLE_NAME + result.add(rs.getString(4));// COLUMN_NAME + String originType = rs.getString(5); + DatabendRawType rowType = new DatabendRawType(originType); + DatabendDataType dataType = rowType.getDataType(); + result.add(dataType.getSqlType());// DATA_TYPE + result.add(rowType.getType());// TYPE_NAME + result.add(rowType.getColumnSize());// COLUMN_SIZE + result.add(0);// BUFFER_LENGTH + result.add(rowType.getDecimalDigits());// DECIMAL_DIGITS + result.add(0);// NUM_PREC_RADIX + result.add(rs.getString(6));// COLUMN_NAME + result.add(rs.getObject(7));// REMARKS + result.add(rs.getString(8));// COLUMN_DEF + result.add(0);// SQL_DATA_TYPE + result.add(0);// SQL_DATETIME_SUB + // CHAR_OCTET_LENGTH (for char types the maximum number of bytes in the column) + if (dataType == DatabendDataType.STRING) { + result.add(dataType.getLength()); + } else { + result.add(null); + } + result.add(rs.getString(9));// ORDINAL_POSITION + result.add(rs.getString(10));// IS_NULLABLE + result.add(null); + result.add(null); + result.add(null); + result.add(null); + result.add("NO");// IS_AUTOINCREMENT + result.add("NO");// IS_GENERATEDCOLUMN + results.add(result); + } + } + return new DatabendUnboundQueryResultSet(Optional.ofNullable(connection.createStatement()), + // Set unmodifiable to prevent columns from being adjusted to affect other + // thread calls + Collections.unmodifiableList(META_ROW_FIELDS), results.iterator()); + } + + public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String[] columnNames) + throws SQLException { + StringBuilder sql = columnMetaSqlTemplate(); + List filters = new ArrayList<>(); + emptyStringEqualsFilter(filters, "table_catalog", catalog); + emptyStringLikeFilter(filters, "table_schema", schemaPattern); + optionalStringLikeFilter(filters, "table_name", tableNamePattern); + optionalStringInFilter(filters, "column_name", columnNames); + buildFilters(sql, filters); + sql.append("\nORDER BY table_catalog, table_schema, table_name, ordinal_position"); + return getColumnsMetaDataBySQL(sql.toString()); + } + + @Override + public ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) + throws SQLException { + StringBuilder sql = columnMetaSqlTemplate(); + List filters = new ArrayList<>(); + emptyStringEqualsFilter(filters, "table_catalog", catalog); + emptyStringLikeFilter(filters, "table_schema", schemaPattern); + if (tableNamePattern != null) { + optionalStringLikeFilter(filters, "table_name", tableNamePattern.replace("\\", "")); + } else { + optionalStringLikeFilter(filters, "table_name", null); + } + optionalStringLikeFilter(filters, "column_name", columnNamePattern); + buildFilters(sql, filters); + sql.append("\nORDER BY table_catalog, table_schema, table_name, ordinal_position"); + return getColumnsMetaDataBySQL(sql.toString()); + } + + @Override + public ResultSet getColumnPrivileges(String s, String s1, String s2, String s3) + throws SQLException { + throw new SQLFeatureNotSupportedException("privileges not supported"); + } + + @Override + public ResultSet getTablePrivileges(String s, String s1, String s2) + throws SQLException { + throw new SQLFeatureNotSupportedException("privileges not supported"); + } + + @Override + public ResultSet getBestRowIdentifier(String s, String s1, String s2, int i, boolean b) + throws SQLException { + throw new SQLFeatureNotSupportedException("row identifiers not supported"); + } + + @Override + public ResultSet getVersionColumns(String s, String s1, String s2) + throws SQLException { + throw new SQLFeatureNotSupportedException("version columns not supported"); + } + + @Override + public ResultSet getPrimaryKeys(String s, String s1, String s2) + throws SQLException { + String query = "SELECT " + + " TRY_CAST(NULL AS varchar) table_cat, " + + " TRY_CAST(NULL AS varchar) table_schema, " + + " TRY_CAST(NULL AS varchar) table_name, " + + " TRY_CAST(NULL AS varchar) column_name, " + + " TRY_CAST(NULL AS smallint) key_seq, " + + " TRY_CAST(NULL AS varchar) pk_name " + + "WHERE false"; + return select(query); + } + + @Override + public ResultSet getImportedKeys(String s, String s1, String s2) + throws SQLException { + String query = "SELECT " + + " TRY_CAST(NULL AS varchar) PKTABLE_CAT, " + + " TRY_CAST(NULL AS varchar) PKTABLE_SCHEM, " + + " TRY_CAST(NULL AS varchar) PKTABLE_NAME, " + + " TRY_CAST(NULL AS varchar) PKCOLUMN_NAME, " + + " TRY_CAST(NULL AS varchar) FKTABLE_CAT, " + + " TRY_CAST(NULL AS varchar) FKTABLE_SCHEM, " + + " TRY_CAST(NULL AS varchar) FKTABLE_NAME, " + + " TRY_CAST(NULL AS varchar) FKCOLUMN_NAME, " + + " TRY_CAST(NULL AS smallint) KEY_SEQ, " + + " TRY_CAST(NULL AS smallint) UPDATE_RULE, " + + " TRY_CAST(NULL AS smallint) DELETE_RULE, " + + " TRY_CAST(NULL AS varchar) FK_NAME, " + + " TRY_CAST(NULL AS varchar) PK_NAME, " + + " TRY_CAST(NULL AS smallint) DEFERRABILITY " + + "WHERE false"; + return select(query); + } + + @Override + public ResultSet getExportedKeys(String s, String s1, String s2) + throws SQLException { + throw new SQLFeatureNotSupportedException("exported keys not supported"); + } + + @Override + public ResultSet getCrossReference(String s, String s1, String s2, String s3, String s4, String s5) + throws SQLException { + throw new SQLFeatureNotSupportedException("cross reference not supported"); + } + + @Override + public ResultSet getTypeInfo() + throws SQLException { + return select("SELECT " + + " TRY_CAST(NULL AS varchar) TYPE_NAME, " + + " TRY_CAST(NULL AS smallint) DATA_TYPE, " + + " TRY_CAST(NULL AS int) PRECISION, " + + " TRY_CAST(NULL AS varchar) LITERAL_PREFIX, " + + " TRY_CAST(NULL AS varchar) LITERAL_SUFFIX, " + + " TRY_CAST(NULL AS varchar) CREATE_PARAMS, " + + " TRY_CAST(NULL AS smallint) NULLABLE, " + + " TRY_CAST(NULL AS boolean) CASE_SENSITIVE, " + + " TRY_CAST(NULL AS smallint) SEARCHABLE, " + + " TRY_CAST(NULL AS boolean) UNSIGNED_ATTRIBUTE, " + + " TRY_CAST(NULL AS boolean) FIXED_PREC_SCALE, " + + " TRY_CAST(NULL AS boolean) AUTO_INCREMENT, " + + " TRY_CAST(NULL AS varchar) LOCAL_TYPE_NAME, " + + " TRY_CAST(NULL AS smallint) MINIMUM_SCALE, " + + " TRY_CAST(NULL AS smallint) MAXIMUM_SCALE, " + + " TRY_CAST(NULL AS int) SQL_DATA_TYPE, " + + " TRY_CAST(NULL AS int) SQL_DATETIME_SUB, " + + " TRY_CAST(NULL AS int) NUM_PREC_RADIX " + + "WHERE false"); + } + + @Override + public ResultSet getIndexInfo(String s, String s1, String s2, boolean b, boolean b1) + throws SQLException { + throw new SQLFeatureNotSupportedException("index info not supported"); + } + + @Override + public boolean supportsResultSetType(int type) + throws SQLException { + return type == ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public boolean supportsResultSetConcurrency(int type, int concurrency) + throws SQLException { + return (type == ResultSet.TYPE_FORWARD_ONLY) && + (concurrency == ResultSet.CONCUR_READ_ONLY); + } + + @Override + public boolean ownUpdatesAreVisible(int i) + throws SQLException { + return false; + } + + @Override + public boolean ownDeletesAreVisible(int i) + throws SQLException { + return false; + } + + @Override + public boolean ownInsertsAreVisible(int i) + throws SQLException { + return false; + } + + @Override + public boolean othersUpdatesAreVisible(int i) + throws SQLException { + return false; + } + + @Override + public boolean othersDeletesAreVisible(int i) + throws SQLException { + return false; + } + + @Override + public boolean othersInsertsAreVisible(int i) + throws SQLException { + return false; + } + + @Override + public boolean updatesAreDetected(int i) + throws SQLException { + return false; + } + + @Override + public boolean deletesAreDetected(int i) + throws SQLException { + return false; + } + + @Override + public boolean insertsAreDetected(int i) + throws SQLException { + return false; + } + + @Override + public boolean supportsBatchUpdates() + throws SQLException { + return true; + } + + @Override + public ResultSet getUDTs(String s, String s1, String s2, int[] ints) + throws SQLException { + return select("SELECT " + + " TRY_CAST(NULL AS varchar) TYPE_CAT, " + + " TRY_CAST(NULL AS varchar) TYPE_SCHEM, " + + " TRY_CAST(NULL AS varchar) TYPE_NAME, " + + " TRY_CAST(NULL AS varchar) CLASS_NAME, " + + " TRY_CAST(NULL AS smallint) DATA_TYPE, " + + " TRY_CAST(NULL AS varchar) REMARKS, " + + " TRY_CAST(NULL AS smallint) BASE_TYPE " + + "WHERE false"); + } + + @Override + public Connection getConnection() + throws SQLException { + return connection; + } + + @Override + public boolean supportsSavepoints() + throws SQLException { + return false; + } + + @Override + public boolean supportsNamedParameters() + throws SQLException { + return true; + } + + @Override + public boolean supportsMultipleOpenResults() + throws SQLException { + return false; + } + + @Override + public boolean supportsGetGeneratedKeys() + throws SQLException { + return false; + } + + @Override + public ResultSet getSuperTypes(String s, String s1, String s2) + throws SQLException { + return select("SELECT " + + " CAST(NULL AS varchar) TYPE_CAT, " + + " CAST(NULL AS varchar) TYPE_SCHEM, " + + " CAST(NULL AS varchar) TYPE_NAME, " + + " CAST(NULL AS varchar) SUPERTYPE_CAT, " + + " CAST(NULL AS varchar) SUPERTYPE_SCHEM, " + + " CAST(NULL AS varchar) SUPERTYPE_NAME " + + "WHERE false"); + } + + @Override + public ResultSet getSuperTables(String s, String s1, String s2) + throws SQLException { + return select("SELECT " + + " CAST(NULL AS varchar) TABLE_CAT, " + + " CAST(NULL AS varchar) TABLE_SCHEM, " + + " CAST(NULL AS varchar) TABLE_NAME, " + + " CAST(NULL AS varchar) SUPERTABLE_NAME " + + "WHERE false"); + } + + @Override + public ResultSet getAttributes(String s, String s1, String s2, String s3) + throws SQLException { + return select("SELECT " + + " TRY_CAST(NULL AS varchar) TYPE_CAT, " + + " TRY_CAST(NULL AS varchar) TYPE_SCHEM, " + + " TRY_CAST(NULL AS varchar) TYPE_NAME, " + + " TRY_CAST(NULL AS varchar) ATTR_NAME, " + + " TRY_CAST(NULL AS int) DATA_TYPE, " + + " TRY_CAST(NULL AS varchar) ATTR_TYPE_NAME, " + + " TRY_CAST(NULL AS int) ATTR_SIZE, " + + " TRY_CAST(NULL AS int) DECIMAL_DIGITS, " + + " TRY_CAST(NULL AS int) NUM_PREC_RADIX, " + + " TRY_CAST(NULL AS smallint) NULLABLE, " + + " TRY_CAST(NULL AS varchar) REMARKS, " + + " TRY_CAST(NULL AS varchar) ATTR_DEF, " + + " TRY_CAST(NULL AS int) SQL_DATA_TYPE, " + + " TRY_CAST(NULL AS int) SQL_DATETIME_SUB, " + + " TRY_CAST(NULL AS int) CHAR_OCTET_LENGTH, " + + " TRY_CAST(NULL AS int) ORDINAL_POSITION, " + + " TRY_CAST(NULL AS varchar) IS_NULLABLE, " + + " TRY_CAST(NULL AS varchar) SCOPE_CATALOG, " + + " TRY_CAST(NULL AS varchar) SCOPE_SCHEMA, " + + " TRY_CAST(NULL AS varchar) SCOPE_TABLE, " + + " TRY_CAST(NULL AS smallint) SOURCE_DATA_TYPE " + + "WHERE false"); + } + + @Override + public boolean supportsResultSetHoldability(int holdability) throws SQLException { + return false; + } + + @Override + public int getResultSetHoldability() throws SQLException { + // N/A applicable as we do not support transactions + return 0; + } + + // input DatabendQuery + // v0.8.173-nightly-d66d905(rust-1.67.0-nightly-2023-01-03T08:02:54.266305248Z) + // return 8 use regex + @Override + public int getDatabaseMajorVersion() + throws SQLException { + String version = getDatabaseProductVersion(); + // regex matching v%d.%d.%d + Pattern pattern = Pattern.compile("v(\\d+)\\.(\\d+)\\.(\\d+)"); + Matcher matcher = pattern.matcher(version); + if (matcher.find()) { + return 10 * Integer.parseInt(matcher.group(1)) + Integer.parseInt(matcher.group(2)); + } + return -1; + } + + // return 173 + @Override + public int getDatabaseMinorVersion() + throws SQLException { + String version = getDatabaseProductVersion(); + // regex matching v%d.%d.%d + Pattern pattern = Pattern.compile("v(\\d+)\\.(\\d+)\\.(\\d+)"); + Matcher matcher = pattern.matcher(version); + if (matcher.find()) { + return Integer.parseInt(matcher.group(3)); + } + return -1; + } + + @Override + public int getJDBCMajorVersion() + throws SQLException { + return 0; + } + + @Override + public int getJDBCMinorVersion() + throws SQLException { + return 1; + } + + @Override + public int getSQLStateType() + throws SQLException { + return DatabaseMetaData.sqlStateSQL99; + } + + @Override + public boolean locatorsUpdateCopy() + throws SQLException { + return false; + } + + @Override + public boolean supportsStatementPooling() + throws SQLException { + return false; + } + + @Override + public RowIdLifetime getRowIdLifetime() + throws SQLException { + return RowIdLifetime.ROWID_UNSUPPORTED; + } + + @Override + public ResultSet getSchemas(String catalog, String schemaPattern) + throws SQLException { + // from information schema + StringBuilder sql = new StringBuilder("SELECT " + + "schema_name as TABLE_SCHEM, " + + "catalog_name as TABLE_CATALOG " + + "FROM information_schema.schemata "); + List filters = new ArrayList<>(); + emptyStringEqualsFilter(filters, "catalog_name", catalog); + emptyStringEqualsFilter(filters, "schema_name", schemaPattern); + buildFilters(sql, filters); + sql.append("\n ORDER BY catalog_name, schema_name"); + return select(sql.toString()); + } + + @Override + public boolean supportsStoredFunctionsUsingCallSyntax() + throws SQLException { + return false; + } + + @Override + public boolean autoCommitFailureClosesAllResultSets() + throws SQLException { + return false; + } + + @Override + public ResultSet getClientInfoProperties() + throws SQLException { + return select("SELECT " + + " TRY_CAST(NULL AS varchar) NAME, " + + " TRY_CAST(NULL AS varchar) MAX_LEN, " + + " TRY_CAST(NULL AS varchar) DEFAULT_VALUE, " + + " TRY_CAST(NULL AS varchar) DESCRIPTION " + + "WHERE false"); + } + + @Override + public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) + throws SQLException { + StringBuilder sql = new StringBuilder("SELECT " + + " current_database() as FUNCTION_CAT, " + + " 'system' as FUNCTION_SCHEMA, " + + " name as FUNCTION_NAME, " + + " description as REMARKS, " + + " 1 as FUNCTION_TYPE, " + + " name as SPECIFIC_NAME " + + "FROM system.functions"); + List filters = new ArrayList<>(); + optionalStringLikeFilter(filters, "function_name", functionNamePattern); + buildFilters(sql, filters); + sql.append("\n ORDER BY function_cat, function_schema, function_name"); + return select(sql.toString()); + } + + @Override + public ResultSet getFunctionColumns(String catalog, String schemaPattern, String functionNamePattern, + String columnNamePattern) + throws SQLException { + StringBuilder sql = new StringBuilder("SELECT " + + " current_database() as FUNCTION_CAT, " + + " 'system' as FUNCTION_SCHEMA, " + + " name as FUNCTION_NAME, " + + " TRY_CAST(NULL AS varchar) COLUMN_NAME, " + + " TRY_CAST(NULL AS smallint) COLUMN_TYPE, " + + " TRY_CAST(NULL AS smallint) DATA_TYPE, " + + " TRY_CAST(NULL AS varchar) TYPE_NAME, " + + " TRY_CAST(NULL AS int) PRECISION, " + + " TRY_CAST(NULL AS int) LENGTH, " + + " TRY_CAST(NULL AS int) SCALE, " + + " TRY_CAST(NULL AS int) RADIX, " + + " TRY_CAST(NULL AS smallint) NULLABLE, " + + " TRY_CAST(NULL AS varchar) REMARKS, " + + " TRY_CAST(NULL AS varchar) CHAR_OCTET_LENGTH, " + + " TRY_CAST(NULL AS int) ORDINAL_POSITION, " + + " TRY_CAST(NULL AS varchar) IS_NULLABLE, " + + " TRY_CAST(NULL AS varchar) SPECIFIC_NAME " + + "FROM system.functions"); + List filters = new ArrayList<>(); + optionalStringLikeFilter(filters, "function_name", functionNamePattern); + buildFilters(sql, filters); + sql.append("\n ORDER BY function_cat, function_schema, function_name"); + return select(sql.toString()); + } + + @Override + public ResultSet getPseudoColumns(String catalog, String schemaPattern, String tableNamePattern, + String columnNamePattern) + throws SQLException { + return select("SELECT " + + " TRY_CAST(NULL AS varchar) TABLE_CAT, " + + " TRY_CAST(NULL AS varchar) TABLE_SCHEM, " + + " TRY_CAST(NULL AS varchar) TABLE_NAME, " + + " TRY_CAST(NULL AS varchar) COLUMN_NAME, " + + " TRY_CAST(NULL AS smallint) DATA_TYPE, " + + " TRY_CAST(NULL AS varchar) COLUMN_SIZE, " + + " TRY_CAST(NULL AS int) DECIMAL_DIGITS, " + + " TRY_CAST(NULL AS int) NUM_PREC_RADIX, " + + " TRY_CAST(NULL AS smallint) COLUMN_USAGE, " + + " TRY_CAST(NULL AS varchar) REMARKS, " + + " TRY_CAST(NULL AS varchar) CHAR_OCTET_LENGTH, " + + " TRY_CAST(NULL AS int) ORDINAL_POSITION, " + + " TRY_CAST(NULL AS varchar) IS_NULLABLE, " + + " TRY_CAST(NULL AS varchar) SCOPE_CATALOG, " + + " TRY_CAST(NULL AS varchar) SCOPE_SCHEMA, " + + " TRY_CAST(NULL AS varchar) SCOPE_TABLE, " + + " TRY_CAST(NULL AS smallint) SOURCE_DATA_TYPE " + + "WHERE false"); + } + + @Override + public boolean generatedKeyAlwaysReturned() + throws SQLException { + return false; + } + + @SuppressWarnings("unchecked") + @Override + public T unwrap(Class iface) + throws SQLException { + if (isWrapperFor(iface)) { + return (T) this; + } + throw new SQLException("No wrapper for " + iface); + } + + @Override + public boolean isWrapperFor(Class iface) + throws SQLException { + return iface.isInstance(this); + } + + private ResultSet select(String sql) + throws SQLException { + Statement statement = getConnection().createStatement(); + DatabendResultSet resultSet; + try { + resultSet = (DatabendResultSet) statement.executeQuery(sql); + } catch (Throwable e) { + try { + statement.close(); + } catch (Throwable closeException) { + if (closeException != e) { + e.addSuppressed(closeException); + } + } + + throw e; + } + return (ResultSet) resultSet; + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendDriver.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendDriver.java new file mode 100644 index 00000000..2eb8381a --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendDriver.java @@ -0,0 +1,80 @@ +package com.databend.bendsql; + +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.util.Properties; +import java.util.logging.Level; +import java.util.logging.Logger; + +import com.databend.bendsql.jni_utils.NativeLibrary; + + +public class DatabendDriver implements Driver { + + static final String DRIVER_VERSION = "0.1.0"; + static final int DRIVER_VERSION_MAJOR = 0; + static final int DRIVER_VERSION_MINOR = 1; + + private static final String JDBC_URL_START = "jdbc:databend://"; + + static { + NativeLibrary.loadLibrary(); + try { + DriverManager.registerDriver(new DatabendDriver()); + } catch (SQLException e) { + Logger.getLogger(DatabendDriver.class.getPackage().getName()) + .log(Level.SEVERE, "Failed to register driver", e); + throw new RuntimeException("Failed to register DatabendDriver", e); + } + } + + @Override + public boolean acceptsURL(String url) + throws SQLException { + if (url == null) { + throw new SQLException("URL is null"); + } + return url.startsWith(JDBC_URL_START); + } + + @Override + public Connection connect(String url, Properties info) + throws SQLException { + if (!acceptsURL(url)) { + return null; + } + return new DatabendConnection(url, info); + } + + @Override + public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) + throws SQLException { + throw new SQLFeatureNotSupportedException("setMaxRows is not supported"); + } + + @Override + public int getMajorVersion() { + return DRIVER_VERSION_MAJOR; + } + + @Override + public int getMinorVersion() { + return DRIVER_VERSION_MINOR; + } + + @Override + public boolean jdbcCompliant() { + return false; + } + + @Override + public Logger getParentLogger() + throws SQLFeatureNotSupportedException { + // TODO: support java.util.Logging + throw new SQLFeatureNotSupportedException(); + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendParameterMetaData.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendParameterMetaData.java new file mode 100644 index 00000000..531d1f59 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendParameterMetaData.java @@ -0,0 +1,100 @@ +package com.databend.bendsql; + +import java.sql.ParameterMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.List; +import java.util.Locale; + +import static com.databend.bendsql.DatabendResultSetMetaData.getTypeClassName; + +import static java.util.Objects.requireNonNull; + +public class DatabendParameterMetaData implements ParameterMetaData { + protected final List params; + + protected DatabendParameterMetaData(List params) { + this.params = requireNonNull(params, "connection is null"); + } + + protected DatabendColumnInfo getParameter(int param) throws SQLException { + if (param < 1 || param > params.size()) { + throw new RuntimeException(format("Parameter index should between 1 and %d but we got %d", params.size(), param)); + } + + return params.get(param - 1); + } + + public static String format(String template, Object... args) { + return String.format(Locale.ROOT, template, args); + } + + @Override + public int getParameterCount() throws SQLException { + return params.size(); + } + + @Override + public int isNullable(int param) throws SQLException { + DatabendColumnInfo p = getParameter(param); + if (p == null) { + return ParameterMetaData.parameterNullableUnknown; + } + + return p.getType().isNullable() ? ParameterMetaData.parameterNullable : ParameterMetaData.parameterNoNulls; + } + + @Override + public boolean isSigned(int param) throws SQLException { + DatabendColumnInfo p = getParameter(param); + return p != null && p.isSigned(); + } + + @Override + public int getPrecision(int param) throws SQLException { + DatabendColumnInfo p = getParameter(param); + return p != null ? p.getPrecision() : 0; + } + + @Override + public int getScale(int param) throws SQLException { + DatabendColumnInfo p = getParameter(param); + return p != null ? p.getScale() : 0; + } + + @Override + public int getParameterType(int param) throws SQLException { + DatabendColumnInfo p = getParameter(param); + return p != null ? p.toSqlType() : Types.OTHER; + } + + @Override + public String getParameterTypeName(int param) throws SQLException { + DatabendColumnInfo p = getParameter(param); + return p != null ? p.getColumnTypeName() : ""; + } + + @Override + public String getParameterClassName(int param) throws SQLException { + DatabendColumnInfo p = getParameter(param); + return getTypeClassName(p.getColumnType()); + } + + @Override + public int getParameterMode(int param) throws SQLException { + return ParameterMetaData.parameterModeIn; + } + + @Override + public T unwrap(Class iface) throws SQLException { + if (iface.isAssignableFrom(getClass())) { + return iface.cast(this); + } + throw new SQLException("Cannot unwrap to " + iface.getName()); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return iface.isAssignableFrom(getClass()); + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendPreparedStatement.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendPreparedStatement.java new file mode 100644 index 00000000..06ed1d41 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendPreparedStatement.java @@ -0,0 +1,904 @@ +package com.databend.bendsql; + +import com.databend.client.data.DatabendRawType; +import com.databend.jdbc.RawStatementWrapper; +import com.databend.jdbc.StatementInfoWrapper; +import com.databend.jdbc.StatementUtil; +import com.databend.jdbc.parser.BatchInsertUtils; +import com.solidfire.gson.Gson; +import lombok.NonNull; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.ISODateTimeFormat; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.ParameterMetaData; +import java.sql.PreparedStatement; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.OffsetTime; +import java.time.format.DateTimeFormatterBuilder; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Calendar; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.logging.Logger; +import java.util.stream.Collectors; + +import static com.databend.bendsql.ObjectCasts.*; +import static com.databend.jdbc.StatementUtil.replaceParameterMarksWithValues; +import static java.lang.String.format; +import static java.time.format.DateTimeFormatter.ISO_LOCAL_DATE; +import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; +import static java.util.Objects.requireNonNull; + +public class DatabendPreparedStatement extends DatabendStatement implements PreparedStatement { + private static final Logger logger = Logger.getLogger(DatabendPreparedStatement.class.getPackage().getName()); + static final DateTimeFormatter DATE_FORMATTER = ISODateTimeFormat.date(); + private final RawStatementWrapper rawStatement; + static final DateTimeFormatter TIME_FORMATTER = DateTimeFormat.forPattern("HH:mm:ss.SSS"); + static final DateTimeFormatter TIMESTAMP_FORMATTER = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS"); + private final DatabendParameterMetaData paramMetaData; + private static final java.time.format.DateTimeFormatter LOCAL_DATE_TIME_FORMATTER = + new DateTimeFormatterBuilder() + .append(ISO_LOCAL_DATE) + .appendLiteral(' ') + .append(ISO_LOCAL_TIME) + .toFormatter(); + private static final java.time.format.DateTimeFormatter OFFSET_TIME_FORMATTER = + new DateTimeFormatterBuilder() + .append(ISO_LOCAL_TIME) + .appendOffset("+HH:mm", "+00:00") + .toFormatter(); + private final String originalSql; + private final List batchValues; + private final Optional batchInsertUtils; + + DatabendPreparedStatement(DatabendConnection connection, String sql) { + super(connection); + this.originalSql = requireNonNull(sql, "sql is null"); + this.batchValues = new ArrayList<>(); + this.batchInsertUtils = BatchInsertUtils.tryParseInsertSql(sql); + this.rawStatement = StatementUtil.parseToRawStatementWrapper(sql); + Map params = StatementUtil.extractColumnTypes(sql); + List list = params.entrySet().stream().map(entry -> { + String type = entry.getValue(); + DatabendRawType databendRawType = new DatabendRawType(type); + return DatabendColumnInfo.of(entry.getKey().toString(), databendRawType); + }).collect(Collectors.toList()); + this.paramMetaData = new DatabendParameterMetaData(Collections.unmodifiableList(list)); + } + + private static String formatBooleanLiteral(boolean x) { + return Boolean.toString(x); + } + + private static String formatByteLiteral(byte x) { + return Byte.toString(x); + } + + private static String formatShortLiteral(short x) { + return Short.toString(x); + } + + private static String formatIntLiteral(int x) { + return Integer.toString(x); + } + + private static String formatLongLiteral(long x) { + return Long.toString(x); + } + + private static String formatFloatLiteral(float x) { + return Float.toString(x); + } + + private static String formatDoubleLiteral(double x) { + return Double.toString(x); + } + + private static String formatBigDecimalLiteral(BigDecimal x) { + if (x == null) { + return "null"; + } + + return x.toString(); + } + + + private static String formatBytesLiteral(byte[] x) { + return new String(x, StandardCharsets.UTF_8); + } + + static IllegalArgumentException invalidConversion(Object x, String toType) { + return new IllegalArgumentException(format("Cannot convert instance of %s to %s", x.getClass().getName(), toType)); + } + + @Override + public void close() + throws SQLException { + super.close(); + } + + public int[] executeBatchByAttachment() throws SQLException { + int[] batchUpdateCounts = new int[batchValues.size()]; + if (!batchInsertUtils.isPresent() || batchValues == null || batchValues.isEmpty()) { +// super.execute(this.originalSql); + return batchUpdateCounts; + } + File saved = null; + try { + saved = batchInsertUtils.get().saveBatchToCSV(batchValues); + DatabendConnection c = (DatabendConnection) getConnection(); + c.getNativeConnection().execInsertWithAttachment(batchInsertUtils.get().getSql(), saved.getAbsolutePath()); + } finally { + if (saved != null) { + saved.delete(); + } + clearBatch(); + } + return batchUpdateCounts; + + } + + public int[] executeBatchDelete() throws SQLException { + if (!batchInsertUtils.isPresent() || batchValues == null || batchValues.isEmpty()) { + return new int[]{}; + } + int[] batchUpdateCounts = new int[batchValues.size()]; + try { + String sql = convertSQLWithBatchValues(this.originalSql, this.batchValues); + logger.fine(String.format("use copy into instead of normal insert, copy into SQL: %s", sql)); + super.execute(sql); + ResultSet r = getResultSet(); + while (r.next()) { + + } + return batchUpdateCounts; + } catch (RuntimeException e) { + throw new SQLException(e); + } + } + + public static String convertSQLWithBatchValues(String baseSql, List batchValues) { + StringBuilder convertedSqlBuilder = new StringBuilder(); + + if (batchValues != null && !batchValues.isEmpty()) { + for (String[] values : batchValues) { + if (values != null && values.length > 0) { + String convertedSql = baseSql; + for (int i = 0; i < values.length; i++) { + convertedSql = convertedSql.replaceFirst("\\?", values[i]); + } + convertedSqlBuilder.append(convertedSql).append(";\n"); + } + } + } + + return convertedSqlBuilder.toString(); + } + + + @Override + public int[] executeBatch() throws SQLException { + if (originalSql.toLowerCase().contains("delete from")) { + return executeBatchDelete(); + } + return executeBatchByAttachment(); + } + + @Override + public ResultSet executeQuery() + throws SQLException { + String sql = replaceParameterMarksWithValues(batchInsertUtils.get().getProvideParams(), this.originalSql) + .get(0) + .getSql(); + executeQuery(sql); + return getResultSet(); + } + + private List prepareSQL(@NonNull Map params) { + return replaceParameterMarksWithValues(params, this.rawStatement); + } + + @Override + public boolean execute() + throws SQLException { + boolean r; + try { + r = this.execute(prepareSQL(batchInsertUtils.get().getProvideParams())); + } catch (Exception e) { + throw new SQLException(e); + } finally { + clearBatch(); + } + return r; + } + + protected boolean execute(List statements) throws SQLException { + try { + for (int i = 0; i < statements.size(); i++) { + String sql = statements.get(i).getSql(); + if (sql.toLowerCase().contains("insert into") && !sql.toLowerCase().contains("select")) { + handleBatchInsert(); + } else { + execute(sql); + } + return true; + } + } catch (Exception e) { + throw new SQLException(e); + } finally { + } + return true; + } + + protected void handleBatchInsert() throws SQLException { + try { + addBatch(); + executeBatch(); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public int executeUpdate() throws SQLException { + this.execute(prepareSQL(batchInsertUtils.get().getProvideParams())); + return batchInsertUtils.get().getProvideParams().size(); + } + + @Override + public void setNull(int i, int i1) + throws SQLException { + checkClosed(); + if (this.originalSql.toLowerCase().contains("insert") || + this.originalSql.toLowerCase().contains("replace")) { + // Databend uses \N as default null representation for csv and tsv format + // https://github.com/datafuselabs/databend/pull/6453 + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, "\\N")); + } else { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, "null")); + } + } + + @Override + public void setBoolean(int i, boolean b) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatBooleanLiteral(b))); + } + + @Override + public void setByte(int i, byte b) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatByteLiteral(b))); + } + + @Override + public void setShort(int i, short i1) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatShortLiteral(i1))); + } + + @Override + public void setInt(int i, int i1) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatIntLiteral(i1))); + } + + @Override + public void setLong(int i, long l) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatLongLiteral(l))); + } + + @Override + public void setFloat(int i, float v) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatFloatLiteral(v))); + } + + @Override + public void setDouble(int i, double v) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatDoubleLiteral(v))); + } + + @Override + public void setBigDecimal(int i, BigDecimal bigDecimal) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatBigDecimalLiteral(bigDecimal))); + } + + @Override + public void setString(int i, String s) + throws SQLException { + checkClosed(); + if (originalSql.toLowerCase().startsWith("insert") || + originalSql.toLowerCase().startsWith("replace")) { + String finalS1 = s; + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, finalS1)); + } else { + if (s.contains("'")) { + s = s.replace("'", "\\\'"); + } + String finalS = s; + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, String.format("%s%s%s", "'", finalS, "'"))); + } + } + + @Override + public void setBytes(int i, byte[] bytes) + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, formatBytesLiteral(bytes))); + } + + @Override + public void setDate(int i, Date date) + throws SQLException { + checkClosed(); + if (date == null) { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, null)); + } else { + if (originalSql.toLowerCase().startsWith("select")) { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, String.format("%s%s%s", "'", date, "'"))); + } else { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, toDateLiteral(date))); + } + } + } + + @Override + public void setTime(int i, Time time) + throws SQLException { + checkClosed(); + if (time == null) { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, null)); + } else { + if (originalSql.toLowerCase().startsWith("select")) { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, String.format("%s%s%s", "'", time, "'"))); + } else { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, toTimeLiteral(time))); + } + } + } + + @Override + public void setTimestamp(int i, Timestamp timestamp) + throws SQLException { + checkClosed(); + if (timestamp == null) { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, null)); + } else { + if (originalSql.toLowerCase().startsWith("select")) { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, String.format("%s%s%s", "'", timestamp, "'"))); + } else { + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, toTimestampLiteral(timestamp))); + } + } + } + + @Override + public void setAsciiStream(int i, InputStream inputStream, int i1) + throws SQLException { + throw new SQLFeatureNotSupportedException("setAsciiStream not supported"); + } + + @Override + public void setUnicodeStream(int i, InputStream inputStream, int i1) + throws SQLException { + throw new SQLFeatureNotSupportedException("setUnicodeStream not supported"); + } + + @Override + public void setBinaryStream(int i, InputStream inputStream, int i1) + throws SQLException { + throw new SQLFeatureNotSupportedException("setBinaryStream not supported"); + } + + @Override + public void clearParameters() + throws SQLException { + checkClosed(); + batchInsertUtils.ifPresent(BatchInsertUtils::clean); + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType) + throws SQLException { + checkClosed(); + if (x == null) { + setNull(parameterIndex, Types.NULL); + return; + } + switch (targetSqlType) { + case Types.BOOLEAN: + case Types.BIT: + setBoolean(parameterIndex, castToBoolean(x, targetSqlType)); + return; + case Types.TINYINT: + setByte(parameterIndex, castToByte(x, targetSqlType)); + return; + case Types.SMALLINT: + setShort(parameterIndex, castToShort(x, targetSqlType)); + return; + case Types.INTEGER: + setInt(parameterIndex, castToInt(x, targetSqlType)); + return; + case Types.BIGINT: + setLong(parameterIndex, castToLong(x, targetSqlType)); + return; + case Types.FLOAT: + case Types.REAL: + setFloat(parameterIndex, castToFloat(x, targetSqlType)); + return; + case Types.DOUBLE: + setDouble(parameterIndex, castToDouble(x, targetSqlType)); + return; + case Types.DECIMAL: + case Types.NUMERIC: + setBigDecimal(parameterIndex, castToBigDecimal(x, targetSqlType)); + return; + case Types.CHAR: + case Types.NCHAR: + case Types.VARCHAR: + case Types.NVARCHAR: + case Types.LONGVARCHAR: + case Types.LONGNVARCHAR: + setString(parameterIndex, x.toString()); + return; + case Types.BINARY: + InputStream blobInputStream = new ByteArrayInputStream(x.toString().getBytes()); + setBinaryStream(parameterIndex, blobInputStream); + return; + case Types.VARBINARY: + case Types.LONGVARBINARY: + setBytes(parameterIndex, castToBinary(x, targetSqlType)); + return; + case Types.DATE: + setString(parameterIndex, toDateLiteral(x)); + return; + case Types.TIME: + setString(parameterIndex, toTimeLiteral(x)); + return; + case Types.TIME_WITH_TIMEZONE: + setString(parameterIndex, toTimeWithTimeZoneLiteral(x)); + return; + case Types.TIMESTAMP: + setString(parameterIndex, toTimestampLiteral(x)); + return; + case Types.TIMESTAMP_WITH_TIMEZONE: + setString(parameterIndex, toTimestampWithTimeZoneLiteral(x)); + return; + } + throw new SQLException("Unsupported target SQL type: " + targetSqlType); + } + + @Override + public void setObject(int parameterIndex, Object x) + throws SQLException { + checkClosed(); + if (x == null) { + setNull(parameterIndex, Types.NULL); + } else if (x instanceof Boolean) { + setBoolean(parameterIndex, (Boolean) x); + } else if (x instanceof Byte) { + setByte(parameterIndex, (Byte) x); + } else if (x instanceof Short) { + setShort(parameterIndex, (Short) x); + } else if (x instanceof Integer) { + setInt(parameterIndex, (Integer) x); + } else if (x instanceof Long) { + setLong(parameterIndex, (Long) x); + } else if (x instanceof Float) { + setFloat(parameterIndex, (Float) x); + } else if (x instanceof Double) { + setDouble(parameterIndex, (Double) x); + } else if (x instanceof BigDecimal) { + setBigDecimal(parameterIndex, (BigDecimal) x); + } else if (x instanceof String) { + setString(parameterIndex, (String) x); + } else if (x instanceof byte[]) { + setBytes(parameterIndex, (byte[]) x); + } else if (x instanceof Date) { + setDate(parameterIndex, (Date) x); + } else if (x instanceof LocalDate) { + setString(parameterIndex, toDateLiteral(x)); + } else if (x instanceof Time) { + setTime(parameterIndex, (Time) x); + } + // TODO (https://github.com/trinodb/trino/issues/6299) LocalTime -> setAsTime + else if (x instanceof OffsetTime) { + setString(parameterIndex, toTimeWithTimeZoneLiteral(x)); + } else if (x instanceof Timestamp) { + setTimestamp(parameterIndex, (Timestamp) x); + } else if (x instanceof Map) { + setString(parameterIndex, convertToJsonString((Map) x)); + } else if (x instanceof Array) { + setString(parameterIndex, convertArrayToString((Array) x)); + } else if (x instanceof ArrayList) { + setString(parameterIndex, convertArrayListToString((ArrayList) x)); + } else { + throw new SQLException("Unsupported object type: " + x.getClass().getName()); + } + } + + public static String convertToJsonString(Map map) { + Gson gson = new Gson(); + return gson.toJson(map); + } + + public static String convertArrayToString(Array array) { + return array.toString(); + } + + public static String convertArrayListToString(ArrayList arrayList) { + StringBuilder builder = new StringBuilder(); + builder.append("["); + for (int i = 0; i < arrayList.size(); i++) { + builder.append(arrayList.get(i)); + if (i < arrayList.size() - 1) { + builder.append(", "); + } + } + builder.append("]"); + + return builder.toString(); + } + + + @Override + public void addBatch() + throws SQLException { + checkClosed(); + if (batchInsertUtils.isPresent()) { + String[] val = batchInsertUtils.get().getValues(); + batchValues.add(val); + batchInsertUtils.get().clean(); + + } + } + + @Override + public void clearBatch() throws SQLException { + checkClosed(); + batchValues.clear(); + batchInsertUtils.ifPresent(BatchInsertUtils::clean); + } + + @Override + public void setCharacterStream(int i, Reader reader, int i1) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setCharacterStream"); + } + + @Override + public void setRef(int i, Ref ref) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setRef"); + } + + @Override + public void setBlob(int i, Blob x) + throws SQLException { + if (x != null) { + setBinaryStream(i, x.getBinaryStream()); + } else { + setNull(i, Types.BLOB); + } + } + + @Override + public void setClob(int i, Clob x) + throws SQLException { + if (x != null) { + setCharacterStream(i, x.getCharacterStream()); + } else { + setNull(i, Types.CLOB); + } + } + + @Override + public void setArray(int i, Array array) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setArray"); + } + + @Override + public ResultSetMetaData getMetaData() + throws SQLException { + return null; + } + + @Override + public void setDate(int i, Date date, Calendar calendar) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setDate"); + } + + @Override + public void setTime(int i, Time time, Calendar calendar) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setTime"); + } + + @Override + public void setTimestamp(int i, Timestamp timestamp, Calendar calendar) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setTimestamp"); + } + + @Override + public void setNull(int i, int i1, String s) + throws SQLException { + setNull(i, i1); + } + + @Override + public void setURL(int i, URL url) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setURL"); + } + + // If you want to use ps.getParameterMetaData().* methods, you need to use a valid sql such as + // insert into table_name (col1 type1, col2 typ2, col3 type3) values (?, ?, ?) + @Override + public ParameterMetaData getParameterMetaData() throws SQLException { + return paramMetaData; + } + + @Override + public void setRowId(int i, RowId rowId) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setRowId"); + } + + @Override + public void setNString(int i, String s) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setNString"); + } + + @Override + public void setNCharacterStream(int i, Reader reader, long l) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setNCharacterStream"); + } + + @Override + public void setNClob(int i, NClob nClob) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setNClob"); + } + + @Override + public void setClob(int i, Reader reader, long l) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setClob"); + } + + @Override + public void setBlob(int i, InputStream inputStream, long l) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setBlob"); + } + + @Override + public void setNClob(int i, Reader reader, long l) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setNClob"); + } + + @Override + public void setSQLXML(int i, SQLXML sqlxml) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setSQLXML"); + } + + @Override + public void setObject(int i, Object o, int i1, int i2) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setObject"); + } + + @Override + public void setAsciiStream(int i, InputStream inputStream, long l) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setAsciiStream"); + } + + @Override + public void setBinaryStream(int i, InputStream inputStream, long l) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setBinaryStream"); + } + + @Override + public void setCharacterStream(int i, Reader reader, long l) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setCharacterStream"); + } + + @Override + public void setAsciiStream(int i, InputStream inputStream) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setAsciiStream"); + } + + @Override + public void setBinaryStream(int i, InputStream inputStream) + throws SQLException { + checkClosed(); + try { + ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + int nRead; + byte[] data = new byte[1024]; + while ((nRead = inputStream.read(data, 0, data.length)) != -1) { + buffer.write(data, 0, nRead); + } + buffer.flush(); + byte[] bytes = buffer.toByteArray(); + // TODO: use base64 which is more efficent if server is new enough to support option + // before that, use the default hex + /// String textString = bytesToBase64(bytes); + String textString = bytesToHex(bytes); + batchInsertUtils.ifPresent(insertUtils -> insertUtils.setPlaceHolderValue(i, textString)); + } catch (IOException e) { + throw new SQLException("Error reading InputStream", e); + } + } + + private static String bytesToHex(byte[] bytes) { + StringBuilder sb = new StringBuilder(); + for (byte b : bytes) { + sb.append(String.format("%02x", b)); + } + return sb.toString(); + } + + private static String bytesToBase64(byte[] bytes) { + return Base64.getEncoder().encodeToString(bytes); + } + + @Override + public void setCharacterStream(int i, Reader reader) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setCharacterStream"); + } + + @Override + public void setNCharacterStream(int i, Reader reader) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setNCharacterStream"); + } + + @Override + public void setClob(int i, Reader reader) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setClob"); + } + + @Override + public void setBlob(int i, InputStream inputStream) + throws SQLException { + setBinaryStream(i, inputStream); + } + + @Override + public void setNClob(int i, Reader reader) + throws SQLException { + throw new SQLFeatureNotSupportedException("PreparedStatement", "setNClob"); + } + + + private String toDateLiteral(Object value) throws IllegalArgumentException { + requireNonNull(value, "value is null"); + if (value instanceof java.util.Date) { + return DATE_FORMATTER.print(((java.util.Date) value).getTime()); + } + if (value instanceof LocalDate) { + return ISO_LOCAL_DATE.format(((LocalDate) value)); + } + if (value instanceof LocalDateTime) { + return ISO_LOCAL_DATE.format(((LocalDateTime) value)); + } + if (value instanceof String) { + // TODO validate proper format + return (String) value; + } + throw invalidConversion(value, "date"); + } + + private String toTimeLiteral(Object value) + throws IllegalArgumentException { + if (value instanceof java.util.Date) { + return TIME_FORMATTER.print(((java.util.Date) value).getTime()); + } + if (value instanceof LocalTime) { + return ISO_LOCAL_TIME.format((LocalTime) value); + } + if (value instanceof LocalDateTime) { + return ISO_LOCAL_TIME.format((LocalDateTime) value); + } + if (value instanceof String) { + // TODO validate proper format + return (String) value; + } + throw invalidConversion(value, "time"); + } + + private String toTimestampLiteral(Object value) + throws IllegalArgumentException { + if (value instanceof java.util.Date) { + return TIMESTAMP_FORMATTER.print(((java.util.Date) value).getTime()); + } + if (value instanceof LocalDateTime) { + return LOCAL_DATE_TIME_FORMATTER.format(((LocalDateTime) value)); + } + if (value instanceof String) { + // TODO validate proper format + return (String) value; + } + throw invalidConversion(value, "timestamp"); + } + + private String toTimestampWithTimeZoneLiteral(Object value) + throws SQLException { + if (value instanceof String) { + return (String) value; + } else if (value instanceof OffsetDateTime) { + return OFFSET_TIME_FORMATTER.format((OffsetDateTime) value); + } + throw invalidConversion(value, "timestamp with time zone"); + } + + private String toTimeWithTimeZoneLiteral(Object value) + throws SQLException { + if (value instanceof OffsetTime) { + return OFFSET_TIME_FORMATTER.format((OffsetTime) value); + } + if (value instanceof String) { + // TODO validate proper format + return (String) value; + } + throw invalidConversion(value, "time with time zone"); + } + +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendResultSet.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendResultSet.java new file mode 100644 index 00000000..920c83c0 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendResultSet.java @@ -0,0 +1,47 @@ +package com.databend.bendsql; + +import java.sql.*; +import java.util.Optional; + + +public class DatabendResultSet extends AbstractDatabendResultSet { + private final Statement statement; + private final NativeRowBatchIterator iterator; + private boolean isClosed; + + public DatabendResultSet(Statement statement, NativeRowBatchIterator batchIterator) { + super(Optional.of(statement), batchIterator.getSchema(), new BatchToRowIterator(batchIterator, batchIterator.getSchema())); + this.statement = statement; + this.isClosed = false; + this.iterator = batchIterator; + } + + @Override + public void close() throws SQLException { + synchronized (this) { + if (isClosed) { + return; + } + isClosed = true; + iterator.close(); + } + } + + @Override + public boolean isClosed() throws SQLException { + return isClosed; + } + + @Override + public T unwrap(Class iface) throws SQLException { + if (iface.isAssignableFrom(getClass())) { + return iface.cast(this); + } + throw new SQLException("Cannot unwrap to " + iface.getName()); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return iface.isAssignableFrom(getClass()); + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendResultSetMetaData.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendResultSetMetaData.java new file mode 100644 index 00000000..aa3e4ab6 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendResultSetMetaData.java @@ -0,0 +1,224 @@ +package com.databend.bendsql; + +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.List; + + +public class DatabendResultSetMetaData implements ResultSetMetaData { + private final List databendColumnInfo; + + DatabendResultSetMetaData(List databendColumnInfo) { + this.databendColumnInfo = databendColumnInfo; + } + + static String getTypeClassName(int type) { + // see javax.sql.rowset.RowSetMetaDataImpl + switch (type) { + case Types.NUMERIC: + case Types.DECIMAL: + return BigDecimal.class.getName(); + case Types.BOOLEAN: + case Types.BIT: + return Boolean.class.getName(); + case Types.TINYINT: + return Byte.class.getName(); + case Types.SMALLINT: + return Short.class.getName(); + case Types.INTEGER: + return Integer.class.getName(); + case Types.BIGINT: + return Long.class.getName(); + case Types.REAL: + return Float.class.getName(); + case Types.FLOAT: + case Types.DOUBLE: + return Double.class.getName(); + case Types.BINARY: + case Types.VARBINARY: + case Types.LONGVARBINARY: + return "byte[]"; + case Types.DATE: + return Date.class.getName(); + case Types.TIME: + return Time.class.getName(); + case Types.TIMESTAMP: + return Timestamp.class.getName(); + case Types.BLOB: + return Blob.class.getName(); + case Types.CLOB: + return Clob.class.getName(); + case Types.ARRAY: + return Array.class.getName(); + case Types.NULL: + return "unknown"; + } + return String.class.getName(); + } + + @Override + public int getColumnCount() + throws SQLException { + if (this.databendColumnInfo == null) { + return 0; + } + return this.databendColumnInfo.size(); + } + + @Override + public boolean isAutoIncrement(int i) + throws SQLException { + return false; + } + + @Override + public boolean isCaseSensitive(int i) + throws SQLException { + return false; + } + + @Override + public boolean isSearchable(int i) + throws SQLException { + return true; + } + + @Override + public boolean isCurrency(int i) + throws SQLException { + return false; + } + + @Override + public int isNullable(int i) + throws SQLException { + DatabendColumnInfo.Nullable nullable = column(i).getNullable(); + switch (nullable) { + case NO_NULLS: + return columnNoNulls; + case NULLABLE: + return columnNullable; + case UNKNOWN: + return columnNullableUnknown; + } + throw new SQLException("Unhandled nullable type: " + nullable); + } + + @Override + public boolean isSigned(int i) + throws SQLException { + return column(i).isSigned(); + } + + @Override + public int getColumnDisplaySize(int i) + throws SQLException { + return column(i).getColumnDisplaySize(); + } + + @Override + public String getColumnLabel(int i) + throws SQLException { + return column(i).getColumnLabel(); + } + + @Override + public String getColumnName(int i) + throws SQLException { + return column(i).getColumnName(); + } + + @Override + public String getSchemaName(int i) + throws SQLException { + return column(i).getSchemaName(); + } + + @Override + public int getPrecision(int i) + throws SQLException { + return column(i).getPrecision(); + } + + @Override + public int getScale(int i) + throws SQLException { + return column(i).getScale(); + } + + @Override + public String getTableName(int i) + throws SQLException { + return column(i).getTableName(); + } + + @Override + public String getCatalogName(int i) + throws SQLException { + return column(i).getCatalogName(); + } + + @Override + public int getColumnType(int i) + throws SQLException { + return column(i).getColumnType(); + } + + @Override + public String getColumnTypeName(int i) + throws SQLException { + return column(i).getColumnTypeName(); + } + + @Override + public boolean isReadOnly(int i) + throws SQLException { + return true; + } + + @Override + public boolean isWritable(int i) + throws SQLException { + return false; + } + + @Override + public boolean isDefinitelyWritable(int i) + throws SQLException { + return false; + } + + @Override + public String getColumnClassName(int i) + throws SQLException { + return getTypeClassName(column(i).getColumnType()); + } + + @Override + public T unwrap(Class aClass) + throws SQLException { + return null; + } + + @Override + public boolean isWrapperFor(Class aClass) + throws SQLException { + return false; + } + + private DatabendColumnInfo column(int column) + throws SQLException { + if ((column <= 0) || (column > this.databendColumnInfo.size())) { + throw new SQLException("Invalid column index: " + column); + } + return this.databendColumnInfo.get(column - 1); + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendStatement.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendStatement.java new file mode 100644 index 00000000..a2062d40 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendStatement.java @@ -0,0 +1,110 @@ +package com.databend.bendsql; + +import java.sql.*; + +public class DatabendStatement extends TrivialStatement { + private final DatabendConnection connection; + private ResultSet currentResultSet; + private boolean isClosed; + private int updateCount = 0; + + DatabendStatement(DatabendConnection connection) { + this.connection = connection; + this.isClosed = false; + } + + @Override + public ResultSet executeQuery(String sql) throws SQLException { + checkClosed(); + try { + NativeConnection nativeConnection = connection.getNativeConnection(); + NativeRowBatchIterator iterator = nativeConnection.execute(sql); + if (iterator == null) { + throw new SQLException("Query does not return result set: " + sql); + } + currentResultSet = new DatabendResultSet(this, iterator); + return currentResultSet; + } catch (Exception e) { + throw new SQLException("Failed to execute query: " + e.getMessage(), e); + } + } + + @Override + public boolean execute(String sql) throws SQLException { + checkClosed(); + try { + NativeConnection nativeConnection = connection.getNativeConnection(); + NativeRowBatchIterator iterator = nativeConnection.execute(sql); + if (iterator == null) { + return false; + } else { + currentResultSet = new DatabendResultSet(this, iterator); + return true; + } + } catch (Exception e) { + throw new SQLException("Failed to execute: " + e.getMessage(), e); + } + } + + @Override + public void close() throws SQLException { + if (!isClosed) { + if (currentResultSet != null) { + currentResultSet.close(); + } + isClosed = true; + } + } + + protected void checkClosed() throws SQLException { + if (isClosed) { + throw new SQLException("Statement is closed"); + } + } + + @Override + public boolean isClosed() throws SQLException { + return isClosed; + } + + @Override + public T unwrap(Class iface) throws SQLException { + if (isWrapperFor(iface)) { + return iface.cast(this); + } + throw new SQLException("Cannot unwrap to " + iface.getName()); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return iface.isAssignableFrom(getClass()); + } + + @Override + public ResultSet getResultSet() throws SQLException { + checkClosed(); + return currentResultSet; + } + + @Override + public int getUpdateCount() throws SQLException { + checkClosed(); + return updateCount; + } + + protected final DatabendConnection connection() + throws SQLException { + if (connection == null) { + throw new SQLException("Statement is closed"); + } + if (connection.isClosed()) { + throw new SQLException("Connection is closed"); + } + return connection; + } + + @Override + public Connection getConnection() throws SQLException { + return connection(); + } +} \ No newline at end of file diff --git a/bindings/java/src/main/java/com/databend/bendsql/DatabendUnboundQueryResultSet.java b/bindings/java/src/main/java/com/databend/bendsql/DatabendUnboundQueryResultSet.java new file mode 100644 index 00000000..13a98646 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/DatabendUnboundQueryResultSet.java @@ -0,0 +1,27 @@ +package com.databend.bendsql; + +import java.sql.Statement; +import java.sql.SQLException; +import java.util.List; +import java.util.Iterator; +import java.util.Optional; + +import com.databend.client.QueryRowField; + +public class DatabendUnboundQueryResultSet extends AbstractDatabendResultSet { + private boolean closed = false; + + DatabendUnboundQueryResultSet(Optional statement, List schema, Iterator> results) { + super(statement, schema, results); + } + + @Override + public void close() throws SQLException { + this.closed = true; + } + + @Override + public boolean isClosed() { + return closed; + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/NativeConnection.java b/bindings/java/src/main/java/com/databend/bendsql/NativeConnection.java new file mode 100644 index 00000000..1ed21eac --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/NativeConnection.java @@ -0,0 +1,85 @@ +/* + * Copyright 2021 Datafuse Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.databend.bendsql; + + +import java.util.HashSet; + +import com.databend.bendsql.jni_utils.AsyncExecutor; +import com.databend.bendsql.jni_utils.NativeObject; + +public class NativeConnection extends NativeObject { + + private final HashSet resultHandles; + + public static NativeConnection of(String dsn) { + return of(dsn, null); + } + + public static NativeConnection of(String dsn, AsyncExecutor executor) { + final long executorHandle = executor != null ? executor.getNativeHandle() : 0; + final long nativeHandle = constructor(executorHandle, dsn); + return new NativeConnection(nativeHandle, executorHandle); + } + + private NativeConnection(long nativeHandle, long executorHandle) { + super(nativeHandle, executorHandle); + this.resultHandles = new HashSet<>(); + } + + public Long execInsertWithAttachment(String sql, String path) { + return loadFile(nativeHandle, executorHandle, sql, path); + } + + /** + * Execute a SQL query and return a result set iterator. + * @param sql the SQL query to execute + * @return a result set iterator, null if the query does not return any result set (has no schema) + */ + public NativeRowBatchIterator execute(String sql) { + if (sql == null) { + throw new NullPointerException("SQL query is null"); + } + final long resultHandle = execute(nativeHandle, executorHandle, sql); + if (resultHandle == 0) { + return null; + } else { + this.resultHandles.add(resultHandle); + return new NativeRowBatchIterator(resultHandle, executorHandle, this); + } + } + + public void close_result(long resultHandle) { + this.resultHandles.remove(resultHandle); + } + + @Override + public void close() { + super.close(); + for (Long resultHandle : resultHandles) { + close_result(resultHandle); + } + this.resultHandles.clear(); + } + + @Override + protected native void disposeInternal(long handle, long executorHandle); + + private static native long constructor(long executorHandle, String dsn); + private static native long execute(long nativeHandle, long executorHandle, String sql); + private static native long loadFile(long nativeHandle, long executorHandle, String sql, String path); +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/NativeRowBatchIterator.java b/bindings/java/src/main/java/com/databend/bendsql/NativeRowBatchIterator.java new file mode 100644 index 00000000..2d77dc3b --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/NativeRowBatchIterator.java @@ -0,0 +1,112 @@ +/* + * Copyright 2021 Datafuse Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.databend.bendsql; + +import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.databend.bendsql.jni_utils.NativeObject; +import com.databend.client.QueryRowField; +import com.databend.client.data.DatabendRawType; + + +public class NativeRowBatchIterator extends NativeObject implements Iterator>> { + private final long connectionHandle; + private final NativeConnection connection; + + private boolean isFinished; + private String currentRowBatch; + private List schema; + ObjectMapper objectMapper = new ObjectMapper(); + + static class Field { + public String name; + public String type; + } + + public NativeRowBatchIterator(long nativeHandle, long executorHandle, NativeConnection connection) { + super(nativeHandle, executorHandle); + this.connectionHandle = nativeHandle; + this.connection = connection; + this.isFinished = false; + String schemaString = getSchema(nativeHandle); + if (schemaString!= null) { + try { + List schemaRaw = objectMapper.readValue(schemaString, new TypeReference>() { + }); + this.schema = schemaRaw.stream() + .map(field -> new QueryRowField(field.name, new DatabendRawType(field.type))) + .collect(Collectors.toList()); + } catch (JsonProcessingException e) { + e.printStackTrace(); + throw new RuntimeException("Failed to parse JSON schema: " + e.getMessage(), e); + } + } else { + this.schema = null; + } + } + + public List getSchema() { + return schema; + } + + public boolean hasNext() { + if (currentRowBatch == null) { + currentRowBatch = fetchNextRowBatch(nativeHandle, executorHandle); + } + if (currentRowBatch == null) { + isFinished = true; + return false; + } + return true; + } + + @Override + public void close() { + super.close(); + connection.close_result(nativeHandle); + } + + @Override + public List> next() { + if (!hasNext()) { + throw new NoSuchElementException(); + } + List> rows; + try { + rows = objectMapper.readValue(currentRowBatch, new TypeReference>>() { + }); + } catch (JsonProcessingException e) { + e.printStackTrace(); + throw new RuntimeException("Failed to parse JSON data: " + e.getMessage(), e); + } + currentRowBatch = null; + + return rows; + } + + private native String fetchNextRowBatch(long nativeHandle, long executorHandle); + + private native String getSchema(long nativeHandle); + + protected native void disposeInternal(long handle, long executorHandle); +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/NoUpdateDataSet.java b/bindings/java/src/main/java/com/databend/bendsql/NoUpdateDataSet.java new file mode 100644 index 00000000..14076169 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/NoUpdateDataSet.java @@ -0,0 +1,434 @@ +/* + * Copyright 2021 Datafuse Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.databend.bendsql; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.sql.*; + +public abstract class NoUpdateDataSet implements ResultSet { + @Override + public void updateNull(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNull(int) not implemented"); + } + + @Override + public void updateBoolean(int columnIndex, boolean x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBoolean(int, boolean) not implemented"); + } + + @Override + public void updateByte(int columnIndex, byte x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateByte(int, byte) not implemented"); + } + + @Override + public void updateShort(int columnIndex, short x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateShort(int, short) not implemented"); + } + + @Override + public void updateInt(int columnIndex, int x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateInt(int, int) not implemented"); + } + + @Override + public void updateLong(int columnIndex, long x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateLong(int, long) not implemented"); + } + + @Override + public void updateFloat(int columnIndex, float x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateFloat(int, float) not implemented"); + } + + @Override + public void updateDouble(int columnIndex, double x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateDouble(int, double) not implemented"); + } + + @Override + public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBigDecimal(int, BigDecimal) not implemented"); + } + + @Override + public void updateString(int columnIndex, String x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateString(int, String) not implemented"); + } + + @Override + public void updateBytes(int columnIndex, byte[] x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBytes(int, byte[]) not implemented"); + } + + @Override + public void updateDate(int columnIndex, Date x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateDate(int, Date) not implemented"); + } + + @Override + public void updateTime(int columnIndex, Time x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateTime(int, Time) not implemented"); + } + + @Override + public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateTimestamp(int, Timestamp) not implemented"); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateAsciiStream(int, InputStream, int) not implemented"); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBinaryStream(int, InputStream, int) not implemented"); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateCharacterStream(int, Reader, int) not implemented"); + } + + @Override + public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateObject(int, Object, int) not implemented"); + } + + @Override + public void updateObject(int columnIndex, Object x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateObject(int, Object) not implemented"); + } + + @Override + public void updateNull(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNull(String) not implemented"); + } + + @Override + public void updateBoolean(String columnLabel, boolean x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBoolean(String, boolean) not implemented"); + } + + @Override + public void updateByte(String columnLabel, byte x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateByte(String, byte) not implemented"); + } + + @Override + public void updateShort(String columnLabel, short x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateShort(String, short) not implemented"); + } + + @Override + public void updateInt(String columnLabel, int x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateInt(String, int) not implemented"); + } + + @Override + public void updateLong(String columnLabel, long x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateLong(String, long) not implemented"); + } + + @Override + public void updateFloat(String columnLabel, float x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateFloat(String, float) not implemented"); + } + + @Override + public void updateDouble(String columnLabel, double x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateDouble(String, double) not implemented"); + } + + @Override + public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBigDecimal(String, BigDecimal) not implemented"); + } + + @Override + public void updateString(String columnLabel, String x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateString(String, String) not implemented"); + } + + @Override + public void updateBytes(String columnLabel, byte[] x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBytes(String, byte[]) not implemented"); + } + + @Override + public void updateDate(String columnLabel, Date x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateDate(String, Date) not implemented"); + } + + @Override + public void updateTime(String columnLabel, Time x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateTime(String, Time) not implemented"); + } + + @Override + public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateTimestamp(String, Timestamp) not implemented"); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateAsciiStream(String, InputStream, int) not implemented"); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBinaryStream(String, InputStream, int) not implemented"); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, int length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateCharacterStream(String, Reader, int) not implemented"); + } + + @Override + public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateObject(String, Object, int) not implemented"); + } + + @Override + public void updateObject(String columnLabel, Object x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateObject(String, Object) not implemented"); + } + + @Override + public void updateRef(int columnIndex, Ref x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateRef(int, Ref) not implemented"); + } + + @Override + public void updateRef(String columnLabel, Ref x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateRef(String, Ref) not implemented"); + } + + @Override + public void updateBlob(int columnIndex, Blob x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBlob(int, Blob) not implemented"); + } + + @Override + public void updateBlob(String columnLabel, Blob x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBlob(String, Blob) not implemented"); + } + + @Override + public void updateClob(int columnIndex, Clob x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateClob(int, Clob) not implemented"); + } + + @Override + public void updateClob(String columnLabel, Clob x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateClob(String, Clob) not implemented"); + } + + @Override + public void updateArray(int columnIndex, Array x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateArray(int, Array) not implemented"); + } + + @Override + public void updateArray(String columnLabel, Array x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateArray(String, Array) not implemented"); + } + + @Override + public void updateRowId(int columnIndex, RowId x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateRowId(int, RowId) not implemented"); + } + + @Override + public void updateRowId(String columnLabel, RowId x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateRowId(String, RowId) not implemented"); + } + + @Override + public void updateNString(int columnIndex, String nString) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNString(int, String) not implemented"); + } + + @Override + public void updateNString(String columnLabel, String nString) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNString(String, String) not implemented"); + } + + @Override + public void updateNClob(int columnIndex, NClob nClob) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNClob(int, NClob) not implemented"); + } + + @Override + public void updateNClob(String columnLabel, NClob nClob) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNClob(String, NClob) not implemented"); + } + + @Override + public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateSQLXML(int, SQLXML) not implemented"); + } + + @Override + public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateSQLXML(String, SQLXML) not implemented"); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNCharacterStream(int, Reader, long) not implemented"); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNCharacterStream(String, Reader, long) not implemented"); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateAsciiStream(int, InputStream, long) not implemented"); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBinaryStream(int, InputStream, long) not implemented"); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateCharacterStream(int, Reader, long) not implemented"); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateAsciiStream(String, InputStream, long) not implemented"); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBinaryStream(String, InputStream, long) not implemented"); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateCharacterStream(String, Reader, long) not implemented"); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBlob(int, InputStream, long) not implemented"); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBlob(String, InputStream, long) not implemented"); + } + + @Override + public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateClob(int, Reader, long) not implemented"); + } + + @Override + public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateClob(String, Reader, long) not implemented"); + } + + @Override + public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNClob(int, Reader, long) not implemented"); + } + + @Override + public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNClob(String, Reader, long) not implemented"); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNCharacterStream(int, Reader) not implemented"); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNCharacterStream(String, Reader) not implemented"); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateAsciiStream(int, InputStream) not implemented"); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBinaryStream(int, InputStream) not implemented"); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateCharacterStream(int, Reader) not implemented"); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateAsciiStream(String, InputStream) not implemented"); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBinaryStream(String, InputStream) not implemented"); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateCharacterStream(String, Reader) not implemented"); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBlob(int, InputStream) not implemented"); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateBlob(String, InputStream) not implemented"); + } + + @Override + public void updateClob(int columnIndex, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateClob(int, Reader) not implemented"); + } + + @Override + public void updateClob(String columnLabel, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateClob(String, Reader) not implemented"); + } + + @Override + public void updateNClob(int columnIndex, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNClob(int, Reader) not implemented"); + } + + @Override + public void updateNClob(String columnLabel, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateNClob(String, Reader) not implemented"); + } +} \ No newline at end of file diff --git a/bindings/java/src/main/java/com/databend/bendsql/ObjectCasts.java b/bindings/java/src/main/java/com/databend/bendsql/ObjectCasts.java new file mode 100644 index 00000000..6f70eb2a --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/ObjectCasts.java @@ -0,0 +1,204 @@ +package com.databend.bendsql; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.sql.SQLException; + +import static java.lang.Byte.parseByte; +import static java.lang.Double.parseDouble; +import static java.lang.Float.parseFloat; +import static java.lang.Integer.parseInt; +import static java.lang.Long.parseLong; +import static java.lang.Short.parseShort; +import static java.lang.String.format; +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Locale.ENGLISH; + +final class ObjectCasts { + private ObjectCasts() { + } + + public static boolean castToBoolean(Object x, int targetSqlType) + throws SQLException { + if (x instanceof Boolean) { + return (Boolean) x; + } + try { + if (x instanceof Number) { + return ((Number) x).intValue() != 0; + } + if (x instanceof String) { + switch (((String) x).toLowerCase(ENGLISH)) { + case "0": + case "false": + return false; + case "1": + case "true": + return true; + } + throw new IllegalArgumentException("Invalid boolean value: " + x); + } + } catch (RuntimeException e) { + throw invalidConversion(x, targetSqlType, e); + } + throw invalidConversion(x, targetSqlType); + } + + public static byte castToByte(Object x, int targetSqlType) + throws SQLException { + if (x instanceof Boolean) { + return (byte) (((Boolean) x) ? 1 : 0); + } + try { + if (x instanceof Number) { + return ((Number) x).byteValue(); + } + if (x instanceof String) { + return parseByte((String) x); + } + } catch (RuntimeException e) { + throw invalidConversion(x, targetSqlType, e); + } + throw invalidConversion(x, targetSqlType); + } + + public static short castToShort(Object x, int targetSqlType) + throws SQLException { + if (x instanceof Boolean) { + return (short) (((Boolean) x) ? 1 : 0); + } + try { + if (x instanceof Number) { + return ((Number) x).shortValue(); + } + if (x instanceof String) { + return parseShort((String) x); + } + } catch (RuntimeException e) { + throw invalidConversion(x, targetSqlType, e); + } + throw invalidConversion(x, targetSqlType); + } + + public static int castToInt(Object x, int targetSqlType) + throws SQLException { + if (x instanceof Boolean) { + return (((Boolean) x) ? 1 : 0); + } + try { + if (x instanceof Number) { + return ((Number) x).intValue(); + } + if (x instanceof String) { + return parseInt((String) x); + } + } catch (RuntimeException e) { + throw invalidConversion(x, targetSqlType, e); + } + throw invalidConversion(x, targetSqlType); + } + + public static long castToLong(Object x, int targetSqlType) + throws SQLException { + if (x instanceof Boolean) { + return (((Boolean) x) ? 1 : 0); + } + try { + if (x instanceof Number) { + return ((Number) x).longValue(); + } + if (x instanceof String) { + return parseLong((String) x); + } + } catch (RuntimeException e) { + throw invalidConversion(x, targetSqlType, e); + } + throw invalidConversion(x, targetSqlType); + } + + public static float castToFloat(Object x, int targetSqlType) + throws SQLException { + if (x instanceof Boolean) { + return (((Boolean) x) ? 1 : 0); + } + try { + if (x instanceof Number) { + return ((Number) x).floatValue(); + } + if (x instanceof String) { + return parseFloat((String) x); + } + } catch (RuntimeException e) { + throw invalidConversion(x, targetSqlType, e); + } + throw invalidConversion(x, targetSqlType); + } + + public static double castToDouble(Object x, int targetSqlType) + throws SQLException { + if (x instanceof Boolean) { + return (((Boolean) x) ? 1 : 0); + } + try { + if (x instanceof Number) { + return ((Number) x).doubleValue(); + } + if (x instanceof String) { + return parseDouble((String) x); + } + } catch (RuntimeException e) { + throw invalidConversion(x, targetSqlType, e); + } + throw invalidConversion(x, targetSqlType); + } + + public static BigDecimal castToBigDecimal(Object x, int targetSqlType) + throws SQLException { + if (x instanceof Boolean) { + return BigDecimal.valueOf(((Boolean) x) ? 1 : 0); + } + if (x instanceof BigInteger) { + return new BigDecimal((BigInteger) x); + } + if (x instanceof BigDecimal) { + return (BigDecimal) x; + } + try { + if ((x instanceof Byte) || (x instanceof Short) || (x instanceof Integer) || (x instanceof Long)) { + return BigDecimal.valueOf(((Number) x).longValue()); + } + if ((x instanceof Float) || (x instanceof Double)) { + return BigDecimal.valueOf(((Number) x).doubleValue()); + } + if (x instanceof String) { + return new BigDecimal((String) x); + } + } catch (RuntimeException e) { + throw invalidConversion(x, targetSqlType, e); + } + throw invalidConversion(x, targetSqlType); + } + + public static byte[] castToBinary(Object x, int targetSqlType) + throws SQLException { + if (x instanceof byte[]) { + return (byte[]) x; + } + if (x instanceof String) { + return ((String) x).getBytes(UTF_8); + } + throw invalidConversion(x, targetSqlType); + } + + private static SQLException invalidConversion(Object x, int sqlType) { + return invalidConversion(x, sqlType, null); + } + + private static SQLException invalidConversion(Object x, int sqlType, Exception e) { + return new SQLException(format("Cannot convert instance of %s to SQL type %s", x.getClass().getName(), sqlType), e); + } + + static SQLException invalidConversion(Object x, String toType) { + return new SQLException(format("Cannot convert instance of %s to %s", x.getClass().getName(), toType)); + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/TrivialConnection.java b/bindings/java/src/main/java/com/databend/bendsql/TrivialConnection.java new file mode 100644 index 00000000..846c0677 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/TrivialConnection.java @@ -0,0 +1,214 @@ +package com.databend.bendsql; + +import java.sql.*; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Executor; + +public abstract class TrivialConnection implements Connection { + + @Override + public String nativeSQL(String sql) throws SQLException { + return sql; + } + + + @Override + public void setReadOnly(boolean readOnly) throws SQLException { + } + + @Override + public boolean isReadOnly() throws SQLException { + return false; + } + + @Override + public void setCatalog(String catalog) throws SQLException { + } + + @Override + public String getCatalog() throws SQLException { + return null; + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + throw new SQLFeatureNotSupportedException("setTransactionIsolation is not supported"); + } + + @Override + public int getTransactionIsolation() throws SQLException { + return Connection.TRANSACTION_NONE; + } + + @Override + public SQLWarning getWarnings() throws SQLException { + return null; + } + + @Override + public void clearWarnings() throws SQLException { + } + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + throw new SQLFeatureNotSupportedException("prepareCall is not supported"); + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + throw new SQLFeatureNotSupportedException("prepareCall is not supported"); + } + + @Override + public Map> getTypeMap() throws SQLException { + throw new SQLFeatureNotSupportedException("getTypeMap is not supported"); + } + + @Override + public void setTypeMap(Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException("setTypeMap is not supported"); + } + + @Override + public void setHoldability(int holdability) throws SQLException { + throw new SQLFeatureNotSupportedException("setHoldability is not supported"); + } + + @Override + public int getHoldability() throws SQLException { + return ResultSet.HOLD_CURSORS_OVER_COMMIT; + } + + @Override + public Savepoint setSavepoint() throws SQLException { + throw new SQLFeatureNotSupportedException("setSavepoint is not supported"); + } + + @Override + public Savepoint setSavepoint(String name) throws SQLException { + throw new SQLFeatureNotSupportedException("setSavepoint is not supported"); + } + + @Override + public void rollback(Savepoint savepoint) throws SQLException { + throw new SQLFeatureNotSupportedException("rollback with savepoint is not supported"); + } + + @Override + public void releaseSavepoint(Savepoint savepoint) throws SQLException { + throw new SQLFeatureNotSupportedException("releaseSavepoint is not supported"); + } + + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { + throw new SQLFeatureNotSupportedException("createStatement is not supported"); + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + throw new SQLFeatureNotSupportedException("prepareStatement is not supported"); + } + + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + throw new SQLFeatureNotSupportedException("createStatement is not supported"); + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + throw new SQLFeatureNotSupportedException("prepareStatement is not supported"); + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + throw new SQLFeatureNotSupportedException("prepareCall is not supported"); + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + throw new SQLFeatureNotSupportedException("prepareStatement is not supported"); + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + throw new SQLFeatureNotSupportedException("prepareStatement is not supported"); + } + + @Override + public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { + throw new SQLFeatureNotSupportedException("prepareStatement is not supported"); + } + + @Override + public Clob createClob() throws SQLException { + throw new SQLFeatureNotSupportedException("createClob is not supported"); + } + + @Override + public Blob createBlob() throws SQLException { + throw new SQLFeatureNotSupportedException("createBlob is not supported"); + } + + @Override + public NClob createNClob() throws SQLException { + throw new SQLFeatureNotSupportedException("createNClob is not supported"); + } + + @Override + public SQLXML createSQLXML() throws SQLException { + throw new SQLFeatureNotSupportedException("createSQLXML is not supported"); + } + + @Override + public boolean isValid(int timeout) throws SQLException { + return !isClosed(); + } + + @Override + public void setClientInfo(String name, String value) throws SQLClientInfoException { + throw new UnsupportedOperationException("setClientInfo is not supported"); + } + + + @Override + public String getClientInfo(String name) throws SQLException { + throw new SQLFeatureNotSupportedException("getClientInfo is not supported"); + } + + @Override + public Properties getClientInfo() throws SQLException { + throw new SQLFeatureNotSupportedException("getClientInfo is not supported"); + } + + @Override + public Array createArrayOf(String typeName, Object[] elements) throws SQLException { + throw new SQLFeatureNotSupportedException("createArrayOf is not supported"); + } + + @Override + public Struct createStruct(String typeName, Object[] attributes) throws SQLException { + throw new SQLFeatureNotSupportedException("createStruct is not supported"); + } + + @Override + public int getNetworkTimeout() throws SQLException { + // zero means there is no limit + return 0; + } + + @Override + public void setClientInfo(Properties properties) throws SQLClientInfoException { + throw new UnsupportedOperationException("Unimplemented method 'setClientInfo'"); + } + + @Override + public void abort(Executor executor) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'abort'"); + } + + @Override + public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'setNetworkTimeout'"); + } +} \ No newline at end of file diff --git a/bindings/java/src/main/java/com/databend/bendsql/TrivialResultSet.java b/bindings/java/src/main/java/com/databend/bendsql/TrivialResultSet.java new file mode 100644 index 00000000..b7451305 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/TrivialResultSet.java @@ -0,0 +1,132 @@ +package com.databend.bendsql; + +import java.sql.*; + +public abstract class TrivialResultSet extends NoUpdateDataSet { + + @Override + public boolean rowDeleted() throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'rowDeleted'"); + } + + @Override + public RowId getRowId(int columnIndex) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'getRowId'"); + } + + @Override + public RowId getRowId(String columnLabel) throws SQLException { + throw new UnsupportedOperationException("Unimplemented method 'getRowId'"); + } + + @Override + public boolean wasNull() throws SQLException { + throw new SQLFeatureNotSupportedException("Method wasNull() not implemented"); + } + + @Override + public int findColumn(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException("Method findColumn(String) not implemented"); + } + + @Override + public boolean isBeforeFirst() throws SQLException { + throw new SQLFeatureNotSupportedException("Method isBeforeFirst() not implemented"); + } + + @Override + public boolean isAfterLast() throws SQLException { + throw new SQLFeatureNotSupportedException("Method isAfterLast() not implemented"); + } + + @Override + public boolean isFirst() throws SQLException { + throw new SQLFeatureNotSupportedException("Method isFirst() not implemented"); + } + + @Override + public boolean isLast() throws SQLException { + throw new SQLFeatureNotSupportedException("Method isLast() not implemented"); + } + + @Override + public void beforeFirst() throws SQLException { + throw new SQLFeatureNotSupportedException("Method beforeFirst() not implemented"); + } + + @Override + public void afterLast() throws SQLException { + throw new SQLFeatureNotSupportedException("Method afterLast() not implemented"); + } + + @Override + public boolean first() throws SQLException { + throw new SQLFeatureNotSupportedException("Method first() not implemented"); + } + + @Override + public boolean last() throws SQLException { + throw new SQLFeatureNotSupportedException("Method last() not implemented"); + } + + @Override + public boolean absolute(int row) throws SQLException { + throw new SQLFeatureNotSupportedException("Method absolute(int) not implemented"); + } + + @Override + public boolean relative(int rows) throws SQLException { + throw new SQLFeatureNotSupportedException("Method relative(int) not implemented"); + } + + @Override + public boolean previous() throws SQLException { + throw new SQLFeatureNotSupportedException("Method previous() not implemented"); + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + if (direction != ResultSet.FETCH_FORWARD) { + throw new SQLFeatureNotSupportedException("Only FETCH_FORWARD is supported"); + } + } + + @Override + public void setFetchSize(int rows) throws SQLException { + } + + @Override + public void insertRow() throws SQLException { + throw new SQLFeatureNotSupportedException("Method insertRow() not implemented"); + } + + @Override + public void updateRow() throws SQLException { + throw new SQLFeatureNotSupportedException("Method updateRow() not implemented"); + } + + @Override + public void deleteRow() throws SQLException { + throw new SQLFeatureNotSupportedException("Method deleteRow() not implemented"); + } + + @Override + public void refreshRow() throws SQLException { + throw new SQLFeatureNotSupportedException("Method refreshRow() not implemented"); + } + + @Override + public void cancelRowUpdates() throws SQLException { + throw new SQLFeatureNotSupportedException("Method cancelRowUpdates() not implemented"); + } + + @Override + public void moveToInsertRow() throws SQLException { + throw new SQLFeatureNotSupportedException("Method moveToInsertRow() not implemented"); + } + + @Override + public void moveToCurrentRow() throws SQLException { + throw new SQLFeatureNotSupportedException("Method moveToCurrentRow() not implemented"); + } +} \ No newline at end of file diff --git a/bindings/java/src/main/java/com/databend/bendsql/TrivialStatement.java b/bindings/java/src/main/java/com/databend/bendsql/TrivialStatement.java new file mode 100644 index 00000000..2f928242 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/TrivialStatement.java @@ -0,0 +1,227 @@ +package com.databend.bendsql; + +import java.sql.*; + +public abstract class TrivialStatement implements Statement { + @Override + public int getMaxFieldSize() throws SQLException { + throw new SQLFeatureNotSupportedException("getMaxFieldSize is not supported"); + } + + @Override + public void setMaxFieldSize(int max) throws SQLException { + throw new SQLFeatureNotSupportedException("setMaxFieldSize is not supported"); + } + + @Override + public int getMaxRows() throws SQLException { + throw new SQLFeatureNotSupportedException("getMaxRows is not supported"); + } + + @Override + public void setMaxRows(int max) throws SQLException { + throw new SQLFeatureNotSupportedException("setMaxRows is not supported"); + } + + @Override + public void setEscapeProcessing(boolean enable) throws SQLException { + throw new SQLFeatureNotSupportedException("setEscapeProcessing is not supported"); + } + + @Override + public int getQueryTimeout() throws SQLException { + throw new SQLFeatureNotSupportedException("getQueryTimeout is not supported"); + } + + @Override + public void setQueryTimeout(int seconds) throws SQLException { + throw new SQLFeatureNotSupportedException("setQueryTimeout is not supported"); + } + + @Override + public void cancel() throws SQLException { + throw new SQLFeatureNotSupportedException("cancel is not supported"); + } + + @Override + public SQLWarning getWarnings() throws SQLException { + throw new SQLFeatureNotSupportedException("getWarnings is not supported"); + } + + @Override + public void clearWarnings() throws SQLException { + throw new SQLFeatureNotSupportedException("clearWarnings is not supported"); + } + + @Override + public void setCursorName(String name) throws SQLException { + throw new SQLFeatureNotSupportedException("setCursorName is not supported"); + } + + @Override + public boolean getMoreResults() throws SQLException { + throw new SQLFeatureNotSupportedException("getMoreResults is not supported"); + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + throw new SQLFeatureNotSupportedException("setFetchDirection is not supported"); + } + + @Override + public int getFetchDirection() throws SQLException { + throw new SQLFeatureNotSupportedException("getFetchDirection is not supported"); + } + + @Override + public void setFetchSize(int rows) throws SQLException { + throw new SQLFeatureNotSupportedException("setFetchSize is not supported"); + } + + @Override + public int getFetchSize() throws SQLException { + throw new SQLFeatureNotSupportedException("getFetchSize is not supported"); + } + + @Override + public int getResultSetConcurrency() throws SQLException { + throw new SQLFeatureNotSupportedException("getResultSetConcurrency is not supported"); + } + + @Override + public int getResultSetType() throws SQLException { + throw new SQLFeatureNotSupportedException("getResultSetType is not supported"); + } + + @Override + public void addBatch(String sql) throws SQLException { + throw new SQLFeatureNotSupportedException("addBatch is not supported"); + } + + @Override + public void clearBatch() throws SQLException { + throw new SQLFeatureNotSupportedException("clearBatch is not supported"); + } + + @Override + public int[] executeBatch() throws SQLException { + throw new SQLFeatureNotSupportedException("executeBatch is not supported"); + } + + + + @Override + public boolean getMoreResults(int current) throws SQLException { + throw new SQLFeatureNotSupportedException("getMoreResults is not supported"); + } + + @Override + public ResultSet getGeneratedKeys() throws SQLException { + throw new SQLFeatureNotSupportedException("getGeneratedKeys is not supported"); + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + throw new SQLFeatureNotSupportedException("executeUpdate is not supported"); + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { + throw new SQLFeatureNotSupportedException("executeUpdate is not supported"); + } + + @Override + public int executeUpdate(String sql, String[] columnNames) throws SQLException { + throw new SQLFeatureNotSupportedException("executeUpdate is not supported"); + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { + throw new SQLFeatureNotSupportedException("execute is not supported"); + } + + @Override + public boolean execute(String sql, int[] columnIndexes) throws SQLException { + throw new SQLFeatureNotSupportedException("execute is not supported"); + } + + @Override + public boolean execute(String sql, String[] columnNames) throws SQLException { + throw new SQLFeatureNotSupportedException("execute is not supported"); + } + + @Override + public int getResultSetHoldability() throws SQLException { + throw new SQLFeatureNotSupportedException("getResultSetHoldability is not supported"); + } + + @Override + public boolean isClosed() throws SQLException { + throw new SQLFeatureNotSupportedException("isClosed is not supported"); + } + + @Override + public void setPoolable(boolean poolable) throws SQLException { + throw new SQLFeatureNotSupportedException("setPoolable is not supported"); + } + + @Override + public boolean isPoolable() throws SQLException { + throw new SQLFeatureNotSupportedException("isPoolable is not supported"); + } + + @Override + public void closeOnCompletion() throws SQLException { + throw new SQLFeatureNotSupportedException("closeOnCompletion is not supported"); + } + + @Override + public boolean isCloseOnCompletion() throws SQLException { + throw new SQLFeatureNotSupportedException("isCloseOnCompletion is not supported"); + } + + @Override + public long getLargeUpdateCount() throws SQLException { + throw new SQLFeatureNotSupportedException("getLargeUpdateCount is not supported"); + } + + @Override + public void setLargeMaxRows(long max) throws SQLException { + throw new SQLFeatureNotSupportedException("setLargeMaxRows is not supported"); + } + + @Override + public long getLargeMaxRows() throws SQLException { + throw new SQLFeatureNotSupportedException("getLargeMaxRows is not supported"); + } + + @Override + public long[] executeLargeBatch() throws SQLException { + throw new SQLFeatureNotSupportedException("executeLargeBatch is not supported"); + } + + @Override + public long executeLargeUpdate(String sql) throws SQLException { + throw new SQLFeatureNotSupportedException("executeLargeUpdate is not supported"); + } + + @Override + public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + throw new SQLFeatureNotSupportedException("executeLargeUpdate is not supported"); + } + + @Override + public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException { + throw new SQLFeatureNotSupportedException("executeLargeUpdate is not supported"); + } + + @Override + public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException { + throw new SQLFeatureNotSupportedException("executeLargeUpdate is not supported"); + } + + @Override + public int executeUpdate(String sql) throws SQLException { + throw new SQLFeatureNotSupportedException("executeUpdate is not supported"); + } +} \ No newline at end of file diff --git a/bindings/java/src/main/java/com/databend/bendsql/jni_utils/AsyncExecutor.java b/bindings/java/src/main/java/com/databend/bendsql/jni_utils/AsyncExecutor.java new file mode 100644 index 00000000..37975911 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/jni_utils/AsyncExecutor.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.databend.bendsql.jni_utils; + +/** + * AsyncExecutor represents an underneath executor that runs async tasks spawned in the Rust world. + * + *

If the executor is passed to construct clients, the executor must outlive the clients.

+ */ +public class AsyncExecutor extends NativeObject { + public static AsyncExecutor createTokioExecutor(int cores) { + return new AsyncExecutor(makeTokioExecutor(cores)); + } + + private AsyncExecutor(long nativeHandle) { + super(nativeHandle, 0); + } + + @Override + protected native void disposeInternal(long handle, long executorHandle); + + private static native long makeTokioExecutor(int cores); +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/jni_utils/Environment.java b/bindings/java/src/main/java/com/databend/bendsql/jni_utils/Environment.java new file mode 100644 index 00000000..1489604c --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/jni_utils/Environment.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.databend.bendsql.jni_utils; + +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.util.Properties; + +/** + * Environment resolves environment-specific project metadata. + */ +public enum Environment { + INSTANCE; + + public static final String UNKNOWN = ""; + private String classifier = UNKNOWN; + private String projectVersion = UNKNOWN; + + static { + ClassLoader classLoader = Environment.class.getClassLoader(); + try (InputStream is = classLoader.getResourceAsStream("bindings.properties")) { + final Properties properties = new Properties(); + properties.load(is); + INSTANCE.projectVersion = properties.getProperty("project.version", UNKNOWN); + } catch (IOException e) { + throw new UncheckedIOException("cannot load environment properties file", e); + } + + final StringBuilder classifier = new StringBuilder(); + final String os = System.getProperty("os.name").toLowerCase(); + if (os.startsWith("windows")) { + classifier.append("windows"); + } else if (os.startsWith("mac")) { + classifier.append("osx"); + } else { + classifier.append("linux"); + } + classifier.append("-"); + final String arch = System.getProperty("os.arch").toLowerCase(); + if (arch.equals("aarch64")) { + classifier.append("aarch_64"); + } else { + classifier.append("x86_64"); + } + INSTANCE.classifier = classifier.toString(); + } + + /** + * Returns the classifier of the compiled environment. + * + * @return The classifier of the compiled environment. + */ + public static String getClassifier() { + return INSTANCE.classifier; + } + + /** + * Returns the version of the code as String. + * + * @return The project version string. + */ + public static String getVersion() { + return INSTANCE.projectVersion; + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/jni_utils/NativeLibrary.java b/bindings/java/src/main/java/com/databend/bendsql/jni_utils/NativeLibrary.java new file mode 100644 index 00000000..b9bc4385 --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/jni_utils/NativeLibrary.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.databend.bendsql.jni_utils; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; +import java.util.concurrent.atomic.AtomicReference; + +import lombok.experimental.UtilityClass; + +/** + * Utility for loading the native library. + */ +@UtilityClass +public class NativeLibrary { + private enum LibraryState { + NOT_LOADED, + LOADING, + LOADED + } + + private static final AtomicReference libraryLoaded = new AtomicReference<>(LibraryState.NOT_LOADED); + + static { + NativeLibrary.loadLibrary(); + } + + public static void loadLibrary() { + if (libraryLoaded.get() == LibraryState.LOADED) { + return; + } + + if (libraryLoaded.compareAndSet(LibraryState.NOT_LOADED, LibraryState.LOADING)) { + try { + doLoadLibrary(); + } catch (IOException e) { + libraryLoaded.set(LibraryState.NOT_LOADED); + throw new UncheckedIOException("Unable to load the Databend shared library", e); + } + libraryLoaded.set(LibraryState.LOADED); + return; + } + + while (libraryLoaded.get() == LibraryState.LOADING) { + try { + Thread.sleep(10); + } catch (InterruptedException ignore) { + } + } + } + + private static void doLoadLibrary() throws IOException { + try { + // try dynamic library - the search path can be configured via "-Djava.library.path" + System.loadLibrary("bendsql_java"); + return; + } catch (UnsatisfiedLinkError ignore) { + // ignore - try from classpath + } + + doLoadBundledLibrary(); + } + + private static void doLoadBundledLibrary() throws IOException { + final String libraryPath = bundledLibraryPath(); + try (final InputStream is = NativeObject.class.getResourceAsStream(libraryPath)) { + if (is == null) { + throw new IOException("cannot find " + libraryPath); + } + final int dot = libraryPath.indexOf('.'); + final File tmpFile = File.createTempFile(libraryPath.substring(0, dot), libraryPath.substring(dot)); + tmpFile.deleteOnExit(); + Files.copy(is, tmpFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + System.load(tmpFile.getAbsolutePath()); + } + } + + private static String bundledLibraryPath() { + final String classifier = Environment.getClassifier(); + final String libraryName = System.mapLibraryName("bendsql_java"); + return "/native/" + classifier + "/" + libraryName; + } +} diff --git a/bindings/java/src/main/java/com/databend/bendsql/jni_utils/NativeObject.java b/bindings/java/src/main/java/com/databend/bendsql/jni_utils/NativeObject.java new file mode 100644 index 00000000..c1a5bd9d --- /dev/null +++ b/bindings/java/src/main/java/com/databend/bendsql/jni_utils/NativeObject.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.databend.bendsql.jni_utils; + +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * NativeObject is the base-class of all BendSQL classes that have + * a pointer to a native object. + * + *

+ * NativeObject has the {@link NativeObject#close()} method, which frees its associated + * native object. + * + *

+ * This function should be called manually, or even better, called implicitly using a + * try-with-resources + * statement, when you are finished with the object. It is no longer called automatically + * during the regular Java GC process via {@link NativeObject#finalize()}. + * + *

+ * Explanatory note + * + *

+ * When or if the Garbage Collector calls {@link Object#finalize()} + * depends on the JVM implementation and system conditions, which the programmer + * cannot control. In addition, the GC cannot see through the native reference + * long member variable (which is the pointer value to the native object), + * and cannot know what other resources depend on it. + * + *

+ * Finalization is deprecated and subject to removal in a future release. + * The use of finalization can lead to problems with security, performance, + * and reliability. See JEP 421 + * for discussion and alternatives. + */ +public abstract class NativeObject implements AutoCloseable { + static { + NativeLibrary.loadLibrary(); + } + + private final AtomicBoolean disposed = new AtomicBoolean(false); + +/** + * An immutable reference to the value of the underneath pointer pointing + * to some underlying native Databend object. + */ + protected final long nativeHandle; + protected final long executorHandle; + + protected NativeObject(long nativeHandle, long executorHandle) { + this.nativeHandle = nativeHandle; + this.executorHandle = executorHandle; + } + + @Override + public void close() { + if (disposed.compareAndSet(false, true)) { + disposeInternal(nativeHandle, executorHandle); + } + } + + /** + * Check if the object has been disposed. Useful for defensive programming. + * + * @return if the object has been disposed. + */ + public boolean isDisposed() { + return disposed.get(); + } + + /** + * Deletes underlying native object pointer. + * + * @param handle to the native object pointer + */ + protected abstract void disposeInternal(long handle, long executorHandle); + + public long getNativeHandle() { + return nativeHandle; + } +} diff --git a/bindings/java/src/main/resources/META-INF/services/java.sql.Driver b/bindings/java/src/main/resources/META-INF/services/java.sql.Driver new file mode 100644 index 00000000..f2b3502f --- /dev/null +++ b/bindings/java/src/main/resources/META-INF/services/java.sql.Driver @@ -0,0 +1 @@ +com.databend.bendsql.DatabendDriver \ No newline at end of file diff --git a/bindings/java/src/main/resources/bindings.properties b/bindings/java/src/main/resources/bindings.properties new file mode 100644 index 00000000..ad06a8d0 --- /dev/null +++ b/bindings/java/src/main/resources/bindings.properties @@ -0,0 +1,15 @@ +# Copyright 2021 Datafuse Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +project.version=${project.version} diff --git a/bindings/java/src/row_batch_iterator.rs b/bindings/java/src/row_batch_iterator.rs new file mode 100644 index 00000000..eb6bdfa3 --- /dev/null +++ b/bindings/java/src/row_batch_iterator.rs @@ -0,0 +1,88 @@ +// Copyright 2021 Datafuse Labs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::jni_utils::executor::executor_or_default; +use crate::jni_utils::executor::Executor; +use crate::Result; +use jni::objects::JClass; + +use databend_driver::rest_api::RowBatch; +use jni::sys::jstring; +use jni::sys::{jlong, jobject}; + +use jni::JNIEnv; + +#[no_mangle] +pub extern "system" fn Java_com_databend_bendsql_NativeRowBatchIterator_fetchNextRowBatch( + mut env: JNIEnv, + _class: JClass, + it: *mut RowBatch, + executor: *const Executor, +) -> jstring { + fetch_next_row_batch(&mut env, it, executor).unwrap_or_else(|e| { + e.throw(&mut env); + std::ptr::null_mut() + }) +} + +fn fetch_next_row_batch( + env: &mut JNIEnv, + it: *mut RowBatch, + executor: *const Executor, +) -> Result { + let batch = unsafe { &mut *it }; + + let data = executor_or_default(env, executor)? + .block_on(async move { batch.fetch_next_page().await })?; + + if !data.is_empty() { + let json = serde_json::to_string(&data).unwrap(); + let jstring = env.new_string(json)?; + Ok(jstring.into_raw()) + } else { + Ok(std::ptr::null_mut()) + } +} + +#[no_mangle] +pub extern "system" fn Java_com_databend_bendsql_NativeRowBatchIterator_getSchema( + mut env: JNIEnv, + _class: JClass, + it: *mut RowBatch, +) -> jstring { + get_schema(&mut env, it).unwrap_or_else(|e| { + e.throw(&mut env); + std::ptr::null_mut() + }) +} + +fn get_schema(env: &mut JNIEnv, it: *mut RowBatch) -> Result { + let batch = unsafe { &mut *it }; + let schema = batch.schema(); + let json = serde_json::to_string(&schema).unwrap(); + let jstring = env.new_string(json)?; + Ok(jstring.into_raw()) +} + +#[no_mangle] +pub extern "system" fn Java_com_databend_bendsql_NativeRowBatchIterator_disposeInternal( + _env: JNIEnv, + _class: JClass, + handle: jlong, + _executor: *const Executor, +) { + if handle != 0 { + let _ = unsafe { Box::from_raw(handle as *mut RowBatch) }; + } +} diff --git a/bindings/java/src/test/java/com/databend/bendsql/DatabendConnectionTest.java b/bindings/java/src/test/java/com/databend/bendsql/DatabendConnectionTest.java new file mode 100644 index 00000000..b24a6b29 --- /dev/null +++ b/bindings/java/src/test/java/com/databend/bendsql/DatabendConnectionTest.java @@ -0,0 +1,55 @@ +package com.databend.bendsql; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.Properties; + +import static org.junit.jupiter.api.Assertions.*; + +public class DatabendConnectionTest { + + private DatabendConnection connection; + private static final String TEST_DSN = "databend://root:@localhost:8000/default?sslmode=disable"; + + @BeforeEach + void setUp() throws Exception { + Properties props = new Properties(); + connection = new DatabendConnection(TEST_DSN, props); + } + + @Test + void testCreateStatement() throws Exception { + Statement stmt = connection.createStatement(); + assertNotNull(stmt); + assertTrue(stmt instanceof DatabendStatement); + } + + @Test + void testSimpleQuery() throws Exception { + Statement stmt = connection.createStatement(); + + ResultSet rs = stmt.executeQuery("SELECT 1"); + + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertFalse(rs.next()); + } + + @Test + void testTableQuery() throws Exception { + Statement stmt = connection.createStatement(); + + stmt.execute("CREATE OR REPLACE TABLE test_table (id INT, name VARCHAR)"); + stmt.execute("INSERT INTO test_table VALUES (1, 'test')"); + ResultSet rs = stmt.executeQuery("SELECT * FROM test_table"); + + assertTrue(rs.next()); + assertEquals(1, rs.getInt("id")); + assertEquals("test", rs.getString("name")); + assertFalse(rs.next()); + + stmt.execute("DROP TABLE IF EXISTS test_table"); + } +} \ No newline at end of file diff --git a/bindings/java/src/test/java/com/databend/bendsql/DatabendDriverTest.java b/bindings/java/src/test/java/com/databend/bendsql/DatabendDriverTest.java new file mode 100644 index 00000000..df7f1496 --- /dev/null +++ b/bindings/java/src/test/java/com/databend/bendsql/DatabendDriverTest.java @@ -0,0 +1,64 @@ +package com.databend.bendsql; + +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.Arrays; +import java.util.List; +import static com.databend.bendsql.utils.ResultSetTestUtils.assertResultSet; + +public class DatabendDriverTest { + private static final String TEST_DSN = "jdbc:databend://root:@localhost:8000/default?sslmode=disable"; + + @Test + public void testSimpleSelect() throws Exception { + try (Connection conn = DriverManager.getConnection(TEST_DSN, null, null); + Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery("SELECT 1, 'hello'")) { + + assertTrue(rs.next()); + assertEquals(1, rs.getInt(1)); + assertEquals("hello", rs.getString(2)); + + assertFalse(rs.next()); + } + } + + @Test + public void testBatchInsert() throws Exception { + try (Connection conn = DriverManager.getConnection(TEST_DSN, null, null);) { + try(Statement stmt = conn.createStatement();) { + stmt.execute("create or replace table test_prepare_statement (a int, b string)"); + } + + try(PreparedStatement ps = conn.prepareStatement("insert into test_prepare_statement values");) { + ps.setInt(1, 1); + ps.setString(2, "a"); + ps.addBatch(); + ps.setInt(1, 2); + ps.setString(2, "b"); + ps.addBatch(); + int[] ans = ps.executeBatch(); + assertEquals(ans.length, 2); + //assertEquals(ans[0], 1); + //assertEquals(ans[1], 1); + Statement statement = conn.createStatement(); + + boolean hasResultSet = statement.execute("SELECT * from test_prepare_statement"); + assertTrue(hasResultSet); + try(ResultSet rs = statement.getResultSet();) { + List expected = Arrays.asList( + new Object[]{1, "a"}, + new Object[]{2, "b"} + ); + assertResultSet(rs, expected); + } + } + } + } +} diff --git a/bindings/java/src/test/java/com/databend/bendsql/NativeConnectionTest.java b/bindings/java/src/test/java/com/databend/bendsql/NativeConnectionTest.java new file mode 100644 index 00000000..77be8473 --- /dev/null +++ b/bindings/java/src/test/java/com/databend/bendsql/NativeConnectionTest.java @@ -0,0 +1,91 @@ +/* + * Copyright 2021 Datafuse Labs + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +package com.databend.bendsql; + +import org.junit.jupiter.api.Test; + +import com.databend.bendsql.jni_utils.NativeLibrary; + +import org.junit.jupiter.api.BeforeEach; + +import static org.junit.jupiter.api.Assertions.*; + +import java.sql.SQLException; +import java.util.List; + +class NativeConnectionTest { + private NativeConnection connection; + private static final String TEST_DSN = "databend://root:@localhost:8000/default?sslmode=disable"; + + static { + NativeLibrary.loadLibrary(); + } + + + @BeforeEach + void setUp() { + connection = NativeConnection.of(TEST_DSN); + } + + @Test + void testSimpleQuery() { + String sql = "SELECT 1, 2"; + + NativeRowBatchIterator result = connection.execute(sql); + + assertNotNull(result); + assertTrue(result.hasNext()); + List> batch = result.next(); + assertEquals(1, batch.size()); + assertEquals(2, batch.get(0).size()); + assertEquals("1", batch.get(0).get(0)); + assertEquals("2", batch.get(0).get(1)); + assertFalse(result.hasNext()); + } + + @Test + void testQueryInvalidQuery() { + String sql = "INVALID SQL QUERY"; + + assertThrows(SQLException.class, () -> connection.execute(sql)); + } + + @Test + void testQueryNullQuery() { + assertThrows(NullPointerException.class, () -> connection.execute(null)); + } + + @Test + void testQueryEmptyQuery() { + String sql = ""; + assertThrows(SQLException.class, () -> connection.execute(sql)); + } + + @Test + void testMultipleQueriesSequentially() { + String sql1 = "SELECT 1"; + String sql2 = "SELECT 2"; + + NativeRowBatchIterator result1 = connection.execute(sql1); + NativeRowBatchIterator result2 = connection.execute(sql2); + + assertNotNull(result1); + assertNotNull(result2); + } +} \ No newline at end of file diff --git a/bindings/java/src/test/java/com/databend/bendsql/utils/ResultSetTestUtils.java b/bindings/java/src/test/java/com/databend/bendsql/utils/ResultSetTestUtils.java new file mode 100644 index 00000000..0a615e3b --- /dev/null +++ b/bindings/java/src/test/java/com/databend/bendsql/utils/ResultSetTestUtils.java @@ -0,0 +1,51 @@ +package com.databend.bendsql.utils; + +import org.junit.jupiter.api.Assertions; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.List; + +public class ResultSetTestUtils { + + /** + * 验证 ResultSet 的内容是否与预期匹配 + * + * @param rs ResultSet 实例 + * @param expectedRows 预期的数据行,每行是一个 Object 数组 + * @throws SQLException 如果访问 ResultSet 出错 + */ + public static void assertResultSet(ResultSet rs, List expectedRows) throws SQLException { + int rowNum = 0; + while (rs.next()) { + Assertions.assertTrue(rowNum < expectedRows.size(), + "Got more rows than expected. Expected " + expectedRows.size() + " rows"); + + Object[] expectedRow = expectedRows.get(rowNum); + for (int i = 0; i < expectedRow.length; i++) { + Object expected = expectedRow[i]; + Object actual = rs.getObject(i + 1); + Assertions.assertEquals(expected, actual, + String.format("Row %d, Column %d mismatch", rowNum + 1, i + 1)); + } + rowNum++; + } + Assertions.assertEquals(expectedRows.size(), rowNum, + "Got fewer rows than expected. Expected " + expectedRows.size() + " rows"); + } + + /** + * 验证 ResultSet 的行数 + * + * @param rs ResultSet 实例 + * @param expectedCount 预期的行数 + * @throws SQLException 如果访问 ResultSet 出错 + */ + public static void assertRowCount(ResultSet rs, int expectedCount) throws SQLException { + int rowCount = 0; + while (rs.next()) { + rowCount++; + } + Assertions.assertEquals(expectedCount, rowCount, + String.format("Expected %d rows but got %d", expectedCount, rowCount)); + } +} \ No newline at end of file diff --git a/bindings/java/tools/build.py b/bindings/java/tools/build.py new file mode 100755 index 00000000..df159e5b --- /dev/null +++ b/bindings/java/tools/build.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser +from pathlib import Path +import shutil +import subprocess + + +def classifier_to_target(classifier: str) -> str: + if classifier == "osx-aarch_64": + return "aarch64-apple-darwin" + if classifier == "osx-x86_64": + return "x86_64-apple-darwin" + if classifier == "linux-aarch_64": + return "aarch64-unknown-linux-gnu" + if classifier == "linux-x86_64": + return "x86_64-unknown-linux-gnu" + if classifier == "windows-x86_64": + return "x86_64-pc-windows-msvc" + raise Exception(f"Unsupported classifier: {classifier}") + + +def get_cargo_artifact_name(classifier: str) -> str: + if classifier.startswith("osx"): + return "libbendsql_java.dylib" + if classifier.startswith("linux"): + return "libbendsql_java.so" + if classifier.startswith("windows"): + return "bendsql_java.dll" + raise Exception(f"Unsupported classifier: {classifier}") + + +if __name__ == "__main__": + basedir = Path(__file__).parent.parent + + parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter) + parser.add_argument("--classifier", type=str, required=True) + parser.add_argument("--target", type=str, default="") + parser.add_argument("--profile", type=str, default="dev") + parser.add_argument("--enable-zigbuild", type=str, default="false") + args = parser.parse_args() + + if args.target: + target = args.target + else: + target = classifier_to_target(args.classifier) + + # Setup target. + command = ["rustup", "target", "add", target] + print("$ " + subprocess.list2cmdline(command)) + subprocess.run(command, cwd=basedir, check=True) + + # Enable zigbuild if flag enabled and we are building linux target + enable_zigbuild = args.enable_zigbuild == "true" and "linux" in target + + cmd = [ + "cargo", + "zigbuild" if enable_zigbuild else "build", + "--color=always", + f"--profile={args.profile}", + ] + + + if enable_zigbuild: + # Pin glibc to 2.17 if zigbuild has been enabled. + cmd += ["--target", f"{target}.2.17"] + else: + cmd += ["--target", target] + + output = basedir / "target" / "bindings" + Path(output).mkdir(exist_ok=True, parents=True) + cmd += ["--target-dir", str(output)] + + print("$ " + subprocess.list2cmdline(cmd)) + try: + subprocess.run(cmd, cwd=basedir, check=True, capture_output=True, text=True) + except subprocess.CalledProcessError as e: + print(f"Command failed with exit code {e.returncode}") + print("Error output:") + print(e.stderr) + raise + + # History reason of cargo profiles. + profile = "debug" if args.profile in ["dev", "test", "bench"] else args.profile + artifact = get_cargo_artifact_name(args.classifier) + src = output / target / profile / artifact + dst = basedir / "target" / "classes" / "native" / args.classifier / artifact + dst.parent.mkdir(exist_ok=True, parents=True) + shutil.copy2(src, dst) diff --git a/sql/src/error.rs b/sql/src/error.rs index 752a80ce..88ba25cc 100644 --- a/sql/src/error.rs +++ b/sql/src/error.rs @@ -48,6 +48,7 @@ pub enum Error { #[cfg(feature = "flight-sql")] Arrow(arrow_schema::ArrowError), Convert(ConvertError), + Unexpected(String), } impl std::fmt::Display for Error { @@ -57,7 +58,7 @@ impl std::fmt::Display for Error { Error::Protocol(msg) => write!(f, "ProtocolError: {}", msg), Error::Transport(msg) => write!(f, "TransportError: {}", msg), Error::IO(msg) => write!(f, "IOError: {}", msg), - + Error::Unexpected(msg) => write!(f, "Unexpected: {}", msg), Error::BadArgument(msg) => write!(f, "BadArgument: {}", msg), Error::InvalidResponse(msg) => write!(f, "ResponseError: {}", msg), #[cfg(feature = "flight-sql")]