diff --git a/.travis.yml b/.travis.yml index 94c64af1e3..24eb017189 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,7 +44,6 @@ jobs: - docker pull sqlflow/sqlflow:latest && docker build --cache-from sqlflow/sqlflow:latest -t sqlflow:latest -f Dockerfile . - docker run --rm -v $GOPATH:/go -w /go/src/sqlflow.org/sqlflow sqlflow:latest bash scripts/test_units.sh - docker run --rm -v $GOPATH:/go -w /go/src/sqlflow.org/sqlflow sqlflow:latest bash scripts/upload_coveralls.sh - - docker run --rm -v $GOPATH:/go -w /go/src/sqlflow.org/sqlflow sqlflow:latest bash scripts/test_couler.sh - # run more parallel tests in the same stage: env: SQLFLOW_TEST_DB=hive script: @@ -55,7 +54,7 @@ jobs: script: - docker pull sqlflow/sqlflow:latest && docker build --cache-from sqlflow/sqlflow:latest -t sqlflow:latest -f Dockerfile . - bash scripts/setup_k8s_env.sh - - docker run --rm --net=host -v /var/run/docker.sock:/var/run/docker.sock -v $HOME/.kube:/root/.kube -v /home/$USER/.minikube/:/home/$USER/.minikube/ -v $GOPATH:/go -w /go/src/sqlflow.org/sqlflow sqlflow:latest bash scripts/test_argo.sh + - docker run --rm --net=host -v /var/run/docker.sock:/var/run/docker.sock -v $HOME/.kube:/root/.kube -v /home/$USER/.minikube/:/home/$USER/.minikube/ -v $GOPATH:/go -w /go/src/sqlflow.org/sqlflow sqlflow:latest bash scripts/test_couler.sh - docker run --rm --net=host -v /var/run/docker.sock:/var/run/docker.sock -v $HOME/.kube:/root/.kube -v /home/$USER/.minikube/:/home/$USER/.minikube/ -v $GOPATH:/go -w /go/src/sqlflow.org/sqlflow sqlflow:latest bash scripts/test_ipython.sh - docker run -e MAXCOMPUTE_AK=$MAXCOMPUTE_AK -e MAXCOMPUTE_SK=$MAXCOMPUTE_SK --rm --net=host -v /var/run/docker.sock:/var/run/docker.sock -v $HOME/.kube:/root/.kube -v /home/$USER/.minikube/:/home/$USER/.minikube/ -v $GOPATH:/go -w /go/src/sqlflow.org/sqlflow sqlflow:latest bash scripts/test_maxcompute.sh - env: SQLFLOW_TEST=java diff --git a/pkg/sql/codegen/proto/.gitignore b/pkg/sql/codegen/proto/.gitignore new file mode 100644 index 0000000000..99e502c253 --- /dev/null +++ b/pkg/sql/codegen/proto/.gitignore @@ -0,0 +1 @@ +intermediate_representation.pb.go \ No newline at end of file diff --git a/python/couler/setup.py b/python/couler/setup.py index 2d5d53c25c..ebdd1baec2 100644 --- a/python/couler/setup.py +++ b/python/couler/setup.py @@ -80,7 +80,7 @@ url=URL, packages=find_packages(exclude=('tests',)), entry_points={ - 'console_scripts': ['couler = couler.__main__:main'], + 'console_scripts': ['couler = couler.client:main'], }, install_requires=REQUIRED, setup_requires=SETUP_REQUIRED, diff --git a/scripts/test_argo.sh b/scripts/test_argo.sh deleted file mode 100644 index 17ce2964bc..0000000000 --- a/scripts/test_argo.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash -# Copyright 2019 The SQLFlow Authors. All rights reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -e - -CHECK_INTERVAL_SECS=2 -MESSAGE=$(kubectl create -f https://raw.githubusercontent.com/argoproj/argo/master/examples/hello-world.yaml) -WORKFLOW_NAME=$(echo ${MESSAGE} | cut -d ' ' -f 1 | cut -d '/' -f 2) - -echo WORKFLOW_NAME ${WORKFLOW_NAME} - -for i in {1..30}; do - WORKFLOW_STATUS=$(kubectl get wf ${WORKFLOW_NAME} -o jsonpath='{.status.phase}') - - if [[ "$WORKFLOW_STATUS" == "Succeeded" ]]; then - echo "Argo workflow succeeded." - kubectl delete wf ${WORKFLOW_NAME} - exit 0 - else - echo "Argo workflow ${WORKFLOW_NAME} ${WORKFLOW_STATUS}" - sleep ${CHECK_INTERVAL_SECS} - fi -done - -echo "Argo job timed out." -exit 1 diff --git a/scripts/test_couler.sh b/scripts/test_couler.sh index 37fb177e32..3519bad0a7 100644 --- a/scripts/test_couler.sh +++ b/scripts/test_couler.sh @@ -13,6 +13,46 @@ set -e +############# Run Couler unit tests ############# pip install -r python/couler/requirements.txt pytest python/couler/tests + + +############# Run Couler e2e test ############# +CHECK_INTERVAL_SECS=2 +cd python/couler/ && python setup.py install +cat < /tmp/sqlflow_couler.py +import couler.argo as couler +couler.run_container(image="docker/whalesay", command='echo "SQLFlow bridges AI and SQL engine."') +EOF + +couler run --mode argo --file /tmp/sqlflow_couler.py > /tmp/sqlflow_argo.yaml + +MESSAGE=$(kubectl create -f /tmp/sqlflow_argo.yaml) + +WORKFLOW_NAME=$(echo ${MESSAGE} | cut -d ' ' -f 1 | cut -d '/' -f 2) + +echo WORKFLOW_NAME ${WORKFLOW_NAME} + + +for i in {1..30}; do + WORKFLOW_STATUS=$(kubectl get wf ${WORKFLOW_NAME} -o jsonpath='{.status.phase}') + + if [[ "$WORKFLOW_STATUS" == "Succeeded" ]]; then + echo "Argo workflow succeeded." + kubectl delete wf ${WORKFLOW_NAME} + rm -rf /tmp/sqlflow* + exit 0 + else + echo "Argo workflow ${WORKFLOW_NAME} ${WORKFLOW_STATUS}" + sleep ${CHECK_INTERVAL_SECS} + fi +done + +echo "Argo job timed out." +rm -rf /tmp/sqlflow* +exit 1 + + +