-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathcloudbuild.yaml
137 lines (123 loc) · 4.46 KB
/
cloudbuild.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
#################################################################################
# Deployment Section
#
# Overview:
# 1. Test in sandbox during development
# 2. Deploy to staging on commit to master
# 3. Deploy to prod when a branch is tagged with prod-* or xxx-prod-*
#
# We want to test individual components in sandbox, and avoid stepping on each
# other, so we do NOT automate deployment to sandbox. Each person should
# use a branch name to trigger the single deployment that they are working on.
#
# We want to soak all code in staging before deploying to prod. To avoid
# incompatible components, we deploy ALL elements to staging when we merge
# to master branch.
#
# Deployments to prod are done by deliberately tagging a specific commit,
# typically in the master branch, with a tag starting with prod-*.
# DO NOT just tag the latest version in master, as someone may have
# pushed new code that hasn't had a chance to soak in staging.
#
#
# Deploy steps never trigger on a new Pull Request. Deploy steps will trigger
# on specific branch name patterns, after a merge to master, or on
# an explicit tag that matches "on:" conditions.
#################################################################################
timeout: 1800s
options:
env:
- PROJECT_ID=$PROJECT_ID
- GIT_COMMIT=$COMMIT_SHA
machineType: 'N1_HIGHCPU_8'
steps:
# Make all git tags available.
- name: gcr.io/cloud-builders/git
id: "Unshallow git clone"
args: ["fetch", "--unshallow"]
# Fetch travis submodule.
- name: gcr.io/cloud-builders/git
id: "Update travis submodule"
args: ["submodule", "update", "--init", "--recursive"]
# TODO: while local docker builds cache intermediate layers, CB does not.
# Combine the Dockerfile.testing with the Dockerfile using --target and
# --from-cache to speed up builds: See also:
# https://andrewlock.net/caching-docker-layers-on-serverless-build-hosts-with-multi-stage-builds---target,-and---cache-from/
- name: gcr.io/cloud-builders/docker
id: "Build the testing docker container"
args: [
"build", "-t", "etl-testing", "-f", "Dockerfile.testing", "."
]
- name: etl-testing
id: "Run all etl unit tests"
args:
- go version
- go get -v -t ./...
- go get -v -tags=integration -t ./...
# Run tests.
- go test -v -coverprofile=_unit.cov ./...
# TODO: race detected in TestGardenerAPI_RunAll
# - go test -v ./... -race
# Integration testing requires additional SA credentials.
- ./integration-testing.sh
# Build update-schema command, with binary in cloudbuild /workspace.
- go build ./cmd/update-schema
env:
- SERVICE_ACCOUNT_mlab_testing=$_SERVICE_ACCOUNT_MLAB_TESTING
- WORKSPACE_LINK=/go/src/github.com/m-lab/etl
- name: gcr.io/cloud-builders/docker
id: "Build the etl docker container"
args: [
"build",
"--build-arg", "VERSION=${TAG_NAME}${BRANCH_NAME}",
"-t", "gcr.io/$PROJECT_ID/etl:$_DOCKER_TAG",
"-f", "cmd/etl_worker/Dockerfile.k8s", "."
]
waitFor: ['Unshallow git clone']
- name: gcr.io/cloud-builders/docker
id: "Push the docker container to gcr.io"
args: [
"push", "gcr.io/$PROJECT_ID/etl:$_DOCKER_TAG"
]
- name: etl-testing
id: "Update table schemas before deploying parsers"
entrypoint: bash
args: [
'-c', './update-schema -standard'
]
env:
- PROJECT=$PROJECT_ID
- name: etl-testing
id: "Update table schemas for wehe deploying parsers"
entrypoint: bash
args: [
'-c', './update-schema -experiment wehe -datatype scamper1 &&
./update-schema -experiment wehe -datatype annotation2 &&
./update-schema -experiment wehe -datatype hopannotation2'
]
env:
- PROJECT=$PROJECT_ID
- name: etl-testing
id: "Update table schemas for msak deploying parsers"
entrypoint: bash
args: [
'-c', './update-schema -experiment msak -datatype annotation2 &&
./update-schema -experiment msak -datatype scamper1 &&
./update-schema -experiment msak -datatype hopannotation2 &&
./update-schema -experiment msak -datatype tcpinfo &&
./update-schema -experiment msak -datatype pcap'
]
env:
- PROJECT=$PROJECT_ID
# UNIVERSAL PARSER: Run apply-cluster.sh
- name: gcr.io/cloud-builders/kubectl
id: "Deploy etl parser to $_CLUSTER_NAME cluster"
entrypoint: /bin/bash
args: [
'-c', '/builder/kubectl.bash version && ./apply-cluster.sh'
]
env:
- BIGQUERY_DATASET=tmp_ndt
- CLOUDSDK_COMPUTE_REGION=$_CLUSTER_REGION
- CLOUDSDK_CONTAINER_CLUSTER=$_CLUSTER_NAME
- CLOUDSDK_CORE_PROJECT=$PROJECT_ID