forked from getsentry/sentry
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.travis.yml
235 lines (213 loc) · 8.69 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
dist: xenial
language: python
python: 2.7
branches:
only:
- master
cache:
yarn: true
directories:
- "${HOME}/virtualenv/python$(python -c 'import platform; print(platform.python_version())')"
- "$NODE_DIR"
- node_modules
- "${HOME}/google-cloud-sdk"
addons:
apt:
update: true
packages:
- libxmlsec1-dev
- libgeoip-dev
chrome: stable
env:
global:
- NODE_ENV=development
# PIP_VERSION causes issues because: https://github.com/pypa/pip/issues/4528
# Note: this has to be synced with the pip version in the Makefile.
- PYTHON_PIP_VERSION=19.2.3
- PIP_USE_PEP517=off
- PIP_DISABLE_PIP_VERSION_CHECK=on
- PIP_QUIET=1
- SENTRY_LIGHT_BUILD=1
- SENTRY_SKIP_BACKEND_VALIDATION=1
- SOUTH_TESTS_MIGRATE=0
- DJANGO_VERSION=">=1.8,<1.9"
# node's version is pinned by .nvmrc and is autodetected by `nvm install`.
- NODE_DIR="${HOME}/.nvm/versions/node/v$(< .nvmrc)"
- NODE_OPTIONS=--max-old-space-size=4096
before_install:
- pip install "pip==${PYTHON_PIP_VERSION}"
script:
# certain commands require sentry init to be run, but this is only true for
# running things within Travis
- make travis-test-$TEST_SUITE
- make travis-scan-$TEST_SUITE
# installing dependencies for after_* steps here ensures they get cached
# since those steps execute after travis runs `store build cache`
after_failure:
- dmesg | tail -n 100
after_script:
- |
coverage_files=$(ls .artifacts/*coverage.xml || true)
if [[ -n "$coverage_files" || -f .artifacts/coverage/cobertura-coverage.xml ]]; then
pip install -U codecov
codecov -e TEST_SUITE
fi
- ./bin/yarn global add @zeus-ci/cli
- $(./bin/yarn global bin)/zeus upload -t "text/xml+xunit" .artifacts/*junit.xml
- $(./bin/yarn global bin)/zeus upload -t "text/xml+coverage" .artifacts/*coverage.xml
- $(./bin/yarn global bin)/zeus upload -t "text/xml+coverage" .artifacts/coverage/cobertura-coverage.xml
- $(./bin/yarn global bin)/zeus upload -t "text/html+pytest" .artifacts/*pytest.html
- $(./bin/yarn global bin)/zeus upload -t "text/plain+pycodestyle" .artifacts/*pycodestyle.log
- $(./bin/yarn global bin)/zeus upload -t "text/xml+checkstyle" .artifacts/*checkstyle.xml
- $(./bin/yarn global bin)/zeus upload -t "application/webpack-stats+json" .artifacts/*webpack-stats.json
base_postgres: &postgres_default
python: 2.7
services:
- memcached
- redis-server
- postgresql
before_install:
- docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.4
- docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- docker ps -a
install:
- python setup.py install_egg_info
- pip install -U -e ".[dev,tests,optional]"
before_script:
- psql -c 'create database sentry;' -U postgres
base_acceptance: &acceptance_default
python: 2.7
services:
- docker
- memcached
- redis-server
- postgresql
before_install:
- find "$NODE_DIR" -type d -empty -delete
- nvm install
- docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.4
- docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- docker ps -a
install:
- ./bin/yarn install --pure-lockfile
- python setup.py install_egg_info
- pip install -U -e ".[dev,tests,optional]"
- |
CHROME_MAJOR_VERSION="$(dpkg -s google-chrome-stable | sed -nr 's/Version: ([0-9]+).*/\1/p')"
wget -N "https://chromedriver.storage.googleapis.com/$(curl https://chromedriver.storage.googleapis.com/LATEST_RELEASE_${CHROME_MAJOR_VERSION})/chromedriver_linux64.zip" -P ~/
- unzip ~/chromedriver_linux64.zip -d ~/
- rm ~/chromedriver_linux64.zip
- sudo install -m755 ~/chromedriver /usr/local/bin/
before_script:
- psql -c 'create database sentry;' -U postgres
# each job in the matrix inherits `env/global` and uses everything above,
# but custom `services`, `before_install`, `install`, and `before_script` directives
# may be defined to define and setup individual job environments with more precision.
matrix:
fast_finish: true
include:
# Lint python and javascript together
- python: 2.7
name: 'Linter'
env: TEST_SUITE=lint
install:
- python setup.py install_egg_info
- SENTRY_LIGHT_BUILD=1 pip install -U -e ".[dev,tests,optional]"
- find "$NODE_DIR" -type d -empty -delete
- nvm install
- ./bin/yarn install --pure-lockfile
- <<: *postgres_default
name: 'Backend [Postgres] (1/2)'
env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0
- <<: *postgres_default
name: 'Backend [Postgres] (2/2)'
env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1
- <<: *acceptance_default
name: 'Acceptance'
env: TEST_SUITE=acceptance USE_SNUBA=1
- python: 2.7
name: 'Frontend'
env: TEST_SUITE=js
before_install:
- find "$NODE_DIR" -type d -empty -delete
- nvm install
install:
- ./bin/yarn install --pure-lockfile
- python: 2.7
name: 'Command Line'
env: TEST_SUITE=cli
services:
- postgresql
- redis-server
install:
- python setup.py install_egg_info
- pip install -U -e .
before_script:
- psql -c 'create database sentry;' -U postgres
- python: 2.7
name: 'Distribution build'
env: TEST_SUITE=dist
before_install:
- find "$NODE_DIR" -type d -empty -delete
- nvm install
- <<: *postgres_default
name: 'Symbolicator Integration'
env: TEST_SUITE=symbolicator
before_install:
- docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.4
- docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- docker run -d --network host --name symbolicator us.gcr.io/sentryio/symbolicator:latest run
- docker ps -a
# snuba in testing
- python: 2.7
name: 'Snuba Integration'
env: TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092
services:
- docker
- memcached
- redis-server
- postgresql
before_install:
- docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
- docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
- docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:19.4
- docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- docker ps -a
install:
- python setup.py install_egg_info
- pip install -U -e ".[dev,tests,optional]"
- pip install confluent-kafka
before_script:
- psql -c 'create database sentry;' -U postgres
# Deploy 'storybook' (component & style guide) - allowed to fail
- language: node_js
name: 'Storybook Deploy'
env: STORYBOOK_BUILD=1
before_install:
# Decrypt the credentials we added to the repo using the key we added with the Travis command line tool
- openssl aes-256-cbc -K $encrypted_020be61ef175_key -iv $encrypted_020be61ef175_iv -in .travis/storybook-credentials.tar.gz.enc -out credentials.tar.gz -d
# If the SDK is not already cached, download it and unpack it
- if [ ! -d ${HOME}/google-cloud-sdk ]; then curl https://sdk.cloud.google.com | bash; fi
- tar -xzf credentials.tar.gz
# Use the decrypted service account credentials to authenticate the command line tool
- gcloud auth activate-service-account --key-file client-secret.json
install:
- ./bin/yarn install --pure-lockfile
- gcloud version
script: bash .travis/deploy-storybook.sh
after_success: skip
after_failure: skip
# jobs are defined in matrix/include
# to specify which jobs are allowed to fail, match the env exactly in matrix/allow_failures
allow_failures:
- language: node_js
env: STORYBOOK_BUILD=1
notifications:
webhooks:
urls:
- https://zeus.ci/hooks/fa079cf6-8e6b-11e7-9155-0a580a28081c/public/provider/travis/webhook
on_success: always
on_failure: always
on_start: always
on_cancel: always
on_error: always