-
Notifications
You must be signed in to change notification settings - Fork 99
299 lines (260 loc) · 10.8 KB
/
analysis_workflow.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
name: Build with analysis tools
on:
workflow_dispatch:
inputs:
run_all_benchmarks:
type: boolean
force_publish_result:
description: Force publish result (requries admin access)
type: boolean
schedule: # Schdeule the job to run at 12 a.m. daily
- cron: '0 0 * * *'
pull_request:
paths-ignore:
- "**/*.md"
jobs:
cibw_docker_image:
uses: ./.github/workflows/cibw_docker_image.yml
permissions: {packages: write}
with:
cibuildwheel_ver: "2.12.1"
force_update: false
code_coverage:
needs: [cibw_docker_image]
runs-on: "ubuntu-latest"
container:
image: ${{needs.cibw_docker_image.outputs.tag}}
services:
mongodb:
image: mongo:4.4
ports:
- 27017:27017
env:
VCPKG_NUGET_USER: ${{secrets.VCPKG_NUGET_USER || github.repository_owner}}
VCPKG_NUGET_TOKEN: ${{secrets.VCPKG_NUGET_TOKEN || secrets.GITHUB_TOKEN}}
VCPKG_MAN_NUGET_USER: ${{secrets.VCPKG_MAN_NUGET_USER}} # For forks to download pre-compiled dependencies from the Man repo
VCPKG_MAN_NUGET_TOKEN: ${{secrets.VCPKG_MAN_NUGET_TOKEN}}
ARCTIC_CMAKE_PRESET: linux-debug
steps:
- uses: actions/[email protected]
with:
submodules: recursive
- name: Get number of CPU cores
uses: SimenB/[email protected]
id: cpu-cores
- name: Extra envs
run: |
. build_tooling/vcpkg_caching.sh # Linux follower needs another call in CIBW
echo -e "VCPKG_BINARY_SOURCES=$VCPKG_BINARY_SOURCES
VCPKG_ROOT=$PLATFORM_VCPKG_ROOT" | tee -a $GITHUB_ENV
cmake -P cpp/CMake/CpuCount.cmake | sed 's/^-- //' | tee -a $GITHUB_ENV
echo "ARCTICDB_CODE_COVERAGE_BUILD=1" | tee -a $GITHUB_ENV
env:
CMAKE_BUILD_PARALLEL_LEVEL: ${{vars.CMAKE_BUILD_PARALLEL_LEVEL}}
- name: Prepare C++ compilation env
run: . build_tooling/prep_cpp_build.sh
- name: CMake compile
# We are pinning the version to 10.6 because >= 10.7, use node20 which is not supported in the container
uses: lukka/[email protected]
with:
cmakeListsTxtPath: ${{github.workspace}}/cpp/CMakeLists.txt
configurePreset: ${{env.ARCTIC_CMAKE_PRESET}}
buildPreset: ${{env.ARCTIC_CMAKE_PRESET}}
env:
ARCTICDB_DEBUG_FIND_PYTHON: ${{vars.ARCTICDB_DEBUG_FIND_PYTHON}}
python_impl_name: 'cp311'
- name: Run C++ Tests
shell: bash -l {0}
run: |
cd cpp/out/linux-debug-build/
ls arcticdb
make -j ${{ steps.cpu-cores.outputs.count }} arcticdb_rapidcheck_tests
make -j ${{ steps.cpu-cores.outputs.count }} test_unit_arcticdb
ctest
# We are chainging the python here because we want to use the default python to build (it is devel version)
# and this python for the rest of the testing
- name: Select Python (Linux)
run: echo /opt/python/cp36-cp36m/bin >> $GITHUB_PATH
- name: Install local dependencies with pip
shell: bash
run: |
python -m pip install --upgrade pip
ARCTIC_CMAKE_PRESET=skip pip install -ve .[Testing]
# - name: Test with pytest
# uses: ./.github/actions/run_local_pytest
# with:
# build_type: debug
# threads: 1
# fast_tests_only: 0
# other_params: '-m coverage run '
- name: Get python Coverage report
shell: bash -l {0}
run: |
cd python
python -m coverage report -m | tee output.txt
python -m coverage html
zip -r python_cov.zip htmlcov/
echo "PYTHON_COV_PERCENT=$(cat output.txt | grep 'TOTAL' | awk '{print $NF}' | tr -d '%')" >> $GITHUB_ENV
- name: Run Gcovr manually post-pytest
shell: bash -l {0}
run: |
cd cpp/out/linux-debug-build/
python -m pip install gcovr
mkdir coverage
python -m gcovr --txt --html-details coverage/index.html -e vcpkg_installed/ -e proto/ -e ../../third_party -e ../../arcticdb/util/test/ -r ../.. --exclude-throw-branches --exclude-unreachable-branches -u --exclude-function-lines | tee output.txt
zip -r coverage.zip coverage/
echo "CPP_COV_PERCENT=$(cat output.txt | grep 'TOTAL' | awk '{print $NF}' | tr -d '%')" >> $GITHUB_ENV
- name: Upload Coverage
uses: actions/[email protected]
with:
name: cpp-coverage-artifact
path: cpp/out/linux-debug-build/coverage.zip
- name: Upload Python Coverage
uses: actions/[email protected]
with:
name: python-coverage-artifact
path: python/python_cov.zip
- name: Restore cached CPP Coverage Percentage from the previous run
id: cache-cov-restore
uses: actions/cache/[email protected]
with:
path: prev_coverage.txt
key: coverage
- name: Get and compare coverage if cache was restored
run: |
# if cache was restored, compare coverage
if [ -f coverage.txt ]; then
PREV_COVERAGE=$(cat prev_coverage.txt | cut -d' ' -f2)
echo "Previous coverage: $PREV_COVERAGE"
CURR_COVERAGE=${{env.CPP_COV_PERCENT}}
echo "CPP_COV_PREV_PERCENT=$PREV_COVERAGE" >> $GITHUB_ENV
echo "Current coverage: $CURR_COVERAGE"
if [ $CURR_COVERAGE -gt $PREV_COVERAGE ]; then
echo "Coverage increased"
elif [ $CURR_COVERAGE -lt $PREV_COVERAGE ]; then
echo "Coverage decreased"
else
echo "Coverage unchanged"
fi
fi
- name: Save CPP Coverage Percentage to file
run: |
echo "Coverage: ${{ env.CPP_COV_PERCENT }}" > current_coverage.txt
- name: Save the current CPP Coverage Percentage to the cache
id: cache-cov-save
uses: actions/cache/[email protected]
with:
path: current_coverage.txt
key: coverage
- name: Check percentage and send Slack notification
if: ${{ env.CPP_COV_PREV_PERCENT && env.CPP_COV_PERCENT && env.CPP_COV_PERCENT < env.CPP_COV_PREV_PERCENT }}
uses: slackapi/[email protected]
with:
# For posting a rich message using Block Kit
payload: |
{
"text": "The CPP Code Coverage has been reduced",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "The CPP Code Coverage from the current run(${{ env.CPP_COV_PERCENT }}%) is lower the previous one(${{ env.CPP_COV_PREV_PERCENT }}%)."
}
}
]
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.ARCTICDB_DEV_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
start_ec2_runner:
if: github.event_name == 'schedule'
uses: ./.github/workflows/ec2_runner_jobs.yml
secrets: inherit
with:
job_type: start
benchmark_linux_medium:
timeout-minutes: 1200
if: github.event_name == 'schedule'
needs: [start_ec2_runner, cibw_docker_image]
runs-on: ${{ needs.start_ec2_runner.outputs.label }}
container:
image: ${{needs.cibw_docker_image.outputs.tag}}
permissions:
contents: write # Technically not necessary since the ARCTICDB_TEST_PAT token override exists
env:
# this is potentially overflowing the cache, so should be looked into after we address issue #1057
SCCACHE_GHA_VERSION: ${{vars.SCCACHE_GHA_VERSION || 1}} # Setting this env var enables the caching
VCPKG_NUGET_USER: ${{secrets.VCPKG_NUGET_USER || github.repository_owner}}
VCPKG_NUGET_TOKEN: ${{secrets.VCPKG_NUGET_TOKEN || secrets.GITHUB_TOKEN}}
CMAKE_C_COMPILER_LAUNCHER: sccache
CMAKE_CXX_COMPILER_LAUNCHER: sccache
steps:
- uses: actions/[email protected]
with:
fetch-depth: 0
token: ${{ secrets.ARCTICDB_TEST_PAT }}
- name: Set persistent storage variables
uses: ./.github/actions/set_persistent_storage_env_vars
with:
aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}"
aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}"
- name: Configure sccache
uses: mozilla-actions/[email protected]
with:
version: "v0.4.0"
# We are changing the python here because we want to use the default python to build (it is devel version)
# and this python for the rest of the testing
- name: Select Python (Linux)
run: |
ls /opt/python
echo /opt/python/cp36-cp36m/bin >> $GITHUB_PATH
- name: Install ASV
shell: bash -el {0}
run: |
git config --global --add safe.directory .
python -m pip install --upgrade pip
pip install asv virtualenv
python -m asv machine -v --yes --machine ArcticDB-Runner-Medium
- name: Benchmark all commits
if: inputs.run_all_benchmarks == true
shell: bash -el {0}
run: |
python -m asv run -v --show-stderr HASHFILE:python/hashes_to_benchmark.txt
- name: Benchmark only latest commit
if: github.event_name != 'pull_request' && inputs.run_all_benchmarks == false
shell: bash -el {0}
run: |
python -m asv run -v --show-stderr HEAD^!
git log HEAD^..HEAD --oneline -n1 --decorate=no | awk '{print $1;}' >> python/hashes_to_benchmark.txt
- name: Benchmark against master
if: github.event_name == 'pull_request' && inputs.run_all_benchmarks == false
shell: bash -el {0}
run: |
python -m asv continuous -v --show-stderr origin/master HEAD -f 1.15
- name: Publish results
if: inputs.force_publish_result == true || github.event_name == 'schedule'
shell: bash -el {0}
run: |
git config --global user.name "${GITHUB_ACTOR}"
git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com"
git fetch --tags --all
python -m asv publish -v
if ${{ github.event_name != 'pull_request' }} ; then
git add python/.asv/results/*
git add python/hashes_to_benchmark.txt
git commit -m "Update ASV Results and hashes to test"
git push origin HEAD
fi
if ${{ github.ref == 'refs/heads/master' }} ; then
python -m asv gh-pages -v --rewrite
fi
stop-ec2-runner:
needs: [start_ec2_runner, benchmark_linux_medium]
if: github.event_name == 'schedule'
uses: ./.github/workflows/ec2_runner_jobs.yml
secrets: inherit
with:
job_type: stop
label: ${{ needs.start_ec2_runner.outputs.label }}
ec2-instance-id: ${{ needs.start_ec2_runner.outputs.ec2-instance-id }}