-
Notifications
You must be signed in to change notification settings - Fork 99
273 lines (239 loc) · 10.3 KB
/
analysis_workflow.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
name: Build with analysis tools
on:
workflow_dispatch:
inputs:
run_all_benchmarks:
type: boolean
default: false
schedule: # Schedule the job to run at 12 a.m. daily
- cron: '0 0 * * *'
pull_request_target:
paths-ignore:
- "**/*.md"
jobs:
get_commits_to_benchmark:
name: Get tag commits
runs-on: ubuntu-22.04
steps:
- name: Checkout code
uses: actions/[email protected]
with:
fetch-depth: 0
ref: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || '' }} # Note: This is dangerous if we run automatic CI on external PRs
- name: Get tags
id: get_tags
run: |
python3 build_tooling/get_commits_for_benchmark.py ${{ inputs.run_all_benchmarks == true && '--run_all_benchmarks' || ''}}
outputs:
matrix: ${{ steps.get_tags.outputs.commits }}
benchmark_commits:
needs: [get_commits_to_benchmark]
strategy:
fail-fast: false
matrix:
commits: ${{ fromJson(needs.get_commits_to_benchmark.outputs.matrix)}}
name: Benchmark commit ${{ matrix.commits }}
uses: ./.github/workflows/benchmark_commits.yml
secrets: inherit
with:
commit: ${{ matrix.commits }}
run_all_benchmarks: ${{ inputs.run_all_benchmarks || false }}
run_on_pr_head: ${{ github.event_name == 'pull_request_target' }}
publish_benchmark_results_to_gh_pages:
name: Publish benchmark results to gh-pages
if: github.ref == 'refs/heads/master'
needs: [benchmark_commits]
runs-on: ubuntu-22.04
container:
image: ubuntu:22.04 # Native runner doesn't allow setting up the ca softlinks required below
permissions:
contents: write
steps:
- name: Select Python
uses: actions/[email protected]
with:
python-version: "3.10"
- name: Prepare environment
shell: bash -el {0}
run: |
apt update
apt install -y git
python -m pip install arcticdb[Testing] "protobuf<5"
- name: Setup softlink for SSL
shell: bash -el {0}
run: |
mkdir -p /etc/pki/tls
ln -s /usr/lib/ssl/certs /etc/pki/tls/certs
ln -s /etc/ssl/certs/ca-certificates.crt /etc/pki/tls/certs/ca-bundle.crt
- uses: actions/[email protected]
with:
fetch-depth: 0
token: ${{ secrets.ARCTICDB_TEST_PAT }}
ref: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || '' }} # Note: This is dangerous if we run automatic CI on external PRs
- name: Set persistent storage variables
uses: ./.github/actions/set_persistent_storage_env_vars
with:
bucket: "arcticdb-ci-benchmark-results"
aws_access_key: "${{ secrets.AWS_S3_ACCESS_KEY }}"
aws_secret_key: "${{ secrets.AWS_S3_SECRET_KEY }}"
- name: Publish results to Github Pages
shell: bash -el {0}
run: |
git config --global --add safe.directory /__w/ArcticDB/ArcticDB
git config --global user.name "${GITHUB_ACTOR}"
git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com"
python build_tooling/transform_asv_results.py --mode extract
python -m asv publish -v
python -m asv gh-pages -v --rewrite
# code_coverage:
# runs-on: "ubuntu-22.04"
# container:
# image: quay.io/pypa/manylinux_2_28_x86_64:latest
# services:
# mongodb:
# image: mongo:4.4
# ports:
# - 27017:27017
# env:
# VCPKG_NUGET_USER: ${{secrets.VCPKG_NUGET_USER || github.repository_owner}}
# VCPKG_NUGET_TOKEN: ${{secrets.VCPKG_NUGET_TOKEN || secrets.GITHUB_TOKEN}}
# VCPKG_MAN_NUGET_USER: ${{secrets.VCPKG_MAN_NUGET_USER}} # For forks to download pre-compiled dependencies from the Man repo
# VCPKG_MAN_NUGET_TOKEN: ${{secrets.VCPKG_MAN_NUGET_TOKEN}}
# ARCTIC_CMAKE_PRESET: linux-debug
# ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
# steps:
# - uses: actions/[email protected]
# with:
# submodules: recursive
# ref: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || '' }} # Note: This is dangerous if we run automatic CI on external PRs
# - name: Get number of CPU cores
# uses: SimenB/[email protected]
# id: cpu-cores
# - name: Install deps
# uses: ./.github/actions/setup_deps
# - name: Extra envs
# shell: bash -l {0}
# run: |
# . build_tooling/vcpkg_caching.sh # Linux follower needs another call in CIBW
# echo -e "VCPKG_BINARY_SOURCES=$VCPKG_BINARY_SOURCES
# VCPKG_ROOT=$PLATFORM_VCPKG_ROOT" | tee -a $GITHUB_ENV
# cmake -P cpp/CMake/CpuCount.cmake | sed 's/^-- //' | tee -a $GITHUB_ENV
# echo "ARCTICDB_CODE_COVERAGE_BUILD=1" | tee -a $GITHUB_ENV
# env:
# CMAKE_BUILD_PARALLEL_LEVEL: ${{vars.CMAKE_BUILD_PARALLEL_LEVEL}}
# - name: Prepare C++ compilation env
# run: . build_tooling/prep_cpp_build.sh
# - name: CMake compile
# # We are pinning the version to 10.6 because >= 10.7, use node20 which is not supported in the container
# uses: lukka/[email protected]
# with:
# cmakeListsTxtPath: ${{github.workspace}}/cpp/CMakeLists.txt
# configurePreset: ${{env.ARCTIC_CMAKE_PRESET}}
# buildPreset: ${{env.ARCTIC_CMAKE_PRESET}}
# env:
# ARCTICDB_DEBUG_FIND_PYTHON: ${{vars.ARCTICDB_DEBUG_FIND_PYTHON}}
# python_impl_name: 'cp311'
# - name: Run C++ Tests
# shell: bash -l {0}
# run: |
# cd cpp/out/linux-debug-build/
# ls arcticdb
# make -j ${{ steps.cpu-cores.outputs.count }} arcticdb_rapidcheck_tests
# make -j ${{ steps.cpu-cores.outputs.count }} test_unit_arcticdb
# ctest
# # We are chainging the python here because we want to use the default python to build (it is devel version)
# # and this python for the rest of the testing
# - name: Select Python (Linux)
# run: echo /opt/python/cp36-cp36m/bin >> $GITHUB_PATH
# - name: Install local dependencies with pip
# shell: bash
# run: |
# python -m pip install --upgrade pip
# ARCTIC_CMAKE_PRESET=skip pip install -ve .[Testing]
# # - name: Test with pytest
# # uses: ./.github/actions/run_local_pytest
# # with:
# # build_type: debug
# # threads: 1
# # fast_tests_only: 0
# # other_params: '-m coverage run '
# - name: Get python Coverage report
# shell: bash -l {0}
# run: |
# cd python
# python -m coverage report -m | tee output.txt
# python -m coverage html
# zip -r python_cov.zip htmlcov/
# echo "PYTHON_COV_PERCENT=$(cat output.txt | grep 'TOTAL' | awk '{print $NF}' | tr -d '%')" >> $GITHUB_ENV
# - name: Run Gcovr manually post-pytest
# shell: bash -l {0}
# run: |
# cd cpp/out/linux-debug-build/
# python -m pip install gcovr
# mkdir coverage
# python -m gcovr --txt --html-details coverage/index.html -e vcpkg_installed/ -e proto/ -e ../../third_party -e ../../arcticdb/util/test/ -r ../.. --exclude-throw-branches --exclude-unreachable-branches -u --exclude-function-lines | tee output.txt
# zip -r coverage.zip coverage/
# echo "CPP_COV_PERCENT=$(cat output.txt | grep 'TOTAL' | awk '{print $NF}' | tr -d '%')" >> $GITHUB_ENV
# - name: Upload Coverage
# uses: actions/[email protected]
# with:
# name: cpp-coverage-artifact
# path: cpp/out/linux-debug-build/coverage.zip
# - name: Upload Python Coverage
# uses: actions/[email protected]
# with:
# name: python-coverage-artifact
# path: python/python_cov.zip
# - name: Restore cached CPP Coverage Percentage from the previous run
# id: cache-cov-restore
# uses: actions/cache/[email protected]
# with:
# path: prev_coverage.txt
# key: coverage
# - name: Get and compare coverage if cache was restored
# run: |
# # if cache was restored, compare coverage
# if [ -f coverage.txt ]; then
# PREV_COVERAGE=$(cat prev_coverage.txt | cut -d' ' -f2)
# echo "Previous coverage: $PREV_COVERAGE"
# CURR_COVERAGE=${{env.CPP_COV_PERCENT}}
# echo "CPP_COV_PREV_PERCENT=$PREV_COVERAGE" >> $GITHUB_ENV
# echo "Current coverage: $CURR_COVERAGE"
# if [ $CURR_COVERAGE -gt $PREV_COVERAGE ]; then
# echo "Coverage increased"
# elif [ $CURR_COVERAGE -lt $PREV_COVERAGE ]; then
# echo "Coverage decreased"
# else
# echo "Coverage unchanged"
# fi
# fi
# - name: Save CPP Coverage Percentage to file
# run: |
# echo "Coverage: ${{ env.CPP_COV_PERCENT }}" > current_coverage.txt
# - name: Save the current CPP Coverage Percentage to the cache
# id: cache-cov-save
# uses: actions/cache/[email protected]
# with:
# path: current_coverage.txt
# key: coverage
# - name: Check percentage and send Slack notification
# if: ${{ env.CPP_COV_PREV_PERCENT && env.CPP_COV_PERCENT && env.CPP_COV_PERCENT < env.CPP_COV_PREV_PERCENT }}
# uses: slackapi/[email protected]
# with:
# # For posting a rich message using Block Kit
# payload: |
# {
# "text": "The CPP Code Coverage has been reduced",
# "blocks": [
# {
# "type": "section",
# "text": {
# "type": "mrkdwn",
# "text": "The CPP Code Coverage from the current run(${{ env.CPP_COV_PERCENT }}%) is lower the previous one(${{ env.CPP_COV_PREV_PERCENT }}%)."
# }
# }
# ]
# }
# env:
# SLACK_WEBHOOK_URL: ${{ secrets.ARCTICDB_DEV_WEBHOOK_URL }}
# SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK