-
Notifications
You must be signed in to change notification settings - Fork 22
/
openqa-trigger-bisect-jobs
executable file
·322 lines (273 loc) · 9.44 KB
/
openqa-trigger-bisect-jobs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
#!/usr/bin/env python3
import argparse
from functools import total_ordering
import hashlib
import json
import logging
import os
import re
import subprocess
import sys
from urllib.parse import urlparse, urlunparse
import requests
USER_AGENT = 'openqa-trigger-bisect-jobs (https://github.com/os-autoinst/scripts)'
logging.basicConfig()
log = logging.getLogger(sys.argv[0] if __name__ == "__main__" else __name__)
GOOD = "-"
BAD = "+"
class CustomFormatter(
argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter
):
"""Preserve multi-line __doc__ and provide default arguments in help strings."""
pass
@total_ordering
class Incident:
def __init__(self, inc: str) -> None:
self.incident = inc
self._incident_id = None
@property
def incident_id(self):
if self._incident_id:
return self._incident_id
try:
self._incident_id = self.incident.split("/")[6]
except IndexError:
self._incident_id = self.incident
return self._incident_id
def __str__(self):
return self.incident
def __eq__(self, __o) -> bool:
return self.incident_id == __o.incident_id
def __gt__(self, __o) -> bool:
return int(self.incident_id) > int(__o.incident_id)
def __hash__(self) -> int:
return int(hashlib.md5(self.incident.encode()).hexdigest(), base=16)
def __repr__(self) -> str:
return f"<Incident -> {self.incident}"
def parse_args():
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=CustomFormatter
)
parser.add_argument(
"-v",
"--verbose",
help="Increase verbosity level, specify multiple times to increase verbosity",
action="count",
default=1,
)
parser.add_argument(
"--url",
required=True,
help="The openQA test URL for which to trigger bisection investigation jobs",
)
parser.add_argument(
"--priority-add",
default=100,
help="Adds the specified value to the cloned job's priority value",
)
parser.add_argument(
"--dry-run", action="store_true", help="Do not do any action on openQA"
)
args = parser.parse_args()
verbose_to_log = {
0: logging.CRITICAL,
1: logging.ERROR,
2: logging.WARN,
3: logging.INFO,
4: logging.DEBUG,
}
logging_level = logging.DEBUG if args.verbose > 4 else verbose_to_log[args.verbose]
log.setLevel(logging_level)
return args
client_args = [
"openqa-cli",
"api",
"--header",
f"User-Agent: {USER_AGENT}",
]
def call(cmds, dry_run=False):
log.debug("call: %s" % cmds)
return subprocess.check_output(
(["echo", "Simulating: "] if dry_run else []) + cmds
).decode("utf-8")
def openqa_comment(job, host, comment, dry_run):
args = client_args + [
"--host",
host,
"-X",
"POST",
"jobs/" + str(job) + "/comments",
"text=" + comment,
]
return call(args, dry_run)
def openqa_set_job_prio(job_id, host, prio, dry_run):
prio_json = json.dumps({"priority": prio})
args = client_args + [
"--host",
host,
"--json",
"--data",
prio_json,
"-X",
"PUT",
"jobs/" + str(job_id),
]
return call(args, dry_run)
def openqa_clone(
cmds,
dry_run,
default_opts=["--skip-chained-deps", "--json-output", "--within-instance"],
default_cmds=["_GROUP=0"],
):
return call(["openqa-clone-job"] + default_opts + cmds + default_cmds, dry_run)
def fetch_url(url, request_type="text"):
try:
content = requests.get(url, headers={'User-Agent': USER_AGENT})
content.raise_for_status()
except requests.exceptions.RequestException as e:
log.error("Error while fetching %s: %s" % (url, str(e)))
raise (e)
raw = content.content
if request_type == "json":
try:
content = content.json()
except json.decoder.JSONDecodeError as e:
log.error(
"Error while decoding JSON from %s -> >>%s<<: %s"
% (url, raw, str(e))
)
raise (e)
return content
def find_changed_issues(investigation):
changes = {}
pattern = re.compile(
r"(?P<diff>[+-])\s+\"(?P<key>[A-Z]+_TEST_(?:ISSUES|REPOS))\"\s*:\s*\"(?P<var>[^\"]*)\","
)
for line in investigation.splitlines():
search = pattern.match(line)
if search:
issue_var = search.group("key")
if not changes.get(issue_var):
changes[issue_var] = {}
changes[issue_var][search.group("diff")] = {
Incident(i) for i in search.group("var").split(",")
}
for key in list(changes):
if not changes[key].get(BAD) or not changes[key].get(GOOD):
del changes[key]
continue
if len(changes[key][BAD]) <= 1:
# no value in triggering single-incident bisections
del changes[key]
continue
changes_repos = {
key: value for key, value in changes.items() if key.endswith("REPOS")
}
return changes_repos if changes_repos else changes
def main(args):
parsed_url = urlparse(args.url)
base_url = urlunparse((parsed_url.scheme, parsed_url.netloc, "", "", "", ""))
job_id = parsed_url.path.lstrip("/tests/")
test_url = f"{base_url}/api/v1/jobs/{job_id}"
log.debug("Retrieving job data from %s" % test_url)
test_data = fetch_url(test_url, request_type="json")
job = test_data["job"]
if job['result'] == 'passed':
log.info(
"Job %d (%s) is passed, skipping bisection"
% (job["id"], job["test"])
)
return
search = re.search(":investigate:", job["test"])
if search:
log.info(
"Job %d (%s) is already an investigation, skipping bisection"
% (job["id"], job["test"])
)
return
if job.get("clone_id") is not None:
log.info("Job %d already has a clone, skipping bisection" % job["id"])
return
children = job["children"] if "children" in job else []
parents = job["parents"] if "parents" in job else []
if (
"Parallel" in children
and len(children["Parallel"])
or "Directly chained" in children
and len(children["Directly chained"])
or "Parallel" in parents
and len(parents["Parallel"])
or "Directly chained" in parents
and len(parents["Directly chained"])
):
return
investigation_url = f"{base_url}/tests/{job_id}/investigation_ajax"
log.debug("Retrieving investigation info from %s" % investigation_url)
investigation = fetch_url(investigation_url, request_type="json")
log.debug("Received investigation info: %s" % investigation)
if "diff_to_last_good" not in investigation:
return
all_changes = find_changed_issues(investigation["diff_to_last_good"])
if not all_changes:
return
exclude_group_regex = os.environ.get("exclude_group_regex", "")
if len(exclude_group_regex) > 0:
full_group = job.get("group", "")
if "parent_group" in job:
full_group = "%s / %s" % (job["parent_group"], full_group)
if re.search(exclude_group_regex, full_group):
return
log.debug("Received job data: %s" % test_data)
test = job["settings"]["TEST"]
prio = int(job["priority"]) + args.priority_add
log.debug("Found test name '%s'" % test)
created = ""
added = []
for key in all_changes:
changes = all_changes[key]
removed_key, added_key = list(changes[GOOD] - changes[BAD]), list(
changes[BAD] - changes[GOOD]
)
log.debug("[%s] removed: %s, added: %s" % (key, removed_key, added_key))
added += added_key
# whole sort is to simplify testability of code
for issue in sorted(list({i.incident_id for i in added}), key=int):
line = {}
log.info("Triggering one bisection job without issue '%s'" % issue)
for key in all_changes:
# use only VARS where is incident present in BAD
if [i for i in all_changes[key][BAD] if i.incident_id == issue]:
line[key] = ",".join(
str(i)
for i in sorted(list(all_changes[key][BAD]))
if i.incident_id != issue
)
log.debug("New set of %s='%s'" % (key, line[key]))
test_name = test + ":investigate:bisect_without_%s" % issue
params = (
[args.url]
+ [k + "=" + v for k, v in line.items()]
+ [
"TEST=" + test_name,
"OPENQA_INVESTIGATE_ORIGIN=" + args.url,
"MAINT_TEST_REPO=",
]
)
out = openqa_clone(
params,
args.dry_run,
)
created_job_ids = []
try:
created_job_ids = json.loads(out).values()
except Exception as e:
log.error("openqa-clone-job returned non-JSON output: " + out)
for job_id in sorted(created_job_ids):
log.info(f"Created {job_id}")
created += f"* **{test_name}**: {base_url}/t{job_id}\n"
openqa_set_job_prio(job_id, args.url, prio, args.dry_run)
if len(created):
comment = "Automatic bisect jobs:\n\n" + created
openqa_comment(job["id"], base_url, comment, args.dry_run)
if __name__ == "__main__":
main(parse_args())