-
Notifications
You must be signed in to change notification settings - Fork 0
/
fetch-ptest-logs
executable file
·103 lines (86 loc) · 3.56 KB
/
fetch-ptest-logs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
#! /usr/bin/env python3
# Copyright (C) 2021 Ross Burton <[email protected]>
# MIT licensed
"""
Given an autobuilder URL for a build step, download the failing ptest logs that
are referenced. For example:
$ fetch-ptest-logs https://autobuilder.yoctoproject.org/typhoon/#/builders/82/builds/3853/steps/12/logs/stdio
Will download the glib-2.0 ptest log file.
"""
import argparse
import ast
import re
import requests
import sys
import logging
http = requests.Session()
def buildbot(server, method, **args):
r = http.get(server + "/api/v2/" + method, params=args)
r.raise_for_status()
return r.json()
def get_destination(server, builder, build):
res = buildbot(server, f"builders/{builder}/builds/{build}", property="publish_destination")
destination = res["builds"][0]["properties"]["publish_destination"][0]
# The destination is a local path, so transform to the external URL.
# Assumes that the local path is /srv/autobuilder/[hostname]
destination = destination.replace("/srv/autobuilder/", "https://")
return destination
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("url", metavar="URL")
parser.add_argument("recipes", nargs="*", metavar="RECIPE")
parser.add_argument("--debug", action="store_true")
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG if args.debug else logging.INFO)
url_re = r"(?P<server>https?://.+/)#/?builders/(?P<builder>\d+)/builds/(?P<build>\d+)/steps/(?P<step>\d+)/.*"
match = re.match(url_re, args.url)
if not match:
print("Cannot parse URL")
sys.exit(1)
logging.debug(f"URL matches: {match.groupdict()}")
server = match.group("server")
builder = int(match.group("builder"))
build = int(match.group("build"))
step = int(match.group("step"))
builder_name = buildbot(server, f"/builders/{builder}")["builders"][0]["name"]
destination = get_destination(server, builder, build)
if not args.recipes:
logs = buildbot(server, f"/builders/{builder}/builds/{build}/steps/{step}/logs/stdio/contents")
# TODO handle more than one chunk
assert len(logs["logchunks"]) == 1
logchunk = logs["logchunks"][0]
error_lines = []
accumulating = False
for line in logchunk["content"].splitlines():
if not line.startswith("o"):
continue
line = line[1:]
if accumulating and line == "":
break
if "Failed ptests:" in line:
accumulating = True
continue
if accumulating:
error_lines.append(line)
errors = ast.literal_eval("".join(error_lines))
for recipe, fails in errors.items():
args.recipes.append(recipe)
print(f"Recipe {recipe} failures:")
# If the failure data is just a string then it's the unparsed output,
# don't show it. Although in some cases this would mean we don't need
# to download it in the next step...
if isinstance(fails, str):
print("Failed to parse log output")
else:
for fail in fails:
print(f"- {fail}")
print()
for recipe in args.recipes:
url = destination + f"/testresults/{builder_name}/{recipe}.log"
filename = f"build-{build}-{recipe}-ptest.log"
print(f"Fetching {filename}")
logging.debug(f"Fetching {url}")
r = http.get(url)
r.raise_for_status()
with open(filename, "wt") as f:
f.write(r.text)