1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
|
#!/usr/bin/env python3
import inspect
import logging
import os
from pprint import pprint
import sys
import requests
from requests import HTTPError
import yaml
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))))
from lib.helpers import setup_logging
from lib.objectstorage import ObjectStorage
from lib.helpers import refuse_root, data_dir
class GitHub:
"""
This class has the job to:
1) find all repositories
2) filter repositories with defined workflow
3) construct package metadata from workflow definition
"""
def __init__(self, vyos_stream_mode=False):
# Some repositories have defined workflow, yet we don't want to build them
# because they are for example obsolete and replaced by another package.
self.blacklist = {
"current": [
"gh-action-test-vyos-1x",
],
}
self.extra_packages = {}
def analyze_repositories_workflow(self, org_name, repositories, branch):
my_blacklist = self.blacklist[branch] if branch in self.blacklist else []
packages = {}
unique_package_names = []
for repo_name, git_url in repositories.items():
expected_workflow = "trigger-rebuild-repo-package.yml"
if repo_name == "vyos-build":
expected_workflow = "trigger_rebuild_packages.yml"
if repo_name in my_blacklist:
continue
url = "https://raw.githubusercontent.com/%s/%s/refs/heads/%s/.github/workflows/%s" % (
org_name, repo_name, branch, expected_workflow
)
try:
response = requests.request("get", url)
response.raise_for_status()
contents = response.text
workflow = yaml.load(contents, Loader=yaml.Loader)
if "jobs" not in workflow:
continue
if "trigger-build" in workflow["jobs"]:
definition = workflow["jobs"]["trigger-build"]["with"]
if "ref_name" not in definition["branch"]:
raise Exception("%s: unknown branch: %s" % (repo_name, definition))
if "PACKAGE_NAME" not in definition["package_name"]:
raise Exception("%s: unknown package_name: %s" % (repo_name, definition))
if repo_name in unique_package_names:
raise Exception("Packages with name '%s' was already defined: %s, others: %s" % (
repo_name, definition, packages,
))
unique_package_names.append(repo_name)
packages[repo_name] = {
"repo_name": repo_name,
"branch": branch,
"package_name": repo_name,
"build_type": "dpkg-buildpackage",
"path": "",
"change_patterns": ["*"],
"git_url": git_url,
}
if "changes" in workflow["jobs"]:
for item in workflow["jobs"]["changes"]["steps"]:
if "uses" in item and "paths-filter" in item["uses"]:
filters = yaml.load(item["with"]["filters"], Loader=yaml.Loader)
for package_name, patterns in filters.items():
pseudo_repo_name = "%s-%s" % (repo_name, package_name)
if package_name in my_blacklist:
continue
if package_name in unique_package_names:
raise Exception("Packages with name '%s' was already defined: %s, others: %s" % (
repo_name, filters, packages,
))
unique_package_names.append(package_name)
packages[pseudo_repo_name] = {
"repo_name": repo_name,
"branch": branch,
"package_name": package_name,
"build_type": "build.py",
"path": "scripts/package-build/%s" % package_name,
"change_patterns": patterns,
"git_url": git_url,
}
except HTTPError as e:
if e.response.status_code == 404:
continue # Repository without defined workflow should be unused/legacy/deprecated
raise
if branch in self.extra_packages:
for extra_package, info in self.extra_packages[branch].items():
packages[extra_package] = info
return packages
def find_org_repositories(self, name):
return self.find_repositories("org", name)
def find_repositories(self, kind, name):
url = "https://api.github.com/%ss/%s/repos" % (kind, name)
items = self.fetch_all_pages(url)
repositories = {}
for item in items:
repositories[item["name"]] = item["clone_url"]
return repositories
def fetch_all_pages(self, base_url, give_up=1000):
page = 1
items = []
while True:
response = requests.request("get", base_url, params={
"page": page,
"per_page": 50,
})
response.raise_for_status()
payload = response.json()
if len(payload) == 0:
break
items.extend(payload)
if page >= give_up:
raise Exception("%s: something is wrong, reached page %s and no end in sight" % (base_url, page))
page += 1
return items
if __name__ == "__main__":
setup_logging()
try:
refuse_root()
command = sys.argv[1] if len(sys.argv) > 1 else None
if command is None:
print("What do you want?")
elif command == "vyos-repos":
pprint(GitHub().find_org_repositories("vyos"))
elif command == "vyos-analyze":
branch = sys.argv[2] if len(sys.argv) > 2 else None
if branch is None:
print("ERROR: missing branch, provide branch as second argument", file=sys.stderr)
exit(1)
github = GitHub()
cache = ObjectStorage(os.path.join(data_dir, "github-vyos-cache.json"), dict, {})
repositories = cache.callback("repos", callback=lambda: github.find_org_repositories("vyos"))
pprint(github.analyze_repositories_workflow("vyos", repositories, "current"))
else:
print("ERROR: unknown command: %s" % command, file=sys.stderr)
exit(1)
except KeyboardInterrupt:
exit(1)
except Exception as e:
logging.exception(e)
exit(1)
|