1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
|
#
# Copyright (C) 2020 Agilent Technologies, Inc.
#
# SPDX-License-Identifier: GPL-2.0-only
#
REVRECORD_INCFILE_NAME ??= "revs.inc"
REVRECORD_JSON_FILE_NAME ??= "revs.json"
REVRECORD_TOPDIR = "${TOPDIR}/revs"
REVRECORD_LATEST_DIR = "${REVRECORD_TOPDIR}/latest"
# A per-multiconfig directory (or just the REVRECORD_LATEST_DIR if multiconfig isn't enabled) for output
REVRECORD_MC_DIR = "${REVRECORD_LATEST_DIR}${@"/${BB_CURRENT_MC}" if d.getVar("BBMULTICONFIG") else ""}"
def get_effective_srcrev_var(d, name):
""" Determine the effective SRCREV variable for given SRC_URI name. """
# Based on code in fetch2/__init__.py
pn = d.getVar("PN")
attempts = []
if name != '' and pn:
attempts.append("SRCREV_{0}_pn-{1}".format(name, pn))
if name != '':
attempts.append("SRCREV_{0}".format(name))
if pn:
attempts.append("SRCREV_pn-{0}".format(pn))
attempts.append("SRCREV")
return next(filter(lambda v: v in d, attempts), None)
def get_unexpanded_src_uris_by_name(d, only_scm=True):
""" Return dict of SRC_URI entry name => unexpanded SRC_URI entry """
import collections
import oe.recipeutils
ret = {}
src_uris = collections.deque()
def enqueue_var_contents(v):
src_uris.extend(oe.recipeutils.split_var_value(d.getVar(v, False) or ""))
enqueue_var_contents("SRC_URI")
while src_uris:
unexpanded_entry = src_uris.popleft()
expanded = d.expand(unexpanded_entry)
if not expanded:
continue
try:
fetcher = bb.fetch.Fetch([expanded], d)
ud = fetcher.ud[fetcher.urls[0]]
if only_scm and not ud.method.supports_srcrev():
continue
uri_name = ud.parm.get("name") or "default"
ret[uri_name] = unexpanded_entry
except bb.fetch2.NoMethodError:
expanded = expanded.strip()
# If the expanded "entry" is actually more than one entry, then maybe the
# unexpanded entry is a variable deref.
if len(expanded.split()) == 1:
raise
if unexpanded_entry[0:2] == "${" and unexpanded_entry[-1] == "}" and unexpanded_entry[2:-1] in d:
# Add the contents of the variable to the processing queue
enqueue_var_contents(unexpanded_entry[2:-1])
else:
# There's only so much we can do. We won't be able to handle something weird like:
# SRC_URI = "${A}${C}"
# A = "git://"
# C = "git@host/path;branch=${BRANCH}"
# since there's no (easy) way to partially expand SRC_URI just up to the leaf variables.
raise NotImplementedError("Unable to process SRC_URI entry: {0}".format(unexpanded_entry))
return ret
def get_srcrev_values(d):
""" Return the version strings for the current recipe """
fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
scms = [u for u in urldata if urldata[u].method.supports_srcrev()]
dict_srcrevs = {}
uds = {}
for scm in scms:
ud = urldata[scm]
for name in ud.names:
rev = ud.method.sortable_revision(ud, d, name)
dict_srcrevs[name] = rev[1]
uds[name] = ud
return dict_srcrevs, uds
def extract_src_uri_param_var(src_uri, param):
"""
Given an unexpanded SRC_URI entry |src_uri| and parameter name |param|, searches
for an assignment to that parameter. If found, and the assignment consists of
dereferencing a single variable, returns name of the variable. Otherwise returns None.
For example, given src_uri="git://repo;branch=${BRANCH}" and param="branch",
this method would return "BRANCH".
"""
import re
regex = re.compile(param + r"=\${(?P<var>[a-zA-Z0-9\-_+./~]+)}")
m = regex.search(src_uri)
if m:
return m.group("var")
return None
def handle_recipe_parsed(d):
if bb.data.inherits_class("externalsrc", d):
return
# Skip recipe if there are no srcrev-supporting SRC_URIs
revs, uds = get_srcrev_values(d)
if not revs:
return
import json
unexpanded_by_name = get_unexpanded_src_uris_by_name(d)
inclines = []
json_entries = []
pn = d.getVar("PN")
for name in sorted(revs):
jsondata = {}
var = get_effective_srcrev_var(d, name)
# We only care about SRCREV that is set to AUTOREV (which will cause it to report as AUTOINC).
if d.getVar(var) != "AUTOINC":
continue
# Add _pn- override if not already present
if "_pn-" not in var:
var = "{0}_pn-{1}".format(var, pn)
inclines.append('{0} = "{1}"'.format(var, revs[name]))
ud = uds[name]
jsondata["rev"] = revs[name]
jsondata["uri_name"] = name
jsondata["path"] = ud.path
jsondata["host"] = ud.host
jsondata["pn"] = pn
# Depending on the fetcher scheme, there may be other parameters we want to capture
raw_src_uri = unexpanded_by_name[name]
if ud.type == "git":
# Also want to capture branch; TODO what about tags?
ref = ud.unresolvedrev[name]
jsondata["branch"] = ref
branch_var = extract_src_uri_param_var(raw_src_uri, "branch")
if branch_var:
inclines.append('{0}_pn-{1} = "{2}"'.format(branch_var, pn, ref))
json_entries.append(jsondata)
pndir = d.expand("${REVRECORD_MC_DIR}/${PN}")
if inclines:
bb.utils.mkdirhier(pndir)
incfile = os.path.join(pndir, d.getVar("REVRECORD_INCFILE_NAME"))
with open(incfile, "w") as f:
f.write("\n".join(inclines))
jsonfile = os.path.join(pndir, d.getVar("REVRECORD_JSON_FILE_NAME"))
with open(jsonfile, "w") as f:
json.dump(json_entries, f, indent=4, sort_keys=True)
def handle_parse_completed(d):
import json
outdir = d.getVar("REVRECORD_MC_DIR")
bb.utils.mkdirhier(outdir)
# Aggregate all the per-PN revs files into a unified one at the base of the outdir
incfile_name = d.getVar("REVRECORD_INCFILE_NAME")
jsonfile_name = d.getVar("REVRECORD_JSON_FILE_NAME")
json_entries = []
with open(os.path.join(outdir, incfile_name), "w") as f:
for root, dirs, files in os.walk(outdir):
# Sorted by PN
dirs.sort()
if root == outdir:
continue
pn = os.path.basename(root)
with open(os.path.join(root, incfile_name), "r") as g:
f.write("# {0}\n".format(pn))
f.writelines(line for line in g)
f.write("\n\n")
with open(os.path.join(root, jsonfile_name), "r") as g:
json_entries.extend(json.load(g))
with open(os.path.join(outdir, jsonfile_name), "w") as f:
json.dump(json_entries, f, indent=4, sort_keys=True)
python revrecord_handler() {
if isinstance(e, bb.event.ParseStarted):
session_dir = d.expand("${REVRECORD_TOPDIR}/${DATETIME}")
bb.utils.mkdirhier(session_dir)
# Set up a convienence symlink
latest_dir = d.getVar("REVRECORD_LATEST_DIR")
if os.path.exists(latest_dir):
os.unlink(latest_dir)
os.symlink(session_dir, latest_dir)
elif isinstance(e, bb.event.RecipeParsed):
handle_recipe_parsed(d)
elif isinstance(e, bb.event.ParseCompleted):
handle_parse_completed(d)
# Also need to do it once for each multiconfig
for mc in (d.getVar("BBMULTICONFIG") or "").split():
if mc:
mcdata = bb.data.createCopy(d)
mcdata.setVar("BB_CURRENT_MC", mc)
handle_parse_completed(mcdata)
# TODO: merge all multiconfig-level revs.inc into a global revs.inc; but how to handle conflicts?
}
addhandler revrecord_handler
revrecord_handler[eventmask] = " \
bb.event.ParseCompleted \
bb.event.ParseStarted \
bb.event.RecipeParsed \
"
|