1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
import textwrap
from twisted.internet import defer, reactor
from buildbot import util
from buildbot.process import buildstep
from buildbot.steps.source.base import Source
from buildbot.interfaces import IRenderable
from zope.interface import implements
class RepoDownloadsFromProperties(util.ComparableMixin, object):
implements(IRenderable)
parse_download_re = (re.compile(r"repo download ([^ ]+) ([0-9]+/[0-9]+)"),
re.compile(r"([^ ]+) ([0-9]+/[0-9]+)"),
re.compile(r"([^ ]+)/([0-9]+/[0-9]+)"),
)
compare_attrs = ('names',)
def __init__(self, names):
self.names = names
def getRenderingFor(self, props):
downloads = []
for propName in self.names:
s = props.getProperty(propName)
if s is not None:
downloads.extend(self.parseDownloadProperty(s))
return downloads
def parseDownloadProperty(self, s):
"""
lets try to be nice in the format we want
can support several instances of "repo download proj number/patch" (direct copy paste from gerrit web site)
or several instances of "proj number/patch" (simpler version)
This feature allows integrator to build with several pending interdependant changes.
returns list of repo downloads sent to the buildslave
"""
if s is None:
return []
ret = []
for cur_re in self.parse_download_re:
res = cur_re.search(s)
while res:
ret.append("%s %s" % (res.group(1), res.group(2)))
s = s[:res.start(0)] + s[res.end(0):]
res = cur_re.search(s)
return ret
class RepoDownloadsFromChangeSource(util.ComparableMixin, object):
implements(IRenderable)
compare_attrs = ('codebase',)
def __init__(self, codebase=None):
self.codebase = codebase
def getRenderingFor(self, props):
downloads = []
if self.codebase is None:
changes = props.getBuild().allChanges()
else:
changes = props.getBuild().getSourceStamp(self.codebase).changes
for change in changes:
if ("event.type" in change.properties and
change.properties["event.type"] == "patchset-created"):
downloads.append("%s %s/%s" % (change.properties["event.change.project"],
change.properties["event.change.number"],
change.properties["event.patchSet.number"]))
return downloads
class Repo(Source):
""" Class for Repo with all the smarts """
name = 'repo'
renderables = ["manifestURL", "manifestFile", "tarball", "jobs",
"syncAllBranches", "updateTarballAge", "manifestOverrideUrl",
"repoDownloads"]
ref_not_found_re = re.compile(r"fatal: Couldn't find remote ref")
cherry_pick_error_re = re.compile(r"|".join([r"Automatic cherry-pick failed",
r"error: "
r"fatal: "
r"possibly due to conflict resolution."]))
re_change = re.compile(r".* refs/changes/\d\d/(\d+)/(\d+) -> FETCH_HEAD$")
re_head = re.compile(r"^HEAD is now at ([0-9a-f]+)...")
mirror_sync_retry = 10 # number of retries, if we detect mirror desynchronization
mirror_sync_sleep = 60 # wait 1min between retries (thus default total retry time is 10min)
def __init__(self,
manifestURL=None,
manifestBranch="master",
manifestFile="default.xml",
tarball=None,
jobs=None,
syncAllBranches=False,
updateTarballAge=7*24.0*3600.0,
manifestOverrideUrl=None,
repoDownloads=None,
**kwargs):
"""
@type manifestURL: string
@param manifestURL: The URL which points at the repo manifests repository.
@type manifestBranch: string
@param manifestBranch: The manifest branch to check out by default.
@type manifestFile: string
@param manifestFile: The manifest to use for sync.
@type syncAllBranches: bool.
@param syncAllBranches: true, then we must slowly synchronize all branches.
@type updateTarballAge: float
@param updateTarballAge: renderable to determine the update tarball policy,
given properties
Returns: max age of tarball in seconds, or None, if we
want to skip tarball update
@type manifestOverrideUrl: string
@param manifestOverrideUrl: optional http URL for overriding the manifest
usually coming from Property setup by a ForceScheduler
@type repoDownloads: list of strings
@param repoDownloads: optional repo download to perform after the repo sync
"""
self.manifestURL = manifestURL
self.manifestBranch = manifestBranch
self.manifestFile = manifestFile
self.tarball = tarball
self.jobs = jobs
self.syncAllBranches = syncAllBranches
self.updateTarballAge = updateTarballAge
self.manifestOverrideUrl = manifestOverrideUrl
if repoDownloads is None:
repoDownloads = []
self.repoDownloads = repoDownloads
Source.__init__(self, **kwargs)
assert self.manifestURL is not None
def computeSourceRevision(self, changes):
if not changes:
return None
return changes[-1].revision
def filterManifestPatches(self):
"""
Patches to manifest projects are a bit special.
repo does not support a way to download them automatically,
so we need to implement the boilerplate manually.
This code separates the manifest patches from the other patches,
and generates commands to import those manifest patches.
"""
manifest_unrelated_downloads = []
manifest_related_downloads = []
for download in self.repoDownloads:
project, ch_ps = download.split(" ")[-2:]
if (self.manifestURL.endswith("/"+project) or
self.manifestURL.endswith("/"+project+".git")):
ch, ps = map(int, ch_ps.split("/"))
branch = "refs/changes/%02d/%d/%d" % (ch % 100, ch, ps)
manifest_related_downloads.append(
["git", "fetch", self.manifestURL, branch])
manifest_related_downloads.append(
["git", "cherry-pick", "FETCH_HEAD"])
else:
manifest_unrelated_downloads.append(download)
self.repoDownloads = manifest_unrelated_downloads
self.manifestDownloads = manifest_related_downloads
def _repoCmd(self, command, abandonOnFailure=True, **kwargs):
return self._Cmd(["repo"]+command, abandonOnFailure=abandonOnFailure, **kwargs)
def _Cmd(self, command, abandonOnFailure=True, workdir=None, **kwargs):
if workdir is None:
workdir = self.workdir
self.cmd = cmd = buildstep.RemoteShellCommand(workdir, command,
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout, **kwargs)
# does not make sense to logEnviron for each command (just for first)
self.logEnviron = False
cmd.useLog(self.stdio_log, False)
self.stdio_log.addHeader("Starting command: %s\n" % (" ".join(command), ))
self.step_status.setText(["%s" % (" ".join(command[:2]))])
d = self.runCommand(cmd)
def evaluateCommand(cmd):
if abandonOnFailure and cmd.didFail():
self.step_status.setText(["repo failed at: %s" % (" ".join(command[:2]))])
self.stdio_log.addStderr("Source step failed while running command %s\n" % cmd)
raise buildstep.BuildStepFailed()
return cmd.rc
d.addCallback(lambda _: evaluateCommand(cmd))
return d
def repoDir(self):
return self.build.path_module.join(self.workdir, ".repo")
def sourcedirIsUpdateable(self):
return self.pathExists(self.repoDir())
def startVC(self, branch, revision, patch):
d = self.doStartVC()
d.addErrback(self.failed)
@defer.inlineCallbacks
def doStartVC(self):
self.stdio_log = self.addLogForRemoteCommands("stdio")
self.filterManifestPatches()
if self.repoDownloads:
self.stdio_log.addHeader("will download:\n" + "repo download " + "\nrepo download ".join(self.repoDownloads) + "\n")
self.willRetryInCaseOfFailure = True
d = self.doRepoSync()
def maybeRetry(why):
# in case the tree was corrupted somehow because of previous build
# we clobber one time, and retry everything
if why.check(buildstep.BuildStepFailed) and self.willRetryInCaseOfFailure:
self.stdio_log.addStderr("got issue at first try:\n" + str(why) +
"\nRetry after clobber...")
return self.doRepoSync(forceClobber=True)
return why # propagate to self.failed
d.addErrback(maybeRetry)
yield d
yield self.maybeUpdateTarball()
# starting from here, clobbering will not help
yield self.doRepoDownloads()
self.setStatus(self.cmd, 0)
yield self.finished(0)
@defer.inlineCallbacks
def doClobberStart(self):
yield self.runRmdir(self.workdir)
yield self.runMkdir(self.workdir)
yield self.maybeExtractTarball()
@defer.inlineCallbacks
def doRepoSync(self, forceClobber=False):
updatable = yield self.sourcedirIsUpdateable()
if not updatable or forceClobber:
# no need to re-clobber in case of failure
self.willRetryInCaseOfFailure = False
yield self.doClobberStart()
yield self.doCleanup()
yield self._repoCmd(['init',
'-u', self.manifestURL,
'-b', self.manifestBranch,
'-m', self.manifestFile])
if self.manifestOverrideUrl:
self.stdio_log.addHeader("overriding manifest with %s\n" % (self.manifestOverrideUrl))
local_file = yield self.pathExists(self.build.path_module.join(self.workdir,
self.manifestOverrideUrl))
if local_file:
yield self._Cmd(["cp", "-f", self.manifestOverrideUrl, "manifest_override.xml"])
else:
yield self._Cmd(["wget", self.manifestOverrideUrl, "-O", "manifest_override.xml"])
yield self._Cmd(["ln", "-sf", "../manifest_override.xml", "manifest.xml"],
workdir=self.build.path_module.join(self.workdir, ".repo"))
for command in self.manifestDownloads:
yield self._Cmd(command, workdir=self.build.path_module.join(self.workdir, ".repo", "manifests"))
command = ['sync']
if self.jobs:
command.append('-j' + str(self.jobs))
if not self.syncAllBranches:
command.append('-c')
self.step_status.setText(["repo sync"])
self.stdio_log.addHeader("synching manifest %s from branch %s from %s\n"
% (self.manifestFile, self.manifestBranch, self.manifestURL))
yield self._repoCmd(command)
command = ['manifest', '-r', '-o', 'manifest-original.xml']
yield self._repoCmd(command)
# check whether msg matches one of the
# compiled regexps in self.re_error_messages
def _findErrorMessages(self, error_re):
for logname in ['stderr', 'stdout']:
if not hasattr(self.cmd, logname):
continue
msg = getattr(self.cmd, logname)
if not (re.search(error_re, msg) is None):
return True
return False
def _sleep(self, delay):
d = defer.Deferred()
reactor.callLater(delay, d.callback, 1)
return d
@defer.inlineCallbacks
def doRepoDownloads(self):
self.repo_downloaded = ""
for download in self.repoDownloads:
command = ['download'] + download.split(' ')
self.stdio_log.addHeader("downloading changeset %s\n"
% (download))
retry = self.mirror_sync_retry + 1
while retry > 0:
yield self._repoCmd(command, abandonOnFailure=False,
collectStdout=True, collectStderr=True)
if not self._findErrorMessages(self.ref_not_found_re):
break
retry -= 1
self.stdio_log.addStderr("failed downloading changeset %s\n" % (download))
self.stdio_log.addHeader("wait one minute for mirror sync\n")
yield self._sleep(self.mirror_sync_sleep)
if retry == 0:
self.step_status.setText(["repo: change %s does not exist" % download])
self.step_status.setText2(["repo: change %s does not exist" % download])
raise buildstep.BuildStepFailed()
if self.cmd.didFail() or self._findErrorMessages(self.cherry_pick_error_re):
# cherry pick error! We create a diff with status current workdir
# in stdout, which reveals the merge errors and exit
command = ['forall', '-c', 'git', 'diff', 'HEAD']
yield self._repoCmd(command, abandonOnFailure=False)
self.step_status.setText(["download failed: %s" % download])
raise buildstep.BuildStepFailed()
if hasattr(self.cmd, 'stderr'):
lines = self.cmd.stderr.split("\n")
match1 = match2 = False
for line in lines:
if not match1:
match1 = self.re_change.match(line)
if not match2:
match2 = self.re_head.match(line)
if match1 and match2:
self.repo_downloaded += "%s/%s %s " % (match1.group(1),
match1.group(2),
match2.group(1))
self.setProperty("repo_downloaded", self.repo_downloaded, "Source")
def computeTarballOptions(self):
# Keep in mind that the compression part of tarball generation
# can be non negligible
tar = ['tar']
if self.tarball.endswith("gz"):
tar.append('-z')
if self.tarball.endswith("bz2") or self.tarball.endswith("bz"):
tar.append('-j')
if self.tarball.endswith("lzma"):
tar.append('--lzma')
if self.tarball.endswith("lzop"):
tar.append('--lzop')
return tar
@defer.inlineCallbacks
def maybeExtractTarball(self):
if self.tarball:
tar = self.computeTarballOptions() + ['-xvf', self.tarball]
res = yield self._Cmd(tar, abandonOnFailure=False)
if res: # error with tarball.. erase repo dir and tarball
yield self._Cmd(["rm", "-f", self.tarball], abandonOnFailure=False)
yield self.runRmdir(self.repoDir(), abandonOnFailure=False)
@defer.inlineCallbacks
def maybeUpdateTarball(self):
if not self.tarball or self.updateTarballAge is None:
return
# tarball path is absolute, so we cannot use slave's stat command
# stat -c%Y gives mtime in second since epoch
res = yield self._Cmd(["stat", "-c%Y", self.tarball], collectStdout=True, abandonOnFailure=False)
if not res:
tarball_mtime = int(self.cmd.stdout)
yield self._Cmd(["stat", "-c%Y", "."], collectStdout=True)
now_mtime = int(self.cmd.stdout)
age = now_mtime - tarball_mtime
if res or age > self.updateTarballAge:
tar = self.computeTarballOptions() + ['-cvf', self.tarball, ".repo"]
res = yield self._Cmd(tar, abandonOnFailure=False)
if res: # error with tarball.. erase tarball, but dont fail
yield self._Cmd(["rm", "-f", self.tarball], abandonOnFailure=False)
# a simple shell script to gather all cleanup tweaks...
# doing them one by one just complicate the stuff
# and messup the stdio log
def _getCleanupCommand(self):
"""also used by tests for expectations"""
return textwrap.dedent("""\
set -v
if [ -d .repo/manifests ]
then
# repo just refuse to run if manifest is messed up
# so ensure we are in a known state
cd .repo/manifests
rm -f .git/index.lock
git fetch origin
git reset --hard remotes/origin/%(manifestBranch)s
git config branch.default.merge %(manifestBranch)s
cd ..
ln -sf manifests/%(manifestFile)s manifest.xml
cd ..
fi
repo forall -c rm -f .git/index.lock
repo forall -c git clean -f -d -x 2>/dev/null
repo forall -c git reset --hard HEAD 2>/dev/null
rm -f %(workdir)s/.repo/project.list
""") % self.__dict__
def doCleanup(self):
command = self._getCleanupCommand()
return self._Cmd(["bash", "-c", command], abandonOnFailure=False)
|