blob: 59a2ee8f8082f4512a21099bf836f453a8a05d38 [file] [log] [blame]
rjw1f884582022-01-06 17:20:42 +08001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6git fetcher support the SRC_URI with format of:
7SRC_URI = "git://some.host/somepath;OptionA=xxx;OptionB=xxx;..."
8
9Supported SRC_URI options are:
10
11- branch
12 The git branch to retrieve from. The default is "master"
13
14 This option also supports multiple branch fetching, with branches
15 separated by commas. In multiple branches case, the name option
16 must have the same number of names to match the branches, which is
17 used to specify the SRC_REV for the branch
18 e.g:
19 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
20 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
21 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
22
23- tag
24 The git tag to retrieve. The default is "master"
25
26- protocol
27 The method to use to access the repository. Common options are "git",
28 "http", "https", "file", "ssh" and "rsync". The default is "git".
29
30- rebaseable
31 rebaseable indicates that the upstream git repo may rebase in the future,
32 and current revision may disappear from upstream repo. This option will
33 remind fetcher to preserve local cache carefully for future use.
34 The default value is "0", set rebaseable=1 for rebaseable git repo.
35
36- nocheckout
37 Don't checkout source code when unpacking. set this option for the recipe
38 who has its own routine to checkout code.
39 The default is "0", set nocheckout=1 if needed.
40
41- bareclone
42 Create a bare clone of the source code and don't checkout the source code
43 when unpacking. Set this option for the recipe who has its own routine to
44 checkout code and tracking branch requirements.
45 The default is "0", set bareclone=1 if needed.
46
47- nobranch
48 Don't check the SHA validation for branch. set this option for the recipe
49 referring to commit which is valid in tag instead of branch.
50 The default is "0", set nobranch=1 if needed.
51
52- usehead
53 For local git:// urls to use the current branch HEAD as the revision for use with
54 AUTOREV. Implies nobranch.
55
56"""
57
58#Copyright (C) 2005 Richard Purdie
59#
60# This program is free software; you can redistribute it and/or modify
61# it under the terms of the GNU General Public License version 2 as
62# published by the Free Software Foundation.
63#
64# This program is distributed in the hope that it will be useful,
65# but WITHOUT ANY WARRANTY; without even the implied warranty of
66# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
67# GNU General Public License for more details.
68#
69# You should have received a copy of the GNU General Public License along
70# with this program; if not, write to the Free Software Foundation, Inc.,
71# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
72
73import collections
74import errno
75import fnmatch
76import os
77import re
78import subprocess
79import tempfile
80import bb
81import bb.progress
82from bb.fetch2 import FetchMethod
83from bb.fetch2 import runfetchcmd
84from bb.fetch2 import logger
85
86
87class GitProgressHandler(bb.progress.LineFilterProgressHandler):
88 """Extract progress information from git output"""
89 def __init__(self, d):
90 self._buffer = ''
91 self._count = 0
92 super(GitProgressHandler, self).__init__(d)
93 # Send an initial progress event so the bar gets shown
94 self._fire_progress(-1)
95
96 def write(self, string):
97 self._buffer += string
98 stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
99 stage_weights = [0.2, 0.05, 0.5, 0.25]
100 stagenum = 0
101 for i, stage in reversed(list(enumerate(stages))):
102 if stage in self._buffer:
103 stagenum = i
104 self._buffer = ''
105 break
106 self._status = stages[stagenum]
107 percs = re.findall(r'(\d+)%', string)
108 if percs:
109 progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
110 rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
111 if rates:
112 rate = rates[-1]
113 else:
114 rate = None
115 self.update(progress, rate)
116 else:
117 if stagenum == 0:
118 percs = re.findall(r': (\d+)', string)
119 if percs:
120 count = int(percs[-1])
121 if count > self._count:
122 self._count = count
123 self._fire_progress(-count)
124 super(GitProgressHandler, self).write(string)
125
126
127class Git(FetchMethod):
128 bitbake_dir = os.path.abspath(os.path.join(os.path.dirname(os.path.join(os.path.abspath(__file__))), '..', '..', '..'))
129 make_shallow_path = os.path.join(bitbake_dir, 'bin', 'git-make-shallow')
130
131 """Class to fetch a module or modules from git repositories"""
132 def init(self, d):
133 pass
134
135 def supports(self, ud, d):
136 """
137 Check to see if a given url can be fetched with git.
138 """
139 return ud.type in ['git']
140
141 def supports_checksum(self, urldata):
142 return False
143
144 def urldata_init(self, ud, d):
145 """
146 init git specific variable within url data
147 so that the git method like latest_revision() can work
148 """
149 if 'protocol' in ud.parm:
150 ud.proto = ud.parm['protocol']
151 elif not ud.host:
152 ud.proto = 'file'
153 else:
154 ud.proto = "git"
155
156 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
157 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
158
159 ud.nocheckout = ud.parm.get("nocheckout","0") == "1"
160
161 ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
162
163 ud.nobranch = ud.parm.get("nobranch","0") == "1"
164
165 # usehead implies nobranch
166 ud.usehead = ud.parm.get("usehead","0") == "1"
167 if ud.usehead:
168 if ud.proto != "file":
169 raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
170 ud.nobranch = 1
171
172 # bareclone implies nocheckout
173 ud.bareclone = ud.parm.get("bareclone","0") == "1"
174 if ud.bareclone:
175 ud.nocheckout = 1
176
177 ud.unresolvedrev = {}
178 branches = ud.parm.get("branch", "master").split(',')
179 if len(branches) != len(ud.names):
180 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
181
182 ud.cloneflags = "-s -n"
183 if ud.bareclone:
184 ud.cloneflags += " --mirror"
185
186 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
187 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
188
189 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
190 if depth_default is not None:
191 try:
192 depth_default = int(depth_default or 0)
193 except ValueError:
194 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
195 else:
196 if depth_default < 0:
197 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH: %s" % depth_default)
198 else:
199 depth_default = 1
200 ud.shallow_depths = collections.defaultdict(lambda: depth_default)
201
202 revs_default = d.getVar("BB_GIT_SHALLOW_REVS", True)
203 ud.shallow_revs = []
204 ud.branches = {}
205 for pos, name in enumerate(ud.names):
206 branch = branches[pos]
207 ud.branches[name] = branch
208 ud.unresolvedrev[name] = branch
209
210 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name)
211 if shallow_depth is not None:
212 try:
213 shallow_depth = int(shallow_depth or 0)
214 except ValueError:
215 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
216 else:
217 if shallow_depth < 0:
218 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth))
219 ud.shallow_depths[name] = shallow_depth
220
221 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name)
222 if revs is not None:
223 ud.shallow_revs.extend(revs.split())
224 elif revs_default is not None:
225 ud.shallow_revs.extend(revs_default.split())
226
227 if (ud.shallow and
228 not ud.shallow_revs and
229 all(ud.shallow_depths[n] == 0 for n in ud.names)):
230 # Shallow disabled for this URL
231 ud.shallow = False
232
233 if ud.usehead:
234 ud.unresolvedrev['default'] = 'HEAD'
235
236 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
237
238 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
239 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
240 ud.write_shallow_tarballs = (d.getVar("BB_GENERATE_SHALLOW_TARBALLS") or write_tarballs) != "0"
241
242 ud.setup_revisions(d)
243
244 for name in ud.names:
245 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
246 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
247 if ud.revisions[name]:
248 ud.unresolvedrev[name] = ud.revisions[name]
249 ud.revisions[name] = self.latest_revision(ud, d, name)
250
251 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
252 if gitsrcname.startswith('.'):
253 gitsrcname = gitsrcname[1:]
254
255 # for rebaseable git repo, it is necessary to keep mirror tar ball
256 # per revision, so that even the revision disappears from the
257 # upstream repo in the future, the mirror will remain intact and still
258 # contains the revision
259 if ud.rebaseable:
260 for name in ud.names:
261 gitsrcname = gitsrcname + '_' + ud.revisions[name]
262
263 dl_dir = d.getVar("DL_DIR")
264 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
265 ud.clonedir = os.path.join(gitdir, gitsrcname)
266 ud.localfile = ud.clonedir
267
268 mirrortarball = 'git2_%s.tar.gz' % gitsrcname
269 ud.fullmirror = os.path.join(dl_dir, mirrortarball)
270 ud.mirrortarballs = [mirrortarball]
271 if ud.shallow:
272 tarballname = gitsrcname
273 if ud.bareclone:
274 tarballname = "%s_bare" % tarballname
275
276 if ud.shallow_revs:
277 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
278
279 for name, revision in sorted(ud.revisions.items()):
280 tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7])
281 depth = ud.shallow_depths[name]
282 if depth:
283 tarballname = "%s-%s" % (tarballname, depth)
284
285 shallow_refs = []
286 if not ud.nobranch:
287 shallow_refs.extend(ud.branches.values())
288 if ud.shallow_extra_refs:
289 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
290 if shallow_refs:
291 tarballname = "%s_%s" % (tarballname, "_".join(sorted(shallow_refs)).replace('/', '.'))
292
293 fetcher = self.__class__.__name__.lower()
294 ud.shallowtarball = '%sshallow_%s.tar.gz' % (fetcher, tarballname)
295 ud.fullshallow = os.path.join(dl_dir, ud.shallowtarball)
296 ud.mirrortarballs.insert(0, ud.shallowtarball)
297
298 def localpath(self, ud, d):
299 return ud.clonedir
300
301 def need_update(self, ud, d):
302 return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud)
303
304 def clonedir_need_update(self, ud, d):
305 if not os.path.exists(ud.clonedir):
306 return True
307 for name in ud.names:
308 if not self._contains_ref(ud, d, name, ud.clonedir):
309 return True
310 return False
311
312 def shallow_tarball_need_update(self, ud):
313 return ud.shallow and ud.write_shallow_tarballs and not os.path.exists(ud.fullshallow)
314
315 def tarball_need_update(self, ud):
316 return ud.write_tarballs and not os.path.exists(ud.fullmirror)
317
318 def try_premirror(self, ud, d):
319 # If we don't do this, updating an existing checkout with only premirrors
320 # is not possible
321 if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
322 return True
323 if os.path.exists(ud.clonedir):
324 return False
325 return True
326
327 def download(self, ud, d):
328 """Fetch url"""
329
330 # A current clone is preferred to either tarball, a shallow tarball is
331 # preferred to an out of date clone, and a missing clone will use
332 # either tarball.
333 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
334 ud.localpath = ud.fullshallow
335 return
336 elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
337 bb.utils.mkdirhier(ud.clonedir)
338 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
339
340 repourl = self._get_repo_url(ud)
341
342 # If the repo still doesn't exist, fallback to cloning it
343 if not os.path.exists(ud.clonedir):
344 # We do this since git will use a "-l" option automatically for local urls where possible
345 if repourl.startswith("file://"):
346 repourl = repourl[7:]
347 clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
348 if ud.proto.lower() != 'file':
349 bb.fetch2.check_network_access(d, clone_cmd, ud.url)
350 progresshandler = GitProgressHandler(d)
351 runfetchcmd(clone_cmd, d, log=progresshandler)
352
353 # Update the checkout if needed
354 needupdate = False
355 for name in ud.names:
356 if not self._contains_ref(ud, d, name, ud.clonedir):
357 needupdate = True
358 break
359
360 if needupdate:
361 output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
362 if "origin" in output:
363 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
364
365 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
366 fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
367 if ud.proto.lower() != 'file':
368 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
369 progresshandler = GitProgressHandler(d)
370 runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
371 runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
372 runfetchcmd("%s pack-refs --all" % ud.basecmd, d, workdir=ud.clonedir)
373 runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
374 try:
375 os.unlink(ud.fullmirror)
376 except OSError as exc:
377 if exc.errno != errno.ENOENT:
378 raise
379
380 for name in ud.names:
381 if not self._contains_ref(ud, d, name, ud.clonedir):
382 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
383
384 def build_mirror_data(self, ud, d):
385 if ud.shallow and ud.write_shallow_tarballs:
386 if not os.path.exists(ud.fullshallow):
387 if os.path.islink(ud.fullshallow):
388 os.unlink(ud.fullshallow)
389 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
390 shallowclone = os.path.join(tempdir, 'git')
391 try:
392 self.clone_shallow_local(ud, shallowclone, d)
393
394 logger.info("Creating tarball of git repository")
395 runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
396 runfetchcmd("touch %s.done" % ud.fullshallow, d)
397 finally:
398 bb.utils.remove(tempdir, recurse=True)
399 elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
400 if os.path.islink(ud.fullmirror):
401 os.unlink(ud.fullmirror)
402
403 logger.info("Creating tarball of git repository")
404 runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
405 runfetchcmd("touch %s.done" % ud.fullmirror, d)
406
407 def clone_shallow_local(self, ud, dest, d):
408 """Clone the repo and make it shallow.
409
410 The upstream url of the new clone isn't set at this time, as it'll be
411 set correctly when unpacked."""
412 runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)
413
414 to_parse, shallow_branches = [], []
415 for name in ud.names:
416 revision = ud.revisions[name]
417 depth = ud.shallow_depths[name]
418 if depth:
419 to_parse.append('%s~%d^{}' % (revision, depth - 1))
420
421 # For nobranch, we need a ref, otherwise the commits will be
422 # removed, and for non-nobranch, we truncate the branch to our
423 # srcrev, to avoid keeping unnecessary history beyond that.
424 branch = ud.branches[name]
425 if ud.nobranch:
426 ref = "refs/shallow/%s" % name
427 elif ud.bareclone:
428 ref = "refs/heads/%s" % branch
429 else:
430 ref = "refs/remotes/origin/%s" % branch
431
432 shallow_branches.append(ref)
433 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
434
435 # Map srcrev+depths to revisions
436 parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)
437
438 # Resolve specified revisions
439 parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
440 shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()
441
442 # Apply extra ref wildcards
443 all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
444 d, workdir=dest).splitlines()
445 for r in ud.shallow_extra_refs:
446 if not ud.bareclone:
447 r = r.replace('refs/heads/', 'refs/remotes/origin/')
448
449 if '*' in r:
450 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
451 shallow_branches.extend(matches)
452 else:
453 shallow_branches.append(r)
454
455 # Make the repository shallow
456 shallow_cmd = [self.make_shallow_path, '-s']
457 for b in shallow_branches:
458 shallow_cmd.append('-r')
459 shallow_cmd.append(b)
460 shallow_cmd.extend(shallow_revisions)
461 runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
462
463 def unpack(self, ud, destdir, d):
464 """ unpack the downloaded src to destdir"""
465
466 subdir = ud.parm.get("subpath", "")
467 if subdir != "":
468 readpathspec = ":%s" % subdir
469 def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
470 else:
471 readpathspec = ""
472 def_destsuffix = "git/"
473
474 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
475 destdir = ud.destdir = os.path.join(destdir, destsuffix)
476 if os.path.exists(destdir):
477 bb.utils.prunedir(destdir)
478
479 source_found = False
480 source_error = []
481
482 if not source_found:
483 clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
484 if clonedir_is_up_to_date:
485 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
486 source_found = True
487 else:
488 source_error.append("clone directory not available or not up to date: " + ud.clonedir)
489
490 if not source_found:
491 if ud.shallow:
492 if os.path.exists(ud.fullshallow):
493 bb.utils.mkdirhier(destdir)
494 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
495 source_found = True
496 else:
497 source_error.append("shallow clone not available: " + ud.fullshallow)
498 else:
499 source_error.append("shallow clone not enabled")
500
501 if not source_found:
502 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
503
504 repourl = self._get_repo_url(ud)
505 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
506 if not ud.nocheckout:
507 if subdir != "":
508 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
509 workdir=destdir)
510 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
511 elif not ud.nobranch:
512 branchname = ud.branches[ud.names[0]]
513 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
514 ud.revisions[ud.names[0]]), d, workdir=destdir)
515 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
516 branchname), d, workdir=destdir)
517 else:
518 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
519
520 return True
521
522 def clean(self, ud, d):
523 """ clean the git directory """
524
525 bb.utils.remove(ud.localpath, True)
526 bb.utils.remove(ud.fullmirror)
527 bb.utils.remove(ud.fullmirror + ".done")
528
529 def supports_srcrev(self):
530 return True
531
532 def _contains_ref(self, ud, d, name, wd):
533 cmd = ""
534 if ud.nobranch:
535 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
536 ud.basecmd, ud.revisions[name])
537 else:
538 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
539 ud.basecmd, ud.revisions[name], ud.branches[name])
540 try:
541 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
542 except bb.fetch2.FetchError:
543 return False
544 if len(output.split()) > 1:
545 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
546 return output.split()[0] != "0"
547
548 def _get_repo_url(self, ud):
549 """
550 Return the repository URL
551 """
552 if ud.user:
553 username = ud.user + '@'
554 else:
555 username = ""
556 return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
557
558 def _revision_key(self, ud, d, name):
559 """
560 Return a unique key for the url
561 """
562 return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
563
564 def _lsremote(self, ud, d, search):
565 """
566 Run git ls-remote with the specified search string
567 """
568 # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
569 # and WORKDIR is in PATH (as a result of RSS), our call to
570 # runfetchcmd() exports PATH so this function will get called again (!)
571 # In this scenario the return call of the function isn't actually
572 # important - WORKDIR isn't needed in PATH to call git ls-remote
573 # anyway.
574 if d.getVar('_BB_GIT_IN_LSREMOTE', False):
575 return ''
576 d.setVar('_BB_GIT_IN_LSREMOTE', '1')
577 try:
578 repourl = self._get_repo_url(ud)
579 cmd = "%s ls-remote %s %s" % \
580 (ud.basecmd, repourl, search)
581 if ud.proto.lower() != 'file':
582 bb.fetch2.check_network_access(d, cmd, repourl)
583 output = runfetchcmd(cmd, d, True)
584 if not output:
585 raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
586 finally:
587 d.delVar('_BB_GIT_IN_LSREMOTE')
588 return output
589
590 def _latest_revision(self, ud, d, name):
591 """
592 Compute the HEAD revision for the url
593 """
594 output = self._lsremote(ud, d, "")
595 # Tags of the form ^{} may not work, need to fallback to other form
596 if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
597 head = ud.unresolvedrev[name]
598 tag = ud.unresolvedrev[name]
599 else:
600 head = "refs/heads/%s" % ud.unresolvedrev[name]
601 tag = "refs/tags/%s" % ud.unresolvedrev[name]
602 for s in [head, tag + "^{}", tag]:
603 for l in output.strip().split('\n'):
604 sha1, ref = l.split()
605 if s == ref:
606 return sha1
607 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
608 (ud.unresolvedrev[name], ud.host+ud.path))
609
610 def latest_versionstring(self, ud, d):
611 """
612 Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
613 by searching through the tags output of ls-remote, comparing
614 versions and returning the highest match.
615 """
616 pupver = ('', '')
617
618 tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
619 try:
620 output = self._lsremote(ud, d, "refs/tags/*")
621 except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
622 bb.note("Could not list remote: %s" % str(e))
623 return pupver
624
625 verstring = ""
626 revision = ""
627 for line in output.split("\n"):
628 if not line:
629 break
630
631 tag_head = line.split("/")[-1]
632 # Ignore non-released branches
633 m = re.search("(alpha|beta|rc|final)+", tag_head)
634 if m:
635 continue
636
637 # search for version in the line
638 tag = tagregex.search(tag_head)
639 if tag == None:
640 continue
641
642 tag = tag.group('pver')
643 tag = tag.replace("_", ".")
644
645 if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
646 continue
647
648 verstring = tag
649 revision = line.split()[0]
650 pupver = (verstring, revision)
651
652 return pupver
653
654 def _build_revision(self, ud, d, name):
655 return ud.revisions[name]
656
657 def gitpkgv_revision(self, ud, d, name):
658 """
659 Return a sortable revision number by counting commits in the history
660 Based on gitpkgv.bblass in meta-openembedded
661 """
662 rev = self._build_revision(ud, d, name)
663 localpath = ud.localpath
664 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
665 if not os.path.exists(localpath):
666 commits = None
667 else:
668 if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
669 from pipes import quote
670 commits = bb.fetch2.runfetchcmd(
671 "git rev-list %s -- | wc -l" % quote(rev),
672 d, quiet=True).strip().lstrip('0')
673 if commits:
674 open(rev_file, "w").write("%d\n" % int(commits))
675 else:
676 commits = open(rev_file, "r").readline(128).strip()
677 if commits:
678 return False, "%s+%s" % (commits, rev[:7])
679 else:
680 return True, str(rev)
681
682 def checkstatus(self, fetch, ud, d):
683 try:
684 self._lsremote(ud, d, "")
685 return True
686 except bb.fetch2.FetchError:
687 return False