blob: f5bd816cedbe76cce7a5b9d1db31cd036d827528 [file] [log] [blame]
rjw1f884582022-01-06 17:20:42 +08001# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Utility Functions
5"""
6
7# Copyright (C) 2004 Michael Lauer
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import re, fcntl, os, string, stat, shutil, time
23import sys
24import errno
25import logging
26import bb
27import bb.msg
28import multiprocessing
29import fcntl
30import imp
31import itertools
32import subprocess
33import glob
34import fnmatch
35import traceback
36import errno
37import signal
38import ast
39import collections
40import copy
41from subprocess import getstatusoutput
42from contextlib import contextmanager
43from ctypes import cdll
44
45logger = logging.getLogger("BitBake.Util")
46python_extensions = [e for e, _, _ in imp.get_suffixes()]
47
48
49def clean_context():
50 return {
51 "os": os,
52 "bb": bb,
53 "time": time,
54 }
55
56def get_context():
57 return _context
58
59
60def set_context(ctx):
61 _context = ctx
62
63# Context used in better_exec, eval
64_context = clean_context()
65
66class VersionStringException(Exception):
67 """Exception raised when an invalid version specification is found"""
68
69def explode_version(s):
70 r = []
71 alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
72 numeric_regexp = re.compile('^(\d+)(.*)$')
73 while (s != ''):
74 if s[0] in string.digits:
75 m = numeric_regexp.match(s)
76 r.append((0, int(m.group(1))))
77 s = m.group(2)
78 continue
79 if s[0] in string.ascii_letters:
80 m = alpha_regexp.match(s)
81 r.append((1, m.group(1)))
82 s = m.group(2)
83 continue
84 if s[0] == '~':
85 r.append((-1, s[0]))
86 else:
87 r.append((2, s[0]))
88 s = s[1:]
89 return r
90
91def split_version(s):
92 """Split a version string into its constituent parts (PE, PV, PR)"""
93 s = s.strip(" <>=")
94 e = 0
95 if s.count(':'):
96 e = int(s.split(":")[0])
97 s = s.split(":")[1]
98 r = ""
99 if s.count('-'):
100 r = s.rsplit("-", 1)[1]
101 s = s.rsplit("-", 1)[0]
102 v = s
103 return (e, v, r)
104
105def vercmp_part(a, b):
106 va = explode_version(a)
107 vb = explode_version(b)
108 while True:
109 if va == []:
110 (oa, ca) = (0, None)
111 else:
112 (oa, ca) = va.pop(0)
113 if vb == []:
114 (ob, cb) = (0, None)
115 else:
116 (ob, cb) = vb.pop(0)
117 if (oa, ca) == (0, None) and (ob, cb) == (0, None):
118 return 0
119 if oa < ob:
120 return -1
121 elif oa > ob:
122 return 1
123 elif ca is None:
124 return -1
125 elif cb is None:
126 return 1
127 elif ca < cb:
128 return -1
129 elif ca > cb:
130 return 1
131
132def vercmp(ta, tb):
133 (ea, va, ra) = ta
134 (eb, vb, rb) = tb
135
136 r = int(ea or 0) - int(eb or 0)
137 if (r == 0):
138 r = vercmp_part(va, vb)
139 if (r == 0):
140 r = vercmp_part(ra, rb)
141 return r
142
143def vercmp_string(a, b):
144 ta = split_version(a)
145 tb = split_version(b)
146 return vercmp(ta, tb)
147
148def vercmp_string_op(a, b, op):
149 """
150 Compare two versions and check if the specified comparison operator matches the result of the comparison.
151 This function is fairly liberal about what operators it will accept since there are a variety of styles
152 depending on the context.
153 """
154 res = vercmp_string(a, b)
155 if op in ('=', '=='):
156 return res == 0
157 elif op == '<=':
158 return res <= 0
159 elif op == '>=':
160 return res >= 0
161 elif op in ('>', '>>'):
162 return res > 0
163 elif op in ('<', '<<'):
164 return res < 0
165 elif op == '!=':
166 return res != 0
167 else:
168 raise VersionStringException('Unsupported comparison operator "%s"' % op)
169
170def explode_deps(s):
171 """
172 Take an RDEPENDS style string of format:
173 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
174 and return a list of dependencies.
175 Version information is ignored.
176 """
177 r = []
178 l = s.split()
179 flag = False
180 for i in l:
181 if i[0] == '(':
182 flag = True
183 #j = []
184 if not flag:
185 r.append(i)
186 #else:
187 # j.append(i)
188 if flag and i.endswith(')'):
189 flag = False
190 # Ignore version
191 #r[-1] += ' ' + ' '.join(j)
192 return r
193
194def explode_dep_versions2(s, *, sort=True):
195 """
196 Take an RDEPENDS style string of format:
197 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
198 and return a dictionary of dependencies and versions.
199 """
200 r = collections.OrderedDict()
201 l = s.replace(",", "").split()
202 lastdep = None
203 lastcmp = ""
204 lastver = ""
205 incmp = False
206 inversion = False
207 for i in l:
208 if i[0] == '(':
209 incmp = True
210 i = i[1:].strip()
211 if not i:
212 continue
213
214 if incmp:
215 incmp = False
216 inversion = True
217 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
218 #
219 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
220 # we list each possibly valid item.
221 # The build system is responsible for validation of what it supports.
222 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
223 lastcmp = i[0:2]
224 i = i[2:]
225 elif i.startswith(('<', '>', '=')):
226 lastcmp = i[0:1]
227 i = i[1:]
228 else:
229 # This is an unsupported case!
230 raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
231 lastcmp = (i or "")
232 i = ""
233 i.strip()
234 if not i:
235 continue
236
237 if inversion:
238 if i.endswith(')'):
239 i = i[:-1] or ""
240 inversion = False
241 if lastver and i:
242 lastver += " "
243 if i:
244 lastver += i
245 if lastdep not in r:
246 r[lastdep] = []
247 r[lastdep].append(lastcmp + " " + lastver)
248 continue
249
250 #if not inversion:
251 lastdep = i
252 lastver = ""
253 lastcmp = ""
254 if not (i in r and r[i]):
255 r[lastdep] = []
256
257 if sort:
258 r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
259 return r
260
261def explode_dep_versions(s):
262 r = explode_dep_versions2(s)
263 for d in r:
264 if not r[d]:
265 r[d] = None
266 continue
267 if len(r[d]) > 1:
268 bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
269 r[d] = r[d][0]
270 return r
271
272def join_deps(deps, commasep=True):
273 """
274 Take the result from explode_dep_versions and generate a dependency string
275 """
276 result = []
277 for dep in deps:
278 if deps[dep]:
279 if isinstance(deps[dep], list):
280 for v in deps[dep]:
281 result.append(dep + " (" + v + ")")
282 else:
283 result.append(dep + " (" + deps[dep] + ")")
284 else:
285 result.append(dep)
286 if commasep:
287 return ", ".join(result)
288 else:
289 return " ".join(result)
290
291def _print_trace(body, line):
292 """
293 Print the Environment of a Text Body
294 """
295 error = []
296 # print the environment of the method
297 min_line = max(1, line-4)
298 max_line = min(line + 4, len(body))
299 for i in range(min_line, max_line + 1):
300 if line == i:
301 error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
302 else:
303 error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
304 return error
305
306def better_compile(text, file, realfile, mode = "exec", lineno = 0):
307 """
308 A better compile method. This method
309 will print the offending lines.
310 """
311 try:
312 cache = bb.methodpool.compile_cache(text)
313 if cache:
314 return cache
315 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
316 text2 = "\n" * int(lineno) + text
317 code = compile(text2, realfile, mode)
318 bb.methodpool.compile_cache_add(text, code)
319 return code
320 except Exception as e:
321 error = []
322 # split the text into lines again
323 body = text.split('\n')
324 error.append("Error in compiling python function in %s, line %s:\n" % (realfile, lineno))
325 if hasattr(e, "lineno"):
326 error.append("The code lines resulting in this error were:")
327 error.extend(_print_trace(body, e.lineno))
328 else:
329 error.append("The function causing this error was:")
330 for line in body:
331 error.append(line)
332 error.append("%s: %s" % (e.__class__.__name__, str(e)))
333
334 logger.error("\n".join(error))
335
336 e = bb.BBHandledException(e)
337 raise e
338
339def _print_exception(t, value, tb, realfile, text, context):
340 error = []
341 try:
342 exception = traceback.format_exception_only(t, value)
343 error.append('Error executing a python function in %s:\n' % realfile)
344
345 # Strip 'us' from the stack (better_exec call) unless that was where the
346 # error came from
347 if tb.tb_next is not None:
348 tb = tb.tb_next
349
350 textarray = text.split('\n')
351
352 linefailed = tb.tb_lineno
353
354 tbextract = traceback.extract_tb(tb)
355 tbformat = traceback.format_list(tbextract)
356 error.append("The stack trace of python calls that resulted in this exception/failure was:")
357 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
358 error.extend(_print_trace(textarray, linefailed))
359
360 # See if this is a function we constructed and has calls back into other functions in
361 # "text". If so, try and improve the context of the error by diving down the trace
362 level = 0
363 nexttb = tb.tb_next
364 while nexttb is not None and (level+1) < len(tbextract):
365 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
366 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
367 # The code was possibly in the string we compiled ourselves
368 error.extend(_print_trace(textarray, tbextract[level+1][1]))
369 elif tbextract[level+1][0].startswith("/"):
370 # The code looks like it might be in a file, try and load it
371 try:
372 with open(tbextract[level+1][0], "r") as f:
373 text = f.readlines()
374 error.extend(_print_trace(text, tbextract[level+1][1]))
375 except:
376 error.append(tbformat[level+1])
377 else:
378 error.append(tbformat[level+1])
379 nexttb = tb.tb_next
380 level = level + 1
381
382 error.append("Exception: %s" % ''.join(exception))
383
384 # If the exception is from spwaning a task, let's be helpful and display
385 # the output (which hopefully includes stderr).
386 if isinstance(value, subprocess.CalledProcessError) and value.output:
387 error.append("Subprocess output:")
388 error.append(value.output.decode("utf-8", errors="ignore"))
389 finally:
390 logger.error("\n".join(error))
391
392def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
393 """
394 Similiar to better_compile, better_exec will
395 print the lines that are responsible for the
396 error.
397 """
398 import bb.parse
399 if not text:
400 text = code
401 if not hasattr(code, "co_filename"):
402 code = better_compile(code, realfile, realfile)
403 try:
404 exec(code, get_context(), context)
405 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
406 # Error already shown so passthrough, no need for traceback
407 raise
408 except Exception as e:
409 if pythonexception:
410 raise
411 (t, value, tb) = sys.exc_info()
412 try:
413 _print_exception(t, value, tb, realfile, text, context)
414 except Exception as e:
415 logger.error("Exception handler error: %s" % str(e))
416
417 e = bb.BBHandledException(e)
418 raise e
419
420def simple_exec(code, context):
421 exec(code, get_context(), context)
422
423def better_eval(source, locals, extraglobals = None):
424 ctx = get_context()
425 if extraglobals:
426 ctx = copy.copy(ctx)
427 for g in extraglobals:
428 ctx[g] = extraglobals[g]
429 return eval(source, ctx, locals)
430
431@contextmanager
432def fileslocked(files):
433 """Context manager for locking and unlocking file locks."""
434 locks = []
435 if files:
436 for lockfile in files:
437 locks.append(bb.utils.lockfile(lockfile))
438
439 yield
440
441 for lock in locks:
442 bb.utils.unlockfile(lock)
443
444@contextmanager
445def timeout(seconds):
446 def timeout_handler(signum, frame):
447 pass
448
449 original_handler = signal.signal(signal.SIGALRM, timeout_handler)
450
451 try:
452 signal.alarm(seconds)
453 yield
454 finally:
455 signal.alarm(0)
456 signal.signal(signal.SIGALRM, original_handler)
457
458def lockfile(name, shared=False, retry=True, block=False):
459 """
460 Use the specified file as a lock file, return when the lock has
461 been acquired. Returns a variable to pass to unlockfile().
462 Parameters:
463 retry: True to re-try locking if it fails, False otherwise
464 block: True to block until the lock succeeds, False otherwise
465 The retry and block parameters are kind of equivalent unless you
466 consider the possibility of sending a signal to the process to break
467 out - at which point you want block=True rather than retry=True.
468 """
469 dirname = os.path.dirname(name)
470 mkdirhier(dirname)
471
472 if not os.access(dirname, os.W_OK):
473 logger.error("Unable to acquire lock '%s', directory is not writable",
474 name)
475 sys.exit(1)
476
477 op = fcntl.LOCK_EX
478 if shared:
479 op = fcntl.LOCK_SH
480 if not retry and not block:
481 op = op | fcntl.LOCK_NB
482
483 while True:
484 # If we leave the lockfiles lying around there is no problem
485 # but we should clean up after ourselves. This gives potential
486 # for races though. To work around this, when we acquire the lock
487 # we check the file we locked was still the lock file on disk.
488 # by comparing inode numbers. If they don't match or the lockfile
489 # no longer exists, we start again.
490
491 # This implementation is unfair since the last person to request the
492 # lock is the most likely to win it.
493
494 try:
495 lf = open(name, 'a+')
496 fileno = lf.fileno()
497 fcntl.flock(fileno, op)
498 statinfo = os.fstat(fileno)
499 if os.path.exists(lf.name):
500 statinfo2 = os.stat(lf.name)
501 if statinfo.st_ino == statinfo2.st_ino:
502 return lf
503 lf.close()
504 except OSError as e:
505 if e.errno == errno.EACCES:
506 logger.error("Unable to acquire lock '%s', %s",
507 e.strerror, name)
508 sys.exit(1)
509 try:
510 lf.close()
511 except Exception:
512 pass
513 pass
514 if not retry:
515 return None
516
517def unlockfile(lf):
518 """
519 Unlock a file locked using lockfile()
520 """
521 try:
522 # If we had a shared lock, we need to promote to exclusive before
523 # removing the lockfile. Attempt this, ignore failures.
524 fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
525 os.unlink(lf.name)
526 except (IOError, OSError):
527 pass
528 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
529 lf.close()
530
531def md5_file(filename):
532 """
533 Return the hex string representation of the MD5 checksum of filename.
534 """
535 import hashlib, mmap
536
537 with open(filename, "rb") as f:
538 m = hashlib.md5()
539 try:
540 with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
541 for chunk in iter(lambda: mm.read(8192), b''):
542 m.update(chunk)
543 except ValueError:
544 # You can't mmap() an empty file so silence this exception
545 pass
546 return m.hexdigest()
547
548def sha256_file(filename):
549 """
550 Return the hex string representation of the 256-bit SHA checksum of
551 filename.
552 """
553 import hashlib
554
555 s = hashlib.sha256()
556 with open(filename, "rb") as f:
557 for line in f:
558 s.update(line)
559 return s.hexdigest()
560
561def sha1_file(filename):
562 """
563 Return the hex string representation of the SHA1 checksum of the filename
564 """
565 import hashlib
566
567 s = hashlib.sha1()
568 with open(filename, "rb") as f:
569 for line in f:
570 s.update(line)
571 return s.hexdigest()
572
573def preserved_envvars_exported():
574 """Variables which are taken from the environment and placed in and exported
575 from the metadata"""
576 return [
577 'BB_TASKHASH',
578 'HOME',
579 'LOGNAME',
580 'PATH',
581 'PWD',
582 'SHELL',
583 'TERM',
584 'USER',
585 'LC_ALL',
586 'BBSERVER',
587 ]
588
589def preserved_envvars():
590 """Variables which are taken from the environment and placed in the metadata"""
591 v = [
592 'BBPATH',
593 'BB_PRESERVE_ENV',
594 'BB_ENV_WHITELIST',
595 'BB_ENV_EXTRAWHITE',
596 ]
597 return v + preserved_envvars_exported()
598
599def filter_environment(good_vars):
600 """
601 Create a pristine environment for bitbake. This will remove variables that
602 are not known and may influence the build in a negative way.
603 """
604
605 removed_vars = {}
606 for key in list(os.environ):
607 if key in good_vars:
608 continue
609
610 removed_vars[key] = os.environ[key]
611 del os.environ[key]
612
613 # If we spawn a python process, we need to have a UTF-8 locale, else python's file
614 # access methods will use ascii. You can't change that mode once the interpreter is
615 # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
616 # distros support that and we need to set something.
617 os.environ["LC_ALL"] = "en_US.UTF-8"
618
619 if removed_vars:
620 logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
621
622 return removed_vars
623
624def approved_variables():
625 """
626 Determine and return the list of whitelisted variables which are approved
627 to remain in the environment.
628 """
629 if 'BB_PRESERVE_ENV' in os.environ:
630 return os.environ.keys()
631 approved = []
632 if 'BB_ENV_WHITELIST' in os.environ:
633 approved = os.environ['BB_ENV_WHITELIST'].split()
634 approved.extend(['BB_ENV_WHITELIST'])
635 else:
636 approved = preserved_envvars()
637 if 'BB_ENV_EXTRAWHITE' in os.environ:
638 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
639 if 'BB_ENV_EXTRAWHITE' not in approved:
640 approved.extend(['BB_ENV_EXTRAWHITE'])
641 return approved
642
643def clean_environment():
644 """
645 Clean up any spurious environment variables. This will remove any
646 variables the user hasn't chosen to preserve.
647 """
648 if 'BB_PRESERVE_ENV' not in os.environ:
649 good_vars = approved_variables()
650 return filter_environment(good_vars)
651
652 return {}
653
654def empty_environment():
655 """
656 Remove all variables from the environment.
657 """
658 for s in list(os.environ.keys()):
659 os.unsetenv(s)
660 del os.environ[s]
661
662def build_environment(d):
663 """
664 Build an environment from all exported variables.
665 """
666 import bb.data
667 for var in bb.data.keys(d):
668 export = d.getVarFlag(var, "export", False)
669 if export:
670 os.environ[var] = d.getVar(var) or ""
671
672def _check_unsafe_delete_path(path):
673 """
674 Basic safeguard against recursively deleting something we shouldn't. If it returns True,
675 the caller should raise an exception with an appropriate message.
676 NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
677 with potentially disastrous results.
678 """
679 extra = ''
680 # HOME might not be /home/something, so in case we can get it, check against it
681 homedir = os.environ.get('HOME', '')
682 if homedir:
683 extra = '|%s' % homedir
684 if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
685 return True
686 return False
687
688def remove(path, recurse=False):
689 """Equivalent to rm -f or rm -rf"""
690 if not path:
691 return
692 if recurse:
693 for name in glob.glob(path):
694 if _check_unsafe_delete_path(path):
695 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
696 # shutil.rmtree(name) would be ideal but its too slow
697 subprocess.check_call(['rm', '-rf'] + glob.glob(path))
698 return
699 for name in glob.glob(path):
700 try:
701 os.unlink(name)
702 except OSError as exc:
703 if exc.errno != errno.ENOENT:
704 raise
705
706def prunedir(topdir):
707 # Delete everything reachable from the directory named in 'topdir'.
708 # CAUTION: This is dangerous!
709 if _check_unsafe_delete_path(topdir):
710 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
711 for root, dirs, files in os.walk(topdir, topdown = False):
712 for name in files:
713 os.remove(os.path.join(root, name))
714 for name in dirs:
715 if os.path.islink(os.path.join(root, name)):
716 os.remove(os.path.join(root, name))
717 else:
718 os.rmdir(os.path.join(root, name))
719 os.rmdir(topdir)
720
721#
722# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
723# but thats possibly insane and suffixes is probably going to be small
724#
725def prune_suffix(var, suffixes, d):
726 # See if var ends with any of the suffixes listed and
727 # remove it if found
728 for suffix in suffixes:
729 if var.endswith(suffix):
730 return var.replace(suffix, "")
731 return var
732
733def mkdirhier(directory):
734 """Create a directory like 'mkdir -p', but does not complain if
735 directory already exists like os.makedirs
736 """
737
738 try:
739 os.makedirs(directory)
740 except OSError as e:
741 if e.errno != errno.EEXIST:
742 raise e
743
744def movefile(src, dest, newmtime = None, sstat = None):
745 """Moves a file from src to dest, preserving all permissions and
746 attributes; mtime will be preserved even when moving across
747 filesystems. Returns true on success and false on failure. Move is
748 atomic.
749 """
750
751 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
752 try:
753 if not sstat:
754 sstat = os.lstat(src)
755 except Exception as e:
756 print("movefile: Stating source file failed...", e)
757 return None
758
759 destexists = 1
760 try:
761 dstat = os.lstat(dest)
762 except:
763 dstat = os.lstat(os.path.dirname(dest))
764 destexists = 0
765
766 if destexists:
767 if stat.S_ISLNK(dstat[stat.ST_MODE]):
768 try:
769 os.unlink(dest)
770 destexists = 0
771 except Exception as e:
772 pass
773
774 if stat.S_ISLNK(sstat[stat.ST_MODE]):
775 try:
776 target = os.readlink(src)
777 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
778 os.unlink(dest)
779 os.symlink(target, dest)
780 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
781 os.unlink(src)
782 return os.lstat(dest)
783 except Exception as e:
784 print("movefile: failed to properly create symlink:", dest, "->", target, e)
785 return None
786
787 renamefailed = 1
788 # os.rename needs to know the dest path ending with file name
789 # so append the file name to a path only if it's a dir specified
790 srcfname = os.path.basename(src)
791 destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
792 else dest
793
794 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
795 try:
796 os.rename(src, destpath)
797 renamefailed = 0
798 except Exception as e:
799 if e.errno != errno.EXDEV:
800 # Some random error.
801 print("movefile: Failed to move", src, "to", dest, e)
802 return None
803 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
804
805 if renamefailed:
806 didcopy = 0
807 if stat.S_ISREG(sstat[stat.ST_MODE]):
808 try: # For safety copy then move it over.
809 shutil.copyfile(src, destpath + "#new")
810 os.rename(destpath + "#new", destpath)
811 didcopy = 1
812 except Exception as e:
813 print('movefile: copy', src, '->', dest, 'failed.', e)
814 return None
815 else:
816 #we don't yet handle special, so we need to fall back to /bin/mv
817 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
818 if a[0] != 0:
819 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
820 return None # failure
821 try:
822 if didcopy:
823 os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
824 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
825 os.unlink(src)
826 except Exception as e:
827 print("movefile: Failed to chown/chmod/unlink", dest, e)
828 return None
829
830 if newmtime:
831 os.utime(destpath, (newmtime, newmtime))
832 else:
833 os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
834 newmtime = sstat[stat.ST_MTIME]
835 return newmtime
836
837def copyfile(src, dest, newmtime = None, sstat = None):
838 """
839 Copies a file from src to dest, preserving all permissions and
840 attributes; mtime will be preserved even when moving across
841 filesystems. Returns true on success and false on failure.
842 """
843 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
844 try:
845 if not sstat:
846 sstat = os.lstat(src)
847 except Exception as e:
848 logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
849 return False
850
851 destexists = 1
852 try:
853 dstat = os.lstat(dest)
854 except:
855 dstat = os.lstat(os.path.dirname(dest))
856 destexists = 0
857
858 if destexists:
859 if stat.S_ISLNK(dstat[stat.ST_MODE]):
860 try:
861 os.unlink(dest)
862 destexists = 0
863 except Exception as e:
864 pass
865
866 if stat.S_ISLNK(sstat[stat.ST_MODE]):
867 try:
868 target = os.readlink(src)
869 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
870 os.unlink(dest)
871 os.symlink(target, dest)
872 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
873 return os.lstat(dest)
874 except Exception as e:
875 logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
876 return False
877
878 if stat.S_ISREG(sstat[stat.ST_MODE]):
879 try:
880 srcchown = False
881 if not os.access(src, os.R_OK):
882 # Make sure we can read it
883 srcchown = True
884 os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
885
886 # For safety copy then move it over.
887 shutil.copyfile(src, dest + "#new")
888 os.rename(dest + "#new", dest)
889 except Exception as e:
890 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
891 return False
892 finally:
893 if srcchown:
894 os.chmod(src, sstat[stat.ST_MODE])
895 os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
896
897 else:
898 #we don't yet handle special, so we need to fall back to /bin/mv
899 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
900 if a[0] != 0:
901 logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
902 return False # failure
903 try:
904 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
905 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
906 except Exception as e:
907 logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
908 return False
909
910 if newmtime:
911 os.utime(dest, (newmtime, newmtime))
912 else:
913 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
914 newmtime = sstat[stat.ST_MTIME]
915 return newmtime
916
917def break_hardlinks(src, sstat = None):
918 """
919 Ensures src is the only hardlink to this file. Other hardlinks,
920 if any, are not affected (other than in their st_nlink value, of
921 course). Returns true on success and false on failure.
922
923 """
924 try:
925 if not sstat:
926 sstat = os.lstat(src)
927 except Exception as e:
928 logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
929 return False
930 if sstat[stat.ST_NLINK] == 1:
931 return True
932 return copyfile(src, src, sstat=sstat)
933
934def which(path, item, direction = 0, history = False, executable=False):
935 """
936 Locate `item` in the list of paths `path` (colon separated string like $PATH).
937 If `direction` is non-zero then the list is reversed.
938 If `history` is True then the list of candidates also returned as result,history.
939 If `executable` is True then the candidate has to be an executable file,
940 otherwise the candidate simply has to exist.
941 """
942
943 if executable:
944 is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
945 else:
946 is_candidate = lambda p: os.path.exists(p)
947
948 hist = []
949 paths = (path or "").split(':')
950 if direction != 0:
951 paths.reverse()
952
953 for p in paths:
954 next = os.path.join(p, item)
955 hist.append(next)
956 if is_candidate(next):
957 if not os.path.isabs(next):
958 next = os.path.abspath(next)
959 if history:
960 return next, hist
961 return next
962
963 if history:
964 return "", hist
965 return ""
966
967def to_boolean(string, default=None):
968 if not string:
969 return default
970
971 normalized = string.lower()
972 if normalized in ("y", "yes", "1", "true"):
973 return True
974 elif normalized in ("n", "no", "0", "false"):
975 return False
976 else:
977 raise ValueError("Invalid value for to_boolean: %s" % string)
978
979def contains(variable, checkvalues, truevalue, falsevalue, d):
980 """Check if a variable contains all the values specified.
981
982 Arguments:
983
984 variable -- the variable name. This will be fetched and expanded (using
985 d.getVar(variable)) and then split into a set().
986
987 checkvalues -- if this is a string it is split on whitespace into a set(),
988 otherwise coerced directly into a set().
989
990 truevalue -- the value to return if checkvalues is a subset of variable.
991
992 falsevalue -- the value to return if variable is empty or if checkvalues is
993 not a subset of variable.
994
995 d -- the data store.
996 """
997
998 val = d.getVar(variable)
999 if not val:
1000 return falsevalue
1001 val = set(val.split())
1002 if isinstance(checkvalues, str):
1003 checkvalues = set(checkvalues.split())
1004 else:
1005 checkvalues = set(checkvalues)
1006 if checkvalues.issubset(val):
1007 return truevalue
1008 return falsevalue
1009
1010def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1011 val = d.getVar(variable)
1012 if not val:
1013 return falsevalue
1014 val = set(val.split())
1015 if isinstance(checkvalues, str):
1016 checkvalues = set(checkvalues.split())
1017 else:
1018 checkvalues = set(checkvalues)
1019 if checkvalues & val:
1020 return truevalue
1021 return falsevalue
1022
1023def filter(variable, checkvalues, d):
1024 """Return all words in the variable that are present in the checkvalues.
1025
1026 Arguments:
1027
1028 variable -- the variable name. This will be fetched and expanded (using
1029 d.getVar(variable)) and then split into a set().
1030
1031 checkvalues -- if this is a string it is split on whitespace into a set(),
1032 otherwise coerced directly into a set().
1033
1034 d -- the data store.
1035 """
1036
1037 val = d.getVar(variable)
1038 if not val:
1039 return ''
1040 val = set(val.split())
1041 if isinstance(checkvalues, str):
1042 checkvalues = set(checkvalues.split())
1043 else:
1044 checkvalues = set(checkvalues)
1045 return ' '.join(sorted(checkvalues & val))
1046
1047def cpu_count():
1048 return multiprocessing.cpu_count()
1049
1050def nonblockingfd(fd):
1051 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
1052
1053def process_profilelog(fn, pout = None):
1054 # Either call with a list of filenames and set pout or a filename and optionally pout.
1055 if not pout:
1056 pout = fn + '.processed'
1057 pout = open(pout, 'w')
1058
1059 import pstats
1060 if isinstance(fn, list):
1061 p = pstats.Stats(*fn, stream=pout)
1062 else:
1063 p = pstats.Stats(fn, stream=pout)
1064 p.sort_stats('time')
1065 p.print_stats()
1066 p.print_callers()
1067 p.sort_stats('cumulative')
1068 p.print_stats()
1069
1070 pout.flush()
1071 pout.close()
1072
1073#
1074# Was present to work around multiprocessing pool bugs in python < 2.7.3
1075#
1076def multiprocessingpool(*args, **kwargs):
1077
1078 import multiprocessing.pool
1079 #import multiprocessing.util
1080 #multiprocessing.util.log_to_stderr(10)
1081 # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
1082 # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
1083 def wrapper(func):
1084 def wrap(self, timeout=None):
1085 return func(self, timeout=timeout if timeout is not None else 1e100)
1086 return wrap
1087 multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
1088
1089 return multiprocessing.Pool(*args, **kwargs)
1090
1091def exec_flat_python_func(func, *args, **kwargs):
1092 """Execute a flat python function (defined with def funcname(args):...)"""
1093 # Prepare a small piece of python code which calls the requested function
1094 # To do this we need to prepare two things - a set of variables we can use to pass
1095 # the values of arguments into the calling function, and the list of arguments for
1096 # the function being called
1097 context = {}
1098 funcargs = []
1099 # Handle unnamed arguments
1100 aidx = 1
1101 for arg in args:
1102 argname = 'arg_%s' % aidx
1103 context[argname] = arg
1104 funcargs.append(argname)
1105 aidx += 1
1106 # Handle keyword arguments
1107 context.update(kwargs)
1108 funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
1109 code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
1110 comp = bb.utils.better_compile(code, '<string>', '<string>')
1111 bb.utils.better_exec(comp, context, code, '<string>')
1112 return context['retval']
1113
1114def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1115 """Edit lines from a recipe or config file and modify one or more
1116 specified variable values set in the file using a specified callback
1117 function. Lines are expected to have trailing newlines.
1118 Parameters:
1119 meta_lines: lines from the file; can be a list or an iterable
1120 (e.g. file pointer)
1121 variables: a list of variable names to look for. Functions
1122 may also be specified, but must be specified with '()' at
1123 the end of the name. Note that the function doesn't have
1124 any intrinsic understanding of _append, _prepend, _remove,
1125 or overrides, so these are considered as part of the name.
1126 These values go into a regular expression, so regular
1127 expression syntax is allowed.
1128 varfunc: callback function called for every variable matching
1129 one of the entries in the variables parameter. The function
1130 should take four arguments:
1131 varname: name of variable matched
1132 origvalue: current value in file
1133 op: the operator (e.g. '+=')
1134 newlines: list of lines up to this point. You can use
1135 this to prepend lines before this variable setting
1136 if you wish.
1137 and should return a four-element tuple:
1138 newvalue: new value to substitute in, or None to drop
1139 the variable setting entirely. (If the removal
1140 results in two consecutive blank lines, one of the
1141 blank lines will also be dropped).
1142 newop: the operator to use - if you specify None here,
1143 the original operation will be used.
1144 indent: number of spaces to indent multi-line entries,
1145 or -1 to indent up to the level of the assignment
1146 and opening quote, or a string to use as the indent.
1147 minbreak: True to allow the first element of a
1148 multi-line value to continue on the same line as
1149 the assignment, False to indent before the first
1150 element.
1151 To clarify, if you wish not to change the value, then you
1152 would return like this: return origvalue, None, 0, True
1153 match_overrides: True to match items with _overrides on the end,
1154 False otherwise
1155 Returns a tuple:
1156 updated:
1157 True if changes were made, False otherwise.
1158 newlines:
1159 Lines after processing
1160 """
1161
1162 var_res = {}
1163 if match_overrides:
1164 override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
1165 else:
1166 override_re = ''
1167 for var in variables:
1168 if var.endswith('()'):
1169 var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
1170 else:
1171 var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
1172
1173 updated = False
1174 varset_start = ''
1175 varlines = []
1176 newlines = []
1177 in_var = None
1178 full_value = ''
1179 var_end = ''
1180
1181 def handle_var_end():
1182 prerun_newlines = newlines[:]
1183 op = varset_start[len(in_var):].strip()
1184 (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
1185 changed = (prerun_newlines != newlines)
1186
1187 if newvalue is None:
1188 # Drop the value
1189 return True
1190 elif newvalue != full_value or (newop not in [None, op]):
1191 if newop not in [None, op]:
1192 # Callback changed the operator
1193 varset_new = "%s %s" % (in_var, newop)
1194 else:
1195 varset_new = varset_start
1196
1197 if isinstance(indent, int):
1198 if indent == -1:
1199 indentspc = ' ' * (len(varset_new) + 2)
1200 else:
1201 indentspc = ' ' * indent
1202 else:
1203 indentspc = indent
1204 if in_var.endswith('()'):
1205 # A function definition
1206 if isinstance(newvalue, list):
1207 newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
1208 else:
1209 if not newvalue.startswith('\n'):
1210 newvalue = '\n' + newvalue
1211 if not newvalue.endswith('\n'):
1212 newvalue = newvalue + '\n'
1213 newlines.append('%s {%s}\n' % (varset_new, newvalue))
1214 else:
1215 # Normal variable
1216 if isinstance(newvalue, list):
1217 if not newvalue:
1218 # Empty list -> empty string
1219 newlines.append('%s ""\n' % varset_new)
1220 elif minbreak:
1221 # First item on first line
1222 if len(newvalue) == 1:
1223 newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
1224 else:
1225 newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
1226 for item in newvalue[1:]:
1227 newlines.append('%s%s \\\n' % (indentspc, item))
1228 newlines.append('%s"\n' % indentspc)
1229 else:
1230 # No item on first line
1231 newlines.append('%s " \\\n' % varset_new)
1232 for item in newvalue:
1233 newlines.append('%s%s \\\n' % (indentspc, item))
1234 newlines.append('%s"\n' % indentspc)
1235 else:
1236 newlines.append('%s "%s"\n' % (varset_new, newvalue))
1237 return True
1238 else:
1239 # Put the old lines back where they were
1240 newlines.extend(varlines)
1241 # If newlines was touched by the function, we'll need to return True
1242 return changed
1243
1244 checkspc = False
1245
1246 for line in meta_lines:
1247 if in_var:
1248 value = line.rstrip()
1249 varlines.append(line)
1250 if in_var.endswith('()'):
1251 full_value += '\n' + value
1252 else:
1253 full_value += value[:-1]
1254 if value.endswith(var_end):
1255 if in_var.endswith('()'):
1256 if full_value.count('{') - full_value.count('}') >= 0:
1257 continue
1258 full_value = full_value[:-1]
1259 if handle_var_end():
1260 updated = True
1261 checkspc = True
1262 in_var = None
1263 else:
1264 skip = False
1265 for (varname, var_re) in var_res.items():
1266 res = var_re.match(line)
1267 if res:
1268 isfunc = varname.endswith('()')
1269 if isfunc:
1270 splitvalue = line.split('{', 1)
1271 var_end = '}'
1272 else:
1273 var_end = res.groups()[-1]
1274 splitvalue = line.split(var_end, 1)
1275 varset_start = splitvalue[0].rstrip()
1276 value = splitvalue[1].rstrip()
1277 if not isfunc and value.endswith('\\'):
1278 value = value[:-1]
1279 full_value = value
1280 varlines = [line]
1281 in_var = res.group(1)
1282 if isfunc:
1283 in_var += '()'
1284 if value.endswith(var_end):
1285 full_value = full_value[:-1]
1286 if handle_var_end():
1287 updated = True
1288 checkspc = True
1289 in_var = None
1290 skip = True
1291 break
1292 if not skip:
1293 if checkspc:
1294 checkspc = False
1295 if newlines and newlines[-1] == '\n' and line == '\n':
1296 # Squash blank line if there are two consecutive blanks after a removal
1297 continue
1298 newlines.append(line)
1299 return (updated, newlines)
1300
1301
1302def edit_metadata_file(meta_file, variables, varfunc):
1303 """Edit a recipe or config file and modify one or more specified
1304 variable values set in the file using a specified callback function.
1305 The file is only written to if the value(s) actually change.
1306 This is basically the file version of edit_metadata(), see that
1307 function's description for parameter/usage information.
1308 Returns True if the file was written to, False otherwise.
1309 """
1310 with open(meta_file, 'r') as f:
1311 (updated, newlines) = edit_metadata(f, variables, varfunc)
1312 if updated:
1313 with open(meta_file, 'w') as f:
1314 f.writelines(newlines)
1315 return updated
1316
1317
1318def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1319 """Edit bblayers.conf, adding and/or removing layers
1320 Parameters:
1321 bblayers_conf: path to bblayers.conf file to edit
1322 add: layer path (or list of layer paths) to add; None or empty
1323 list to add nothing
1324 remove: layer path (or list of layer paths) to remove; None or
1325 empty list to remove nothing
1326 edit_cb: optional callback function that will be called after
1327 processing adds/removes once per existing entry.
1328 Returns a tuple:
1329 notadded: list of layers specified to be added but weren't
1330 (because they were already in the list)
1331 notremoved: list of layers that were specified to be removed
1332 but weren't (because they weren't in the list)
1333 """
1334
1335 import fnmatch
1336
1337 def remove_trailing_sep(pth):
1338 if pth and pth[-1] == os.sep:
1339 pth = pth[:-1]
1340 return pth
1341
1342 approved = bb.utils.approved_variables()
1343 def canonicalise_path(pth):
1344 pth = remove_trailing_sep(pth)
1345 if 'HOME' in approved and '~' in pth:
1346 pth = os.path.expanduser(pth)
1347 return pth
1348
1349 def layerlist_param(value):
1350 if not value:
1351 return []
1352 elif isinstance(value, list):
1353 return [remove_trailing_sep(x) for x in value]
1354 else:
1355 return [remove_trailing_sep(value)]
1356
1357 addlayers = layerlist_param(add)
1358 removelayers = layerlist_param(remove)
1359
1360 # Need to use a list here because we can't set non-local variables from a callback in python 2.x
1361 bblayercalls = []
1362 removed = []
1363 plusequals = False
1364 orig_bblayers = []
1365
1366 def handle_bblayers_firstpass(varname, origvalue, op, newlines):
1367 bblayercalls.append(op)
1368 if op == '=':
1369 del orig_bblayers[:]
1370 orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
1371 return (origvalue, None, 2, False)
1372
1373 def handle_bblayers(varname, origvalue, op, newlines):
1374 updated = False
1375 bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
1376 if removelayers:
1377 for removelayer in removelayers:
1378 for layer in bblayers:
1379 if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
1380 updated = True
1381 bblayers.remove(layer)
1382 removed.append(removelayer)
1383 break
1384 if addlayers and not plusequals:
1385 for addlayer in addlayers:
1386 if addlayer not in bblayers:
1387 updated = True
1388 bblayers.append(addlayer)
1389 del addlayers[:]
1390
1391 if edit_cb:
1392 newlist = []
1393 for layer in bblayers:
1394 res = edit_cb(layer, canonicalise_path(layer))
1395 if res != layer:
1396 newlist.append(res)
1397 updated = True
1398 else:
1399 newlist.append(layer)
1400 bblayers = newlist
1401
1402 if updated:
1403 if op == '+=' and not bblayers:
1404 bblayers = None
1405 return (bblayers, None, 2, False)
1406 else:
1407 return (origvalue, None, 2, False)
1408
1409 with open(bblayers_conf, 'r') as f:
1410 (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
1411
1412 if not bblayercalls:
1413 raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
1414
1415 # Try to do the "smart" thing depending on how the user has laid out
1416 # their bblayers.conf file
1417 if bblayercalls.count('+=') > 1:
1418 plusequals = True
1419
1420 removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
1421 notadded = []
1422 for layer in addlayers:
1423 layer_canon = canonicalise_path(layer)
1424 if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
1425 notadded.append(layer)
1426 notadded_canon = [canonicalise_path(layer) for layer in notadded]
1427 addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
1428
1429 (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
1430 if addlayers:
1431 # Still need to add these
1432 for addlayer in addlayers:
1433 newlines.append('BBLAYERS += "%s"\n' % addlayer)
1434 updated = True
1435
1436 if updated:
1437 with open(bblayers_conf, 'w') as f:
1438 f.writelines(newlines)
1439
1440 notremoved = list(set(removelayers) - set(removed))
1441
1442 return (notadded, notremoved)
1443
1444
1445def get_file_layer(filename, d):
1446 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
1447 collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
1448 collection_res = {}
1449 for collection in collections:
1450 collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
1451
1452 def path_to_layer(path):
1453 # Use longest path so we handle nested layers
1454 matchlen = 0
1455 match = None
1456 for collection, regex in collection_res.items():
1457 if len(regex) > matchlen and re.match(regex, path):
1458 matchlen = len(regex)
1459 match = collection
1460 return match
1461
1462 result = None
1463 bbfiles = (d.getVar('BBFILES') or '').split()
1464 bbfilesmatch = False
1465 for bbfilesentry in bbfiles:
1466 if fnmatch.fnmatch(filename, bbfilesentry):
1467 bbfilesmatch = True
1468 result = path_to_layer(bbfilesentry)
1469
1470 if not bbfilesmatch:
1471 # Probably a bbclass
1472 result = path_to_layer(filename)
1473
1474 return result
1475
1476
1477# Constant taken from http://linux.die.net/include/linux/prctl.h
1478PR_SET_PDEATHSIG = 1
1479
1480class PrCtlError(Exception):
1481 pass
1482
1483def signal_on_parent_exit(signame):
1484 """
1485 Trigger signame to be sent when the parent process dies
1486 """
1487 signum = getattr(signal, signame)
1488 # http://linux.die.net/man/2/prctl
1489 result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
1490 if result != 0:
1491 raise PrCtlError('prctl failed with error code %s' % result)
1492
1493#
1494# Manually call the ioprio syscall. We could depend on other libs like psutil
1495# however this gets us enough of what we need to bitbake for now without the
1496# dependency
1497#
1498_unamearch = os.uname()[4]
1499IOPRIO_WHO_PROCESS = 1
1500IOPRIO_CLASS_SHIFT = 13
1501
1502def ioprio_set(who, cls, value):
1503 NR_ioprio_set = None
1504 if _unamearch == "x86_64":
1505 NR_ioprio_set = 251
1506 elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
1507 NR_ioprio_set = 289
1508
1509 if NR_ioprio_set:
1510 ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
1511 rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
1512 if rc != 0:
1513 raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
1514 else:
1515 bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1516
1517def set_process_name(name):
1518 from ctypes import cdll, byref, create_string_buffer
1519 # This is nice to have for debugging, not essential
1520 try:
1521 libc = cdll.LoadLibrary('libc.so.6')
1522 buf = create_string_buffer(bytes(name, 'utf-8'))
1523 libc.prctl(15, byref(buf), 0, 0, 0)
1524 except:
1525 pass
1526
1527# export common proxies variables from datastore to environment
1528def export_proxies(d):
1529 import os
1530
1531 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
1532 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
1533 'GIT_PROXY_COMMAND']
1534 exported = False
1535
1536 for v in variables:
1537 if v in os.environ.keys():
1538 exported = True
1539 else:
1540 v_proxy = d.getVar(v)
1541 if v_proxy is not None:
1542 os.environ[v] = v_proxy
1543 exported = True
1544
1545 return exported
1546
1547
1548def load_plugins(logger, plugins, pluginpath):
1549 def load_plugin(name):
1550 logger.debug(1, 'Loading plugin %s' % name)
1551 fp, pathname, description = imp.find_module(name, [pluginpath])
1552 try:
1553 return imp.load_module(name, fp, pathname, description)
1554 finally:
1555 if fp:
1556 fp.close()
1557
1558 logger.debug(1, 'Loading plugins from %s...' % pluginpath)
1559
1560 expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
1561 for ext in python_extensions)
1562 files = itertools.chain.from_iterable(expanded)
1563 names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
1564 for name in names:
1565 if name != '__init__':
1566 plugin = load_plugin(name)
1567 if hasattr(plugin, 'plugin_init'):
1568 obj = plugin.plugin_init(plugins)
1569 plugins.append(obj or plugin)
1570 else:
1571 plugins.append(plugin)
1572
1573
1574class LogCatcher(logging.Handler):
1575 """Logging handler for collecting logged messages so you can check them later"""
1576 def __init__(self):
1577 self.messages = []
1578 logging.Handler.__init__(self, logging.WARNING)
1579 def emit(self, record):
1580 self.messages.append(bb.build.logformatter.format(record))
1581 def contains(self, message):
1582 return (message in self.messages)