blob: 78658a1683e19accfa714c51995103e1f1446c61 [file] [log] [blame]
rjw1f884582022-01-06 17:20:42 +08001#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25
26import sys, os, glob, os.path, re, time
27import atexit
28import itertools
29import logging
30import multiprocessing
31import sre_constants
32import threading
33from io import StringIO, UnsupportedOperation
34from contextlib import closing
35from functools import wraps
36from collections import defaultdict, namedtuple
37import bb, bb.exceptions, bb.command
38from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
39import queue
40import signal
41import subprocess
42import errno
43import prserv.serv
44import pyinotify
45import json
46import pickle
47import codecs
48
49logger = logging.getLogger("BitBake")
50collectlog = logging.getLogger("BitBake.Collection")
51buildlog = logging.getLogger("BitBake.Build")
52parselog = logging.getLogger("BitBake.Parsing")
53providerlog = logging.getLogger("BitBake.Provider")
54
55class NoSpecificMatch(bb.BBHandledException):
56 """
57 Exception raised when no or multiple file matches are found
58 """
59
60class NothingToBuild(Exception):
61 """
62 Exception raised when there is nothing to build
63 """
64
65class CollectionError(bb.BBHandledException):
66 """
67 Exception raised when layer configuration is incorrect
68 """
69
70class state:
71 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
72
73 @classmethod
74 def get_name(cls, code):
75 for name in dir(cls):
76 value = getattr(cls, name)
77 if type(value) == type(cls.initial) and value == code:
78 return name
79 raise ValueError("Invalid status code: %s" % code)
80
81
82class SkippedPackage:
83 def __init__(self, info = None, reason = None):
84 self.pn = None
85 self.skipreason = None
86 self.provides = None
87 self.rprovides = None
88
89 if info:
90 self.pn = info.pn
91 self.skipreason = info.skipreason
92 self.provides = info.provides
93 self.rprovides = info.rprovides
94 elif reason:
95 self.skipreason = reason
96
97
98class CookerFeatures(object):
99 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
100
101 def __init__(self):
102 self._features=set()
103
104 def setFeature(self, f):
105 # validate we got a request for a feature we support
106 if f not in CookerFeatures._feature_list:
107 return
108 self._features.add(f)
109
110 def __contains__(self, f):
111 return f in self._features
112
113 def __iter__(self):
114 return self._features.__iter__()
115
116 def __next__(self):
117 return next(self._features)
118
119
120class EventWriter:
121 def __init__(self, cooker, eventfile):
122 self.file_inited = None
123 self.cooker = cooker
124 self.eventfile = eventfile
125 self.event_queue = []
126
127 def write_event(self, event):
128 with open(self.eventfile, "a") as f:
129 try:
130 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
131 f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
132 "vars": str_event}))
133 except Exception as err:
134 import traceback
135 print(err, traceback.format_exc())
136
137 def send(self, event):
138 if self.file_inited:
139 # we have the file, just write the event
140 self.write_event(event)
141 else:
142 # init on bb.event.BuildStarted
143 name = "%s.%s" % (event.__module__, event.__class__.__name__)
144 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
145 with open(self.eventfile, "w") as f:
146 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
147
148 self.file_inited = True
149
150 # write pending events
151 for evt in self.event_queue:
152 self.write_event(evt)
153
154 # also write the current event
155 self.write_event(event)
156 else:
157 # queue all events until the file is inited
158 self.event_queue.append(event)
159
160#============================================================================#
161# BBCooker
162#============================================================================#
163class BBCooker:
164 """
165 Manages one bitbake build run
166 """
167
168 def __init__(self, configuration, featureSet=None):
169 self.recipecaches = None
170 self.skiplist = {}
171 self.featureset = CookerFeatures()
172 if featureSet:
173 for f in featureSet:
174 self.featureset.setFeature(f)
175
176 self.configuration = configuration
177
178 bb.debug(1, "BBCooker starting %s" % time.time())
179 sys.stdout.flush()
180
181 self.configwatcher = pyinotify.WatchManager()
182 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
183 sys.stdout.flush()
184
185 self.configwatcher.bbseen = []
186 self.configwatcher.bbwatchedfiles = []
187 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
188 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
189 sys.stdout.flush()
190 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
191 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
192 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
193 self.watcher = pyinotify.WatchManager()
194 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
195 sys.stdout.flush()
196 self.watcher.bbseen = []
197 self.watcher.bbwatchedfiles = []
198 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
199
200 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
201 sys.stdout.flush()
202
203 # If being called by something like tinfoil, we need to clean cached data
204 # which may now be invalid
205 bb.parse.clear_cache()
206 bb.parse.BBHandler.cached_statements = {}
207
208 self.ui_cmdline = None
209
210 self.initConfigurationData()
211
212 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
213 sys.stdout.flush()
214
215 # we log all events to a file if so directed
216 if self.configuration.writeeventlog:
217 # register the log file writer as UI Handler
218 writer = EventWriter(self, self.configuration.writeeventlog)
219 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
220 bb.event.register_UIHhandler(EventLogWriteHandler(writer))
221
222 self.inotify_modified_files = []
223
224 def _process_inotify_updates(server, cooker, abort):
225 cooker.process_inotify_updates()
226 return 1.0
227
228 self.configuration.server_register_idlecallback(_process_inotify_updates, self)
229
230 # TOSTOP must not be set or our children will hang when they output
231 try:
232 fd = sys.stdout.fileno()
233 if os.isatty(fd):
234 import termios
235 tcattr = termios.tcgetattr(fd)
236 if tcattr[3] & termios.TOSTOP:
237 buildlog.info("The terminal had the TOSTOP bit set, clearing...")
238 tcattr[3] = tcattr[3] & ~termios.TOSTOP
239 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
240 except UnsupportedOperation:
241 pass
242
243 self.command = bb.command.Command(self)
244 self.state = state.initial
245
246 self.parser = None
247
248 signal.signal(signal.SIGTERM, self.sigterm_exception)
249 # Let SIGHUP exit as SIGTERM
250 signal.signal(signal.SIGHUP, self.sigterm_exception)
251
252 bb.debug(1, "BBCooker startup complete %s" % time.time())
253 sys.stdout.flush()
254
255 def process_inotify_updates(self):
256 for n in [self.confignotifier, self.notifier]:
257 if n.check_events(timeout=0):
258 # read notified events and enqeue them
259 n.read_events()
260 n.process_events()
261
262 def config_notifications(self, event):
263 if event.maskname == "IN_Q_OVERFLOW":
264 bb.warn("inotify event queue overflowed, invalidating caches.")
265 self.parsecache_valid = False
266 self.baseconfig_valid = False
267 bb.parse.clear_cache()
268 return
269 if not event.pathname in self.configwatcher.bbwatchedfiles:
270 return
271 if not event.pathname in self.inotify_modified_files:
272 self.inotify_modified_files.append(event.pathname)
273 self.baseconfig_valid = False
274
275 def notifications(self, event):
276 if event.maskname == "IN_Q_OVERFLOW":
277 bb.warn("inotify event queue overflowed, invalidating caches.")
278 self.parsecache_valid = False
279 bb.parse.clear_cache()
280 return
281 if event.pathname.endswith("bitbake-cookerdaemon.log") \
282 or event.pathname.endswith("bitbake.lock"):
283 return
284 if not event.pathname in self.inotify_modified_files:
285 self.inotify_modified_files.append(event.pathname)
286 self.parsecache_valid = False
287
288 def add_filewatch(self, deps, watcher=None, dirs=False):
289 if not watcher:
290 watcher = self.watcher
291 for i in deps:
292 watcher.bbwatchedfiles.append(i[0])
293 if dirs:
294 f = i[0]
295 else:
296 f = os.path.dirname(i[0])
297 if f in watcher.bbseen:
298 continue
299 watcher.bbseen.append(f)
300 watchtarget = None
301 while True:
302 # We try and add watches for files that don't exist but if they did, would influence
303 # the parser. The parent directory of these files may not exist, in which case we need
304 # to watch any parent that does exist for changes.
305 try:
306 watcher.add_watch(f, self.watchmask, quiet=False)
307 if watchtarget:
308 watcher.bbwatchedfiles.append(watchtarget)
309 break
310 except pyinotify.WatchManagerError as e:
311 if 'ENOENT' in str(e):
312 watchtarget = f
313 f = os.path.dirname(f)
314 if f in watcher.bbseen:
315 break
316 watcher.bbseen.append(f)
317 continue
318 if 'ENOSPC' in str(e):
319 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
320 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
321 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
322 providerlog.error("Root privilege is required to modify max_user_watches.")
323 raise
324
325 def sigterm_exception(self, signum, stackframe):
326 if signum == signal.SIGTERM:
327 bb.warn("Cooker received SIGTERM, shutting down...")
328 elif signum == signal.SIGHUP:
329 bb.warn("Cooker received SIGHUP, shutting down...")
330 self.state = state.forceshutdown
331
332 def setFeatures(self, features):
333 # we only accept a new feature set if we're in state initial, so we can reset without problems
334 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
335 raise Exception("Illegal state for feature set change")
336 original_featureset = list(self.featureset)
337 for feature in features:
338 self.featureset.setFeature(feature)
339 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
340 if (original_featureset != list(self.featureset)) and self.state != state.error:
341 self.reset()
342
343 def initConfigurationData(self):
344
345 self.state = state.initial
346 self.caches_array = []
347
348 # Need to preserve BB_CONSOLELOG over resets
349 consolelog = None
350 if hasattr(self, "data"):
351 consolelog = self.data.getVar("BB_CONSOLELOG")
352
353 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
354 self.enableDataTracking()
355
356 all_extra_cache_names = []
357 # We hardcode all known cache types in a single place, here.
358 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
359 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo")
360
361 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names
362
363 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
364 # This is the entry point, no further check needed!
365 for var in caches_name_array:
366 try:
367 module_name, cache_name = var.split(':')
368 module = __import__(module_name, fromlist=(cache_name,))
369 self.caches_array.append(getattr(module, cache_name))
370 except ImportError as exc:
371 logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
372 sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
373
374 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
375 self.databuilder.parseBaseConfiguration()
376 self.data = self.databuilder.data
377 self.data_hash = self.databuilder.data_hash
378 self.extraconfigdata = {}
379
380 if consolelog:
381 self.data.setVar("BB_CONSOLELOG", consolelog)
382
383 self.data.setVar('BB_CMDLINE', self.ui_cmdline)
384
385 #
386 # Copy of the data store which has been expanded.
387 # Used for firing events and accessing variables where expansion needs to be accounted for
388 #
389 bb.parse.init_parser(self.data)
390
391 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
392 self.disableDataTracking()
393
394 for mc in self.databuilder.mcdata.values():
395 mc.renameVar("__depends", "__base_depends")
396 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
397
398 self.baseconfig_valid = True
399 self.parsecache_valid = False
400
401 def handlePRServ(self):
402 # Setup a PR Server based on the new configuration
403 try:
404 self.prhost = prserv.serv.auto_start(self.data)
405 except prserv.serv.PRServiceConfigError as e:
406 bb.fatal("Unable to start PR Server, exitting")
407
408 def enableDataTracking(self):
409 self.configuration.tracking = True
410 if hasattr(self, "data"):
411 self.data.enableTracking()
412
413 def disableDataTracking(self):
414 self.configuration.tracking = False
415 if hasattr(self, "data"):
416 self.data.disableTracking()
417
418 def parseConfiguration(self):
419 # Set log file verbosity
420 verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
421 if verboselogs:
422 bb.msg.loggerVerboseLogs = True
423
424 # Change nice level if we're asked to
425 nice = self.data.getVar("BB_NICE_LEVEL")
426 if nice:
427 curnice = os.nice(0)
428 nice = int(nice) - curnice
429 buildlog.verbose("Renice to %s " % os.nice(nice))
430
431 if self.recipecaches:
432 del self.recipecaches
433 self.multiconfigs = self.databuilder.mcdata.keys()
434 self.recipecaches = {}
435 for mc in self.multiconfigs:
436 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
437
438 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
439
440 self.parsecache_valid = False
441
442 def updateConfigOpts(self, options, environment, cmdline):
443 self.ui_cmdline = cmdline
444 clean = True
445 for o in options:
446 if o in ['prefile', 'postfile']:
447 # Only these options may require a reparse
448 try:
449 if getattr(self.configuration, o) == options[o]:
450 # Value is the same, no need to mark dirty
451 continue
452 except AttributeError:
453 pass
454 logger.debug(1, "Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
455 print("Marking as dirty due to '%s' option change to '%s'" % (o, options[o]))
456 clean = False
457 setattr(self.configuration, o, options[o])
458 for k in bb.utils.approved_variables():
459 if k in environment and k not in self.configuration.env:
460 logger.debug(1, "Updating new environment variable %s to %s" % (k, environment[k]))
461 self.configuration.env[k] = environment[k]
462 clean = False
463 if k in self.configuration.env and k not in environment:
464 logger.debug(1, "Updating environment variable %s (deleted)" % (k))
465 del self.configuration.env[k]
466 clean = False
467 if k not in self.configuration.env and k not in environment:
468 continue
469 if environment[k] != self.configuration.env[k]:
470 logger.debug(1, "Updating environment variable %s from %s to %s" % (k, self.configuration.env[k], environment[k]))
471 self.configuration.env[k] = environment[k]
472 clean = False
473 if not clean:
474 logger.debug(1, "Base environment change, triggering reparse")
475 self.reset()
476
477 def runCommands(self, server, data, abort):
478 """
479 Run any queued asynchronous command
480 This is done by the idle handler so it runs in true context rather than
481 tied to any UI.
482 """
483
484 return self.command.runAsyncCommand()
485
486 def showVersions(self):
487
488 (latest_versions, preferred_versions) = self.findProviders()
489
490 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
491 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
492
493 for p in sorted(self.recipecaches[''].pkg_pn):
494 pref = preferred_versions[p]
495 latest = latest_versions[p]
496
497 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
498 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
499
500 if pref == latest:
501 prefstr = ""
502
503 logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
504
505 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
506 """
507 Show the outer or per-recipe environment
508 """
509 fn = None
510 envdata = None
511 if not pkgs_to_build:
512 pkgs_to_build = []
513
514 orig_tracking = self.configuration.tracking
515 if not orig_tracking:
516 self.enableDataTracking()
517 self.reset()
518
519
520 if buildfile:
521 # Parse the configuration here. We need to do it explicitly here since
522 # this showEnvironment() code path doesn't use the cache
523 self.parseConfiguration()
524
525 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
526 fn = self.matchFile(fn)
527 fn = bb.cache.realfn2virtual(fn, cls, mc)
528 elif len(pkgs_to_build) == 1:
529 ignore = self.data.getVar("ASSUME_PROVIDED") or ""
530 if pkgs_to_build[0] in set(ignore.split()):
531 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
532
533 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
534
535 mc = runlist[0][0]
536 fn = runlist[0][3]
537 else:
538 envdata = self.data
539 data.expandKeys(envdata)
540 parse.ast.runAnonFuncs(envdata)
541
542 if fn:
543 try:
544 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
545 envdata = bb_cache.loadDataFull(fn, self.collection.get_file_appends(fn))
546 except Exception as e:
547 parselog.exception("Unable to read %s", fn)
548 raise
549
550 # Display history
551 with closing(StringIO()) as env:
552 self.data.inchistory.emit(env)
553 logger.plain(env.getvalue())
554
555 # emit variables and shell functions
556 with closing(StringIO()) as env:
557 data.emit_env(env, envdata, True)
558 logger.plain(env.getvalue())
559
560 # emit the metadata which isnt valid shell
561 for e in sorted(envdata.keys()):
562 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
563 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
564
565 if not orig_tracking:
566 self.disableDataTracking()
567 self.reset()
568
569 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
570 """
571 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
572 """
573 bb.event.fire(bb.event.TreeDataPreparationStarted(), self.data)
574
575 # A task of None means use the default task
576 if task is None:
577 task = self.configuration.cmd
578 if not task.startswith("do_"):
579 task = "do_%s" % task
580
581 targetlist = self.checkPackages(pkgs_to_build, task)
582 fulltargetlist = []
583 defaulttask_implicit = ''
584 defaulttask_explicit = False
585 wildcard = False
586
587 # Wild card expansion:
588 # Replace string such as "multiconfig:*:bash"
589 # into "multiconfig:A:bash multiconfig:B:bash bash"
590 for k in targetlist:
591 if k.startswith("multiconfig:"):
592 if wildcard:
593 bb.fatal('multiconfig conflict')
594 if k.split(":")[1] == "*":
595 wildcard = True
596 for mc in self.multiconfigs:
597 if mc:
598 fulltargetlist.append(k.replace('*', mc))
599 # implicit default task
600 else:
601 defaulttask_implicit = k.split(":")[2]
602 else:
603 fulltargetlist.append(k)
604 else:
605 defaulttask_explicit = True
606 fulltargetlist.append(k)
607
608 if not defaulttask_explicit and defaulttask_implicit != '':
609 fulltargetlist.append(defaulttask_implicit)
610
611 bb.debug(1,"Target list: %s" % (str(fulltargetlist)))
612 taskdata = {}
613 localdata = {}
614
615 for mc in self.multiconfigs:
616 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
617 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
618 bb.data.expandKeys(localdata[mc])
619
620 current = 0
621 runlist = []
622 for k in fulltargetlist:
623 mc = ""
624 if k.startswith("multiconfig:"):
625 mc = k.split(":")[1]
626 k = ":".join(k.split(":")[2:])
627 ktask = task
628 if ":do_" in k:
629 k2 = k.split(":do_")
630 k = k2[0]
631 ktask = k2[1]
632 taskdata[mc].add_provider(localdata[mc], self.recipecaches[mc], k)
633 current += 1
634 if not ktask.startswith("do_"):
635 ktask = "do_%s" % ktask
636 if k not in taskdata[mc].build_targets or not taskdata[mc].build_targets[k]:
637 # e.g. in ASSUME_PROVIDED
638 continue
639 fn = taskdata[mc].build_targets[k][0]
640 runlist.append([mc, k, ktask, fn])
641 bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
642
643
644 # No need to do check providers if there are no mcdeps or not an mc build
645 if len(self.multiconfigs) > 1:
646 seen = set()
647 new = True
648 # Make sure we can provide the multiconfig dependency
649 while new:
650 mcdeps = set()
651 # Add unresolved first, so we can get multiconfig indirect dependencies on time
652 for mc in self.multiconfigs:
653 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
654 mcdeps |= set(taskdata[mc].get_mcdepends())
655 new = False
656 for mc in self.multiconfigs:
657 for k in mcdeps:
658 if k in seen:
659 continue
660 l = k.split(':')
661 depmc = l[2]
662 if depmc not in self.multiconfigs:
663 bb.fatal("Multiconfig dependency %s depends on nonexistent mc configuration %s" % (k,depmc))
664 else:
665 logger.debug(1, "Adding providers for multiconfig dependency %s" % l[3])
666 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
667 seen.add(k)
668 new = True
669
670 for mc in self.multiconfigs:
671 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
672
673 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
674 return taskdata, runlist
675
676 def prepareTreeData(self, pkgs_to_build, task):
677 """
678 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
679 """
680
681 # We set abort to False here to prevent unbuildable targets raising
682 # an exception when we're just generating data
683 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
684
685 return runlist, taskdata
686
687 ######## WARNING : this function requires cache_extra to be enabled ########
688
689 def generateTaskDepTreeData(self, pkgs_to_build, task):
690 """
691 Create a dependency graph of pkgs_to_build including reverse dependency
692 information.
693 """
694 if not task.startswith("do_"):
695 task = "do_%s" % task
696
697 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task)
698 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
699 rq.rqdata.prepare()
700 return self.buildDependTree(rq, taskdata)
701
702 @staticmethod
703 def add_mc_prefix(mc, pn):
704 if mc:
705 return "multiconfig:%s:%s" % (mc, pn)
706 return pn
707
708 def buildDependTree(self, rq, taskdata):
709 seen_fns = []
710 depend_tree = {}
711 depend_tree["depends"] = {}
712 depend_tree["tdepends"] = {}
713 depend_tree["pn"] = {}
714 depend_tree["rdepends-pn"] = {}
715 depend_tree["packages"] = {}
716 depend_tree["rdepends-pkg"] = {}
717 depend_tree["rrecs-pkg"] = {}
718 depend_tree['providermap'] = {}
719 depend_tree["layer-priorities"] = self.bbfile_config_priorities
720
721 for mc in taskdata:
722 for name, fn in list(taskdata[mc].get_providermap().items()):
723 pn = self.recipecaches[mc].pkg_fn[fn]
724 pn = self.add_mc_prefix(mc, pn)
725 if name != pn:
726 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[fn]
727 depend_tree['providermap'][name] = (pn, version)
728
729 for tid in rq.rqdata.runtaskentries:
730 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
731 pn = self.recipecaches[mc].pkg_fn[taskfn]
732 pn = self.add_mc_prefix(mc, pn)
733 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
734 if pn not in depend_tree["pn"]:
735 depend_tree["pn"][pn] = {}
736 depend_tree["pn"][pn]["filename"] = taskfn
737 depend_tree["pn"][pn]["version"] = version
738 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
739
740 # if we have extra caches, list all attributes they bring in
741 extra_info = []
742 for cache_class in self.caches_array:
743 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
744 cachefields = getattr(cache_class, 'cachefields', [])
745 extra_info = extra_info + cachefields
746
747 # for all attributes stored, add them to the dependency tree
748 for ei in extra_info:
749 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
750
751
752 dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(tid))
753 if not dotname in depend_tree["tdepends"]:
754 depend_tree["tdepends"][dotname] = []
755 for dep in rq.rqdata.runtaskentries[tid].depends:
756 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
757 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
758 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
759 if taskfn not in seen_fns:
760 seen_fns.append(taskfn)
761 packages = []
762
763 depend_tree["depends"][pn] = []
764 for dep in taskdata[mc].depids[taskfn]:
765 depend_tree["depends"][pn].append(dep)
766
767 depend_tree["rdepends-pn"][pn] = []
768 for rdep in taskdata[mc].rdepids[taskfn]:
769 depend_tree["rdepends-pn"][pn].append(rdep)
770
771 rdepends = self.recipecaches[mc].rundeps[taskfn]
772 for package in rdepends:
773 depend_tree["rdepends-pkg"][package] = []
774 for rdepend in rdepends[package]:
775 depend_tree["rdepends-pkg"][package].append(rdepend)
776 packages.append(package)
777
778 rrecs = self.recipecaches[mc].runrecs[taskfn]
779 for package in rrecs:
780 depend_tree["rrecs-pkg"][package] = []
781 for rdepend in rrecs[package]:
782 depend_tree["rrecs-pkg"][package].append(rdepend)
783 if not package in packages:
784 packages.append(package)
785
786 for package in packages:
787 if package not in depend_tree["packages"]:
788 depend_tree["packages"][package] = {}
789 depend_tree["packages"][package]["pn"] = pn
790 depend_tree["packages"][package]["filename"] = taskfn
791 depend_tree["packages"][package]["version"] = version
792
793 return depend_tree
794
795 ######## WARNING : this function requires cache_extra to be enabled ########
796 def generatePkgDepTreeData(self, pkgs_to_build, task):
797 """
798 Create a dependency tree of pkgs_to_build, returning the data.
799 """
800 if not task.startswith("do_"):
801 task = "do_%s" % task
802
803 _, taskdata = self.prepareTreeData(pkgs_to_build, task)
804
805 seen_fns = []
806 depend_tree = {}
807 depend_tree["depends"] = {}
808 depend_tree["pn"] = {}
809 depend_tree["rdepends-pn"] = {}
810 depend_tree["rdepends-pkg"] = {}
811 depend_tree["rrecs-pkg"] = {}
812
813 # if we have extra caches, list all attributes they bring in
814 extra_info = []
815 for cache_class in self.caches_array:
816 if type(cache_class) is type and issubclass(cache_class, bb.cache.RecipeInfoCommon) and hasattr(cache_class, 'cachefields'):
817 cachefields = getattr(cache_class, 'cachefields', [])
818 extra_info = extra_info + cachefields
819
820 tids = []
821 for mc in taskdata:
822 for tid in taskdata[mc].taskentries:
823 tids.append(tid)
824
825 for tid in tids:
826 (mc, fn, taskname, taskfn) = bb.runqueue.split_tid_mcfn(tid)
827
828 pn = self.recipecaches[mc].pkg_fn[taskfn]
829 pn = self.add_mc_prefix(mc, pn)
830
831 if pn not in depend_tree["pn"]:
832 depend_tree["pn"][pn] = {}
833 depend_tree["pn"][pn]["filename"] = taskfn
834 version = "%s:%s-%s" % self.recipecaches[mc].pkg_pepvpr[taskfn]
835 depend_tree["pn"][pn]["version"] = version
836 rdepends = self.recipecaches[mc].rundeps[taskfn]
837 rrecs = self.recipecaches[mc].runrecs[taskfn]
838 depend_tree["pn"][pn]["inherits"] = self.recipecaches[mc].inherits.get(taskfn, None)
839
840 # for all extra attributes stored, add them to the dependency tree
841 for ei in extra_info:
842 depend_tree["pn"][pn][ei] = vars(self.recipecaches[mc])[ei][taskfn]
843
844 if taskfn not in seen_fns:
845 seen_fns.append(taskfn)
846
847 depend_tree["depends"][pn] = []
848 for dep in taskdata[mc].depids[taskfn]:
849 pn_provider = ""
850 if dep in taskdata[mc].build_targets and taskdata[mc].build_targets[dep]:
851 fn_provider = taskdata[mc].build_targets[dep][0]
852 pn_provider = self.recipecaches[mc].pkg_fn[fn_provider]
853 else:
854 pn_provider = dep
855 pn_provider = self.add_mc_prefix(mc, pn_provider)
856 depend_tree["depends"][pn].append(pn_provider)
857
858 depend_tree["rdepends-pn"][pn] = []
859 for rdep in taskdata[mc].rdepids[taskfn]:
860 pn_rprovider = ""
861 if rdep in taskdata[mc].run_targets and taskdata[mc].run_targets[rdep]:
862 fn_rprovider = taskdata[mc].run_targets[rdep][0]
863 pn_rprovider = self.recipecaches[mc].pkg_fn[fn_rprovider]
864 else:
865 pn_rprovider = rdep
866 pn_rprovider = self.add_mc_prefix(mc, pn_rprovider)
867 depend_tree["rdepends-pn"][pn].append(pn_rprovider)
868
869 depend_tree["rdepends-pkg"].update(rdepends)
870 depend_tree["rrecs-pkg"].update(rrecs)
871
872 return depend_tree
873
874 def generateDepTreeEvent(self, pkgs_to_build, task):
875 """
876 Create a task dependency graph of pkgs_to_build.
877 Generate an event with the result
878 """
879 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
880 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.data)
881
882 def generateDotGraphFiles(self, pkgs_to_build, task):
883 """
884 Create a task dependency graph of pkgs_to_build.
885 Save the result to a set of .dot files.
886 """
887
888 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
889
890 with open('pn-buildlist', 'w') as f:
891 for pn in depgraph["pn"]:
892 f.write(pn + "\n")
893 logger.info("PN build list saved to 'pn-buildlist'")
894
895 # Remove old format output files to ensure no confusion with stale data
896 try:
897 os.unlink('pn-depends.dot')
898 except FileNotFoundError:
899 pass
900 try:
901 os.unlink('package-depends.dot')
902 except FileNotFoundError:
903 pass
904
905 with open('task-depends.dot', 'w') as f:
906 f.write("digraph depends {\n")
907 for task in sorted(depgraph["tdepends"]):
908 (pn, taskname) = task.rsplit(".", 1)
909 fn = depgraph["pn"][pn]["filename"]
910 version = depgraph["pn"][pn]["version"]
911 f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn))
912 for dep in sorted(depgraph["tdepends"][task]):
913 f.write('"%s" -> "%s"\n' % (task, dep))
914 f.write("}\n")
915 logger.info("Task dependencies saved to 'task-depends.dot'")
916
917 with open('recipe-depends.dot', 'w') as f:
918 f.write("digraph depends {\n")
919 pndeps = {}
920 for task in sorted(depgraph["tdepends"]):
921 (pn, taskname) = task.rsplit(".", 1)
922 if pn not in pndeps:
923 pndeps[pn] = set()
924 for dep in sorted(depgraph["tdepends"][task]):
925 (deppn, deptaskname) = dep.rsplit(".", 1)
926 pndeps[pn].add(deppn)
927 for pn in sorted(pndeps):
928 fn = depgraph["pn"][pn]["filename"]
929 version = depgraph["pn"][pn]["version"]
930 f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn))
931 for dep in sorted(pndeps[pn]):
932 if dep == pn:
933 continue
934 f.write('"%s" -> "%s"\n' % (pn, dep))
935 f.write("}\n")
936 logger.info("Flattened recipe dependencies saved to 'recipe-depends.dot'")
937
938 def show_appends_with_no_recipes(self):
939 # Determine which bbappends haven't been applied
940
941 # First get list of recipes, including skipped
942 recipefns = list(self.recipecaches[''].pkg_fn.keys())
943 recipefns.extend(self.skiplist.keys())
944
945 # Work out list of bbappends that have been applied
946 applied_appends = []
947 for fn in recipefns:
948 applied_appends.extend(self.collection.get_file_appends(fn))
949
950 appends_without_recipes = []
951 for _, appendfn in self.collection.bbappends:
952 if not appendfn in applied_appends:
953 appends_without_recipes.append(appendfn)
954
955 if appends_without_recipes:
956 msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
957 warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
958 False) or "no"
959 if warn_only.lower() in ("1", "yes", "true"):
960 bb.warn(msg)
961 else:
962 bb.fatal(msg)
963
964 def handlePrefProviders(self):
965
966 for mc in self.multiconfigs:
967 localdata = data.createCopy(self.databuilder.mcdata[mc])
968 bb.data.expandKeys(localdata)
969
970 # Handle PREFERRED_PROVIDERS
971 for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split():
972 try:
973 (providee, provider) = p.split(':')
974 except:
975 providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
976 continue
977 if providee in self.recipecaches[mc].preferred and self.recipecaches[mc].preferred[providee] != provider:
978 providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.recipecaches[mc].preferred[providee])
979 self.recipecaches[mc].preferred[providee] = provider
980
981 def findConfigFilePath(self, configfile):
982 """
983 Find the location on disk of configfile and if it exists and was parsed by BitBake
984 emit the ConfigFilePathFound event with the path to the file.
985 """
986 path = bb.cookerdata.findConfigFile(configfile, self.data)
987 if not path:
988 return
989
990 # Generate a list of parsed configuration files by searching the files
991 # listed in the __depends and __base_depends variables with a .conf suffix.
992 conffiles = []
993 dep_files = self.data.getVar('__base_depends', False) or []
994 dep_files = dep_files + (self.data.getVar('__depends', False) or [])
995
996 for f in dep_files:
997 if f[0].endswith(".conf"):
998 conffiles.append(f[0])
999
1000 _, conf, conffile = path.rpartition("conf/")
1001 match = os.path.join(conf, conffile)
1002 # Try and find matches for conf/conffilename.conf as we don't always
1003 # have the full path to the file.
1004 for cfg in conffiles:
1005 if cfg.endswith(match):
1006 bb.event.fire(bb.event.ConfigFilePathFound(path),
1007 self.data)
1008 break
1009
1010 def findFilesMatchingInDir(self, filepattern, directory):
1011 """
1012 Searches for files containing the substring 'filepattern' which are children of
1013 'directory' in each BBPATH. i.e. to find all rootfs package classes available
1014 to BitBake one could call findFilesMatchingInDir(self, 'rootfs_', 'classes')
1015 or to find all machine configuration files one could call:
1016 findFilesMatchingInDir(self, '.conf', 'conf/machine')
1017 """
1018
1019 matches = []
1020 bbpaths = self.data.getVar('BBPATH').split(':')
1021 for path in bbpaths:
1022 dirpath = os.path.join(path, directory)
1023 if os.path.exists(dirpath):
1024 for root, dirs, files in os.walk(dirpath):
1025 for f in files:
1026 if filepattern in f:
1027 matches.append(f)
1028
1029 if matches:
1030 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1031
1032 def findProviders(self, mc=''):
1033 return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1034
1035 def findBestProvider(self, pn, mc=''):
1036 if pn in self.recipecaches[mc].providers:
1037 filenames = self.recipecaches[mc].providers[pn]
1038 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc])
1039 filename = eligible[0]
1040 return None, None, None, filename
1041 elif pn in self.recipecaches[mc].pkg_pn:
1042 return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1043 else:
1044 return None, None, None, None
1045
1046 def findConfigFiles(self, varname):
1047 """
1048 Find config files which are appropriate values for varname.
1049 i.e. MACHINE, DISTRO
1050 """
1051 possible = []
1052 var = varname.lower()
1053
1054 data = self.data
1055 # iterate configs
1056 bbpaths = data.getVar('BBPATH').split(':')
1057 for path in bbpaths:
1058 confpath = os.path.join(path, "conf", var)
1059 if os.path.exists(confpath):
1060 for root, dirs, files in os.walk(confpath):
1061 # get all child files, these are appropriate values
1062 for f in files:
1063 val, sep, end = f.rpartition('.')
1064 if end == 'conf':
1065 possible.append(val)
1066
1067 if possible:
1068 bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.data)
1069
1070 def findInheritsClass(self, klass):
1071 """
1072 Find all recipes which inherit the specified class
1073 """
1074 pkg_list = []
1075
1076 for pfn in self.recipecaches[''].pkg_fn:
1077 inherits = self.recipecaches[''].inherits.get(pfn, None)
1078 if inherits and klass in inherits:
1079 pkg_list.append(self.recipecaches[''].pkg_fn[pfn])
1080
1081 return pkg_list
1082
1083 def generateTargetsTree(self, klass=None, pkgs=None):
1084 """
1085 Generate a dependency tree of buildable targets
1086 Generate an event with the result
1087 """
1088 # if the caller hasn't specified a pkgs list default to universe
1089 if not pkgs:
1090 pkgs = ['universe']
1091 # if inherited_class passed ensure all recipes which inherit the
1092 # specified class are included in pkgs
1093 if klass:
1094 extra_pkgs = self.findInheritsClass(klass)
1095 pkgs = pkgs + extra_pkgs
1096
1097 # generate a dependency tree for all our packages
1098 tree = self.generatePkgDepTreeData(pkgs, 'build')
1099 bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.data)
1100
1101 def interactiveMode( self ):
1102 """Drop off into a shell"""
1103 try:
1104 from bb import shell
1105 except ImportError:
1106 parselog.exception("Interactive mode not available")
1107 sys.exit(1)
1108 else:
1109 shell.start( self )
1110
1111
1112 def handleCollections(self, collections):
1113 """Handle collections"""
1114 errors = False
1115 self.bbfile_config_priorities = []
1116 if collections:
1117 collection_priorities = {}
1118 collection_depends = {}
1119 collection_list = collections.split()
1120 min_prio = 0
1121 for c in collection_list:
1122 bb.debug(1,'Processing %s in collection list' % (c))
1123
1124 # Get collection priority if defined explicitly
1125 priority = self.data.getVar("BBFILE_PRIORITY_%s" % c)
1126 if priority:
1127 try:
1128 prio = int(priority)
1129 except ValueError:
1130 parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
1131 errors = True
1132 if min_prio == 0 or prio < min_prio:
1133 min_prio = prio
1134 collection_priorities[c] = prio
1135 else:
1136 collection_priorities[c] = None
1137
1138 # Check dependencies and store information for priority calculation
1139 deps = self.data.getVar("LAYERDEPENDS_%s" % c)
1140 if deps:
1141 try:
1142 depDict = bb.utils.explode_dep_versions2(deps)
1143 except bb.utils.VersionStringException as vse:
1144 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1145 for dep, oplist in list(depDict.items()):
1146 if dep in collection_list:
1147 for opstr in oplist:
1148 layerver = self.data.getVar("LAYERVERSION_%s" % dep)
1149 (op, depver) = opstr.split()
1150 if layerver:
1151 try:
1152 res = bb.utils.vercmp_string_op(layerver, depver, op)
1153 except bb.utils.VersionStringException as vse:
1154 bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
1155 if not res:
1156 parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
1157 errors = True
1158 else:
1159 parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
1160 errors = True
1161 else:
1162 parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
1163 errors = True
1164 collection_depends[c] = list(depDict.keys())
1165 else:
1166 collection_depends[c] = []
1167
1168 # Check recommends and store information for priority calculation
1169 recs = self.data.getVar("LAYERRECOMMENDS_%s" % c)
1170 if recs:
1171 try:
1172 recDict = bb.utils.explode_dep_versions2(recs)
1173 except bb.utils.VersionStringException as vse:
1174 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1175 for rec, oplist in list(recDict.items()):
1176 if rec in collection_list:
1177 if oplist:
1178 opstr = oplist[0]
1179 layerver = self.data.getVar("LAYERVERSION_%s" % rec)
1180 if layerver:
1181 (op, recver) = opstr.split()
1182 try:
1183 res = bb.utils.vercmp_string_op(layerver, recver, op)
1184 except bb.utils.VersionStringException as vse:
1185 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1186 if not res:
1187 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1188 continue
1189 else:
1190 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1191 continue
1192 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1193 collection_depends[c].append(rec)
1194 else:
1195 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1196
1197 # Recursively work out collection priorities based on dependencies
1198 def calc_layer_priority(collection):
1199 if not collection_priorities[collection]:
1200 max_depprio = min_prio
1201 for dep in collection_depends[collection]:
1202 calc_layer_priority(dep)
1203 depprio = collection_priorities[dep]
1204 if depprio > max_depprio:
1205 max_depprio = depprio
1206 max_depprio += 1
1207 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio)
1208 collection_priorities[collection] = max_depprio
1209
1210 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
1211 for c in collection_list:
1212 calc_layer_priority(c)
1213 regex = self.data.getVar("BBFILE_PATTERN_%s" % c)
1214 if regex == None:
1215 parselog.error("BBFILE_PATTERN_%s not defined" % c)
1216 errors = True
1217 continue
1218 elif regex == "":
1219 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c)
1220 errors = False
1221 continue
1222 else:
1223 try:
1224 cre = re.compile(regex)
1225 except re.error:
1226 parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
1227 errors = True
1228 continue
1229 self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c]))
1230 if errors:
1231 # We've already printed the actual error(s)
1232 raise CollectionError("Errors during parsing layer configuration")
1233
1234 def buildSetVars(self):
1235 """
1236 Setup any variables needed before starting a build
1237 """
1238 t = time.gmtime()
1239 for mc in self.databuilder.mcdata:
1240 ds = self.databuilder.mcdata[mc]
1241 if not ds.getVar("BUILDNAME", False):
1242 ds.setVar("BUILDNAME", "${DATE}${TIME}")
1243 ds.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
1244 ds.setVar("DATE", time.strftime('%Y%m%d', t))
1245 ds.setVar("TIME", time.strftime('%H%M%S', t))
1246
1247 def reset_mtime_caches(self):
1248 """
1249 Reset mtime caches - this is particularly important when memory resident as something
1250 which is cached is not unlikely to have changed since the last invocation (e.g. a
1251 file associated with a recipe might have been modified by the user).
1252 """
1253 build.reset_cache()
1254 bb.fetch._checksum_cache.mtime_cache.clear()
1255 siggen_cache = getattr(bb.parse.siggen, 'checksum_cache', None)
1256 if siggen_cache:
1257 bb.parse.siggen.checksum_cache.mtime_cache.clear()
1258
1259 def matchFiles(self, bf):
1260 """
1261 Find the .bb files which match the expression in 'buildfile'.
1262 """
1263 if bf.startswith("/") or bf.startswith("../"):
1264 bf = os.path.abspath(bf)
1265
1266 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
1267 filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data)
1268 try:
1269 os.stat(bf)
1270 bf = os.path.abspath(bf)
1271 return [bf]
1272 except OSError:
1273 regexp = re.compile(bf)
1274 matches = []
1275 for f in filelist:
1276 if regexp.search(f) and os.path.isfile(f):
1277 matches.append(f)
1278 return matches
1279
1280 def matchFile(self, buildfile):
1281 """
1282 Find the .bb file which matches the expression in 'buildfile'.
1283 Raise an error if multiple files
1284 """
1285 matches = self.matchFiles(buildfile)
1286 if len(matches) != 1:
1287 if matches:
1288 msg = "Unable to match '%s' to a specific recipe file - %s matches found:" % (buildfile, len(matches))
1289 if matches:
1290 for f in matches:
1291 msg += "\n %s" % f
1292 parselog.error(msg)
1293 else:
1294 parselog.error("Unable to find any recipe file matching '%s'" % buildfile)
1295 raise NoSpecificMatch
1296 return matches[0]
1297
1298 def buildFile(self, buildfile, task):
1299 """
1300 Build the file matching regexp buildfile
1301 """
1302 bb.event.fire(bb.event.BuildInit(), self.data)
1303
1304 # Too many people use -b because they think it's how you normally
1305 # specify a target to be built, so show a warning
1306 bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.")
1307
1308 self.buildFileInternal(buildfile, task)
1309
1310 def buildFileInternal(self, buildfile, task, fireevents=True, quietlog=False):
1311 """
1312 Build the file matching regexp buildfile
1313 """
1314
1315 # Parse the configuration here. We need to do it explicitly here since
1316 # buildFile() doesn't use the cache
1317 self.parseConfiguration()
1318
1319 # If we are told to do the None task then query the default task
1320 if (task == None):
1321 task = self.configuration.cmd
1322 if not task.startswith("do_"):
1323 task = "do_%s" % task
1324
1325 fn, cls, mc = bb.cache.virtualfn2realfn(buildfile)
1326 fn = self.matchFile(fn)
1327
1328 self.buildSetVars()
1329 self.reset_mtime_caches()
1330
1331 bb_cache = bb.cache.Cache(self.databuilder, self.data_hash, self.caches_array)
1332
1333 infos = bb_cache.parse(fn, self.collection.get_file_appends(fn))
1334 infos = dict(infos)
1335
1336 fn = bb.cache.realfn2virtual(fn, cls, mc)
1337 try:
1338 info_array = infos[fn]
1339 except KeyError:
1340 bb.fatal("%s does not exist" % fn)
1341
1342 if info_array[0].skipped:
1343 bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
1344
1345 self.recipecaches[mc].add_from_recipeinfo(fn, info_array)
1346
1347 # Tweak some variables
1348 item = info_array[0].pn
1349 self.recipecaches[mc].ignored_dependencies = set()
1350 self.recipecaches[mc].bbfile_priority[fn] = 1
1351 self.configuration.limited_deps = True
1352
1353 # Remove external dependencies
1354 self.recipecaches[mc].task_deps[fn]['depends'] = {}
1355 self.recipecaches[mc].deps[fn] = []
1356 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1357 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1358
1359 # Invalidate task for target if force mode active
1360 if self.configuration.force:
1361 logger.verbose("Invalidate task %s, %s", task, fn)
1362 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn)
1363
1364 # Setup taskdata structure
1365 taskdata = {}
1366 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort)
1367 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1368
1369 if quietlog:
1370 rqloglevel = bb.runqueue.logger.getEffectiveLevel()
1371 bb.runqueue.logger.setLevel(logging.WARNING)
1372
1373 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1374 if fireevents:
1375 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1376
1377 # Execute the runqueue
1378 runlist = [[mc, item, task, fn]]
1379
1380 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1381
1382 def buildFileIdle(server, rq, abort):
1383
1384 msg = None
1385 interrupted = 0
1386 if abort or self.state == state.forceshutdown:
1387 rq.finish_runqueue(True)
1388 msg = "Forced shutdown"
1389 interrupted = 2
1390 elif self.state == state.shutdown:
1391 rq.finish_runqueue(False)
1392 msg = "Stopped build"
1393 interrupted = 1
1394 failures = 0
1395 try:
1396 retval = rq.execute_runqueue()
1397 except runqueue.TaskFailure as exc:
1398 failures += len(exc.args)
1399 retval = False
1400 except SystemExit as exc:
1401 self.command.finishAsyncCommand(str(exc))
1402 if quietlog:
1403 bb.runqueue.logger.setLevel(rqloglevel)
1404 return False
1405
1406 if not retval:
1407 if fireevents:
1408 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1409 self.command.finishAsyncCommand(msg)
1410 # We trashed self.recipecaches above
1411 self.parsecache_valid = False
1412 self.configuration.limited_deps = False
1413 bb.parse.siggen.reset(self.data)
1414 if quietlog:
1415 bb.runqueue.logger.setLevel(rqloglevel)
1416 return False
1417 if retval is True:
1418 return True
1419 return retval
1420
1421 self.configuration.server_register_idlecallback(buildFileIdle, rq)
1422
1423 def buildTargets(self, targets, task):
1424 """
1425 Attempt to build the targets specified
1426 """
1427
1428 def buildTargetsIdle(server, rq, abort):
1429 msg = None
1430 interrupted = 0
1431 if abort or self.state == state.forceshutdown:
1432 rq.finish_runqueue(True)
1433 msg = "Forced shutdown"
1434 interrupted = 2
1435 elif self.state == state.shutdown:
1436 rq.finish_runqueue(False)
1437 msg = "Stopped build"
1438 interrupted = 1
1439 failures = 0
1440 try:
1441 retval = rq.execute_runqueue()
1442 except runqueue.TaskFailure as exc:
1443 failures += len(exc.args)
1444 retval = False
1445 except SystemExit as exc:
1446 self.command.finishAsyncCommand(str(exc))
1447 return False
1448
1449 if not retval:
1450 try:
1451 for mc in self.multiconfigs:
1452 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1453 finally:
1454 self.command.finishAsyncCommand(msg)
1455 return False
1456 if retval is True:
1457 return True
1458 return retval
1459
1460 self.reset_mtime_caches()
1461 self.buildSetVars()
1462
1463 # If we are told to do the None task then query the default task
1464 if (task == None):
1465 task = self.configuration.cmd
1466
1467 if not task.startswith("do_"):
1468 task = "do_%s" % task
1469
1470 packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets]
1471
1472 bb.event.fire(bb.event.BuildInit(packages), self.data)
1473
1474 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort)
1475
1476 buildname = self.data.getVar("BUILDNAME", False)
1477
1478 # make targets to always look as <target>:do_<task>
1479 ntargets = []
1480 for target in runlist:
1481 if target[0]:
1482 ntargets.append("multiconfig:%s:%s:%s" % (target[0], target[1], target[2]))
1483 ntargets.append("%s:%s" % (target[1], target[2]))
1484
1485 for mc in self.multiconfigs:
1486 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1487
1488 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1489 if 'universe' in targets:
1490 rq.rqdata.warn_multi_bb = True
1491
1492 self.configuration.server_register_idlecallback(buildTargetsIdle, rq)
1493
1494
1495 def getAllKeysWithFlags(self, flaglist):
1496 dump = {}
1497 for k in self.data.keys():
1498 try:
1499 expand = True
1500 flags = self.data.getVarFlags(k)
1501 if flags and "func" in flags and "python" in flags:
1502 expand = False
1503 v = self.data.getVar(k, expand)
1504 if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart):
1505 dump[k] = {
1506 'v' : str(v) ,
1507 'history' : self.data.varhistory.variable(k),
1508 }
1509 for d in flaglist:
1510 if flags and d in flags:
1511 dump[k][d] = flags[d]
1512 else:
1513 dump[k][d] = None
1514 except Exception as e:
1515 print(e)
1516 return dump
1517
1518
1519 def updateCacheSync(self):
1520 if self.state == state.running:
1521 return
1522
1523 # reload files for which we got notifications
1524 for p in self.inotify_modified_files:
1525 bb.parse.update_cache(p)
1526 if p in bb.parse.BBHandler.cached_statements:
1527 del bb.parse.BBHandler.cached_statements[p]
1528 self.inotify_modified_files = []
1529
1530 if not self.baseconfig_valid:
1531 logger.debug(1, "Reloading base configuration data")
1532 self.initConfigurationData()
1533 self.handlePRServ()
1534
1535 # This is called for all async commands when self.state != running
1536 def updateCache(self):
1537 if self.state == state.running:
1538 return
1539
1540 if self.state in (state.shutdown, state.forceshutdown, state.error):
1541 if hasattr(self.parser, 'shutdown'):
1542 self.parser.shutdown(clean=False, force = True)
1543 raise bb.BBHandledException()
1544
1545 if self.state != state.parsing:
1546 self.updateCacheSync()
1547
1548 if self.state != state.parsing and not self.parsecache_valid:
1549 bb.parse.siggen.reset(self.data)
1550 self.parseConfiguration ()
1551 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
1552 for mc in self.multiconfigs:
1553 bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc])
1554
1555 for mc in self.multiconfigs:
1556 ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or ""
1557 self.recipecaches[mc].ignored_dependencies = set(ignore.split())
1558
1559 for dep in self.configuration.extra_assume_provided:
1560 self.recipecaches[mc].ignored_dependencies.add(dep)
1561
1562 self.collection = CookerCollectFiles(self.bbfile_config_priorities)
1563 (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data)
1564
1565 # Add inotify watches for directories searched for bb/bbappend files
1566 for dirent in searchdirs:
1567 self.add_filewatch([[dirent]], dirs=True)
1568
1569 self.parser = CookerParser(self, filelist, masked)
1570 self.parsecache_valid = True
1571
1572 self.state = state.parsing
1573
1574 if not self.parser.parse_next():
1575 collectlog.debug(1, "parsing complete")
1576 if self.parser.error:
1577 raise bb.BBHandledException()
1578 self.show_appends_with_no_recipes()
1579 self.handlePrefProviders()
1580 for mc in self.multiconfigs:
1581 self.recipecaches[mc].bbfile_priority = self.collection.collection_priorities(self.recipecaches[mc].pkg_fn, self.data)
1582 self.state = state.running
1583
1584 # Send an event listing all stamps reachable after parsing
1585 # which the metadata may use to clean up stale data
1586 for mc in self.multiconfigs:
1587 event = bb.event.ReachableStamps(self.recipecaches[mc].stamp)
1588 bb.event.fire(event, self.databuilder.mcdata[mc])
1589 return None
1590
1591 return True
1592
1593 def checkPackages(self, pkgs_to_build, task=None):
1594
1595 # Return a copy, don't modify the original
1596 pkgs_to_build = pkgs_to_build[:]
1597
1598 if len(pkgs_to_build) == 0:
1599 raise NothingToBuild
1600
1601 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
1602 for pkg in pkgs_to_build:
1603 if pkg in ignore:
1604 parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
1605
1606 if 'world' in pkgs_to_build:
1607 pkgs_to_build.remove('world')
1608 for mc in self.multiconfigs:
1609 bb.providers.buildWorldTargetList(self.recipecaches[mc], task)
1610 for t in self.recipecaches[mc].world_target:
1611 if mc:
1612 t = "multiconfig:" + mc + ":" + t
1613 pkgs_to_build.append(t)
1614
1615 if 'universe' in pkgs_to_build:
1616 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1617 parselog.debug(1, "collating packages for \"universe\"")
1618 pkgs_to_build.remove('universe')
1619 for mc in self.multiconfigs:
1620 for t in self.recipecaches[mc].universe_target:
1621 if task:
1622 foundtask = False
1623 for provider_fn in self.recipecaches[mc].providers[t]:
1624 if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']:
1625 foundtask = True
1626 break
1627 if not foundtask:
1628 bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task))
1629 continue
1630 if mc:
1631 t = "multiconfig:" + mc + ":" + t
1632 pkgs_to_build.append(t)
1633
1634 return pkgs_to_build
1635
1636 def pre_serve(self):
1637 # We now are in our own process so we can call this here.
1638 # PRServ exits if its parent process exits
1639 self.handlePRServ()
1640 return
1641
1642 def post_serve(self):
1643 prserv.serv.auto_shutdown()
1644 bb.event.fire(CookerExit(), self.data)
1645
1646
1647 def shutdown(self, force = False):
1648 if force:
1649 self.state = state.forceshutdown
1650 else:
1651 self.state = state.shutdown
1652
1653 if self.parser:
1654 self.parser.shutdown(clean=not force, force=force)
1655
1656 def finishcommand(self):
1657 self.state = state.initial
1658
1659 def reset(self):
1660 self.initConfigurationData()
1661
1662 def clientComplete(self):
1663 """Called when the client is done using the server"""
1664 self.finishcommand()
1665 self.extraconfigdata = {}
1666 self.command.reset()
1667 self.databuilder.reset()
1668 self.data = self.databuilder.data
1669
1670
1671class CookerExit(bb.event.Event):
1672 """
1673 Notify clients of the Cooker shutdown
1674 """
1675
1676 def __init__(self):
1677 bb.event.Event.__init__(self)
1678
1679
1680class CookerCollectFiles(object):
1681 def __init__(self, priorities):
1682 self.bbappends = []
1683 # Priorities is a list of tupples, with the second element as the pattern.
1684 # We need to sort the list with the longest pattern first, and so on to
1685 # the shortest. This allows nested layers to be properly evaluated.
1686 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1687
1688 def calc_bbfile_priority( self, filename, matched = None ):
1689 for _, _, regex, pri in self.bbfile_config_priorities:
1690 if regex.match(filename):
1691 if matched != None:
1692 if not regex in matched:
1693 matched.add(regex)
1694 return pri
1695 return 0
1696
1697 def get_bbfiles(self):
1698 """Get list of default .bb files by reading out the current directory"""
1699 path = os.getcwd()
1700 contents = os.listdir(path)
1701 bbfiles = []
1702 for f in contents:
1703 if f.endswith(".bb"):
1704 bbfiles.append(os.path.abspath(os.path.join(path, f)))
1705 return bbfiles
1706
1707 def find_bbfiles(self, path):
1708 """Find all the .bb and .bbappend files in a directory"""
1709 found = []
1710 for dir, dirs, files in os.walk(path):
1711 for ignored in ('SCCS', 'CVS', '.svn'):
1712 if ignored in dirs:
1713 dirs.remove(ignored)
1714 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
1715
1716 return found
1717
1718 def collect_bbfiles(self, config, eventdata):
1719 """Collect all available .bb build files"""
1720 masked = 0
1721
1722 collectlog.debug(1, "collecting .bb files")
1723
1724 files = (config.getVar( "BBFILES") or "").split()
1725 config.setVar("BBFILES", " ".join(files))
1726
1727 # Sort files by priority
1728 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
1729
1730 if not len(files):
1731 files = self.get_bbfiles()
1732
1733 if not len(files):
1734 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1735 bb.event.fire(CookerExit(), eventdata)
1736
1737 # We need to track where we look so that we can add inotify watches. There
1738 # is no nice way to do this, this is horrid. We intercept the os.listdir()
1739 # (or os.scandir() for python 3.6+) calls while we run glob().
1740 origlistdir = os.listdir
1741 if hasattr(os, 'scandir'):
1742 origscandir = os.scandir
1743 searchdirs = []
1744
1745 def ourlistdir(d):
1746 searchdirs.append(d)
1747 return origlistdir(d)
1748
1749 def ourscandir(d):
1750 searchdirs.append(d)
1751 return origscandir(d)
1752
1753 os.listdir = ourlistdir
1754 if hasattr(os, 'scandir'):
1755 os.scandir = ourscandir
1756 try:
1757 # Can't use set here as order is important
1758 newfiles = []
1759 for f in files:
1760 if os.path.isdir(f):
1761 dirfiles = self.find_bbfiles(f)
1762 for g in dirfiles:
1763 if g not in newfiles:
1764 newfiles.append(g)
1765 else:
1766 globbed = glob.glob(f)
1767 if not globbed and os.path.exists(f):
1768 globbed = [f]
1769 # glob gives files in order on disk. Sort to be deterministic.
1770 for g in sorted(globbed):
1771 if g not in newfiles:
1772 newfiles.append(g)
1773 finally:
1774 os.listdir = origlistdir
1775 if hasattr(os, 'scandir'):
1776 os.scandir = origscandir
1777
1778 bbmask = config.getVar('BBMASK')
1779
1780 if bbmask:
1781 # First validate the individual regular expressions and ignore any
1782 # that do not compile
1783 bbmasks = []
1784 for mask in bbmask.split():
1785 # When constructing an older style single regex, it's possible for BBMASK
1786 # to end up beginning with '|', which matches and masks _everything_.
1787 if mask.startswith("|"):
1788 collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask)
1789 mask = mask[1:]
1790 try:
1791 re.compile(mask)
1792 bbmasks.append(mask)
1793 except sre_constants.error:
1794 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1795
1796 # Then validate the combined regular expressions. This should never
1797 # fail, but better safe than sorry...
1798 bbmask = "|".join(bbmasks)
1799 try:
1800 bbmask_compiled = re.compile(bbmask)
1801 except sre_constants.error:
1802 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1803 bbmask = None
1804
1805 bbfiles = []
1806 bbappend = []
1807 for f in newfiles:
1808 if bbmask and bbmask_compiled.search(f):
1809 collectlog.debug(1, "skipping masked file %s", f)
1810 masked += 1
1811 continue
1812 if f.endswith('.bb'):
1813 bbfiles.append(f)
1814 elif f.endswith('.bbappend'):
1815 bbappend.append(f)
1816 else:
1817 collectlog.debug(1, "skipping %s: unknown file extension", f)
1818
1819 # Build a list of .bbappend files for each .bb file
1820 for f in bbappend:
1821 base = os.path.basename(f).replace('.bbappend', '.bb')
1822 self.bbappends.append((base, f))
1823
1824 # Find overlayed recipes
1825 # bbfiles will be in priority order which makes this easy
1826 bbfile_seen = dict()
1827 self.overlayed = defaultdict(list)
1828 for f in reversed(bbfiles):
1829 base = os.path.basename(f)
1830 if base not in bbfile_seen:
1831 bbfile_seen[base] = f
1832 else:
1833 topfile = bbfile_seen[base]
1834 self.overlayed[topfile].append(f)
1835
1836 return (bbfiles, masked, searchdirs)
1837
1838 def get_file_appends(self, fn):
1839 """
1840 Returns a list of .bbappend files to apply to fn
1841 """
1842 filelist = []
1843 f = os.path.basename(fn)
1844 for b in self.bbappends:
1845 (bbappend, filename) = b
1846 if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
1847 filelist.append(filename)
1848 return filelist
1849
1850 def collection_priorities(self, pkgfns, d):
1851
1852 priorities = {}
1853
1854 # Calculate priorities for each file
1855 matched = set()
1856 for p in pkgfns:
1857 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1858 priorities[p] = self.calc_bbfile_priority(realfn, matched)
1859
1860 unmatched = set()
1861 for _, _, regex, pri in self.bbfile_config_priorities:
1862 if not regex in matched:
1863 unmatched.add(regex)
1864
1865 # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
1866 def find_bbappend_match(regex):
1867 for b in self.bbappends:
1868 (bbfile, append) = b
1869 if regex.match(append):
1870 # If the bbappend is matched by already "matched set", return False
1871 for matched_regex in matched:
1872 if matched_regex.match(append):
1873 return False
1874 return True
1875 return False
1876
1877 for unmatch in unmatched.copy():
1878 if find_bbappend_match(unmatch):
1879 unmatched.remove(unmatch)
1880
1881 for collection, pattern, regex, _ in self.bbfile_config_priorities:
1882 if regex in unmatched:
1883 if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1':
1884 collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
1885
1886 return priorities
1887
1888class ParsingFailure(Exception):
1889 def __init__(self, realexception, recipe):
1890 self.realexception = realexception
1891 self.recipe = recipe
1892 Exception.__init__(self, realexception, recipe)
1893
1894class Feeder(multiprocessing.Process):
1895 def __init__(self, jobs, to_parsers, quit):
1896 self.quit = quit
1897 self.jobs = jobs
1898 self.to_parsers = to_parsers
1899 multiprocessing.Process.__init__(self)
1900
1901 def run(self):
1902 while True:
1903 try:
1904 quit = self.quit.get_nowait()
1905 except queue.Empty:
1906 pass
1907 else:
1908 if quit == 'cancel':
1909 self.to_parsers.cancel_join_thread()
1910 break
1911
1912 try:
1913 job = self.jobs.pop()
1914 except IndexError:
1915 break
1916
1917 try:
1918 self.to_parsers.put(job, timeout=0.5)
1919 except queue.Full:
1920 self.jobs.insert(0, job)
1921 continue
1922
1923class Parser(multiprocessing.Process):
1924 def __init__(self, jobs, results, quit, init, profile):
1925 self.jobs = jobs
1926 self.results = results
1927 self.quit = quit
1928 self.init = init
1929 multiprocessing.Process.__init__(self)
1930 self.context = bb.utils.get_context().copy()
1931 self.handlers = bb.event.get_class_handlers().copy()
1932 self.profile = profile
1933
1934 def run(self):
1935
1936 if not self.profile:
1937 self.realrun()
1938 return
1939
1940 try:
1941 import cProfile as profile
1942 except:
1943 import profile
1944 prof = profile.Profile()
1945 try:
1946 profile.Profile.runcall(prof, self.realrun)
1947 finally:
1948 logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
1949 prof.dump_stats(logfile)
1950
1951 def realrun(self):
1952 if self.init:
1953 self.init()
1954
1955 pending = []
1956 while True:
1957 try:
1958 self.quit.get_nowait()
1959 except queue.Empty:
1960 pass
1961 else:
1962 self.results.cancel_join_thread()
1963 break
1964
1965 if pending:
1966 result = pending.pop()
1967 else:
1968 try:
1969 job = self.jobs.get(timeout=0.25)
1970 except queue.Empty:
1971 continue
1972
1973 if job is None:
1974 break
1975 result = self.parse(*job)
1976
1977 try:
1978 self.results.put(result, timeout=0.25)
1979 except queue.Full:
1980 pending.append(result)
1981
1982 def parse(self, filename, appends):
1983 try:
1984 # Record the filename we're parsing into any events generated
1985 def parse_filter(self, record):
1986 record.taskpid = bb.event.worker_pid
1987 record.fn = filename
1988 return True
1989
1990 # Reset our environment and handlers to the original settings
1991 bb.utils.set_context(self.context.copy())
1992 bb.event.set_class_handlers(self.handlers.copy())
1993 bb.event.LogHandler.filter = parse_filter
1994
1995 return True, self.bb_cache.parse(filename, appends)
1996 except Exception as exc:
1997 tb = sys.exc_info()[2]
1998 exc.recipe = filename
1999 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2000 return True, exc
2001 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2002 # and for example a worker thread doesn't just exit on its own in response to
2003 # a SystemExit event for example.
2004 except BaseException as exc:
2005 return True, ParsingFailure(exc, filename)
2006
2007class CookerParser(object):
2008 def __init__(self, cooker, filelist, masked):
2009 self.filelist = filelist
2010 self.cooker = cooker
2011 self.cfgdata = cooker.data
2012 self.cfghash = cooker.data_hash
2013 self.cfgbuilder = cooker.databuilder
2014
2015 # Accounting statistics
2016 self.parsed = 0
2017 self.cached = 0
2018 self.error = 0
2019 self.masked = masked
2020
2021 self.skipped = 0
2022 self.virtuals = 0
2023 self.total = len(filelist)
2024
2025 self.current = 0
2026 self.process_names = []
2027
2028 self.bb_cache = bb.cache.Cache(self.cfgbuilder, self.cfghash, cooker.caches_array)
2029 self.fromcache = []
2030 self.willparse = []
2031 for filename in self.filelist:
2032 appends = self.cooker.collection.get_file_appends(filename)
2033 if not self.bb_cache.cacheValid(filename, appends):
2034 self.willparse.append((filename, appends))
2035 else:
2036 self.fromcache.append((filename, appends))
2037 self.toparse = self.total - len(self.fromcache)
2038 self.progress_chunk = int(max(self.toparse / 100, 1))
2039
2040 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2041 multiprocessing.cpu_count()), len(self.willparse))
2042
2043 self.start()
2044 self.haveshutdown = False
2045
2046 def start(self):
2047 self.results = self.load_cached()
2048 self.processes = []
2049 if self.toparse:
2050 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2051 def init():
2052 Parser.bb_cache = self.bb_cache
2053 bb.utils.set_process_name(multiprocessing.current_process().name)
2054 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2055 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2056
2057 self.feeder_quit = multiprocessing.Queue(maxsize=1)
2058 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2059 self.jobs = multiprocessing.Queue(maxsize=self.num_processes)
2060 self.result_queue = multiprocessing.Queue()
2061 self.feeder = Feeder(self.willparse, self.jobs, self.feeder_quit)
2062 self.feeder.start()
2063 for i in range(0, self.num_processes):
2064 parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
2065 parser.start()
2066 self.process_names.append(parser.name)
2067 self.processes.append(parser)
2068
2069 self.results = itertools.chain(self.results, self.parse_generator())
2070
2071 def shutdown(self, clean=True, force=False):
2072 if not self.toparse:
2073 return
2074 if self.haveshutdown:
2075 return
2076 self.haveshutdown = True
2077
2078 if clean:
2079 event = bb.event.ParseCompleted(self.cached, self.parsed,
2080 self.skipped, self.masked,
2081 self.virtuals, self.error,
2082 self.total)
2083
2084 bb.event.fire(event, self.cfgdata)
2085 self.feeder_quit.put(None)
2086 for process in self.processes:
2087 self.parser_quit.put(None)
2088 else:
2089 self.feeder_quit.put('cancel')
2090
2091 self.parser_quit.cancel_join_thread()
2092 for process in self.processes:
2093 self.parser_quit.put(None)
2094
2095 self.jobs.cancel_join_thread()
2096
2097 for process in self.processes:
2098 if force:
2099 process.join(.1)
2100 process.terminate()
2101 else:
2102 process.join()
2103 self.feeder.join()
2104
2105 sync = threading.Thread(target=self.bb_cache.sync)
2106 sync.start()
2107 multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
2108 bb.codeparser.parser_cache_savemerge()
2109 bb.fetch.fetcher_parse_done()
2110 if self.cooker.configuration.profile:
2111 profiles = []
2112 for i in self.process_names:
2113 logfile = "profile-parse-%s.log" % i
2114 if os.path.exists(logfile):
2115 profiles.append(logfile)
2116
2117 pout = "profile-parse.log.processed"
2118 bb.utils.process_profilelog(profiles, pout = pout)
2119 print("Processed parsing statistics saved to %s" % (pout))
2120
2121 def load_cached(self):
2122 for filename, appends in self.fromcache:
2123 cached, infos = self.bb_cache.load(filename, appends)
2124 yield not cached, infos
2125
2126 def parse_generator(self):
2127 while True:
2128 if self.parsed >= self.toparse:
2129 break
2130
2131 try:
2132 result = self.result_queue.get(timeout=0.25)
2133 except queue.Empty:
2134 pass
2135 else:
2136 value = result[1]
2137 if isinstance(value, BaseException):
2138 raise value
2139 else:
2140 yield result
2141
2142 def parse_next(self):
2143 result = []
2144 parsed = None
2145 try:
2146 parsed, result = next(self.results)
2147 except StopIteration:
2148 self.shutdown()
2149 return False
2150 except bb.BBHandledException as exc:
2151 self.error += 1
2152 logger.error('Failed to parse recipe: %s' % exc.recipe)
2153 self.shutdown(clean=False)
2154 return False
2155 except ParsingFailure as exc:
2156 self.error += 1
2157 logger.error('Unable to parse %s: %s' %
2158 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2159 self.shutdown(clean=False)
2160 return False
2161 except bb.parse.ParseError as exc:
2162 self.error += 1
2163 logger.error(str(exc))
2164 self.shutdown(clean=False)
2165 return False
2166 except bb.data_smart.ExpansionError as exc:
2167 self.error += 1
2168 bbdir = os.path.dirname(__file__) + os.sep
2169 etype, value, _ = sys.exc_info()
2170 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2171 logger.error('ExpansionError during parsing %s', value.recipe,
2172 exc_info=(etype, value, tb))
2173 self.shutdown(clean=False)
2174 return False
2175 except Exception as exc:
2176 self.error += 1
2177 etype, value, tb = sys.exc_info()
2178 if hasattr(value, "recipe"):
2179 logger.error('Unable to parse %s' % value.recipe,
2180 exc_info=(etype, value, exc.traceback))
2181 else:
2182 # Most likely, an exception occurred during raising an exception
2183 import traceback
2184 logger.error('Exception during parse: %s' % traceback.format_exc())
2185 self.shutdown(clean=False)
2186 return False
2187
2188 self.current += 1
2189 self.virtuals += len(result)
2190 if parsed:
2191 self.parsed += 1
2192 if self.parsed % self.progress_chunk == 0:
2193 bb.event.fire(bb.event.ParseProgress(self.parsed, self.toparse),
2194 self.cfgdata)
2195 else:
2196 self.cached += 1
2197
2198 for virtualfn, info_array in result:
2199 if info_array[0].skipped:
2200 self.skipped += 1
2201 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
2202 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
2203 self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2204 parsed=parsed, watcher = self.cooker.add_filewatch)
2205 return True
2206
2207 def reparse(self, filename):
2208 infos = self.bb_cache.parse(filename, self.cooker.collection.get_file_appends(filename))
2209 for vfn, info_array in infos:
2210 (fn, cls, mc) = bb.cache.virtualfn2realfn(vfn)
2211 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)