blob: 3164171eb2832084ba2f8f9a1357f6f05c33bde1 [file] [log] [blame]
lh9ed821d2023-04-07 01:36:19 -07001# Script utility functions
2#
3# Copyright (C) 2014 Intel Corporation
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import argparse
9import glob
10import logging
11import os
12import random
13import shlex
14import shutil
15import string
16import subprocess
17import sys
18import tempfile
19import threading
20import importlib
21from importlib import machinery
22
23class KeepAliveStreamHandler(logging.StreamHandler):
24 def __init__(self, keepalive=True, **kwargs):
25 super().__init__(**kwargs)
26 if keepalive is True:
27 keepalive = 5000 # default timeout
28 self._timeout = threading.Condition()
29 self._stop = False
30
31 # background thread waits on condition, if the condition does not
32 # happen emit a keep alive message
33 def thread():
34 while not self._stop:
35 with self._timeout:
36 if not self._timeout.wait(keepalive):
37 self.emit(logging.LogRecord("keepalive", logging.INFO,
38 None, None, "Keepalive message", None, None))
39
40 self._thread = threading.Thread(target = thread, daemon = True)
41 self._thread.start()
42
43 def close(self):
44 # mark the thread to stop and notify it
45 self._stop = True
46 with self._timeout:
47 self._timeout.notify()
48 # wait for it to join
49 self._thread.join()
50 super().close()
51
52 def emit(self, record):
53 super().emit(record)
54 # trigger timer reset
55 with self._timeout:
56 self._timeout.notify()
57
58def logger_create(name, stream=None, keepalive=None):
59 logger = logging.getLogger(name)
60 if keepalive is not None:
61 loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive)
62 else:
63 loggerhandler = logging.StreamHandler(stream=stream)
64 loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
65 logger.addHandler(loggerhandler)
66 logger.setLevel(logging.INFO)
67 return logger
68
69def logger_setup_color(logger, color='auto'):
70 from bb.msg import BBLogFormatter
71
72 for handler in logger.handlers:
73 if (isinstance(handler, logging.StreamHandler) and
74 isinstance(handler.formatter, BBLogFormatter)):
75 if color == 'always' or (color == 'auto' and handler.stream.isatty()):
76 handler.formatter.enable_color()
77
78
79def load_plugins(logger, plugins, pluginpath):
80
81 def load_plugin(name):
82 logger.debug('Loading plugin %s' % name)
83 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
84 if spec:
85 return spec.loader.load_module()
86
87 def plugin_name(filename):
88 return os.path.splitext(os.path.basename(filename))[0]
89
90 known_plugins = [plugin_name(p.__name__) for p in plugins]
91 logger.debug('Loading plugins from %s...' % pluginpath)
92 for fn in glob.glob(os.path.join(pluginpath, '*.py')):
93 name = plugin_name(fn)
94 if name != '__init__' and name not in known_plugins:
95 plugin = load_plugin(name)
96 if hasattr(plugin, 'plugin_init'):
97 plugin.plugin_init(plugins)
98 plugins.append(plugin)
99
100
101def git_convert_standalone_clone(repodir):
102 """If specified directory is a git repository, ensure it's a standalone clone"""
103 import bb.process
104 if os.path.exists(os.path.join(repodir, '.git')):
105 alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates')
106 if os.path.exists(alternatesfile):
107 # This will have been cloned with -s, so we need to convert it so none
108 # of the contents is shared
109 bb.process.run('git repack -a', cwd=repodir)
110 os.remove(alternatesfile)
111
112def _get_temp_recipe_dir(d):
113 # This is a little bit hacky but we need to find a place where we can put
114 # the recipe so that bitbake can find it. We're going to delete it at the
115 # end so it doesn't really matter where we put it.
116 bbfiles = d.getVar('BBFILES').split()
117 fetchrecipedir = None
118 for pth in bbfiles:
119 if pth.endswith('.bb'):
120 pthdir = os.path.dirname(pth)
121 if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK):
122 fetchrecipedir = pthdir.replace('*', 'recipetool')
123 if pthdir.endswith('workspace/recipes/*'):
124 # Prefer the workspace
125 break
126 return fetchrecipedir
127
128class FetchUrlFailure(Exception):
129 def __init__(self, url):
130 self.url = url
131 def __str__(self):
132 return "Failed to fetch URL %s" % self.url
133
134def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False):
135 """
136 Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e.
137 any dependencies that need to be satisfied in order to support the fetch
138 operation will be taken care of
139 """
140
141 import bb
142
143 checksums = {}
144 fetchrecipepn = None
145
146 # We need to put our temp directory under ${BASE_WORKDIR} otherwise
147 # we may have problems with the recipe-specific sysroot population
148 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
149 bb.utils.mkdirhier(tmpparent)
150 tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent)
151 try:
152 tmpworkdir = os.path.join(tmpdir, 'work')
153 logger.debug('fetch_url: temp dir is %s' % tmpdir)
154
155 fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data)
156 if not fetchrecipedir:
157 logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe')
158 sys.exit(1)
159 fetchrecipe = None
160 bb.utils.mkdirhier(fetchrecipedir)
161 try:
162 # Generate a dummy recipe so we can follow more or less normal paths
163 # for do_fetch and do_unpack
164 # I'd use tempfile functions here but underscores can be produced by that and those
165 # aren't allowed in recipe file names except to separate the version
166 rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
167 fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring)
168 fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0]
169 logger.debug('Generating initial recipe %s for fetching' % fetchrecipe)
170 with open(fetchrecipe, 'w') as f:
171 # We don't want to have to specify LIC_FILES_CHKSUM
172 f.write('LICENSE = "CLOSED"\n')
173 # We don't need the cross-compiler
174 f.write('INHIBIT_DEFAULT_DEPS = "1"\n')
175 # We don't have the checksums yet so we can't require them
176 f.write('BB_STRICT_CHECKSUM = "ignore"\n')
177 f.write('SRC_URI = "%s"\n' % srcuri)
178 f.write('SRCREV = "%s"\n' % srcrev)
179 f.write('WORKDIR = "%s"\n' % tmpworkdir)
180 # Set S out of the way so it doesn't get created under the workdir
181 f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc'))
182 if not mirrors:
183 # We do not need PREMIRRORS since we are almost certainly
184 # fetching new source rather than something that has already
185 # been fetched. Hence, we disable them by default.
186 # However, we provide an option for users to enable it.
187 f.write('PREMIRRORS = ""\n')
188 f.write('MIRRORS = ""\n')
189
190 logger.info('Fetching %s...' % srcuri)
191
192 # FIXME this is too noisy at the moment
193
194 # Parse recipes so our new recipe gets picked up
195 tinfoil.parse_recipes()
196
197 def eventhandler(event):
198 if isinstance(event, bb.fetch2.MissingChecksumEvent):
199 checksums.update(event.checksums)
200 return True
201 return False
202
203 # Run the fetch + unpack tasks
204 res = tinfoil.build_targets(fetchrecipepn,
205 'do_unpack',
206 handle_events=True,
207 extra_events=['bb.fetch2.MissingChecksumEvent'],
208 event_callback=eventhandler)
209 if not res:
210 raise FetchUrlFailure(srcuri)
211
212 # Remove unneeded directories
213 rd = tinfoil.parse_recipe(fetchrecipepn)
214 if rd:
215 pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE']
216 for pathvar in pathvars:
217 path = rd.getVar(pathvar)
218 if os.path.exists(path):
219 shutil.rmtree(path)
220 finally:
221 if fetchrecipe:
222 try:
223 os.remove(fetchrecipe)
224 except FileNotFoundError:
225 pass
226 try:
227 os.rmdir(fetchrecipedir)
228 except OSError as e:
229 import errno
230 if e.errno != errno.ENOTEMPTY:
231 raise
232
233 bb.utils.mkdirhier(destdir)
234 for fn in os.listdir(tmpworkdir):
235 shutil.move(os.path.join(tmpworkdir, fn), destdir)
236
237 finally:
238 if not preserve_tmp:
239 shutil.rmtree(tmpdir)
240 tmpdir = None
241
242 return checksums, tmpdir
243
244
245def run_editor(fn, logger=None):
246 if isinstance(fn, str):
247 files = [fn]
248 else:
249 files = fn
250
251 editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi'))
252 try:
253 #print(shlex.split(editor) + files)
254 return subprocess.check_call(shlex.split(editor) + files)
255 except subprocess.CalledProcessError as exc:
256 logger.error("Execution of '%s' failed: %s" % (editor, exc))
257 return 1
258
259def is_src_url(param):
260 """
261 Check if a parameter is a URL and return True if so
262 NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works
263 """
264 if not param:
265 return False
266 elif '://' in param:
267 return True
268 elif param.startswith('git@') or ('@' in param and param.endswith('.git')):
269 return True
270 return False
271
272def filter_src_subdirs(pth):
273 """
274 Filter out subdirectories of initial unpacked source trees that we do not care about.
275 Used by devtool and recipetool.
276 """
277 dirlist = os.listdir(pth)
278 filterout = ['git.indirectionsymlink', 'source-date-epoch']
279 dirlist = [x for x in dirlist if x not in filterout]
280 return dirlist