blob: 2325ee2747bd03cae635ddc62d4a19f9d2763ee7 [file] [log] [blame]
lh9ed821d2023-04-07 01:36:19 -07001#
2# Sanity check the users setup for common misconfigurations
3#
4
5SANITY_REQUIRED_UTILITIES ?= "patch diffstat git bzip2 tar \
6 gzip gawk chrpath wget cpio perl file which"
7
8def bblayers_conf_file(d):
9 return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf')
10
11def sanity_conf_read(fn):
12 with open(fn, 'r') as f:
13 lines = f.readlines()
14 return lines
15
16def sanity_conf_find_line(pattern, lines):
17 import re
18 return next(((index, line)
19 for index, line in enumerate(lines)
20 if re.search(pattern, line)), (None, None))
21
22def sanity_conf_update(fn, lines, version_var_name, new_version):
23 index, line = sanity_conf_find_line(r"^%s" % version_var_name, lines)
24 lines[index] = '%s = "%d"\n' % (version_var_name, new_version)
25 with open(fn, "w") as f:
26 f.write(''.join(lines))
27
28# Functions added to this variable MUST throw a NotImplementedError exception unless
29# they successfully changed the config version in the config file. Exceptions
30# are used since exec_func doesn't handle return values.
31BBLAYERS_CONF_UPDATE_FUNCS += " \
32 conf/bblayers.conf:LCONF_VERSION:LAYER_CONF_VERSION:oecore_update_bblayers \
33 conf/local.conf:CONF_VERSION:LOCALCONF_VERSION:oecore_update_localconf \
34 conf/site.conf:SCONF_VERSION:SITE_CONF_VERSION:oecore_update_siteconf \
35"
36
37SANITY_DIFF_TOOL ?= "meld"
38
39SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample"
40python oecore_update_localconf() {
41 # Check we are using a valid local.conf
42 current_conf = d.getVar('CONF_VERSION')
43 conf_version = d.getVar('LOCALCONF_VERSION')
44
45 failmsg = """Your version of local.conf was generated from an older/newer version of
46local.conf.sample and there have been updates made to this file. Please compare the two
47files and merge any changes before continuing.
48
49Matching the version numbers will remove this message.
50
51\"${SANITY_DIFF_TOOL} conf/local.conf ${SANITY_LOCALCONF_SAMPLE}\"
52
53is a good way to visualise the changes."""
54 failmsg = d.expand(failmsg)
55
56 raise NotImplementedError(failmsg)
57}
58
59SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample"
60python oecore_update_siteconf() {
61 # If we have a site.conf, check it's valid
62 current_sconf = d.getVar('SCONF_VERSION')
63 sconf_version = d.getVar('SITE_CONF_VERSION')
64
65 failmsg = """Your version of site.conf was generated from an older version of
66site.conf.sample and there have been updates made to this file. Please compare the two
67files and merge any changes before continuing.
68
69Matching the version numbers will remove this message.
70
71\"${SANITY_DIFF_TOOL} conf/site.conf ${SANITY_SITECONF_SAMPLE}\"
72
73is a good way to visualise the changes."""
74 failmsg = d.expand(failmsg)
75
76 raise NotImplementedError(failmsg)
77}
78
79SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample"
80python oecore_update_bblayers() {
81 # bblayers.conf is out of date, so see if we can resolve that
82
83 current_lconf = int(d.getVar('LCONF_VERSION'))
84 lconf_version = int(d.getVar('LAYER_CONF_VERSION'))
85
86 failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}).
87Please compare your file against bblayers.conf.sample and merge any changes before continuing.
88"${SANITY_DIFF_TOOL} conf/bblayers.conf ${SANITY_BBLAYERCONF_SAMPLE}"
89
90is a good way to visualise the changes."""
91 failmsg = d.expand(failmsg)
92
93 if not current_lconf:
94 raise NotImplementedError(failmsg)
95
96 lines = []
97
98 if current_lconf < 4:
99 raise NotImplementedError(failmsg)
100
101 bblayers_fn = bblayers_conf_file(d)
102 lines = sanity_conf_read(bblayers_fn)
103
104 if current_lconf == 4 and lconf_version > 4:
105 topdir_var = '$' + '{TOPDIR}'
106 index, bbpath_line = sanity_conf_find_line('BBPATH', lines)
107 if bbpath_line:
108 start = bbpath_line.find('"')
109 if start != -1 and (len(bbpath_line) != (start + 1)):
110 if bbpath_line[start + 1] == '"':
111 lines[index] = (bbpath_line[:start + 1] +
112 topdir_var + bbpath_line[start + 1:])
113 else:
114 if not topdir_var in bbpath_line:
115 lines[index] = (bbpath_line[:start + 1] +
116 topdir_var + ':' + bbpath_line[start + 1:])
117 else:
118 raise NotImplementedError(failmsg)
119 else:
120 index, bbfiles_line = sanity_conf_find_line('BBFILES', lines)
121 if bbfiles_line:
122 lines.insert(index, 'BBPATH = "' + topdir_var + '"\n')
123 else:
124 raise NotImplementedError(failmsg)
125
126 current_lconf += 1
127 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
128 bb.note("Your conf/bblayers.conf has been automatically updated.")
129 return
130
131 elif current_lconf == 5 and lconf_version > 5:
132 # Null update, to avoid issues with people switching between poky and other distros
133 current_lconf = 6
134 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
135 bb.note("Your conf/bblayers.conf has been automatically updated.")
136 return
137
138 status.addresult()
139
140 elif current_lconf == 6 and lconf_version > 6:
141 # Handle rename of meta-yocto -> meta-poky
142 # This marks the start of separate version numbers but code is needed in OE-Core
143 # for the migration, one last time.
144 layers = d.getVar('BBLAYERS').split()
145 layers = [ os.path.basename(path) for path in layers ]
146 if 'meta-yocto' in layers:
147 found = False
148 while True:
149 index, meta_yocto_line = sanity_conf_find_line(r'.*meta-yocto[\'"\s\n]', lines)
150 if meta_yocto_line:
151 lines[index] = meta_yocto_line.replace('meta-yocto', 'meta-poky')
152 found = True
153 else:
154 break
155 if not found:
156 raise NotImplementedError(failmsg)
157 index, meta_yocto_line = sanity_conf_find_line('LCONF_VERSION.*\n', lines)
158 if meta_yocto_line:
159 lines[index] = 'POKY_BBLAYERS_CONF_VERSION = "1"\n'
160 else:
161 raise NotImplementedError(failmsg)
162 with open(bblayers_fn, "w") as f:
163 f.write(''.join(lines))
164 bb.note("Your conf/bblayers.conf has been automatically updated.")
165 return
166 current_lconf += 1
167 sanity_conf_update(bblayers_fn, lines, 'LCONF_VERSION', current_lconf)
168 bb.note("Your conf/bblayers.conf has been automatically updated.")
169 return
170
171 raise NotImplementedError(failmsg)
172}
173
174def raise_sanity_error(msg, d, network_error=False):
175 if d.getVar("SANITY_USE_EVENTS") == "1":
176 try:
177 bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d)
178 except TypeError:
179 bb.event.fire(bb.event.SanityCheckFailed(msg), d)
180 return
181
182 bb.fatal(""" OE-core's config sanity checker detected a potential misconfiguration.
183 Either fix the cause of this error or at your own risk disable the checker (see sanity.conf).
184 Following is the list of potential problems / advisories:
185
186 %s""" % msg)
187
188# Check flags associated with a tuning.
189def check_toolchain_tune_args(data, tune, multilib, errs):
190 found_errors = False
191 if check_toolchain_args_present(data, tune, multilib, errs, 'CCARGS'):
192 found_errors = True
193 if check_toolchain_args_present(data, tune, multilib, errs, 'ASARGS'):
194 found_errors = True
195 if check_toolchain_args_present(data, tune, multilib, errs, 'LDARGS'):
196 found_errors = True
197
198 return found_errors
199
200def check_toolchain_args_present(data, tune, multilib, tune_errors, which):
201 args_set = (data.getVar("TUNE_%s" % which) or "").split()
202 args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune)) or "").split()
203 args_missing = []
204
205 # If no args are listed/required, we are done.
206 if not args_wanted:
207 return
208 for arg in args_wanted:
209 if arg not in args_set:
210 args_missing.append(arg)
211
212 found_errors = False
213 if args_missing:
214 found_errors = True
215 tune_errors.append("TUNEABI for %s requires '%s' in TUNE_%s (%s)." %
216 (tune, ' '.join(args_missing), which, ' '.join(args_set)))
217 return found_errors
218
219# Check a single tune for validity.
220def check_toolchain_tune(data, tune, multilib):
221 tune_errors = []
222 if not tune:
223 return "No tuning found for %s multilib." % multilib
224 localdata = bb.data.createCopy(data)
225 if multilib != "default":
226 # Apply the overrides so we can look at the details.
227 overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib
228 localdata.setVar("OVERRIDES", overrides)
229 bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib))
230 features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune) or "").split()
231 if not features:
232 return "Tuning '%s' has no defined features, and cannot be used." % tune
233 valid_tunes = localdata.getVarFlags('TUNEVALID') or {}
234 conflicts = localdata.getVarFlags('TUNECONFLICTS') or {}
235 # [doc] is the documentation for the variable, not a real feature
236 if 'doc' in valid_tunes:
237 del valid_tunes['doc']
238 if 'doc' in conflicts:
239 del conflicts['doc']
240 for feature in features:
241 if feature in conflicts:
242 for conflict in conflicts[feature].split():
243 if conflict in features:
244 tune_errors.append("Feature '%s' conflicts with '%s'." %
245 (feature, conflict))
246 if feature in valid_tunes:
247 bb.debug(2, " %s: %s" % (feature, valid_tunes[feature]))
248 else:
249 tune_errors.append("Feature '%s' is not defined." % feature)
250 whitelist = localdata.getVar("TUNEABI_WHITELIST")
251 if whitelist:
252 tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune)
253 if not tuneabi:
254 tuneabi = tune
255 if True not in [x in whitelist.split() for x in tuneabi.split()]:
256 tune_errors.append("Tuning '%s' (%s) cannot be used with any supported tuning/ABI." %
257 (tune, tuneabi))
258 else:
259 if not check_toolchain_tune_args(localdata, tuneabi, multilib, tune_errors):
260 bb.debug(2, "Sanity check: Compiler args OK for %s." % tune)
261 if tune_errors:
262 return "Tuning '%s' has the following errors:\n" % tune + '\n'.join(tune_errors)
263
264def check_toolchain(data):
265 tune_error_set = []
266 deftune = data.getVar("DEFAULTTUNE")
267 tune_errors = check_toolchain_tune(data, deftune, 'default')
268 if tune_errors:
269 tune_error_set.append(tune_errors)
270
271 multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split()
272 global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split()
273
274 if multilibs:
275 seen_libs = []
276 seen_tunes = []
277 for lib in multilibs:
278 if lib in seen_libs:
279 tune_error_set.append("The multilib '%s' appears more than once." % lib)
280 else:
281 seen_libs.append(lib)
282 if not lib in global_multilibs:
283 tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib)
284 tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib)
285 if tune in seen_tunes:
286 tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune)
287 else:
288 seen_libs.append(tune)
289 if tune == deftune:
290 tune_error_set.append("Multilib '%s' (%s) is also the default tuning." % (lib, deftune))
291 else:
292 tune_errors = check_toolchain_tune(data, tune, lib)
293 if tune_errors:
294 tune_error_set.append(tune_errors)
295 if tune_error_set:
296 return "Toolchain tunings invalid:\n" + '\n'.join(tune_error_set) + "\n"
297
298 return ""
299
300def check_conf_exists(fn, data):
301 bbpath = []
302 fn = data.expand(fn)
303 vbbpath = data.getVar("BBPATH", False)
304 if vbbpath:
305 bbpath += vbbpath.split(":")
306 for p in bbpath:
307 currname = os.path.join(data.expand(p), fn)
308 if os.access(currname, os.R_OK):
309 return True
310 return False
311
312def check_create_long_filename(filepath, pathname):
313 import string, random
314 testfile = os.path.join(filepath, ''.join(random.choice(string.ascii_letters) for x in range(200)))
315 try:
316 if not os.path.exists(filepath):
317 bb.utils.mkdirhier(filepath)
318 f = open(testfile, "w")
319 f.close()
320 os.remove(testfile)
321 except IOError as e:
322 import errno
323 err, strerror = e.args
324 if err == errno.ENAMETOOLONG:
325 return "Failed to create a file with a long name in %s. Please use a filesystem that does not unreasonably limit filename length.\n" % pathname
326 else:
327 return "Failed to create a file in %s: %s.\n" % (pathname, strerror)
328 except OSError as e:
329 errno, strerror = e.args
330 return "Failed to create %s directory in which to run long name sanity check: %s.\n" % (pathname, strerror)
331 return ""
332
333def check_path_length(filepath, pathname, limit):
334 if len(filepath) > limit:
335 return "The length of %s is longer than %s, this would cause unexpected errors, please use a shorter path.\n" % (pathname, limit)
336 return ""
337
338def get_filesystem_id(path):
339 import subprocess
340 try:
341 return subprocess.check_output(["stat", "-f", "-c", "%t", path]).decode('utf-8').strip()
342 except subprocess.CalledProcessError:
343 bb.warn("Can't get filesystem id of: %s" % path)
344 return None
345
346# Check that the path isn't located on nfs.
347def check_not_nfs(path, name):
348 # The nfs' filesystem id is 6969
349 if get_filesystem_id(path) == "6969":
350 return "The %s: %s can't be located on nfs.\n" % (name, path)
351 return ""
352
353# Check that the path is on a case-sensitive file system
354def check_case_sensitive(path, name):
355 import tempfile
356 with tempfile.NamedTemporaryFile(prefix='TmP', dir=path) as tmp_file:
357 if os.path.exists(tmp_file.name.lower()):
358 return "The %s (%s) can't be on a case-insensitive file system.\n" % (name, path)
359 return ""
360
361# Check that path isn't a broken symlink
362def check_symlink(lnk, data):
363 if os.path.islink(lnk) and not os.path.exists(lnk):
364 raise_sanity_error("%s is a broken symlink." % lnk, data)
365
366def check_connectivity(d):
367 # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable
368 # using the same syntax as for SRC_URI. If the variable is not set
369 # the check is skipped
370 test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split()
371 retval = ""
372
373 bbn = d.getVar('BB_NO_NETWORK')
374 if bbn not in (None, '0', '1'):
375 return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn
376
377 # Only check connectivity if network enabled and the
378 # CONNECTIVITY_CHECK_URIS are set
379 network_enabled = not (bbn == '1')
380 check_enabled = len(test_uris)
381 if check_enabled and network_enabled:
382 # Take a copy of the data store and unset MIRRORS and PREMIRRORS
383 data = bb.data.createCopy(d)
384 data.delVar('PREMIRRORS')
385 data.delVar('MIRRORS')
386 try:
387 fetcher = bb.fetch2.Fetch(test_uris, data)
388 fetcher.checkstatus()
389 except Exception as err:
390 # Allow the message to be configured so that users can be
391 # pointed to a support mechanism.
392 msg = data.getVar('CONNECTIVITY_CHECK_MSG') or ""
393 if len(msg) == 0:
394 msg = "%s.\n" % err
395 msg += " Please ensure your host's network is configured correctly.\n"
396 msg += " If your ISP or network is blocking the above URL,\n"
397 msg += " try with another domain name, for example by setting:\n"
398 msg += " CONNECTIVITY_CHECK_URIS = \"https://www.yoctoproject.org/\""
399 msg += " You could also set BB_NO_NETWORK = \"1\" to disable network\n"
400 msg += " access if all required sources are on local disk.\n"
401 retval = msg
402
403 return retval
404
405def check_supported_distro(sanity_data):
406 from fnmatch import fnmatch
407
408 tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS')
409 if not tested_distros:
410 return
411
412 try:
413 distro = oe.lsb.distro_identifier()
414 except Exception:
415 distro = None
416
417 if not distro:
418 bb.warn('Host distribution could not be determined; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.')
419
420 for supported in [x.strip() for x in tested_distros.split('\\n')]:
421 if fnmatch(distro, supported):
422 return
423
424 bb.warn('Host distribution "%s" has not been validated with this version of the build system; you may possibly experience unexpected failures. It is recommended that you use a tested distribution.' % distro)
425
426# Checks we should only make if MACHINE is set correctly
427def check_sanity_validmachine(sanity_data):
428 messages = ""
429
430 # Check TUNE_ARCH is set
431 if sanity_data.getVar('TUNE_ARCH') == 'INVALID':
432 messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n'
433
434 # Check TARGET_OS is set
435 if sanity_data.getVar('TARGET_OS') == 'INVALID':
436 messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n'
437
438 # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS
439 pkgarchs = sanity_data.getVar('PACKAGE_ARCHS')
440 tunepkg = sanity_data.getVar('TUNE_PKGARCH')
441 defaulttune = sanity_data.getVar('DEFAULTTUNE')
442 tunefound = False
443 seen = {}
444 dups = []
445
446 for pa in pkgarchs.split():
447 if seen.get(pa, 0) == 1:
448 dups.append(pa)
449 else:
450 seen[pa] = 1
451 if pa == tunepkg:
452 tunefound = True
453
454 if len(dups):
455 messages = messages + "Error, the PACKAGE_ARCHS variable contains duplicates. The following archs are listed more than once: %s" % " ".join(dups)
456
457 if tunefound == False:
458 messages = messages + "Error, the PACKAGE_ARCHS variable (%s) for DEFAULTTUNE (%s) does not contain TUNE_PKGARCH (%s)." % (pkgarchs, defaulttune, tunepkg)
459
460 return messages
461
462# Patch before 2.7 can't handle all the features in git-style diffs. Some
463# patches may incorrectly apply, and others won't apply at all.
464def check_patch_version(sanity_data):
465 from distutils.version import LooseVersion
466 import re, subprocess
467
468 try:
469 result = subprocess.check_output(["patch", "--version"], stderr=subprocess.STDOUT).decode('utf-8')
470 version = re.search(r"[0-9.]+", result.splitlines()[0]).group()
471 if LooseVersion(version) < LooseVersion("2.7"):
472 return "Your version of patch is older than 2.7 and has bugs which will break builds. Please install a newer version of patch.\n"
473 else:
474 return None
475 except subprocess.CalledProcessError as e:
476 return "Unable to execute patch --version, exit code %d:\n%s\n" % (e.returncode, e.output)
477
478# Unpatched versions of make 3.82 are known to be broken. See GNU Savannah Bug 30612.
479# Use a modified reproducer from http://savannah.gnu.org/bugs/?30612 to validate.
480def check_make_version(sanity_data):
481 from distutils.version import LooseVersion
482 import subprocess
483
484 try:
485 result = subprocess.check_output(['make', '--version'], stderr=subprocess.STDOUT).decode('utf-8')
486 except subprocess.CalledProcessError as e:
487 return "Unable to execute make --version, exit code %d\n%s\n" % (e.returncode, e.output)
488 version = result.split()[2]
489 if LooseVersion(version) == LooseVersion("3.82"):
490 # Construct a test file
491 f = open("makefile_test", "w")
492 f.write("makefile_test.a: makefile_test_a.c makefile_test_b.c makefile_test.a( makefile_test_a.c makefile_test_b.c)\n")
493 f.write("\n")
494 f.write("makefile_test_a.c:\n")
495 f.write(" touch $@\n")
496 f.write("\n")
497 f.write("makefile_test_b.c:\n")
498 f.write(" touch $@\n")
499 f.close()
500
501 # Check if make 3.82 has been patched
502 try:
503 subprocess.check_call(['make', '-f', 'makefile_test'])
504 except subprocess.CalledProcessError as e:
505 return "Your version of make 3.82 is broken. Please revert to 3.81 or install a patched version.\n"
506 finally:
507 os.remove("makefile_test")
508 if os.path.exists("makefile_test_a.c"):
509 os.remove("makefile_test_a.c")
510 if os.path.exists("makefile_test_b.c"):
511 os.remove("makefile_test_b.c")
512 if os.path.exists("makefile_test.a"):
513 os.remove("makefile_test.a")
514 return None
515
516
517# Check if we're running on WSL (Windows Subsystem for Linux).
518# WSLv1 is known not to work but WSLv2 should work properly as
519# long as the VHDX file is optimized often, let the user know
520# upfront.
521# More information on installing WSLv2 at:
522# https://docs.microsoft.com/en-us/windows/wsl/wsl2-install
523def check_wsl(d):
524 with open("/proc/version", "r") as f:
525 verdata = f.readlines()
526 for l in verdata:
527 if "Microsoft" in l:
528 return "OpenEmbedded doesn't work under WSLv1, please upgrade to WSLv2 if you want to run builds on Windows"
529 elif "microsoft" in l:
530 bb.warn("You are running bitbake under WSLv2, this works properly but you should optimize your VHDX file eventually to avoid running out of storage space")
531 return None
532
533# Require at least gcc version 5.0.
534#
535# This can be fixed on CentOS-7 with devtoolset-6+
536# https://www.softwarecollections.org/en/scls/rhscl/devtoolset-6/
537#
538# A less invasive fix is with scripts/install-buildtools (or with user
539# built buildtools-extended-tarball)
540#
541def check_gcc_version(sanity_data):
542 from distutils.version import LooseVersion
543 import subprocess
544
545 build_cc, version = oe.utils.get_host_compiler_version(sanity_data)
546 if build_cc.strip() == "gcc":
547 if LooseVersion(version) < LooseVersion("5.0"):
548 return "Your version of gcc is older than 5.0 and will break builds. Please install a newer version of gcc (you could use the project's buildtools-extended-tarball or use scripts/install-buildtools).\n"
549 return None
550
551# Tar version 1.24 and onwards handle overwriting symlinks correctly
552# but earlier versions do not; this needs to work properly for sstate
553# Version 1.28 is needed so opkg-build works correctly when reproducibile builds are enabled
554def check_tar_version(sanity_data):
555 from distutils.version import LooseVersion
556 import subprocess
557 try:
558 result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8')
559 except subprocess.CalledProcessError as e:
560 return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output)
561 version = result.split()[3]
562 if LooseVersion(version) < LooseVersion("1.28"):
563 return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n"
564 return None
565
566# We use git parameters and functionality only found in 1.7.8 or later
567# The kernel tools assume git >= 1.8.3.1 (verified needed > 1.7.9.5) see #6162
568# The git fetcher also had workarounds for git < 1.7.9.2 which we've dropped
569def check_git_version(sanity_data):
570 from distutils.version import LooseVersion
571 import subprocess
572 try:
573 result = subprocess.check_output(["git", "--version"], stderr=subprocess.DEVNULL).decode('utf-8')
574 except subprocess.CalledProcessError as e:
575 return "Unable to execute git --version, exit code %d\n%s\n" % (e.returncode, e.output)
576 version = result.split()[2]
577 if LooseVersion(version) < LooseVersion("1.8.3.1"):
578 return "Your version of git is older than 1.8.3.1 and has bugs which will break builds. Please install a newer version of git.\n"
579 return None
580
581# Check the required perl modules which may not be installed by default
582def check_perl_modules(sanity_data):
583 import subprocess
584 ret = ""
585 modules = ( "Text::ParseWords", "Thread::Queue", "Data::Dumper" )
586 errresult = ''
587 for m in modules:
588 try:
589 subprocess.check_output(["perl", "-e", "use %s" % m])
590 except subprocess.CalledProcessError as e:
591 errresult += bytes.decode(e.output)
592 ret += "%s " % m
593 if ret:
594 return "Required perl module(s) not found: %s\n\n%s\n" % (ret, errresult)
595 return None
596
597def sanity_check_conffiles(d):
598 funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split()
599 for func in funcs:
600 conffile, current_version, required_version, func = func.split(":")
601 if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \
602 d.getVar(current_version) != d.getVar(required_version):
603 try:
604 bb.build.exec_func(func, d)
605 except NotImplementedError as e:
606 bb.fatal(str(e))
607 d.setVar("BB_INVALIDCONF", True)
608
609def sanity_handle_abichanges(status, d):
610 #
611 # Check the 'ABI' of TMPDIR
612 #
613 import subprocess
614
615 current_abi = d.getVar('OELAYOUT_ABI')
616 abifile = d.getVar('SANITY_ABIFILE')
617 if os.path.exists(abifile):
618 with open(abifile, "r") as f:
619 abi = f.read().strip()
620 if not abi.isdigit():
621 with open(abifile, "w") as f:
622 f.write(current_abi)
623 elif int(abi) <= 11 and current_abi == "12":
624 status.addresult("The layout of TMPDIR changed for Recipe Specific Sysroots.\nConversion doesn't make sense and this change will rebuild everything so please delete TMPDIR (%s).\n" % d.getVar("TMPDIR"))
625 elif int(abi) <= 13 and current_abi == "14":
626 status.addresult("TMPDIR changed to include path filtering from the pseudo database.\nIt is recommended to use a clean TMPDIR with the new pseudo path filtering so TMPDIR (%s) would need to be removed to continue.\n" % d.getVar("TMPDIR"))
627
628 elif (abi != current_abi):
629 # Code to convert from one ABI to another could go here if possible.
630 status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi))
631 else:
632 with open(abifile, "w") as f:
633 f.write(current_abi)
634
635def check_sanity_sstate_dir_change(sstate_dir, data):
636 # Sanity checks to be done when the value of SSTATE_DIR changes
637
638 # Check that SSTATE_DIR isn't on a filesystem with limited filename length (eg. eCryptFS)
639 testmsg = ""
640 if sstate_dir != "":
641 testmsg = check_create_long_filename(sstate_dir, "SSTATE_DIR")
642 # If we don't have permissions to SSTATE_DIR, suggest the user set it as an SSTATE_MIRRORS
643 try:
644 err = testmsg.split(': ')[1].strip()
645 if err == "Permission denied.":
646 testmsg = testmsg + "You could try using %s in SSTATE_MIRRORS rather than as an SSTATE_CACHE.\n" % (sstate_dir)
647 except IndexError:
648 pass
649 return testmsg
650
651def check_sanity_version_change(status, d):
652 # Sanity checks to be done when SANITY_VERSION or NATIVELSBSTRING changes
653 # In other words, these tests run once in a given build directory and then
654 # never again until the sanity version or host distrubution id/version changes.
655
656 # Check the python install is complete. Examples that are often removed in
657 # minimal installations: glib-2.0-natives requries # xml.parsers.expat and icu
658 # requires distutils.sysconfig.
659 try:
660 import xml.parsers.expat
661 import distutils.sysconfig
662 except ImportError as e:
663 status.addresult('Your Python 3 is not a full install. Please install the module %s (see the Getting Started guide for further information).\n' % e.name)
664
665 status.addresult(check_gcc_version(d))
666 status.addresult(check_make_version(d))
667 status.addresult(check_patch_version(d))
668 status.addresult(check_tar_version(d))
669 status.addresult(check_git_version(d))
670 status.addresult(check_perl_modules(d))
671 status.addresult(check_wsl(d))
672
673 missing = ""
674
675 if not check_app_exists("${MAKE}", d):
676 missing = missing + "GNU make,"
677
678 if not check_app_exists('${BUILD_CC}', d):
679 missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC")
680
681 if not check_app_exists('${BUILD_CXX}', d):
682 missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX")
683
684 required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES')
685
686 for util in required_utilities.split():
687 if not check_app_exists(util, d):
688 missing = missing + "%s," % util
689
690 if missing:
691 missing = missing.rstrip(',')
692 status.addresult("Please install the following missing utilities: %s\n" % missing)
693
694 assume_provided = d.getVar('ASSUME_PROVIDED').split()
695 # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf
696 if "diffstat-native" not in assume_provided:
697 status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n')
698
699 # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS)
700 import stat
701 tmpdir = d.getVar('TMPDIR')
702 status.addresult(check_create_long_filename(tmpdir, "TMPDIR"))
703 tmpdirmode = os.stat(tmpdir).st_mode
704 if (tmpdirmode & stat.S_ISGID):
705 status.addresult("TMPDIR is setgid, please don't build in a setgid directory")
706 if (tmpdirmode & stat.S_ISUID):
707 status.addresult("TMPDIR is setuid, please don't build in a setuid directory")
708
709 # Check that a user isn't building in a path in PSEUDO_IGNORE_PATHS
710 pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",")
711 workdir = d.getVar('WORKDIR', expand=True)
712 for i in pseudoignorepaths:
713 if i and workdir.startswith(i):
714 status.addresult("You are building in a path included in PSEUDO_IGNORE_PATHS " + str(i) + " please locate the build outside this path.\n")
715
716 # Check if PSEUDO_IGNORE_PATHS and and paths under pseudo control overlap
717 pseudoignorepaths = d.getVar('PSEUDO_IGNORE_PATHS', expand=True).split(",")
718 pseudo_control_dir = "${D},${PKGD},${PKGDEST},${IMAGEROOTFS},${SDK_OUTPUT}"
719 pseudocontroldir = d.expand(pseudo_control_dir).split(",")
720 for i in pseudoignorepaths:
721 for j in pseudocontroldir:
722 if i and j:
723 if j.startswith(i):
724 status.addresult("A path included in PSEUDO_IGNORE_PATHS " + str(i) + " and the path " + str(j) + " overlap and this will break pseudo permission and ownership tracking. Please set the path " + str(j) + " to a different directory which does not overlap with pseudo controlled directories. \n")
725
726 # Some third-party software apparently relies on chmod etc. being suid root (!!)
727 import stat
728 suid_check_bins = "chown chmod mknod".split()
729 for bin_cmd in suid_check_bins:
730 bin_path = bb.utils.which(os.environ["PATH"], bin_cmd)
731 if bin_path:
732 bin_stat = os.stat(bin_path)
733 if bin_stat.st_uid == 0 and bin_stat.st_mode & stat.S_ISUID:
734 status.addresult('%s has the setuid bit set. This interferes with pseudo and may cause other issues that break the build process.\n' % bin_path)
735
736 # Check that we can fetch from various network transports
737 netcheck = check_connectivity(d)
738 status.addresult(netcheck)
739 if netcheck:
740 status.network_error = True
741
742 nolibs = d.getVar('NO32LIBS')
743 if not nolibs:
744 lib32path = '/lib'
745 if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ):
746 lib32path = '/lib32'
747
748 if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'):
749 status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n")
750
751 bbpaths = d.getVar('BBPATH').split(":")
752 if ("." in bbpaths or "./" in bbpaths or "" in bbpaths):
753 status.addresult("BBPATH references the current directory, either through " \
754 "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\
755 "layer configuration is adding empty elements to BBPATH.\n\t "\
756 "Please check your layer.conf files and other BBPATH " \
757 "settings to remove the current working directory " \
758 "references.\n" \
759 "Parsed BBPATH is" + str(bbpaths));
760
761 oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF')
762 if not oes_bb_conf:
763 status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n')
764
765 # The length of TMPDIR can't be longer than 410
766 status.addresult(check_path_length(tmpdir, "TMPDIR", 410))
767
768 # Check that TMPDIR isn't located on nfs
769 status.addresult(check_not_nfs(tmpdir, "TMPDIR"))
770
771 # Check for case-insensitive file systems (such as Linux in Docker on
772 # macOS with default HFS+ file system)
773 status.addresult(check_case_sensitive(tmpdir, "TMPDIR"))
774
775def sanity_check_locale(d):
776 """
777 Currently bitbake switches locale to en_US.UTF-8 so check that this locale actually exists.
778 """
779 import locale
780 try:
781 locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
782 except locale.Error:
783 raise_sanity_error("Your system needs to support the en_US.UTF-8 locale.", d)
784
785def check_sanity_everybuild(status, d):
786 import os, stat
787 # Sanity tests which test the users environment so need to run at each build (or are so cheap
788 # it makes sense to always run them.
789
790 if 0 == os.getuid():
791 raise_sanity_error("Do not use Bitbake as root.", d)
792
793 # Check the Python version, we now have a minimum of Python 3.4
794 import sys
795 if sys.hexversion < 0x030500F0:
796 status.addresult('The system requires at least Python 3.5 to run. Please update your Python interpreter.\n')
797
798 # Check the bitbake version meets minimum requirements
799 from distutils.version import LooseVersion
800 minversion = d.getVar('BB_MIN_VERSION')
801 if (LooseVersion(bb.__version__) < LooseVersion(minversion)):
802 status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__))
803
804 sanity_check_locale(d)
805
806 paths = d.getVar('PATH').split(":")
807 if "." in paths or "./" in paths or "" in paths:
808 status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n")
809
810 #Check if bitbake is present in PATH environment variable
811 bb_check = bb.utils.which(d.getVar('PATH'), 'bitbake')
812 if not bb_check:
813 bb.warn("bitbake binary is not found in PATH, did you source the script?")
814
815 # Check whether 'inherit' directive is found (used for a class to inherit)
816 # in conf file it's supposed to be uppercase INHERIT
817 inherit = d.getVar('inherit')
818 if inherit:
819 status.addresult("Please don't use inherit directive in your local.conf. The directive is supposed to be used in classes and recipes only to inherit of bbclasses. Here INHERIT should be used.\n")
820
821 # Check that the DISTRO is valid, if set
822 # need to take into account DISTRO renaming DISTRO
823 distro = d.getVar('DISTRO')
824 if distro and distro != "nodistro":
825 if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ):
826 status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO"))
827
828 # Check that these variables don't use tilde-expansion as we don't do that
829 for v in ("TMPDIR", "DL_DIR", "SSTATE_DIR"):
830 if d.getVar(v).startswith("~"):
831 status.addresult("%s uses ~ but Bitbake will not expand this, use an absolute path or variables." % v)
832
833 # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't
834 # set, since so much relies on it being set.
835 dldir = d.getVar('DL_DIR')
836 if not dldir:
837 status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n")
838 if os.path.exists(dldir) and not os.access(dldir, os.W_OK):
839 status.addresult("DL_DIR: %s exists but you do not appear to have write access to it. \n" % dldir)
840 check_symlink(dldir, d)
841
842 # Check that the MACHINE is valid, if it is set
843 machinevalid = True
844 if d.getVar('MACHINE'):
845 if not check_conf_exists("conf/machine/${MACHINE}.conf", d):
846 status.addresult('MACHINE=%s is invalid. Please set a valid MACHINE in your local.conf, environment or other configuration file.\n' % (d.getVar('MACHINE')))
847 machinevalid = False
848 else:
849 status.addresult(check_sanity_validmachine(d))
850 else:
851 status.addresult('Please set a MACHINE in your local.conf or environment\n')
852 machinevalid = False
853 if machinevalid:
854 status.addresult(check_toolchain(d))
855
856 # Check that the SDKMACHINE is valid, if it is set
857 if d.getVar('SDKMACHINE'):
858 if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d):
859 status.addresult('Specified SDKMACHINE value is not valid\n')
860 elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}":
861 status.addresult('SDKMACHINE is set, but SDK_ARCH has not been changed as a result - SDKMACHINE may have been set too late (e.g. in the distro configuration)\n')
862
863 # If SDK_VENDOR looks like "-my-sdk" then the triples are badly formed so fail early
864 sdkvendor = d.getVar("SDK_VENDOR")
865 if not (sdkvendor.startswith("-") and sdkvendor.count("-") == 1):
866 status.addresult("SDK_VENDOR should be of the form '-foosdk' with a single dash; found '%s'\n" % sdkvendor)
867
868 check_supported_distro(d)
869
870 omask = os.umask(0o022)
871 if omask & 0o755:
872 status.addresult("Please use a umask which allows a+rx and u+rwx\n")
873 os.umask(omask)
874
875 if d.getVar('TARGET_ARCH') == "arm":
876 # This path is no longer user-readable in modern (very recent) Linux
877 try:
878 if os.path.exists("/proc/sys/vm/mmap_min_addr"):
879 f = open("/proc/sys/vm/mmap_min_addr", "r")
880 try:
881 if (int(f.read().strip()) > 65536):
882 status.addresult("/proc/sys/vm/mmap_min_addr is not <= 65536. This will cause problems with qemu so please fix the value (as root).\n\nTo fix this in later reboots, set vm.mmap_min_addr = 65536 in /etc/sysctl.conf.\n")
883 finally:
884 f.close()
885 except:
886 pass
887
888 for checkdir in ['COREBASE', 'TMPDIR']:
889 val = d.getVar(checkdir)
890 if val.find('..') != -1:
891 status.addresult("Error, you have '..' in your %s directory path. Please ensure the variable contains an absolute path as this can break some recipe builds in obtuse ways." % checkdir)
892 if val.find('+') != -1:
893 status.addresult("Error, you have an invalid character (+) in your %s directory path. Please move the installation to a directory which doesn't include any + characters." % checkdir)
894 if val.find('@') != -1:
895 status.addresult("Error, you have an invalid character (@) in your %s directory path. Please move the installation to a directory which doesn't include any @ characters." % checkdir)
896 if val.find(' ') != -1:
897 status.addresult("Error, you have a space in your %s directory path. Please move the installation to a directory which doesn't include a space since autotools doesn't support this." % checkdir)
898 if val.find('%') != -1:
899 status.addresult("Error, you have an invalid character (%) in your %s directory path which causes problems with python string formatting. Please move the installation to a directory which doesn't include any % characters." % checkdir)
900
901 # Check the format of MIRRORS, PREMIRRORS and SSTATE_MIRRORS
902 import re
903 mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS']
904 protocols = ['http', 'ftp', 'file', 'https', \
905 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \
906 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3' ]
907 for mirror_var in mirror_vars:
908 mirrors = (d.getVar(mirror_var) or '').replace('\\n', ' ').split()
909
910 # Split into pairs
911 if len(mirrors) % 2 != 0:
912 bb.warn('Invalid mirror variable value for %s: %s, should contain paired members.' % (mirror_var, str(mirrors)))
913 continue
914 mirrors = list(zip(*[iter(mirrors)]*2))
915
916 for mirror_entry in mirrors:
917 pattern, mirror = mirror_entry
918
919 decoded = bb.fetch2.decodeurl(pattern)
920 try:
921 pattern_scheme = re.compile(decoded[0])
922 except re.error as exc:
923 bb.warn('Invalid scheme regex (%s) in %s; %s' % (pattern, mirror_var, mirror_entry))
924 continue
925
926 if not any(pattern_scheme.match(protocol) for protocol in protocols):
927 bb.warn('Invalid protocol (%s) in %s: %s' % (decoded[0], mirror_var, mirror_entry))
928 continue
929
930 if not any(mirror.startswith(protocol + '://') for protocol in protocols):
931 bb.warn('Invalid protocol in %s: %s' % (mirror_var, mirror_entry))
932 continue
933
934 if mirror.startswith('file://'):
935 import urllib
936 check_symlink(urllib.parse.urlparse(mirror).path, d)
937 # SSTATE_MIRROR ends with a /PATH string
938 if mirror.endswith('/PATH'):
939 # remove /PATH$ from SSTATE_MIRROR to get a working
940 # base directory path
941 mirror_base = urllib.parse.urlparse(mirror[:-1*len('/PATH')]).path
942 check_symlink(mirror_base, d)
943
944 # Check that TMPDIR hasn't changed location since the last time we were run
945 tmpdir = d.getVar('TMPDIR')
946 checkfile = os.path.join(tmpdir, "saved_tmpdir")
947 if os.path.exists(checkfile):
948 with open(checkfile, "r") as f:
949 saved_tmpdir = f.read().strip()
950 if (saved_tmpdir != tmpdir):
951 status.addresult("Error, TMPDIR has changed location. You need to either move it back to %s or delete it and rebuild\n" % saved_tmpdir)
952 else:
953 bb.utils.mkdirhier(tmpdir)
954 # Remove setuid, setgid and sticky bits from TMPDIR
955 try:
956 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISUID)
957 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISGID)
958 os.chmod(tmpdir, os.stat(tmpdir).st_mode & ~ stat.S_ISVTX)
959 except OSError as exc:
960 bb.warn("Unable to chmod TMPDIR: %s" % exc)
961 with open(checkfile, "w") as f:
962 f.write(tmpdir)
963
964 # If /bin/sh is a symlink, check that it points to dash or bash
965 if os.path.islink('/bin/sh'):
966 real_sh = os.path.realpath('/bin/sh')
967 # Due to update-alternatives, the shell name may take various
968 # forms, such as /bin/dash, bin/bash, /bin/bash.bash ...
969 if '/dash' not in real_sh and '/bash' not in real_sh:
970 status.addresult("Error, /bin/sh links to %s, must be dash or bash\n" % real_sh)
971
972def check_sanity(sanity_data):
973 class SanityStatus(object):
974 def __init__(self):
975 self.messages = ""
976 self.network_error = False
977
978 def addresult(self, message):
979 if message:
980 self.messages = self.messages + message
981
982 status = SanityStatus()
983
984 tmpdir = sanity_data.getVar('TMPDIR')
985 sstate_dir = sanity_data.getVar('SSTATE_DIR')
986
987 check_symlink(sstate_dir, sanity_data)
988
989 # Check saved sanity info
990 last_sanity_version = 0
991 last_tmpdir = ""
992 last_sstate_dir = ""
993 last_nativelsbstr = ""
994 sanityverfile = sanity_data.expand("${TOPDIR}/cache/sanity_info")
995 if os.path.exists(sanityverfile):
996 with open(sanityverfile, 'r') as f:
997 for line in f:
998 if line.startswith('SANITY_VERSION'):
999 last_sanity_version = int(line.split()[1])
1000 if line.startswith('TMPDIR'):
1001 last_tmpdir = line.split()[1]
1002 if line.startswith('SSTATE_DIR'):
1003 last_sstate_dir = line.split()[1]
1004 if line.startswith('NATIVELSBSTRING'):
1005 last_nativelsbstr = line.split()[1]
1006
1007 check_sanity_everybuild(status, sanity_data)
1008
1009 sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1)
1010 network_error = False
1011 # NATIVELSBSTRING var may have been overridden with "universal", so
1012 # get actual host distribution id and version
1013 nativelsbstr = lsb_distro_identifier(sanity_data)
1014 if last_sanity_version < sanity_version or last_nativelsbstr != nativelsbstr:
1015 check_sanity_version_change(status, sanity_data)
1016 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data))
1017 else:
1018 if last_sstate_dir != sstate_dir:
1019 status.addresult(check_sanity_sstate_dir_change(sstate_dir, sanity_data))
1020
1021 if os.path.exists(os.path.dirname(sanityverfile)) and not status.messages:
1022 with open(sanityverfile, 'w') as f:
1023 f.write("SANITY_VERSION %s\n" % sanity_version)
1024 f.write("TMPDIR %s\n" % tmpdir)
1025 f.write("SSTATE_DIR %s\n" % sstate_dir)
1026 f.write("NATIVELSBSTRING %s\n" % nativelsbstr)
1027
1028 sanity_handle_abichanges(status, sanity_data)
1029
1030 if status.messages != "":
1031 raise_sanity_error(sanity_data.expand(status.messages), sanity_data, status.network_error)
1032
1033# Create a copy of the datastore and finalise it to ensure appends and
1034# overrides are set - the datastore has yet to be finalised at ConfigParsed
1035def copy_data(e):
1036 sanity_data = bb.data.createCopy(e.data)
1037 sanity_data.finalize()
1038 return sanity_data
1039
1040addhandler config_reparse_eventhandler
1041config_reparse_eventhandler[eventmask] = "bb.event.ConfigParsed"
1042python config_reparse_eventhandler() {
1043 sanity_check_conffiles(e.data)
1044}
1045
1046addhandler check_sanity_eventhandler
1047check_sanity_eventhandler[eventmask] = "bb.event.SanityCheck bb.event.NetworkTest"
1048python check_sanity_eventhandler() {
1049 if bb.event.getName(e) == "SanityCheck":
1050 sanity_data = copy_data(e)
1051 check_sanity(sanity_data)
1052 if e.generateevents:
1053 sanity_data.setVar("SANITY_USE_EVENTS", "1")
1054 bb.event.fire(bb.event.SanityCheckPassed(), e.data)
1055 elif bb.event.getName(e) == "NetworkTest":
1056 sanity_data = copy_data(e)
1057 if e.generateevents:
1058 sanity_data.setVar("SANITY_USE_EVENTS", "1")
1059 bb.event.fire(bb.event.NetworkTestFailed() if check_connectivity(sanity_data) else bb.event.NetworkTestPassed(), e.data)
1060
1061 return
1062}