lh | e6a93fb | 2023-04-10 22:53:27 -0700 | [diff] [blame] | 1 | # Copyright (C) 2017 Mediatek |
| 2 | # Author: Richard Sun |
| 3 | # Some code and influence taken from externalsrc.bbclass: |
| 4 | # Copyright (C) 2012 Linux Foundation |
| 5 | # Author: Richard Purdie |
| 6 | # Some code and influence taken from srctree.bbclass: |
| 7 | # Copyright (C) 2009 Chris Larson <clarson@kergoth.com> |
| 8 | # Released under the MIT license (see COPYING.MIT for the terms) |
| 9 | # |
| 10 | # workonsrc.bbclass enables use of an existing source tree, usually workon to |
| 11 | # the build system to build a piece of software rather than the usual fetch/unpack/patch |
| 12 | # process. |
| 13 | # |
| 14 | # To use, add workonsrc to the global inherit and set WORKONSRC to point at the |
| 15 | # directory you want to use containing the sources e.g. from local.conf for a recipe |
| 16 | # called "myrecipe" you would do: |
| 17 | # |
| 18 | # INHERIT += "workonsrc" |
| 19 | # WORKONSRC_pn-myrecipe = "/path/to/my/source/tree" |
| 20 | # |
| 21 | # In order to make this class work for both target and native versions (or with |
| 22 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate |
| 23 | # directory under the work directory (split source and build directories). This is |
| 24 | # the default, but the build directory can be set to the source directory if |
| 25 | # circumstances dictate by setting WORKONSRC_BUILD to the same value, e.g.: |
| 26 | # |
| 27 | # WORKONSRC_BUILD_pn-myrecipe = "/path/to/my/source/tree" |
| 28 | # |
| 29 | |
| 30 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" |
| 31 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" |
| 32 | |
| 33 | python () { |
| 34 | import subprocess, os.path |
| 35 | |
| 36 | depends = d.getVar("DEPENDS") |
| 37 | depends = "%s rsync-native" % depends |
| 38 | d.setVar("DEPENDS", depends) |
| 39 | |
| 40 | pn = d.getVar('PN') |
| 41 | d.appendVarFlag('do_populate_lic', 'depends', ' %s:do_configure' % pn) |
| 42 | workonsrc = d.getVar('WORKONSRC') |
| 43 | workonsrcbuild = d.getVar('WORKONSRC_BUILD') |
| 44 | if workonsrc and not workonsrc.startswith("/"): |
| 45 | bb.error("WORKONSRC must be an absolute path") |
| 46 | if workonsrcbuild and not workonsrcbuild.startswith("/"): |
| 47 | bb.error("WORKONSRC_BUILD must be an absolute path") |
| 48 | |
| 49 | workonprebuilt = workonsrc.replace("build/../src/", "build/../prebuilt/") |
| 50 | if not os.path.exists(workonsrc): |
| 51 | if os.path.exists(workonprebuilt): |
| 52 | workonsrc = workonprebuilt |
| 53 | else: |
| 54 | bb.warn("Both %s and %s aren't existed" % (workonsrc, workonprebuilt) ) |
| 55 | |
| 56 | # If this is the base recipe and WORKONSRC is set for it or any of its |
| 57 | # derivatives, then enable BB_DONT_CACHE to force the recipe to always be |
| 58 | # re-parsed so that the file-checksums function for do_compile is run every |
| 59 | # time. |
| 60 | bpn = d.getVar('BPN') |
| 61 | classextend = (d.getVar('BBCLASSEXTEND') or '').split() |
| 62 | if bpn == d.getVar('PN') or not classextend: |
| 63 | if (workonsrc or |
| 64 | ('native' in classextend and |
| 65 | d.getVar('WORKONSRC_pn-%s-native' % bpn)) or |
| 66 | ('nativesdk' in classextend and |
| 67 | d.getVar('WORKONSRC_pn-nativesdk-%s' % bpn)) or |
| 68 | ('cross' in classextend and |
| 69 | d.getVar('WORKONSRC_pn-%s-cross' % bpn))): |
| 70 | d.setVar('BB_DONT_CACHE', '1') |
| 71 | |
| 72 | if workonsrc: |
| 73 | import oe.recipeutils |
| 74 | import oe.path |
| 75 | |
| 76 | d.setVar('S', workonsrc) |
| 77 | if workonsrcbuild: |
| 78 | d.setVar('B', workonsrcbuild) |
| 79 | else: |
| 80 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') |
| 81 | workonsrcbuild = d.getVar('B') |
| 82 | |
| 83 | if workonsrc != workonsrcbuild: |
| 84 | d.setVar('S', workonsrcbuild) |
| 85 | |
| 86 | local_srcuri = [] |
| 87 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) |
| 88 | for url in fetch.urls: |
| 89 | url_data = fetch.ud[url] |
| 90 | parm = url_data.parm |
| 91 | if (url_data.type == 'file' or |
| 92 | 'type' in parm and parm['type'] == 'kmeta'): |
| 93 | local_srcuri.append(url) |
| 94 | |
| 95 | d.setVar('SRC_URI', ' '.join(local_srcuri)) |
| 96 | |
| 97 | if '{SRCPV}' in d.getVar('PV', False): |
| 98 | # Dummy value because the default function can't be called with blank SRC_URI |
| 99 | d.setVar('SRCPV', '999') |
| 100 | |
| 101 | if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking': |
| 102 | d.setVar('CONFIGUREOPT_DEPTRACK', '') |
| 103 | |
| 104 | tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys()) |
| 105 | |
| 106 | for task in tasks: |
| 107 | if task.endswith("_setscene"): |
| 108 | # sstate is never going to work for workon source trees, disable it |
| 109 | bb.build.deltask(task, d) |
| 110 | else: |
| 111 | # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time |
| 112 | d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock") |
| 113 | |
| 114 | # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean) |
| 115 | cleandirs = oe.recipeutils.split_var_value(d.getVarFlag(task, 'cleandirs', False) or '') |
| 116 | setvalue = False |
| 117 | for cleandir in cleandirs[:]: |
| 118 | if oe.path.is_path_parent(workonsrc, d.expand(cleandir)): |
| 119 | cleandirs.remove(cleandir) |
| 120 | setvalue = True |
| 121 | if setvalue: |
| 122 | d.setVarFlag(task, 'cleandirs', ' '.join(cleandirs)) |
| 123 | |
| 124 | fetch_tasks = ['do_fetch', 'do_unpack'] |
| 125 | # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one |
| 126 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string |
| 127 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) |
| 128 | |
| 129 | for task in d.getVar("SRCTREECOVEREDTASKS").split(): |
| 130 | if local_srcuri and task in fetch_tasks: |
| 131 | continue |
| 132 | bb.build.deltask(task, d) |
| 133 | |
| 134 | d.prependVarFlag('do_compile', 'prefuncs', "workonsrc_compile_prefunc ") |
| 135 | d.prependVarFlag('do_configure', 'prefuncs', "workonsrc_configure_prefunc ") |
| 136 | |
| 137 | d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}') |
| 138 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') |
| 139 | |
| 140 | # We don't want the workdir to go away |
| 141 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) |
| 142 | |
| 143 | bb.build.addtask('do_buildclean', |
| 144 | 'do_clean' if d.getVar('S') == d.getVar('B') else None, |
| 145 | None, d) |
| 146 | |
| 147 | |
| 148 | # If B=S the same builddir is used even for different architectures. |
| 149 | # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that |
| 150 | # change of do_configure task hash is correctly detected and stamps are |
| 151 | # invalidated if e.g. MACHINE changes. |
| 152 | if d.getVar('S') == d.getVar('B'): |
| 153 | configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' |
| 154 | d.setVar('CONFIGURESTAMPFILE', configstamp) |
| 155 | d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') |
| 156 | d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*') |
| 157 | } |
| 158 | |
| 159 | python workonsrc_configure_prefunc() { |
| 160 | srctree_rsync_files(d) |
| 161 | s_dir = d.getVar('S') |
| 162 | # Create desired symlinks |
| 163 | symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split() |
| 164 | newlinks = [] |
| 165 | for symlink in symlinks: |
| 166 | symsplit = symlink.split(':', 1) |
| 167 | lnkfile = os.path.join(s_dir, symsplit[0]) |
| 168 | target = d.expand(symsplit[1]) |
| 169 | if len(symsplit) > 1: |
| 170 | if os.path.islink(lnkfile): |
| 171 | # Link already exists, leave it if it points to the right location already |
| 172 | if os.readlink(lnkfile) == target: |
| 173 | continue |
| 174 | os.unlink(lnkfile) |
| 175 | elif os.path.exists(lnkfile): |
| 176 | # File/dir exists with same name as link, just leave it alone |
| 177 | continue |
| 178 | os.symlink(target, lnkfile) |
| 179 | newlinks.append(symsplit[0]) |
| 180 | # Hide the symlinks from git |
| 181 | try: |
| 182 | git_exclude_file = os.path.join(s_dir, '.git/info/exclude') |
| 183 | if os.path.exists(git_exclude_file): |
| 184 | with open(git_exclude_file, 'r+') as efile: |
| 185 | elines = efile.readlines() |
| 186 | for link in newlinks: |
| 187 | if link in elines or '/'+link in elines: |
| 188 | continue |
| 189 | efile.write('/' + link + '\n') |
| 190 | except IOError as ioe: |
| 191 | bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git') |
| 192 | } |
| 193 | |
| 194 | python workonsrc_compile_prefunc() { |
| 195 | srctree_rsync_files(d) |
| 196 | # Make it obvious that this is happening, since forgetting about it could lead to much confusion |
| 197 | bb.plain('NOTE: %s: compiling from workon source tree %s' % (d.getVar('PN'), d.getVar('WORKONSRC'))) |
| 198 | } |
| 199 | |
| 200 | do_buildclean[dirs] = "${S} ${B}" |
| 201 | do_buildclean[nostamp] = "1" |
| 202 | do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}" |
| 203 | workonsrc_do_buildclean() { |
| 204 | if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then |
| 205 | rm -f ${@' '.join([x.split(':')[0] for x in (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()])} |
| 206 | if [ "${CLEANBROKEN}" != "1" ]; then |
| 207 | oe_runmake clean || die "make failed" |
| 208 | fi |
| 209 | else |
| 210 | bbnote "nothing to do - no makefile found" |
| 211 | fi |
| 212 | } |
| 213 | |
| 214 | def srctree_rsync_files(d): |
| 215 | import subprocess, os.path |
| 216 | |
| 217 | workonsrc = d.getVar('WORKONSRC') |
| 218 | workonprebuilt = workonsrc.replace("build/../src/", "build/../prebuilt/") |
| 219 | if not os.path.exists(workonsrc): |
| 220 | if os.path.exists(workonprebuilt): |
| 221 | workonsrc = workonprebuilt |
| 222 | else: |
| 223 | bb.warn("Both %s and %s aren't existed" % (workonsrc, workonprebuilt) ) |
| 224 | |
| 225 | if workonsrc: |
| 226 | d.setVar('S', workonsrc) |
| 227 | workonsrcbuild = d.getVar('WORKONSRC_BUILD') |
| 228 | if workonsrcbuild: |
| 229 | d.setVar('B', workonsrcbuild) |
| 230 | else: |
| 231 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') |
| 232 | workonsrcbuild = d.getVar('B') |
| 233 | |
| 234 | if workonsrc != workonsrcbuild: |
| 235 | cmd = "mkdir -p %s" % (workonsrcbuild) |
| 236 | subprocess.call(cmd, shell=True) |
| 237 | |
| 238 | if os.path.exists(workonsrc): |
| 239 | workonsrc_rsync_appended_flag = d.getVar('WORKONSRC_RSYNC_APPENDED_FLAG') |
| 240 | if workonsrc_rsync_appended_flag is None: |
| 241 | workonsrc_rsync_appended_flag="" |
| 242 | cmd = "rsync -aL %s %s/* %s" % (workonsrc_rsync_appended_flag, workonsrc, workonsrcbuild) |
| 243 | ret = subprocess.call(cmd, shell=True) |
| 244 | if ret != 0: |
| 245 | bb.fatal("rsync -aL %s %s/* %s failed." % (workonsrc_rsync_appended_flag, workonsrc, workonsrcbuild)) |
| 246 | d.setVar('S', workonsrcbuild) |
| 247 | |
| 248 | def srctree_hash_files(d, srcdir=None): |
| 249 | import shutil |
| 250 | import subprocess |
| 251 | import tempfile |
| 252 | |
| 253 | s_dir = srcdir or d.getVar('WORKONSRC') |
| 254 | git_dir = None |
| 255 | |
| 256 | try: |
| 257 | git_dir = os.path.join(s_dir, |
| 258 | subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) |
| 259 | except subprocess.CalledProcessError: |
| 260 | pass |
| 261 | |
| 262 | ret = " " |
| 263 | if git_dir is not None: |
| 264 | oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1') |
| 265 | with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index: |
| 266 | # Clone index |
| 267 | shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name) |
| 268 | # Update our custom index |
| 269 | env = os.environ.copy() |
| 270 | env['GIT_INDEX_FILE'] = tmp_index.name |
| 271 | subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env) |
| 272 | sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") |
| 273 | with open(oe_hash_file, 'w') as fobj: |
| 274 | fobj.write(sha1) |
| 275 | ret = oe_hash_file + ':True' |
| 276 | else: |
| 277 | ret = s_dir + '/*:True' |
| 278 | return ret |
| 279 | |
| 280 | def srctree_configure_hash_files(d): |
| 281 | """ |
| 282 | Get the list of files that should trigger do_configure to re-execute, |
| 283 | based on the value of CONFIGURE_FILES |
| 284 | """ |
| 285 | in_files = (d.getVar('CONFIGURE_FILES') or '').split() |
| 286 | out_items = [] |
| 287 | search_files = [] |
| 288 | for entry in in_files: |
| 289 | if entry.startswith('/'): |
| 290 | out_items.append('%s:%s' % (entry, os.path.exists(entry))) |
| 291 | else: |
| 292 | search_files.append(entry) |
| 293 | if search_files: |
| 294 | s_dir = d.getVar('WORKONSRC') |
| 295 | for root, _, files in os.walk(s_dir): |
| 296 | for f in files: |
| 297 | if f in search_files: |
| 298 | out_items.append('%s:True' % os.path.join(root, f)) |
| 299 | return ' '.join(out_items) |
| 300 | |
| 301 | EXPORT_FUNCTIONS do_buildclean |