Complete Yocto mirror with license table for TQMa6UL (2038-compliance)
- 264 license table entries with exact download URLs (224/264 resolved) - Complete sources/ directory with all BitBake recipes - Build configuration: tqma6ul-multi-mba6ulx, spaetzle (musl) - Full traceability for Softwarefreigabeantrag - GCC 13.4.0, Linux 6.6.102, U-Boot 2023.04, musl 1.2.4 - License distribution: GPL-2.0 (24), MIT (23), GPL-2.0+ (18), BSD-3 (16)
This commit is contained in:
404
sources/poky/scripts/lib/devtool/__init__.py
Normal file
404
sources/poky/scripts/lib/devtool/__init__.py
Normal file
@@ -0,0 +1,404 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Development tool - utility functions for plugins
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool plugins module"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import logging
|
||||
import re
|
||||
import codecs
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
class DevtoolError(Exception):
|
||||
"""Exception for handling devtool errors"""
|
||||
def __init__(self, message, exitcode=1):
|
||||
super(DevtoolError, self).__init__(message)
|
||||
self.exitcode = exitcode
|
||||
|
||||
|
||||
def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
|
||||
"""Run a program in bitbake build context"""
|
||||
import bb
|
||||
if not 'cwd' in options:
|
||||
options["cwd"] = builddir
|
||||
if init_path:
|
||||
# As the OE init script makes use of BASH_SOURCE to determine OEROOT,
|
||||
# and can't determine it when running under dash, we need to set
|
||||
# the executable to bash to correctly set things up
|
||||
if not 'executable' in options:
|
||||
options['executable'] = 'bash'
|
||||
logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
|
||||
init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
|
||||
else:
|
||||
logger.debug('Executing command "%s"' % cmd)
|
||||
init_prefix = ''
|
||||
if watch:
|
||||
if sys.stdout.isatty():
|
||||
# Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
|
||||
cmd = 'script -e -q -c "%s" /dev/null' % cmd
|
||||
return exec_watch('%s%s' % (init_prefix, cmd), **options)
|
||||
else:
|
||||
return bb.process.run('%s%s' % (init_prefix, cmd), **options)
|
||||
|
||||
def exec_watch(cmd, **options):
|
||||
"""Run program with stdout shown on sys.stdout"""
|
||||
import bb
|
||||
if isinstance(cmd, str) and not "shell" in options:
|
||||
options["shell"] = True
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
|
||||
)
|
||||
|
||||
reader = codecs.getreader('utf-8')(process.stdout)
|
||||
buf = ''
|
||||
while True:
|
||||
out = reader.read(1, 1)
|
||||
if out:
|
||||
sys.stdout.write(out)
|
||||
sys.stdout.flush()
|
||||
buf += out
|
||||
elif out == '' and process.poll() != None:
|
||||
break
|
||||
|
||||
if process.returncode != 0:
|
||||
raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
|
||||
|
||||
return buf, None
|
||||
|
||||
def exec_fakeroot(d, cmd, **kwargs):
|
||||
"""Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
|
||||
# Grab the command and check it actually exists
|
||||
fakerootcmd = d.getVar('FAKEROOTCMD')
|
||||
fakerootenv = d.getVar('FAKEROOTENV')
|
||||
exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
|
||||
|
||||
def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
|
||||
if not os.path.exists(fakerootcmd):
|
||||
logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
|
||||
return 2
|
||||
# Set up the appropriate environment
|
||||
newenv = dict(os.environ)
|
||||
for varvalue in fakerootenv.split():
|
||||
if '=' in varvalue:
|
||||
splitval = varvalue.split('=', 1)
|
||||
newenv[splitval[0]] = splitval[1]
|
||||
return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
|
||||
|
||||
def setup_tinfoil(config_only=False, basepath=None, tracking=False):
|
||||
"""Initialize tinfoil api from bitbake"""
|
||||
import scriptpath
|
||||
orig_cwd = os.path.abspath(os.curdir)
|
||||
try:
|
||||
if basepath:
|
||||
os.chdir(basepath)
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
if not bitbakepath:
|
||||
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
|
||||
sys.exit(1)
|
||||
|
||||
import bb.tinfoil
|
||||
tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
|
||||
try:
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
tinfoil.prepare(config_only)
|
||||
except bb.tinfoil.TinfoilUIException:
|
||||
tinfoil.shutdown()
|
||||
raise DevtoolError('Failed to start bitbake environment')
|
||||
except:
|
||||
tinfoil.shutdown()
|
||||
raise
|
||||
finally:
|
||||
os.chdir(orig_cwd)
|
||||
return tinfoil
|
||||
|
||||
def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
|
||||
"""Parse the specified recipe"""
|
||||
try:
|
||||
recipefile = tinfoil.get_recipe_file(pn)
|
||||
except bb.providers.NoProvider as e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
if appends:
|
||||
append_files = tinfoil.get_file_appends(recipefile)
|
||||
if filter_workspace:
|
||||
# Filter out appends from the workspace
|
||||
append_files = [path for path in append_files if
|
||||
not path.startswith(config.workspace_path)]
|
||||
else:
|
||||
append_files = None
|
||||
try:
|
||||
rd = tinfoil.parse_recipe_file(recipefile, appends, append_files)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
return rd
|
||||
|
||||
def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
|
||||
"""
|
||||
Check that a recipe is in the workspace and (optionally) that source
|
||||
is present.
|
||||
"""
|
||||
|
||||
workspacepn = pn
|
||||
|
||||
for recipe, value in workspace.items():
|
||||
if recipe == pn:
|
||||
break
|
||||
if bbclassextend:
|
||||
recipefile = value['recipefile']
|
||||
if recipefile:
|
||||
targets = get_bbclassextend_targets(recipefile, recipe)
|
||||
if pn in targets:
|
||||
workspacepn = recipe
|
||||
break
|
||||
else:
|
||||
raise DevtoolError("No recipe named '%s' in your workspace" % pn)
|
||||
|
||||
if checksrc:
|
||||
srctree = workspace[workspacepn]['srctree']
|
||||
if not os.path.exists(srctree):
|
||||
raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
|
||||
if not os.listdir(srctree):
|
||||
raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
|
||||
|
||||
return workspacepn
|
||||
|
||||
def use_external_build(same_dir, no_same_dir, d):
|
||||
"""
|
||||
Determine if we should use B!=S (separate build and source directories) or not
|
||||
"""
|
||||
b_is_s = True
|
||||
if no_same_dir:
|
||||
logger.info('Using separate build directory since --no-same-dir specified')
|
||||
b_is_s = False
|
||||
elif same_dir:
|
||||
logger.info('Using source tree as build directory since --same-dir specified')
|
||||
elif bb.data.inherits_class('autotools-brokensep', d):
|
||||
logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
|
||||
elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
|
||||
logger.info('Using source tree as build directory since that would be the default for this recipe')
|
||||
else:
|
||||
b_is_s = False
|
||||
return b_is_s
|
||||
|
||||
def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
|
||||
"""
|
||||
Set up the git repository for the source tree
|
||||
"""
|
||||
import bb.process
|
||||
import oe.patch
|
||||
if not os.path.exists(os.path.join(repodir, '.git')):
|
||||
bb.process.run('git init', cwd=repodir)
|
||||
bb.process.run('git config --local gc.autodetach 0', cwd=repodir)
|
||||
bb.process.run('git add -f -A .', cwd=repodir)
|
||||
commit_cmd = ['git']
|
||||
oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
|
||||
commit_cmd += ['commit', '-q']
|
||||
stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
|
||||
if not stdout:
|
||||
commit_cmd.append('--allow-empty')
|
||||
commitmsg = "Initial empty commit with no upstream sources"
|
||||
elif version:
|
||||
commitmsg = "Initial commit from upstream at version %s" % version
|
||||
else:
|
||||
commitmsg = "Initial commit from upstream"
|
||||
commit_cmd += ['-m', commitmsg]
|
||||
bb.process.run(commit_cmd, cwd=repodir)
|
||||
|
||||
# Ensure singletask.lock (as used by externalsrc.bbclass) is ignored by git
|
||||
gitinfodir = os.path.join(repodir, '.git', 'info')
|
||||
try:
|
||||
os.mkdir(gitinfodir)
|
||||
except FileExistsError:
|
||||
pass
|
||||
excludes = []
|
||||
excludefile = os.path.join(gitinfodir, 'exclude')
|
||||
try:
|
||||
with open(excludefile, 'r') as f:
|
||||
excludes = f.readlines()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
if 'singletask.lock\n' not in excludes:
|
||||
excludes.append('singletask.lock\n')
|
||||
with open(excludefile, 'w') as f:
|
||||
for line in excludes:
|
||||
f.write(line)
|
||||
|
||||
bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
|
||||
bb.process.run('git tag -f %s' % basetag, cwd=repodir)
|
||||
|
||||
# if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
|
||||
# so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
|
||||
stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
|
||||
found = False
|
||||
for line in stdout.splitlines():
|
||||
if line.endswith("/"):
|
||||
new_dir = line.split()[1]
|
||||
for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
|
||||
if ".git" in dirs + files:
|
||||
(stdout, _) = bb.process.run('git remote', cwd=root)
|
||||
remote = stdout.splitlines()[0]
|
||||
(stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
|
||||
remote_url = stdout.splitlines()[0]
|
||||
logger.error(os.path.relpath(os.path.join(root, ".."), root))
|
||||
bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
|
||||
found = True
|
||||
if found:
|
||||
oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
|
||||
found = False
|
||||
if os.path.exists(os.path.join(repodir, '.gitmodules')):
|
||||
bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir)
|
||||
|
||||
def recipe_to_append(recipefile, config, wildcard=False):
|
||||
"""
|
||||
Convert a recipe file to a bbappend file path within the workspace.
|
||||
NOTE: if the bbappend already exists, you should be using
|
||||
workspace[args.recipename]['bbappend'] instead of calling this
|
||||
function.
|
||||
"""
|
||||
appendname = os.path.splitext(os.path.basename(recipefile))[0]
|
||||
if wildcard:
|
||||
appendname = re.sub(r'_.*', '_%', appendname)
|
||||
appendpath = os.path.join(config.workspace_path, 'appends')
|
||||
appendfile = os.path.join(appendpath, appendname + '.bbappend')
|
||||
return appendfile
|
||||
|
||||
def get_bbclassextend_targets(recipefile, pn):
|
||||
"""
|
||||
Cheap function to get BBCLASSEXTEND and then convert that to the
|
||||
list of targets that would result.
|
||||
"""
|
||||
import bb.utils
|
||||
|
||||
values = {}
|
||||
def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
|
||||
values[varname] = origvalue
|
||||
return origvalue, None, 0, True
|
||||
with open(recipefile, 'r') as f:
|
||||
bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
|
||||
|
||||
targets = []
|
||||
bbclassextend = values.get('BBCLASSEXTEND', '').split()
|
||||
if bbclassextend:
|
||||
for variant in bbclassextend:
|
||||
if variant == 'nativesdk':
|
||||
targets.append('%s-%s' % (variant, pn))
|
||||
elif variant in ['native', 'cross', 'crosssdk']:
|
||||
targets.append('%s-%s' % (pn, variant))
|
||||
return targets
|
||||
|
||||
def replace_from_file(path, old, new):
|
||||
"""Replace strings on a file"""
|
||||
|
||||
def read_file(path):
|
||||
data = None
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
return data
|
||||
|
||||
def write_file(path, data):
|
||||
if data is None:
|
||||
return
|
||||
wdata = data.rstrip() + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(wdata)
|
||||
|
||||
# In case old is None, return immediately
|
||||
if old is None:
|
||||
return
|
||||
try:
|
||||
rdata = read_file(path)
|
||||
except IOError as e:
|
||||
# if file does not exit, just quit, otherwise raise an exception
|
||||
if e.errno == errno.ENOENT:
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
old_contents = rdata.splitlines()
|
||||
new_contents = []
|
||||
for old_content in old_contents:
|
||||
try:
|
||||
new_contents.append(old_content.replace(old, new))
|
||||
except ValueError:
|
||||
pass
|
||||
write_file(path, "\n".join(new_contents))
|
||||
|
||||
|
||||
def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
|
||||
""" This function will make unlocked-sigs.inc match the recipes in the
|
||||
workspace plus any extras we want unlocked. """
|
||||
|
||||
if not fixed_setup:
|
||||
# Only need to write this out within the eSDK
|
||||
return
|
||||
|
||||
if not extra:
|
||||
extra = []
|
||||
|
||||
confdir = os.path.join(basepath, 'conf')
|
||||
unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
|
||||
|
||||
# Get current unlocked list if any
|
||||
values = {}
|
||||
def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
|
||||
values[varname] = origvalue
|
||||
return origvalue, None, 0, True
|
||||
if os.path.exists(unlockedsigs):
|
||||
with open(unlockedsigs, 'r') as f:
|
||||
bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
|
||||
unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
|
||||
|
||||
# If the new list is different to the current list, write it out
|
||||
newunlocked = sorted(list(workspace.keys()) + extra)
|
||||
if unlocked != newunlocked:
|
||||
bb.utils.mkdirhier(confdir)
|
||||
with open(unlockedsigs, 'w') as f:
|
||||
f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
|
||||
"# This layer was created by the OpenEmbedded devtool" +
|
||||
" utility in order to\n" +
|
||||
"# contain recipes that are unlocked.\n")
|
||||
|
||||
f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
|
||||
for pn in newunlocked:
|
||||
f.write(' ' + pn)
|
||||
f.write('"')
|
||||
|
||||
def check_prerelease_version(ver, operation):
|
||||
if 'pre' in ver or 'rc' in ver:
|
||||
logger.warning('Version "%s" looks like a pre-release version. '
|
||||
'If that is the case, in order to ensure that the '
|
||||
'version doesn\'t appear to go backwards when you '
|
||||
'later upgrade to the final release version, it is '
|
||||
'recommmended that instead you use '
|
||||
'<current version>+<pre-release version> e.g. if '
|
||||
'upgrading from 1.9 to 2.0-rc2 use "1.9+2.0-rc2". '
|
||||
'If you prefer not to reset and re-try, you can change '
|
||||
'the version after %s succeeds using "devtool rename" '
|
||||
'with -V/--version.' % (ver, operation))
|
||||
|
||||
def check_git_repo_dirty(repodir):
|
||||
"""Check if a git repository is clean or not"""
|
||||
stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
|
||||
return stdout
|
||||
|
||||
def check_git_repo_op(srctree, ignoredirs=None):
|
||||
"""Check if a git repository is in the middle of a rebase"""
|
||||
stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree)
|
||||
topleveldir = stdout.strip()
|
||||
if ignoredirs and topleveldir in ignoredirs:
|
||||
return
|
||||
gitdir = os.path.join(topleveldir, '.git')
|
||||
if os.path.exists(os.path.join(gitdir, 'rebase-merge')):
|
||||
raise DevtoolError("Source tree %s appears to be in the middle of a rebase - please resolve this first" % srctree)
|
||||
if os.path.exists(os.path.join(gitdir, 'rebase-apply')):
|
||||
raise DevtoolError("Source tree %s appears to be in the middle of 'git am' or 'git apply' - please resolve this first" % srctree)
|
||||
92
sources/poky/scripts/lib/devtool/build.py
Normal file
92
sources/poky/scripts/lib/devtool/build.py
Normal file
@@ -0,0 +1,92 @@
|
||||
# Development tool - build command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool build plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import tempfile
|
||||
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
from devtool import parse_recipe
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
def _set_file_values(fn, values):
|
||||
remaining = list(values.keys())
|
||||
|
||||
def varfunc(varname, origvalue, op, newlines):
|
||||
newvalue = values.get(varname, origvalue)
|
||||
remaining.remove(varname)
|
||||
return (newvalue, '=', 0, True)
|
||||
|
||||
with open(fn, 'r') as f:
|
||||
(updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
|
||||
|
||||
for item in remaining:
|
||||
updated = True
|
||||
newlines.append('%s = "%s"' % (item, values[item]))
|
||||
|
||||
if updated:
|
||||
with open(fn, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
return updated
|
||||
|
||||
def _get_build_tasks(config):
|
||||
tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
|
||||
return ['do_%s' % task.strip() for task in tasks]
|
||||
|
||||
def build(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'build' subcommand"""
|
||||
workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
|
||||
if not rd:
|
||||
return 1
|
||||
deploytask = 'do_deploy' in rd.getVar('__BBTASKS')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if args.clean:
|
||||
# use clean instead of cleansstate to avoid messing things up in eSDK
|
||||
build_tasks = ['do_clean']
|
||||
else:
|
||||
build_tasks = _get_build_tasks(config)
|
||||
if deploytask:
|
||||
build_tasks.append('do_deploy')
|
||||
|
||||
bbappend = workspace[workspacepn]['bbappend']
|
||||
if args.disable_parallel_make:
|
||||
logger.info("Disabling 'make' parallelism")
|
||||
_set_file_values(bbappend, {'PARALLEL_MAKE': ''})
|
||||
try:
|
||||
bbargs = []
|
||||
for task in build_tasks:
|
||||
if args.recipename.endswith('-native') and 'package' in task:
|
||||
continue
|
||||
bbargs.append('%s:%s' % (args.recipename, task))
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
finally:
|
||||
if args.disable_parallel_make:
|
||||
_set_file_values(bbappend, {'PARALLEL_MAKE': None})
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_build = subparsers.add_parser('build', help='Build a recipe',
|
||||
description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
|
||||
group='working', order=50)
|
||||
parser_build.add_argument('recipename', help='Recipe to build')
|
||||
parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
|
||||
parser_build.add_argument('-c', '--clean', action='store_true', help='clean up recipe building results')
|
||||
parser_build.set_defaults(func=build)
|
||||
164
sources/poky/scripts/lib/devtool/build_image.py
Normal file
164
sources/poky/scripts/lib/devtool/build_image.py
Normal file
@@ -0,0 +1,164 @@
|
||||
# Development tool - build-image plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
"""Devtool plugin containing the build-image subcommand."""
|
||||
|
||||
import os
|
||||
import errno
|
||||
import logging
|
||||
|
||||
from bb.process import ExecutionError
|
||||
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
class TargetNotImageError(Exception):
|
||||
pass
|
||||
|
||||
def _get_packages(tinfoil, workspace, config):
|
||||
"""Get list of packages from recipes in the workspace."""
|
||||
result = []
|
||||
for recipe in workspace:
|
||||
data = parse_recipe(config, tinfoil, recipe, True)
|
||||
if 'class-target' in data.getVar('OVERRIDES').split(':'):
|
||||
if recipe in data.getVar('PACKAGES').split():
|
||||
result.append(recipe)
|
||||
else:
|
||||
logger.warning("Skipping recipe %s as it doesn't produce a "
|
||||
"package with the same name", recipe)
|
||||
return result
|
||||
|
||||
def build_image(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'build-image' subcommand."""
|
||||
|
||||
image = args.imagename
|
||||
auto_image = False
|
||||
if not image:
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
image = sdk_targets[0]
|
||||
auto_image = True
|
||||
if not image:
|
||||
raise DevtoolError('Unable to determine image to build, please specify one')
|
||||
|
||||
try:
|
||||
if args.add_packages:
|
||||
add_packages = args.add_packages.split(',')
|
||||
else:
|
||||
add_packages = None
|
||||
result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
|
||||
except TargetNotImageError:
|
||||
if auto_image:
|
||||
raise DevtoolError('Unable to determine image to build, please specify one')
|
||||
else:
|
||||
raise DevtoolError('Specified recipe %s is not an image recipe' % image)
|
||||
|
||||
if result == 0:
|
||||
logger.info('Successfully built %s. You can find output files in %s'
|
||||
% (image, outputdir))
|
||||
return result
|
||||
|
||||
def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
|
||||
# remove <image>.bbappend to make sure setup_tinfoil doesn't
|
||||
# break because of it
|
||||
target_basename = config.get('SDK', 'target_basename', '')
|
||||
if target_basename:
|
||||
appendfile = os.path.join(config.workspace_path, 'appends',
|
||||
'%s.bbappend' % target_basename)
|
||||
try:
|
||||
os.unlink(appendfile)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, image, True)
|
||||
if not rd:
|
||||
# Error already shown
|
||||
return (1, None)
|
||||
if not bb.data.inherits_class('image', rd):
|
||||
raise TargetNotImageError()
|
||||
|
||||
# Get the actual filename used and strip the .bb and full path
|
||||
target_basename = rd.getVar('FILE')
|
||||
target_basename = os.path.splitext(os.path.basename(target_basename))[0]
|
||||
config.set('SDK', 'target_basename', target_basename)
|
||||
config.write()
|
||||
|
||||
appendfile = os.path.join(config.workspace_path, 'appends',
|
||||
'%s.bbappend' % target_basename)
|
||||
|
||||
outputdir = None
|
||||
try:
|
||||
if workspace or add_packages:
|
||||
if add_packages:
|
||||
packages = add_packages
|
||||
else:
|
||||
packages = _get_packages(tinfoil, workspace, config)
|
||||
else:
|
||||
packages = None
|
||||
if not task:
|
||||
if not packages and not add_packages and workspace:
|
||||
logger.warning('No recipes in workspace, building image %s unmodified', image)
|
||||
elif not packages:
|
||||
logger.warning('No packages to add, building image %s unmodified', image)
|
||||
|
||||
if packages or extra_append:
|
||||
bb.utils.mkdirhier(os.path.dirname(appendfile))
|
||||
with open(appendfile, 'w') as afile:
|
||||
if packages:
|
||||
# include packages from workspace recipes into the image
|
||||
afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages))
|
||||
if not task:
|
||||
logger.info('Building image %s with the following '
|
||||
'additional packages: %s', image, ' '.join(packages))
|
||||
if extra_append:
|
||||
for line in extra_append:
|
||||
afile.write('%s\n' % line)
|
||||
|
||||
if task in ['populate_sdk', 'populate_sdk_ext']:
|
||||
outputdir = rd.getVar('SDK_DEPLOY')
|
||||
else:
|
||||
outputdir = rd.getVar('DEPLOY_DIR_IMAGE')
|
||||
|
||||
tmp_tinfoil = tinfoil
|
||||
tinfoil = None
|
||||
tmp_tinfoil.shutdown()
|
||||
|
||||
options = ''
|
||||
if task:
|
||||
options += '-c %s' % task
|
||||
|
||||
# run bitbake to build image (or specified task)
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath,
|
||||
'bitbake %s %s' % (options, image), watch=True)
|
||||
except ExecutionError as err:
|
||||
return (err.exitcode, None)
|
||||
finally:
|
||||
if os.path.isfile(appendfile):
|
||||
os.unlink(appendfile)
|
||||
finally:
|
||||
if tinfoil:
|
||||
tinfoil.shutdown()
|
||||
return (0, outputdir)
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the build-image plugin"""
|
||||
parser = subparsers.add_parser('build-image',
|
||||
help='Build image including workspace recipe packages',
|
||||
description='Builds an image, extending it to include '
|
||||
'packages from recipes in the workspace',
|
||||
group='testbuild', order=-10)
|
||||
parser.add_argument('imagename', help='Image recipe to build', nargs='?')
|
||||
parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
|
||||
'entire workspace, specify packages to be added to the image '
|
||||
'(separate multiple packages by commas)',
|
||||
metavar='PACKAGES')
|
||||
parser.set_defaults(func=build_image)
|
||||
55
sources/poky/scripts/lib/devtool/build_sdk.py
Normal file
55
sources/poky/scripts/lib/devtool/build_sdk.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Development tool - build-sdk command plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import glob
|
||||
import shutil
|
||||
import errno
|
||||
import sys
|
||||
import tempfile
|
||||
from devtool import DevtoolError
|
||||
from devtool import build_image
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
def build_sdk(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool build-sdk command"""
|
||||
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
image = sdk_targets[0]
|
||||
else:
|
||||
raise DevtoolError('Unable to determine image to build SDK for')
|
||||
|
||||
extra_append = ['SDK_DERIVATIVE = "1"']
|
||||
try:
|
||||
result, outputdir = build_image.build_image_task(config,
|
||||
basepath,
|
||||
workspace,
|
||||
image,
|
||||
task='populate_sdk_ext',
|
||||
extra_append=extra_append)
|
||||
except build_image.TargetNotImageError:
|
||||
raise DevtoolError('Unable to determine image to build SDK for')
|
||||
|
||||
if result == 0:
|
||||
logger.info('Successfully built SDK. You can find output files in %s'
|
||||
% outputdir)
|
||||
return result
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands"""
|
||||
if context.fixed_setup:
|
||||
parser_build_sdk = subparsers.add_parser('build-sdk',
|
||||
help='Build a derivative SDK of this one',
|
||||
description='Builds an extensible SDK based upon this one and the items in your workspace',
|
||||
group='advanced')
|
||||
parser_build_sdk.set_defaults(func=build_sdk)
|
||||
378
sources/poky/scripts/lib/devtool/deploy.py
Normal file
378
sources/poky/scripts/lib/devtool/deploy.py
Normal file
@@ -0,0 +1,378 @@
|
||||
# Development tool - deploy/undeploy command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool plugin containing the deploy subcommands"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import bb.utils
|
||||
import argparse_oe
|
||||
import oe.types
|
||||
|
||||
from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
deploylist_path = '/.devtool'
|
||||
|
||||
def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
|
||||
"""
|
||||
Prepare a shell script for running on the target to
|
||||
deploy/undeploy files. We have to be careful what we put in this
|
||||
script - only commands that are likely to be available on the
|
||||
target are suitable (the target might be constrained, e.g. using
|
||||
busybox rather than bash with coreutils).
|
||||
"""
|
||||
lines = []
|
||||
lines.append('#!/bin/sh')
|
||||
lines.append('set -e')
|
||||
if undeployall:
|
||||
# Yes, I know this is crude - but it does work
|
||||
lines.append('for entry in %s/*.list; do' % deploylist_path)
|
||||
lines.append('[ ! -f $entry ] && exit')
|
||||
lines.append('set `basename $entry | sed "s/.list//"`')
|
||||
if dryrun:
|
||||
if not deploy:
|
||||
lines.append('echo "Previously deployed files for $1:"')
|
||||
lines.append('manifest="%s/$1.list"' % deploylist_path)
|
||||
lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
|
||||
lines.append('if [ -f $manifest ] ; then')
|
||||
# Read manifest in reverse and delete files / remove empty dirs
|
||||
lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
|
||||
lines.append(' do')
|
||||
if dryrun:
|
||||
lines.append(' if [ ! -d $file ] ; then')
|
||||
lines.append(' echo $file')
|
||||
lines.append(' fi')
|
||||
else:
|
||||
lines.append(' if [ -d $file ] ; then')
|
||||
# Avoid deleting a preserved directory in case it has special perms
|
||||
lines.append(' if [ ! -d $preservedir/$file ] ; then')
|
||||
lines.append(' rmdir $file > /dev/null 2>&1 || true')
|
||||
lines.append(' fi')
|
||||
lines.append(' else')
|
||||
lines.append(' rm -f $file')
|
||||
lines.append(' fi')
|
||||
lines.append(' done')
|
||||
if not dryrun:
|
||||
lines.append(' rm $manifest')
|
||||
if not deploy and not dryrun:
|
||||
# May as well remove all traces
|
||||
lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
|
||||
lines.append('fi')
|
||||
|
||||
if deploy:
|
||||
if not nocheckspace:
|
||||
# Check for available space
|
||||
# FIXME This doesn't take into account files spread across multiple
|
||||
# partitions, but doing that is non-trivial
|
||||
# Find the part of the destination path that exists
|
||||
lines.append('checkpath="$2"')
|
||||
lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
|
||||
lines.append('do')
|
||||
lines.append(' checkpath=`dirname "$checkpath"`')
|
||||
lines.append('done')
|
||||
lines.append(r'freespace=$(df -P $checkpath | sed -nre "s/^(\S+\s+){3}([0-9]+).*/\2/p")')
|
||||
# First line of the file is the total space
|
||||
lines.append('total=`head -n1 $3`')
|
||||
lines.append('if [ $total -gt $freespace ] ; then')
|
||||
lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
|
||||
lines.append(' exit 1')
|
||||
lines.append('fi')
|
||||
if not nopreserve:
|
||||
# Preserve any files that exist. Note that this will add to the
|
||||
# preserved list with successive deployments if the list of files
|
||||
# deployed changes, but because we've deleted any previously
|
||||
# deployed files at this point it will never preserve anything
|
||||
# that was deployed, only files that existed prior to any deploying
|
||||
# (which makes the most sense)
|
||||
lines.append('cat $3 | sed "1d" | while read file fsize')
|
||||
lines.append('do')
|
||||
lines.append(' if [ -e $file ] ; then')
|
||||
lines.append(' dest="$preservedir/$file"')
|
||||
lines.append(' mkdir -p `dirname $dest`')
|
||||
lines.append(' mv $file $dest')
|
||||
lines.append(' fi')
|
||||
lines.append('done')
|
||||
lines.append('rm $3')
|
||||
lines.append('mkdir -p `dirname $manifest`')
|
||||
lines.append('mkdir -p $2')
|
||||
if verbose:
|
||||
lines.append(' tar xv -C $2 -f - | tee $manifest')
|
||||
else:
|
||||
lines.append(' tar xv -C $2 -f - > $manifest')
|
||||
lines.append('sed -i "s!^./!$2!" $manifest')
|
||||
elif not dryrun:
|
||||
# Put any preserved files back
|
||||
lines.append('if [ -d $preservedir ] ; then')
|
||||
lines.append(' cd $preservedir')
|
||||
# find from busybox might not have -exec, so we don't use that
|
||||
lines.append(' find . -type f | while read file')
|
||||
lines.append(' do')
|
||||
lines.append(' mv $file /$file')
|
||||
lines.append(' done')
|
||||
lines.append(' cd /')
|
||||
lines.append(' rm -rf $preservedir')
|
||||
lines.append('fi')
|
||||
|
||||
if undeployall:
|
||||
if not dryrun:
|
||||
lines.append('echo "NOTE: Successfully undeployed $1"')
|
||||
lines.append('done')
|
||||
|
||||
# Delete the script itself
|
||||
lines.append('rm $0')
|
||||
lines.append('')
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
def deploy(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'deploy' subcommand"""
|
||||
import oe.utils
|
||||
|
||||
check_workspace_recipe(workspace, args.recipename, checksrc=False)
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath)
|
||||
try:
|
||||
try:
|
||||
rd = tinfoil.parse_recipe(args.recipename)
|
||||
except Exception as e:
|
||||
raise DevtoolError('Exception parsing recipe %s: %s' %
|
||||
(args.recipename, e))
|
||||
|
||||
srcdir = rd.getVar('D')
|
||||
workdir = rd.getVar('WORKDIR')
|
||||
path = rd.getVar('PATH')
|
||||
strip_cmd = rd.getVar('STRIP')
|
||||
libdir = rd.getVar('libdir')
|
||||
base_libdir = rd.getVar('base_libdir')
|
||||
max_process = oe.utils.get_bb_number_threads(rd)
|
||||
fakerootcmd = rd.getVar('FAKEROOTCMD')
|
||||
fakerootenv = rd.getVar('FAKEROOTENV')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
|
||||
|
||||
def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
|
||||
import math
|
||||
import oe.package
|
||||
|
||||
try:
|
||||
host, destdir = args.target.split(':')
|
||||
except ValueError:
|
||||
destdir = '/'
|
||||
else:
|
||||
args.target = host
|
||||
if not destdir.endswith('/'):
|
||||
destdir += '/'
|
||||
|
||||
recipe_outdir = srcdir
|
||||
if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
|
||||
raise DevtoolError('No files to deploy - have you built the %s '
|
||||
'recipe? If so, the install step has not installed '
|
||||
'any files.' % args.recipename)
|
||||
|
||||
if args.strip and not args.dry_run:
|
||||
# Fakeroot copy to new destination
|
||||
srcdir = recipe_outdir
|
||||
recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
|
||||
if os.path.isdir(recipe_outdir):
|
||||
exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
|
||||
exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
|
||||
os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
|
||||
oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
|
||||
|
||||
filelist = []
|
||||
inodes = set({})
|
||||
ftotalsize = 0
|
||||
for root, _, files in os.walk(recipe_outdir):
|
||||
for fn in files:
|
||||
fstat = os.lstat(os.path.join(root, fn))
|
||||
# Get the size in kiB (since we'll be comparing it to the output of du -k)
|
||||
# MUST use lstat() here not stat() or getfilesize() since we don't want to
|
||||
# dereference symlinks
|
||||
if fstat.st_ino in inodes:
|
||||
fsize = 0
|
||||
else:
|
||||
fsize = int(math.ceil(float(fstat.st_size)/1024))
|
||||
inodes.add(fstat.st_ino)
|
||||
ftotalsize += fsize
|
||||
# The path as it would appear on the target
|
||||
fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
|
||||
filelist.append((fpath, fsize))
|
||||
|
||||
if args.dry_run:
|
||||
print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
|
||||
for item, _ in filelist:
|
||||
print(' %s' % item)
|
||||
return 0
|
||||
|
||||
extraoptions = ''
|
||||
if args.no_host_check:
|
||||
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
|
||||
if not args.show_status:
|
||||
extraoptions += ' -q'
|
||||
|
||||
scp_sshexec = ''
|
||||
ssh_sshexec = 'ssh'
|
||||
if args.ssh_exec:
|
||||
scp_sshexec = "-S %s" % args.ssh_exec
|
||||
ssh_sshexec = args.ssh_exec
|
||||
scp_port = ''
|
||||
ssh_port = ''
|
||||
if args.port:
|
||||
scp_port = "-P %s" % args.port
|
||||
ssh_port = "-p %s" % args.port
|
||||
|
||||
if args.key:
|
||||
extraoptions += ' -i %s' % args.key
|
||||
|
||||
# In order to delete previously deployed files and have the manifest file on
|
||||
# the target, we write out a shell script and then copy it to the target
|
||||
# so we can then run it (piping tar output to it).
|
||||
# (We cannot use scp here, because it doesn't preserve symlinks.)
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
tmpscript = '/tmp/devtool_deploy.sh'
|
||||
tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
|
||||
shellscript = _prepare_remote_script(deploy=True,
|
||||
verbose=args.show_status,
|
||||
nopreserve=args.no_preserve,
|
||||
nocheckspace=args.no_check_space)
|
||||
# Write out the script to a file
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
|
||||
f.write(shellscript)
|
||||
# Write out the file list
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
|
||||
f.write('%d\n' % ftotalsize)
|
||||
for fpath, fsize in filelist:
|
||||
f.write('%s %d\n' % (fpath, fsize))
|
||||
# Copy them to the target
|
||||
ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
|
||||
'get a complete error message' % args.target)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
# Now run the script
|
||||
ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Deploy failed - rerun with -s to get a complete '
|
||||
'error message')
|
||||
|
||||
logger.info('Successfully deployed %s' % recipe_outdir)
|
||||
|
||||
files_list = []
|
||||
for root, _, files in os.walk(recipe_outdir):
|
||||
for filename in files:
|
||||
filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
|
||||
files_list.append(os.path.join(destdir, filename))
|
||||
|
||||
return 0
|
||||
|
||||
def undeploy(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'undeploy' subcommand"""
|
||||
if args.all and args.recipename:
|
||||
raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
|
||||
elif not args.recipename and not args.all:
|
||||
raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
|
||||
|
||||
extraoptions = ''
|
||||
if args.no_host_check:
|
||||
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
|
||||
if not args.show_status:
|
||||
extraoptions += ' -q'
|
||||
|
||||
scp_sshexec = ''
|
||||
ssh_sshexec = 'ssh'
|
||||
if args.ssh_exec:
|
||||
scp_sshexec = "-S %s" % args.ssh_exec
|
||||
ssh_sshexec = args.ssh_exec
|
||||
scp_port = ''
|
||||
ssh_port = ''
|
||||
if args.port:
|
||||
scp_port = "-P %s" % args.port
|
||||
ssh_port = "-p %s" % args.port
|
||||
|
||||
args.target = args.target.split(':')[0]
|
||||
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
tmpscript = '/tmp/devtool_undeploy.sh'
|
||||
shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
|
||||
# Write out the script to a file
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
|
||||
f.write(shellscript)
|
||||
# Copy it to the target
|
||||
ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
|
||||
'get a complete error message' % args.target)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
# Now run the script
|
||||
ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
|
||||
'error message')
|
||||
|
||||
if not args.all and not args.dry_run:
|
||||
logger.info('Successfully undeployed %s' % args.recipename)
|
||||
return 0
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the deploy plugin"""
|
||||
|
||||
parser_deploy = subparsers.add_parser('deploy-target',
|
||||
help='Deploy recipe output files to live target machine',
|
||||
description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
|
||||
group='testbuild')
|
||||
parser_deploy.add_argument('recipename', help='Recipe to deploy')
|
||||
parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
|
||||
parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
|
||||
parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
|
||||
parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
|
||||
parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
|
||||
parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
|
||||
parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
|
||||
parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
|
||||
parser_deploy.add_argument('-I', '--key',
|
||||
help='Specify ssh private key for connection to the target')
|
||||
|
||||
strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
|
||||
strip_opts.add_argument('-S', '--strip',
|
||||
help='Strip executables prior to deploying (default: %(default)s). '
|
||||
'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
|
||||
default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
|
||||
action='store_true')
|
||||
strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
|
||||
|
||||
parser_deploy.set_defaults(func=deploy)
|
||||
|
||||
parser_undeploy = subparsers.add_parser('undeploy-target',
|
||||
help='Undeploy recipe output files in live target machine',
|
||||
description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
|
||||
group='testbuild')
|
||||
parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
|
||||
parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
|
||||
parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
|
||||
parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
|
||||
parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
|
||||
parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
|
||||
parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
|
||||
parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
|
||||
parser_undeploy.add_argument('-I', '--key',
|
||||
help='Specify ssh private key for connection to the target')
|
||||
|
||||
parser_undeploy.set_defaults(func=undeploy)
|
||||
109
sources/poky/scripts/lib/devtool/export.py
Normal file
109
sources/poky/scripts/lib/devtool/export.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# Development tool - export command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool export plugin"""
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import tarfile
|
||||
import logging
|
||||
import datetime
|
||||
import json
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
# output files
|
||||
default_arcname_prefix = "workspace-export"
|
||||
metadata = '.export_metadata'
|
||||
|
||||
def export(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'export' subcommand"""
|
||||
|
||||
def add_metadata(tar):
|
||||
"""Archive the workspace object"""
|
||||
# finally store the workspace metadata
|
||||
with open(metadata, 'w') as fd:
|
||||
fd.write(json.dumps((config.workspace_path, workspace)))
|
||||
tar.add(metadata)
|
||||
os.unlink(metadata)
|
||||
|
||||
def add_recipe(tar, recipe, data):
|
||||
"""Archive recipe with proper arcname"""
|
||||
# Create a map of name/arcnames
|
||||
arcnames = []
|
||||
for key, name in data.items():
|
||||
if name:
|
||||
if key == 'srctree':
|
||||
# all sources, no matter where are located, goes into the sources directory
|
||||
arcname = 'sources/%s' % recipe
|
||||
else:
|
||||
arcname = name.replace(config.workspace_path, '')
|
||||
arcnames.append((name, arcname))
|
||||
|
||||
for name, arcname in arcnames:
|
||||
tar.add(name, arcname=arcname)
|
||||
|
||||
|
||||
# Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
|
||||
if not workspace:
|
||||
logger.info('Workspace contains no recipes, nothing to export')
|
||||
return 0
|
||||
else:
|
||||
for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
|
||||
for recipe in recipes:
|
||||
if recipe not in workspace:
|
||||
logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
|
||||
return 1
|
||||
|
||||
name = args.file
|
||||
|
||||
default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
|
||||
if not name:
|
||||
name = default_name
|
||||
else:
|
||||
# if name is a directory, append the default name
|
||||
if os.path.isdir(name):
|
||||
name = os.path.join(name, default_name)
|
||||
|
||||
if os.path.exists(name) and not args.overwrite:
|
||||
logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
|
||||
return 1
|
||||
|
||||
# if all workspace is excluded, quit
|
||||
if not len(set(workspace.keys()).difference(set(args.exclude))):
|
||||
logger.warning('All recipes in workspace excluded, nothing to export')
|
||||
return 0
|
||||
|
||||
exported = []
|
||||
with tarfile.open(name, 'w:gz') as tar:
|
||||
if args.include:
|
||||
for recipe in args.include:
|
||||
add_recipe(tar, recipe, workspace[recipe])
|
||||
exported.append(recipe)
|
||||
else:
|
||||
for recipe, data in workspace.items():
|
||||
if recipe not in args.exclude:
|
||||
add_recipe(tar, recipe, data)
|
||||
exported.append(recipe)
|
||||
|
||||
add_metadata(tar)
|
||||
|
||||
logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool export subcommands"""
|
||||
parser = subparsers.add_parser('export',
|
||||
help='Export workspace into a tar archive',
|
||||
description='Export one or more recipes from current workspace into a tar archive',
|
||||
group='advanced')
|
||||
|
||||
parser.add_argument('--file', '-f', help='Output archive file name')
|
||||
parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
|
||||
group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
|
||||
parser.set_defaults(func=export)
|
||||
282
sources/poky/scripts/lib/devtool/ide_plugins/__init__.py
Normal file
282
sources/poky/scripts/lib/devtool/ide_plugins/__init__.py
Normal file
@@ -0,0 +1,282 @@
|
||||
#
|
||||
# Copyright (C) 2023-2024 Siemens AG
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
|
||||
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
from enum import Enum, auto
|
||||
from devtool import DevtoolError
|
||||
from bb.utils import mkdirhier
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
class BuildTool(Enum):
|
||||
UNDEFINED = auto()
|
||||
CMAKE = auto()
|
||||
MESON = auto()
|
||||
|
||||
@property
|
||||
def is_c_ccp(self):
|
||||
if self is BuildTool.CMAKE:
|
||||
return True
|
||||
if self is BuildTool.MESON:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class GdbCrossConfig:
|
||||
"""Base class defining the GDB configuration generator interface
|
||||
|
||||
Generate a GDB configuration for a binary on the target device.
|
||||
Only one instance per binary is allowed. This allows to assign unique port
|
||||
numbers for all gdbserver instances.
|
||||
"""
|
||||
_gdbserver_port_next = 1234
|
||||
_binaries = []
|
||||
|
||||
def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
|
||||
self.image_recipe = image_recipe
|
||||
self.modified_recipe = modified_recipe
|
||||
self.gdb_cross = modified_recipe.gdb_cross
|
||||
self.binary = binary
|
||||
if binary in GdbCrossConfig._binaries:
|
||||
raise DevtoolError(
|
||||
"gdbserver config for binary %s is already generated" % binary)
|
||||
GdbCrossConfig._binaries.append(binary)
|
||||
self.script_dir = modified_recipe.ide_sdk_scripts_dir
|
||||
self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
|
||||
self.gdbserver_multi = gdbserver_multi
|
||||
self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
|
||||
self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
|
||||
GdbCrossConfig._gdbserver_port_next += 1
|
||||
self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
|
||||
# gdbserver start script
|
||||
gdbserver_script_file = 'gdbserver_' + self.id_pretty
|
||||
if self.gdbserver_multi:
|
||||
gdbserver_script_file += "_m"
|
||||
self.gdbserver_script = os.path.join(
|
||||
self.script_dir, gdbserver_script_file)
|
||||
# gdbinit file
|
||||
self.gdbinit = os.path.join(
|
||||
self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
|
||||
# gdb start script
|
||||
self.gdb_script = os.path.join(
|
||||
self.script_dir, 'gdb_' + self.id_pretty)
|
||||
|
||||
def _gen_gdbserver_start_script(self):
|
||||
"""Generate a shell command starting the gdbserver on the remote device via ssh
|
||||
|
||||
GDB supports two modes:
|
||||
multi: gdbserver remains running over several debug sessions
|
||||
once: gdbserver terminates after the debugged process terminates
|
||||
"""
|
||||
cmd_lines = ['#!/bin/sh']
|
||||
if self.gdbserver_multi:
|
||||
temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
|
||||
gdbserver_cmd_start = temp_dir
|
||||
gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
|
||||
gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
|
||||
gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
|
||||
self.gdb_cross.gdbserver_path, self.gdbserver_port)
|
||||
gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
|
||||
|
||||
gdbserver_cmd_stop = temp_dir
|
||||
gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
|
||||
gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
|
||||
|
||||
gdbserver_cmd_l = []
|
||||
gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
|
||||
gdbserver_cmd_l.append(' shift')
|
||||
gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
|
||||
self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
|
||||
gdbserver_cmd_l.append('else')
|
||||
gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
|
||||
self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
|
||||
gdbserver_cmd_l.append('fi')
|
||||
gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
|
||||
else:
|
||||
gdbserver_cmd_start = "%s --once :%s %s" % (
|
||||
self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
|
||||
gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
|
||||
self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
|
||||
cmd_lines.append(gdbserver_cmd)
|
||||
GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
|
||||
|
||||
def _gen_gdbinit_config(self):
|
||||
"""Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
|
||||
gdbinit_lines = ['# This file is generated by devtool ide-sdk']
|
||||
if self.gdbserver_multi:
|
||||
target_help = '# gdbserver --multi :%d' % self.gdbserver_port
|
||||
remote_cmd = 'target extended-remote'
|
||||
else:
|
||||
target_help = '# gdbserver :%d %s' % (
|
||||
self.gdbserver_port, self.binary)
|
||||
remote_cmd = 'target remote'
|
||||
gdbinit_lines.append('# On the remote target:')
|
||||
gdbinit_lines.append(target_help)
|
||||
gdbinit_lines.append('# On the build machine:')
|
||||
gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
|
||||
gdbinit_lines.append(
|
||||
'# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
|
||||
|
||||
gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
|
||||
gdbinit_lines.append('set substitute-path "/usr/include" "' +
|
||||
os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
|
||||
# Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
|
||||
gdbinit_lines.append('set debuginfod enabled off')
|
||||
if self.image_recipe.rootfs_dbg:
|
||||
gdbinit_lines.append(
|
||||
'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
|
||||
# First: Search for sources of this recipe in the workspace folder
|
||||
if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
|
||||
gdbinit_lines.append('set substitute-path "%s" "%s"' %
|
||||
(self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
|
||||
else:
|
||||
logger.error(
|
||||
"TARGET_DBGSRC_DIR must contain the recipe name PN.")
|
||||
# Second: Search for sources of other recipes in the rootfs-dbg
|
||||
if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
|
||||
gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
|
||||
self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
|
||||
else:
|
||||
logger.error(
|
||||
"TARGET_DBGSRC_DIR must start with /usr/src/debug.")
|
||||
else:
|
||||
logger.warning(
|
||||
"Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
|
||||
gdbinit_lines.append(
|
||||
'%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
|
||||
gdbinit_lines.append('set remote exec-file ' + self.binary)
|
||||
gdbinit_lines.append(
|
||||
'run ' + os.path.join(self.modified_recipe.d, self.binary))
|
||||
|
||||
GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
|
||||
|
||||
def _gen_gdb_start_script(self):
|
||||
"""Generate a script starting GDB with the corresponding gdbinit configuration."""
|
||||
cmd_lines = ['#!/bin/sh']
|
||||
cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
|
||||
cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
|
||||
self.gdbinit + ' "$@"')
|
||||
GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
|
||||
|
||||
def initialize(self):
|
||||
self._gen_gdbserver_start_script()
|
||||
self._gen_gdbinit_config()
|
||||
self._gen_gdb_start_script()
|
||||
|
||||
@staticmethod
|
||||
def write_file(script_file, cmd_lines, executable=False):
|
||||
script_dir = os.path.dirname(script_file)
|
||||
mkdirhier(script_dir)
|
||||
with open(script_file, 'w') as script_f:
|
||||
script_f.write(os.linesep.join(cmd_lines))
|
||||
script_f.write(os.linesep)
|
||||
if executable:
|
||||
st = os.stat(script_file)
|
||||
os.chmod(script_file, st.st_mode | stat.S_IEXEC)
|
||||
logger.info("Created: %s" % script_file)
|
||||
|
||||
|
||||
class IdeBase:
|
||||
"""Base class defining the interface for IDE plugins"""
|
||||
|
||||
def __init__(self):
|
||||
self.ide_name = 'undefined'
|
||||
self.gdb_cross_configs = []
|
||||
|
||||
@classmethod
|
||||
def ide_plugin_priority(cls):
|
||||
"""Used to find the default ide handler if --ide is not passed"""
|
||||
return 10
|
||||
|
||||
def setup_shared_sysroots(self, shared_env):
|
||||
logger.warn("Shared sysroot mode is not supported for IDE %s" %
|
||||
self.ide_name)
|
||||
|
||||
def setup_modified_recipe(self, args, image_recipe, modified_recipe):
|
||||
logger.warn("Modified recipe mode is not supported for IDE %s" %
|
||||
self.ide_name)
|
||||
|
||||
def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
|
||||
binaries = modified_recipe.find_installed_binaries()
|
||||
for binary in binaries:
|
||||
gdb_cross_config = gdb_cross_config_class(
|
||||
image_recipe, modified_recipe, binary)
|
||||
gdb_cross_config.initialize()
|
||||
self.gdb_cross_configs.append(gdb_cross_config)
|
||||
|
||||
@staticmethod
|
||||
def gen_oe_scrtips_sym_link(modified_recipe):
|
||||
# create a sym-link from sources to the scripts directory
|
||||
if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
|
||||
IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
|
||||
os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
|
||||
|
||||
@staticmethod
|
||||
def update_json_file(json_dir, json_file, update_dict):
|
||||
"""Update a json file
|
||||
|
||||
By default it uses the dict.update function. If this is not sutiable
|
||||
the update function might be passed via update_func parameter.
|
||||
"""
|
||||
json_path = os.path.join(json_dir, json_file)
|
||||
logger.info("Updating IDE config file: %s (%s)" %
|
||||
(json_file, json_path))
|
||||
if not os.path.exists(json_dir):
|
||||
os.makedirs(json_dir)
|
||||
try:
|
||||
with open(json_path) as f:
|
||||
orig_dict = json.load(f)
|
||||
except json.decoder.JSONDecodeError:
|
||||
logger.info(
|
||||
"Decoding %s failed. Probably because of comments in the json file" % json_path)
|
||||
orig_dict = {}
|
||||
except FileNotFoundError:
|
||||
orig_dict = {}
|
||||
orig_dict.update(update_dict)
|
||||
with open(json_path, 'w') as f:
|
||||
json.dump(orig_dict, f, indent=4)
|
||||
|
||||
@staticmethod
|
||||
def symlink_force(tgt, dst):
|
||||
try:
|
||||
os.symlink(tgt, dst)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EEXIST:
|
||||
if os.readlink(dst) != tgt:
|
||||
os.remove(dst)
|
||||
os.symlink(tgt, dst)
|
||||
else:
|
||||
raise err
|
||||
|
||||
|
||||
def get_devtool_deploy_opts(args):
|
||||
"""Filter args for devtool deploy-target args"""
|
||||
if not args.target:
|
||||
return None
|
||||
devtool_deploy_opts = [args.target]
|
||||
if args.no_host_check:
|
||||
devtool_deploy_opts += ["-c"]
|
||||
if args.show_status:
|
||||
devtool_deploy_opts += ["-s"]
|
||||
if args.no_preserve:
|
||||
devtool_deploy_opts += ["-p"]
|
||||
if args.no_check_space:
|
||||
devtool_deploy_opts += ["--no-check-space"]
|
||||
if args.ssh_exec:
|
||||
devtool_deploy_opts += ["-e", args.ssh.exec]
|
||||
if args.port:
|
||||
devtool_deploy_opts += ["-P", args.port]
|
||||
if args.key:
|
||||
devtool_deploy_opts += ["-I", args.key]
|
||||
if args.strip is False:
|
||||
devtool_deploy_opts += ["--no-strip"]
|
||||
return devtool_deploy_opts
|
||||
463
sources/poky/scripts/lib/devtool/ide_plugins/ide_code.py
Normal file
463
sources/poky/scripts/lib/devtool/ide_plugins/ide_code.py
Normal file
@@ -0,0 +1,463 @@
|
||||
#
|
||||
# Copyright (C) 2023-2024 Siemens AG
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
class GdbCrossConfigVSCode(GdbCrossConfig):
|
||||
def __init__(self, image_recipe, modified_recipe, binary):
|
||||
super().__init__(image_recipe, modified_recipe, binary, False)
|
||||
|
||||
def initialize(self):
|
||||
self._gen_gdbserver_start_script()
|
||||
|
||||
|
||||
class IdeVSCode(IdeBase):
|
||||
"""Manage IDE configurations for VSCode
|
||||
|
||||
Modified recipe mode:
|
||||
- cmake: use the cmake-preset generated by devtool ide-sdk
|
||||
- meson: meson is called via a wrapper script generated by devtool ide-sdk
|
||||
|
||||
Shared sysroot mode:
|
||||
In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
|
||||
A workspace cannot be created because there is no recipe that defines how a workspace could
|
||||
be set up.
|
||||
- cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
|
||||
The cmake-kit uses the environment script and the tool-chain file
|
||||
generated by meta-ide-support.
|
||||
- meson: Meson needs manual workspace configuration.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def ide_plugin_priority(cls):
|
||||
"""If --ide is not passed this is the default plugin"""
|
||||
if shutil.which('code'):
|
||||
return 100
|
||||
return 0
|
||||
|
||||
def setup_shared_sysroots(self, shared_env):
|
||||
"""Expose the toolchain of the shared sysroots SDK"""
|
||||
datadir = shared_env.ide_support.datadir
|
||||
deploy_dir_image = shared_env.ide_support.deploy_dir_image
|
||||
real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
|
||||
standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
|
||||
vscode_ws_path = os.path.join(
|
||||
os.environ['HOME'], '.local', 'share', 'CMakeTools')
|
||||
cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
|
||||
oecmake_generator = "Ninja"
|
||||
env_script = os.path.join(
|
||||
deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
|
||||
|
||||
if not os.path.isdir(vscode_ws_path):
|
||||
os.makedirs(vscode_ws_path)
|
||||
cmake_kits_old = []
|
||||
if os.path.exists(cmake_kits_path):
|
||||
with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
|
||||
cmake_kits_old = json.load(cmake_kits_file)
|
||||
cmake_kits = cmake_kits_old.copy()
|
||||
|
||||
cmake_kit_new = {
|
||||
"name": "OE " + real_multimach_target_sys,
|
||||
"environmentSetupScript": env_script,
|
||||
"toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
|
||||
"preferredGenerator": {
|
||||
"name": oecmake_generator
|
||||
}
|
||||
}
|
||||
|
||||
def merge_kit(cmake_kits, cmake_kit_new):
|
||||
i = 0
|
||||
while i < len(cmake_kits):
|
||||
if 'environmentSetupScript' in cmake_kits[i] and \
|
||||
cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
|
||||
cmake_kits[i] = cmake_kit_new
|
||||
return
|
||||
i += 1
|
||||
cmake_kits.append(cmake_kit_new)
|
||||
merge_kit(cmake_kits, cmake_kit_new)
|
||||
|
||||
if cmake_kits != cmake_kits_old:
|
||||
logger.info("Updating: %s" % cmake_kits_path)
|
||||
with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
|
||||
json.dump(cmake_kits, cmake_kits_file, indent=4)
|
||||
else:
|
||||
logger.info("Already up to date: %s" % cmake_kits_path)
|
||||
|
||||
cmake_native = os.path.join(
|
||||
shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
|
||||
if os.path.isfile(cmake_native):
|
||||
logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
|
||||
else:
|
||||
logger.error("Cannot find cmake native at: %s" % cmake_native)
|
||||
|
||||
def dot_code_dir(self, modified_recipe):
|
||||
return os.path.join(modified_recipe.srctree, '.vscode')
|
||||
|
||||
def __vscode_settings_meson(self, settings_dict, modified_recipe):
|
||||
if modified_recipe.build_tool is not BuildTool.MESON:
|
||||
return
|
||||
settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
|
||||
|
||||
confopts = modified_recipe.mesonopts.split()
|
||||
confopts += modified_recipe.meson_cross_file.split()
|
||||
confopts += modified_recipe.extra_oemeson.split()
|
||||
settings_dict["mesonbuild.configureOptions"] = confopts
|
||||
settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
|
||||
|
||||
def __vscode_settings_cmake(self, settings_dict, modified_recipe):
|
||||
"""Add cmake specific settings to settings.json.
|
||||
|
||||
Note: most settings are passed to the cmake preset.
|
||||
"""
|
||||
if modified_recipe.build_tool is not BuildTool.CMAKE:
|
||||
return
|
||||
settings_dict["cmake.configureOnOpen"] = True
|
||||
settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
|
||||
|
||||
def vscode_settings(self, modified_recipe, image_recipe):
|
||||
files_excludes = {
|
||||
"**/.git/**": True,
|
||||
"**/oe-logs/**": True,
|
||||
"**/oe-workdir/**": True,
|
||||
"**/source-date-epoch/**": True
|
||||
}
|
||||
python_exclude = [
|
||||
"**/.git/**",
|
||||
"**/oe-logs/**",
|
||||
"**/oe-workdir/**",
|
||||
"**/source-date-epoch/**"
|
||||
]
|
||||
files_readonly = {
|
||||
modified_recipe.recipe_sysroot + '/**': True,
|
||||
modified_recipe.recipe_sysroot_native + '/**': True,
|
||||
}
|
||||
if image_recipe.rootfs_dbg is not None:
|
||||
files_readonly[image_recipe.rootfs_dbg + '/**'] = True
|
||||
settings_dict = {
|
||||
"files.watcherExclude": files_excludes,
|
||||
"files.exclude": files_excludes,
|
||||
"files.readonlyInclude": files_readonly,
|
||||
"python.analysis.exclude": python_exclude
|
||||
}
|
||||
self.__vscode_settings_cmake(settings_dict, modified_recipe)
|
||||
self.__vscode_settings_meson(settings_dict, modified_recipe)
|
||||
|
||||
settings_file = 'settings.json'
|
||||
IdeBase.update_json_file(
|
||||
self.dot_code_dir(modified_recipe), settings_file, settings_dict)
|
||||
|
||||
def __vscode_extensions_cmake(self, modified_recipe, recommendations):
|
||||
if modified_recipe.build_tool is not BuildTool.CMAKE:
|
||||
return
|
||||
recommendations += [
|
||||
"twxs.cmake",
|
||||
"ms-vscode.cmake-tools",
|
||||
"ms-vscode.cpptools",
|
||||
"ms-vscode.cpptools-extension-pack",
|
||||
"ms-vscode.cpptools-themes"
|
||||
]
|
||||
|
||||
def __vscode_extensions_meson(self, modified_recipe, recommendations):
|
||||
if modified_recipe.build_tool is not BuildTool.MESON:
|
||||
return
|
||||
recommendations += [
|
||||
'mesonbuild.mesonbuild',
|
||||
"ms-vscode.cpptools",
|
||||
"ms-vscode.cpptools-extension-pack",
|
||||
"ms-vscode.cpptools-themes"
|
||||
]
|
||||
|
||||
def vscode_extensions(self, modified_recipe):
|
||||
recommendations = []
|
||||
self.__vscode_extensions_cmake(modified_recipe, recommendations)
|
||||
self.__vscode_extensions_meson(modified_recipe, recommendations)
|
||||
extensions_file = 'extensions.json'
|
||||
IdeBase.update_json_file(
|
||||
self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
|
||||
|
||||
def vscode_c_cpp_properties(self, modified_recipe):
|
||||
properties_dict = {
|
||||
"name": modified_recipe.recipe_id_pretty,
|
||||
}
|
||||
if modified_recipe.build_tool is BuildTool.CMAKE:
|
||||
properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
|
||||
elif modified_recipe.build_tool is BuildTool.MESON:
|
||||
properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
|
||||
properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
|
||||
else: # no C/C++ build
|
||||
return
|
||||
|
||||
properties_dicts = {
|
||||
"configurations": [
|
||||
properties_dict
|
||||
],
|
||||
"version": 4
|
||||
}
|
||||
prop_file = 'c_cpp_properties.json'
|
||||
IdeBase.update_json_file(
|
||||
self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
|
||||
|
||||
def vscode_launch_bin_dbg(self, gdb_cross_config):
|
||||
modified_recipe = gdb_cross_config.modified_recipe
|
||||
|
||||
launch_config = {
|
||||
"name": gdb_cross_config.id_pretty,
|
||||
"type": "cppdbg",
|
||||
"request": "launch",
|
||||
"program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
|
||||
"stopAtEntry": True,
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [],
|
||||
"externalConsole": False,
|
||||
"MIMode": "gdb",
|
||||
"preLaunchTask": gdb_cross_config.id_pretty,
|
||||
"miDebuggerPath": modified_recipe.gdb_cross.gdb,
|
||||
"miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
|
||||
}
|
||||
|
||||
# Search for header files in recipe-sysroot.
|
||||
src_file_map = {
|
||||
"/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
|
||||
}
|
||||
# First of all search for not stripped binaries in the image folder.
|
||||
# These binaries are copied (and optionally stripped) by deploy-target
|
||||
setup_commands = [
|
||||
{
|
||||
"description": "sysroot",
|
||||
"text": "set sysroot " + modified_recipe.d
|
||||
}
|
||||
]
|
||||
|
||||
if gdb_cross_config.image_recipe.rootfs_dbg:
|
||||
launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
|
||||
gdb_cross_config.image_recipe)
|
||||
# First: Search for sources of this recipe in the workspace folder
|
||||
if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
|
||||
src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
|
||||
else:
|
||||
logger.error(
|
||||
"TARGET_DBGSRC_DIR must contain the recipe name PN.")
|
||||
# Second: Search for sources of other recipes in the rootfs-dbg
|
||||
if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
|
||||
src_file_map["/usr/src/debug"] = os.path.join(
|
||||
gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
|
||||
else:
|
||||
logger.error(
|
||||
"TARGET_DBGSRC_DIR must start with /usr/src/debug.")
|
||||
else:
|
||||
logger.warning(
|
||||
"Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
|
||||
|
||||
launch_config['sourceFileMap'] = src_file_map
|
||||
launch_config['setupCommands'] = setup_commands
|
||||
return launch_config
|
||||
|
||||
def vscode_launch(self, modified_recipe):
|
||||
"""GDB Launch configuration for binaries (elf files)"""
|
||||
|
||||
configurations = []
|
||||
for gdb_cross_config in self.gdb_cross_configs:
|
||||
if gdb_cross_config.modified_recipe is modified_recipe:
|
||||
configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
|
||||
launch_dict = {
|
||||
"version": "0.2.0",
|
||||
"configurations": configurations
|
||||
}
|
||||
launch_file = 'launch.json'
|
||||
IdeBase.update_json_file(
|
||||
self.dot_code_dir(modified_recipe), launch_file, launch_dict)
|
||||
|
||||
def vscode_tasks_cpp(self, args, modified_recipe):
|
||||
run_install_deploy = modified_recipe.gen_install_deploy_script(args)
|
||||
install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
|
||||
tasks_dict = {
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": install_task_name,
|
||||
"type": "shell",
|
||||
"command": run_install_deploy,
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
for gdb_cross_config in self.gdb_cross_configs:
|
||||
if gdb_cross_config.modified_recipe is not modified_recipe:
|
||||
continue
|
||||
tasks_dict['tasks'].append(
|
||||
{
|
||||
"label": gdb_cross_config.id_pretty,
|
||||
"type": "shell",
|
||||
"isBackground": True,
|
||||
"dependsOn": [
|
||||
install_task_name
|
||||
],
|
||||
"command": gdb_cross_config.gdbserver_script,
|
||||
"problemMatcher": [
|
||||
{
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": ".",
|
||||
"file": 1,
|
||||
"location": 2,
|
||||
"message": 3
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": True,
|
||||
"beginsPattern": ".",
|
||||
"endsPattern": ".",
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
tasks_file = 'tasks.json'
|
||||
IdeBase.update_json_file(
|
||||
self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
|
||||
|
||||
def vscode_tasks_fallback(self, args, modified_recipe):
|
||||
oe_init_dir = modified_recipe.oe_init_dir
|
||||
oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
|
||||
dt_build = "devtool build "
|
||||
dt_build_label = dt_build + modified_recipe.recipe_id_pretty
|
||||
dt_build_cmd = dt_build + modified_recipe.bpn
|
||||
clean_opt = " --clean"
|
||||
dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
|
||||
dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
|
||||
dt_deploy = "devtool deploy-target "
|
||||
dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
|
||||
dt_deploy_cmd = dt_deploy + modified_recipe.bpn
|
||||
dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
|
||||
deploy_opts = ' '.join(get_devtool_deploy_opts(args))
|
||||
tasks_dict = {
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": dt_build_label,
|
||||
"type": "shell",
|
||||
"command": "bash",
|
||||
"linux": {
|
||||
"options": {
|
||||
"cwd": oe_init_dir
|
||||
}
|
||||
},
|
||||
"args": [
|
||||
"--login",
|
||||
"-c",
|
||||
"%s%s" % (oe_init, dt_build_cmd)
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": dt_deploy_label,
|
||||
"type": "shell",
|
||||
"command": "bash",
|
||||
"linux": {
|
||||
"options": {
|
||||
"cwd": oe_init_dir
|
||||
}
|
||||
},
|
||||
"args": [
|
||||
"--login",
|
||||
"-c",
|
||||
"%s%s %s" % (
|
||||
oe_init, dt_deploy_cmd, deploy_opts)
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": dt_build_deploy_label,
|
||||
"dependsOrder": "sequence",
|
||||
"dependsOn": [
|
||||
dt_build_label,
|
||||
dt_deploy_label
|
||||
],
|
||||
"problemMatcher": [],
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": True
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": dt_build_clean_label,
|
||||
"type": "shell",
|
||||
"command": "bash",
|
||||
"linux": {
|
||||
"options": {
|
||||
"cwd": oe_init_dir
|
||||
}
|
||||
},
|
||||
"args": [
|
||||
"--login",
|
||||
"-c",
|
||||
"%s%s" % (oe_init, dt_build_clean_cmd)
|
||||
],
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
if modified_recipe.gdb_cross:
|
||||
for gdb_cross_config in self.gdb_cross_configs:
|
||||
if gdb_cross_config.modified_recipe is not modified_recipe:
|
||||
continue
|
||||
tasks_dict['tasks'].append(
|
||||
{
|
||||
"label": gdb_cross_config.id_pretty,
|
||||
"type": "shell",
|
||||
"isBackground": True,
|
||||
"dependsOn": [
|
||||
dt_build_deploy_label
|
||||
],
|
||||
"command": gdb_cross_config.gdbserver_script,
|
||||
"problemMatcher": [
|
||||
{
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": ".",
|
||||
"file": 1,
|
||||
"location": 2,
|
||||
"message": 3
|
||||
}
|
||||
],
|
||||
"background": {
|
||||
"activeOnStart": True,
|
||||
"beginsPattern": ".",
|
||||
"endsPattern": ".",
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
tasks_file = 'tasks.json'
|
||||
IdeBase.update_json_file(
|
||||
self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
|
||||
|
||||
def vscode_tasks(self, args, modified_recipe):
|
||||
if modified_recipe.build_tool.is_c_ccp:
|
||||
self.vscode_tasks_cpp(args, modified_recipe)
|
||||
else:
|
||||
self.vscode_tasks_fallback(args, modified_recipe)
|
||||
|
||||
def setup_modified_recipe(self, args, image_recipe, modified_recipe):
|
||||
self.vscode_settings(modified_recipe, image_recipe)
|
||||
self.vscode_extensions(modified_recipe)
|
||||
self.vscode_c_cpp_properties(modified_recipe)
|
||||
if args.target:
|
||||
self.initialize_gdb_cross_configs(
|
||||
image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
|
||||
self.vscode_launch(modified_recipe)
|
||||
self.vscode_tasks(args, modified_recipe)
|
||||
|
||||
|
||||
def register_ide_plugin(ide_plugins):
|
||||
ide_plugins['code'] = IdeVSCode
|
||||
53
sources/poky/scripts/lib/devtool/ide_plugins/ide_none.py
Normal file
53
sources/poky/scripts/lib/devtool/ide_plugins/ide_none.py
Normal file
@@ -0,0 +1,53 @@
|
||||
#
|
||||
# Copyright (C) 2023-2024 Siemens AG
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool ide-sdk generic IDE plugin"""
|
||||
|
||||
import os
|
||||
import logging
|
||||
from devtool.ide_plugins import IdeBase, GdbCrossConfig
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
class IdeNone(IdeBase):
|
||||
"""Generate some generic helpers for other IDEs
|
||||
|
||||
Modified recipe mode:
|
||||
Generate some helper scripts for remote debugging with GDB
|
||||
|
||||
Shared sysroot mode:
|
||||
A wrapper for bitbake meta-ide-support and bitbake build-sysroots
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def setup_shared_sysroots(self, shared_env):
|
||||
real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
|
||||
deploy_dir_image = shared_env.ide_support.deploy_dir_image
|
||||
env_script = os.path.join(
|
||||
deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
|
||||
logger.info(
|
||||
"To use this SDK please source this: %s" % env_script)
|
||||
|
||||
def setup_modified_recipe(self, args, image_recipe, modified_recipe):
|
||||
"""generate some helper scripts and config files
|
||||
|
||||
- Execute the do_install task
|
||||
- Execute devtool deploy-target
|
||||
- Generate a gdbinit file per executable
|
||||
- Generate the oe-scripts sym-link
|
||||
"""
|
||||
script_path = modified_recipe.gen_install_deploy_script(args)
|
||||
logger.info("Created: %s" % script_path)
|
||||
|
||||
self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
|
||||
|
||||
IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
|
||||
|
||||
|
||||
def register_ide_plugin(ide_plugins):
|
||||
ide_plugins['none'] = IdeNone
|
||||
1047
sources/poky/scripts/lib/devtool/ide_sdk.py
Executable file
1047
sources/poky/scripts/lib/devtool/ide_sdk.py
Executable file
File diff suppressed because it is too large
Load Diff
134
sources/poky/scripts/lib/devtool/import.py
Normal file
134
sources/poky/scripts/lib/devtool/import.py
Normal file
@@ -0,0 +1,134 @@
|
||||
# Development tool - import command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool import plugin"""
|
||||
|
||||
import os
|
||||
import tarfile
|
||||
import logging
|
||||
import collections
|
||||
import json
|
||||
import fnmatch
|
||||
|
||||
from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
|
||||
from devtool import export
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def devimport(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'import' subcommand"""
|
||||
|
||||
def get_pn(name):
|
||||
""" Returns the filename of a workspace recipe/append"""
|
||||
metadata = name.split('/')[-1]
|
||||
fn, _ = os.path.splitext(metadata)
|
||||
return fn
|
||||
|
||||
if not os.path.exists(args.file):
|
||||
raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
|
||||
|
||||
with tarfile.open(args.file) as tar:
|
||||
# Get exported metadata
|
||||
export_workspace_path = export_workspace = None
|
||||
try:
|
||||
metadata = tar.getmember(export.metadata)
|
||||
except KeyError as ke:
|
||||
raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
|
||||
|
||||
tar.extract(metadata)
|
||||
with open(metadata.name) as fdm:
|
||||
export_workspace_path, export_workspace = json.load(fdm)
|
||||
os.unlink(metadata.name)
|
||||
|
||||
members = tar.getmembers()
|
||||
|
||||
# Get appends and recipes from the exported archive, these
|
||||
# will be needed to find out those appends without corresponding
|
||||
# recipe pair
|
||||
append_fns, recipe_fns = set(), set()
|
||||
for member in members:
|
||||
if member.name.startswith('appends'):
|
||||
append_fns.add(get_pn(member.name))
|
||||
elif member.name.startswith('recipes'):
|
||||
recipe_fns.add(get_pn(member.name))
|
||||
|
||||
# Setup tinfoil, get required data and shutdown
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
# Find those appends that do not have recipes in current metadata
|
||||
non_importables = []
|
||||
for fn in append_fns - recipe_fns:
|
||||
# Check on current metadata (covering those layers indicated in bblayers.conf)
|
||||
for current_fn in current_fns:
|
||||
if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
|
||||
break
|
||||
else:
|
||||
non_importables.append(fn)
|
||||
logger.warning('No recipe to append %s.bbapppend, skipping' % fn)
|
||||
|
||||
# Extract
|
||||
imported = []
|
||||
for member in members:
|
||||
if member.name == export.metadata:
|
||||
continue
|
||||
|
||||
for nonimp in non_importables:
|
||||
pn = nonimp.split('_')[0]
|
||||
# do not extract data from non-importable recipes or metadata
|
||||
if member.name.startswith('appends/%s' % nonimp) or \
|
||||
member.name.startswith('recipes/%s' % nonimp) or \
|
||||
member.name.startswith('sources/%s' % pn):
|
||||
break
|
||||
else:
|
||||
path = os.path.join(config.workspace_path, member.name)
|
||||
if os.path.exists(path):
|
||||
# by default, no file overwrite is done unless -o is given by the user
|
||||
if args.overwrite:
|
||||
try:
|
||||
tar.extract(member, path=config.workspace_path)
|
||||
except PermissionError as pe:
|
||||
logger.warning(pe)
|
||||
else:
|
||||
logger.warning('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
|
||||
continue
|
||||
else:
|
||||
tar.extract(member, path=config.workspace_path)
|
||||
|
||||
# Update EXTERNALSRC and the devtool md5 file
|
||||
if member.name.startswith('appends'):
|
||||
if export_workspace_path:
|
||||
# appends created by 'devtool modify' just need to update the workspace
|
||||
replace_from_file(path, export_workspace_path, config.workspace_path)
|
||||
|
||||
# appends created by 'devtool add' need replacement of exported source tree
|
||||
pn = get_pn(member.name).split('_')[0]
|
||||
exported_srctree = export_workspace[pn]['srctree']
|
||||
if exported_srctree:
|
||||
replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
|
||||
|
||||
standard._add_md5(config, pn, path)
|
||||
imported.append(pn)
|
||||
|
||||
if imported:
|
||||
logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
|
||||
else:
|
||||
logger.warning('No recipes imported into the workspace')
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool import subcommands"""
|
||||
parser = subparsers.add_parser('import',
|
||||
help='Import exported tar archive into workspace',
|
||||
description='Import tar archive previously created by "devtool export" into workspace',
|
||||
group='advanced')
|
||||
parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
|
||||
parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
|
||||
parser.set_defaults(func=devimport)
|
||||
81
sources/poky/scripts/lib/devtool/menuconfig.py
Normal file
81
sources/poky/scripts/lib/devtool/menuconfig.py
Normal file
@@ -0,0 +1,81 @@
|
||||
# OpenEmbedded Development tool - menuconfig command plugin
|
||||
#
|
||||
# Copyright (C) 2018 Xilinx
|
||||
# Written by: Chandana Kalluri <ckalluri@xilinx.com>
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Devtool menuconfig plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import re
|
||||
import glob
|
||||
from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
|
||||
from devtool import check_workspace_recipe
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def menuconfig(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'menuconfig' subcommand"""
|
||||
|
||||
rd = ""
|
||||
kconfigpath = ""
|
||||
pn_src = ""
|
||||
localfilesdir = ""
|
||||
workspace_dir = ""
|
||||
tinfoil = setup_tinfoil(basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
|
||||
if not rd:
|
||||
return 1
|
||||
|
||||
check_workspace_recipe(workspace, args.component)
|
||||
pn = rd.getVar('PN')
|
||||
|
||||
if not rd.getVarFlag('do_menuconfig','task'):
|
||||
raise DevtoolError("This recipe does not support menuconfig option")
|
||||
|
||||
workspace_dir = os.path.join(config.workspace_path,'sources')
|
||||
kconfigpath = rd.getVar('B')
|
||||
pn_src = os.path.join(workspace_dir,pn)
|
||||
|
||||
# add check to see if oe_local_files exists or not
|
||||
localfilesdir = os.path.join(pn_src,'oe-local-files')
|
||||
if not os.path.exists(localfilesdir):
|
||||
bb.utils.mkdirhier(localfilesdir)
|
||||
# Add gitignore to ensure source tree is clean
|
||||
gitignorefile = os.path.join(localfilesdir,'.gitignore')
|
||||
with open(gitignorefile, 'w') as f:
|
||||
f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
|
||||
f.write('*\n')
|
||||
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
logger.info('Launching menuconfig')
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
|
||||
fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
|
||||
res = standard._create_kconfig_diff(pn_src,rd,fragment)
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""register devtool subcommands from this plugin"""
|
||||
parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
|
||||
parser_menuconfig.add_argument('component', help='compenent to alter config')
|
||||
parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)
|
||||
50
sources/poky/scripts/lib/devtool/package.py
Normal file
50
sources/poky/scripts/lib/devtool/package.py
Normal file
@@ -0,0 +1,50 @@
|
||||
# Development tool - package command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool plugin containing the package subcommands"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
from bb.process import ExecutionError
|
||||
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def package(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'package' subcommand"""
|
||||
check_workspace_recipe(workspace, args.recipename)
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath, config_only=True)
|
||||
try:
|
||||
image_pkgtype = config.get('Package', 'image_pkgtype', '')
|
||||
if not image_pkgtype:
|
||||
image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
|
||||
|
||||
deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
|
||||
logger.info('Your packages are in %s' % deploy_dir_pkg)
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the package plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_package = subparsers.add_parser('package',
|
||||
help='Build packages for a recipe',
|
||||
description='Builds packages for a recipe\'s output files',
|
||||
group='testbuild', order=-5)
|
||||
parser_package.add_argument('recipename', help='Recipe to package')
|
||||
parser_package.set_defaults(func=package)
|
||||
64
sources/poky/scripts/lib/devtool/runqemu.py
Normal file
64
sources/poky/scripts/lib/devtool/runqemu.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# Development tool - runqemu command plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
"""Devtool runqemu plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import glob
|
||||
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def runqemu(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'runqemu' subcommand"""
|
||||
|
||||
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
|
||||
try:
|
||||
machine = tinfoil.config_data.getVar('MACHINE')
|
||||
bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'),
|
||||
tinfoil.config_data.getVar('BUILD_ARCH'),
|
||||
tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep))
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
|
||||
raise DevtoolError('QEMU is not available within this SDK')
|
||||
|
||||
imagename = args.imagename
|
||||
if not imagename:
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
imagename = sdk_targets[0]
|
||||
if not imagename:
|
||||
raise DevtoolError('Unable to determine image name to run, please specify one')
|
||||
|
||||
try:
|
||||
# FIXME runqemu assumes that if OECORE_NATIVE_SYSROOT is set then it shouldn't
|
||||
# run bitbake to find out the values of various environment variables, which
|
||||
# isn't the case for the extensible SDK. Work around it for now.
|
||||
newenv = dict(os.environ)
|
||||
newenv.pop('OECORE_NATIVE_SYSROOT', '')
|
||||
exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True, env=newenv)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
|
||||
description='Runs QEMU to boot the specified image',
|
||||
group='testbuild', order=-20)
|
||||
parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
|
||||
parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
|
||||
nargs=argparse.REMAINDER)
|
||||
parser_runqemu.set_defaults(func=runqemu)
|
||||
330
sources/poky/scripts/lib/devtool/sdk.py
Normal file
330
sources/poky/scripts/lib/devtool/sdk.py
Normal file
@@ -0,0 +1,330 @@
|
||||
# Development tool - sdk-update command plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import glob
|
||||
import shutil
|
||||
import errno
|
||||
import sys
|
||||
import tempfile
|
||||
import re
|
||||
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def parse_locked_sigs(sigfile_path):
|
||||
"""Return <pn:task>:<hash> dictionary"""
|
||||
sig_dict = {}
|
||||
with open(sigfile_path) as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
if ':' in line:
|
||||
taskkey, _, hashval = line.rpartition(':')
|
||||
sig_dict[taskkey.strip()] = hashval.split()[0]
|
||||
return sig_dict
|
||||
|
||||
def generate_update_dict(sigfile_new, sigfile_old):
|
||||
"""Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
|
||||
update_dict = {}
|
||||
sigdict_new = parse_locked_sigs(sigfile_new)
|
||||
sigdict_old = parse_locked_sigs(sigfile_old)
|
||||
for k in sigdict_new:
|
||||
if k not in sigdict_old:
|
||||
update_dict[k] = sigdict_new[k]
|
||||
continue
|
||||
if sigdict_new[k] != sigdict_old[k]:
|
||||
update_dict[k] = sigdict_new[k]
|
||||
continue
|
||||
return update_dict
|
||||
|
||||
def get_sstate_objects(update_dict, sstate_dir):
|
||||
"""Return a list containing sstate objects which are to be installed"""
|
||||
sstate_objects = []
|
||||
for k in update_dict:
|
||||
files = set()
|
||||
hashval = update_dict[k]
|
||||
p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
|
||||
files |= set(glob.glob(p))
|
||||
p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
|
||||
files |= set(glob.glob(p))
|
||||
files = list(files)
|
||||
if len(files) == 1:
|
||||
sstate_objects.extend(files)
|
||||
elif len(files) > 1:
|
||||
logger.error("More than one matching sstate object found for %s" % hashval)
|
||||
|
||||
return sstate_objects
|
||||
|
||||
def mkdir(d):
|
||||
try:
|
||||
os.makedirs(d)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise e
|
||||
|
||||
def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
|
||||
"""Install sstate objects into destination SDK"""
|
||||
sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
|
||||
if not os.path.exists(sstate_dir):
|
||||
logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
|
||||
raise
|
||||
for sb in sstate_objects:
|
||||
dst = sb.replace(src_sdk, dest_sdk)
|
||||
destdir = os.path.dirname(dst)
|
||||
mkdir(destdir)
|
||||
logger.debug("Copying %s to %s" % (sb, dst))
|
||||
shutil.copy(sb, dst)
|
||||
|
||||
def check_manifest(fn, basepath):
|
||||
import bb.utils
|
||||
changedfiles = []
|
||||
with open(fn, 'r') as f:
|
||||
for line in f:
|
||||
splitline = line.split()
|
||||
if len(splitline) > 1:
|
||||
chksum = splitline[0]
|
||||
fpath = splitline[1]
|
||||
curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
|
||||
if chksum != curr_chksum:
|
||||
logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
|
||||
changedfiles.append(fpath)
|
||||
return changedfiles
|
||||
|
||||
def sdk_update(args, config, basepath, workspace):
|
||||
"""Entry point for devtool sdk-update command"""
|
||||
updateserver = args.updateserver
|
||||
if not updateserver:
|
||||
updateserver = config.get('SDK', 'updateserver', '')
|
||||
logger.debug("updateserver: %s" % updateserver)
|
||||
|
||||
# Make sure we are using sdk-update from within SDK
|
||||
logger.debug("basepath = %s" % basepath)
|
||||
old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
|
||||
if not os.path.exists(old_locked_sig_file_path):
|
||||
logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
|
||||
return -1
|
||||
else:
|
||||
logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
|
||||
|
||||
if not '://' in updateserver:
|
||||
logger.error("Update server must be a URL")
|
||||
return -1
|
||||
|
||||
layers_dir = os.path.join(basepath, 'layers')
|
||||
conf_dir = os.path.join(basepath, 'conf')
|
||||
|
||||
# Grab variable values
|
||||
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
|
||||
try:
|
||||
stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR')
|
||||
sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS')
|
||||
site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
tmpsdk_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
|
||||
new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
|
||||
# Fetch manifest from server
|
||||
tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
|
||||
ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
|
||||
if ret != 0:
|
||||
logger.error("Cannot dowload files from %s" % updateserver)
|
||||
return ret
|
||||
changedfiles = check_manifest(tmpmanifest, basepath)
|
||||
if not changedfiles:
|
||||
logger.info("Already up-to-date")
|
||||
return 0
|
||||
# Update metadata
|
||||
logger.debug("Updating metadata via git ...")
|
||||
#Check for the status before doing a fetch and reset
|
||||
if os.path.exists(os.path.join(basepath, 'layers/.git')):
|
||||
out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
|
||||
if not out:
|
||||
ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
|
||||
else:
|
||||
logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
|
||||
logger.error("Changed files:\n%s" % out);
|
||||
return -1
|
||||
else:
|
||||
ret = -1
|
||||
if ret != 0:
|
||||
ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
|
||||
if ret != 0:
|
||||
logger.error("Updating metadata via git failed")
|
||||
return ret
|
||||
logger.debug("Updating conf files ...")
|
||||
for changedfile in changedfiles:
|
||||
ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
|
||||
if ret != 0:
|
||||
logger.error("Updating %s failed" % changedfile)
|
||||
return ret
|
||||
|
||||
# Check if UNINATIVE_CHECKSUM changed
|
||||
uninative = False
|
||||
if 'conf/local.conf' in changedfiles:
|
||||
def read_uninative_checksums(fn):
|
||||
chksumitems = []
|
||||
with open(fn, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith('UNINATIVE_CHECKSUM'):
|
||||
splitline = re.split(r'[\[\]"\']', line)
|
||||
if len(splitline) > 3:
|
||||
chksumitems.append((splitline[1], splitline[3]))
|
||||
return chksumitems
|
||||
|
||||
oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
|
||||
newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
|
||||
if oldsums != newsums:
|
||||
uninative = True
|
||||
for buildarch, chksum in newsums:
|
||||
uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
|
||||
mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
|
||||
ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
|
||||
|
||||
# Ok, all is well at this point - move everything over
|
||||
tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
|
||||
if os.path.exists(tmplayers_dir):
|
||||
shutil.rmtree(layers_dir)
|
||||
shutil.move(tmplayers_dir, layers_dir)
|
||||
for changedfile in changedfiles:
|
||||
destfile = os.path.join(basepath, changedfile)
|
||||
os.remove(destfile)
|
||||
shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
|
||||
os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
|
||||
shutil.move(tmpmanifest, conf_dir)
|
||||
if uninative:
|
||||
shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
|
||||
shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
|
||||
|
||||
if not sstate_mirrors:
|
||||
with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
|
||||
f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
|
||||
f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
|
||||
finally:
|
||||
shutil.rmtree(tmpsdk_dir)
|
||||
|
||||
if not args.skip_prepare:
|
||||
# Find all potentially updateable tasks
|
||||
sdk_update_targets = []
|
||||
tasks = ['do_populate_sysroot', 'do_packagedata']
|
||||
for root, _, files in os.walk(stamps_dir):
|
||||
for fn in files:
|
||||
if not '.sigdata.' in fn:
|
||||
for task in tasks:
|
||||
if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
|
||||
sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
|
||||
# Run bitbake command for the whole SDK
|
||||
logger.info("Preparing build system... (This may take some time.)")
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
|
||||
output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
|
||||
runlines = []
|
||||
for line in output.splitlines():
|
||||
if 'Running task ' in line:
|
||||
runlines.append(line)
|
||||
if runlines:
|
||||
logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
|
||||
return -1
|
||||
except bb.process.ExecutionError as e:
|
||||
logger.error('Preparation failed:\n%s' % e.stdout)
|
||||
return -1
|
||||
return 0
|
||||
|
||||
def sdk_install(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool sdk-install command"""
|
||||
|
||||
import oe.recipeutils
|
||||
import bb.process
|
||||
|
||||
for recipe in args.recipename:
|
||||
if recipe in workspace:
|
||||
raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
|
||||
|
||||
tasks = ['do_populate_sysroot', 'do_packagedata']
|
||||
stampprefixes = {}
|
||||
def checkstamp(recipe):
|
||||
stampprefix = stampprefixes[recipe]
|
||||
stamps = glob.glob(stampprefix + '*')
|
||||
for stamp in stamps:
|
||||
if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
install_recipes = []
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
for recipe in args.recipename:
|
||||
rd = parse_recipe(config, tinfoil, recipe, True)
|
||||
if not rd:
|
||||
return 1
|
||||
stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0])
|
||||
if checkstamp(recipe):
|
||||
logger.info('%s is already installed' % recipe)
|
||||
else:
|
||||
install_recipes.append(recipe)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if install_recipes:
|
||||
logger.info('Installing %s...' % ', '.join(install_recipes))
|
||||
install_tasks = []
|
||||
for recipe in install_recipes:
|
||||
for task in tasks:
|
||||
if recipe.endswith('-native') and 'package' in task:
|
||||
continue
|
||||
install_tasks.append('%s:%s' % (recipe, task))
|
||||
options = ''
|
||||
if not args.allow_build:
|
||||
options += ' --setscene-only'
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
|
||||
failed = False
|
||||
for recipe in install_recipes:
|
||||
if checkstamp(recipe):
|
||||
logger.info('Successfully installed %s' % recipe)
|
||||
else:
|
||||
raise DevtoolError('Failed to install %s - unavailable' % recipe)
|
||||
failed = True
|
||||
if failed:
|
||||
return 2
|
||||
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the sdk plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_sdk = subparsers.add_parser('sdk-update',
|
||||
help='Update SDK components',
|
||||
description='Updates installed SDK components from a remote server',
|
||||
group='sdk')
|
||||
updateserver = context.config.get('SDK', 'updateserver', '')
|
||||
if updateserver:
|
||||
parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
|
||||
else:
|
||||
parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
|
||||
parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
|
||||
parser_sdk.set_defaults(func=sdk_update)
|
||||
|
||||
parser_sdk_install = subparsers.add_parser('sdk-install',
|
||||
help='Install additional SDK components',
|
||||
description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
|
||||
group='sdk')
|
||||
parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
|
||||
parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
|
||||
parser_sdk_install.set_defaults(func=sdk_install)
|
||||
109
sources/poky/scripts/lib/devtool/search.py
Normal file
109
sources/poky/scripts/lib/devtool/search.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# Development tool - search command plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
"""Devtool search plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import re
|
||||
from devtool import setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def search(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'search' subcommand"""
|
||||
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
|
||||
defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
|
||||
|
||||
keyword_rc = re.compile(args.keyword)
|
||||
|
||||
def print_match(pn):
|
||||
rd = parse_recipe(config, tinfoil, pn, True)
|
||||
if not rd:
|
||||
return
|
||||
summary = rd.getVar('SUMMARY')
|
||||
if summary == rd.expand(defsummary):
|
||||
summary = ''
|
||||
print("%s %s" % (pn.ljust(20), summary))
|
||||
|
||||
|
||||
matches = []
|
||||
if os.path.exists(pkgdata_dir):
|
||||
for fn in os.listdir(pkgdata_dir):
|
||||
pfn = os.path.join(pkgdata_dir, fn)
|
||||
if not os.path.isfile(pfn):
|
||||
continue
|
||||
|
||||
packages = []
|
||||
match = False
|
||||
if keyword_rc.search(fn):
|
||||
match = True
|
||||
|
||||
if not match:
|
||||
with open(pfn, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith('PACKAGES:'):
|
||||
packages = line.split(':', 1)[1].strip().split()
|
||||
|
||||
for pkg in packages:
|
||||
if keyword_rc.search(pkg):
|
||||
match = True
|
||||
break
|
||||
if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
|
||||
with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
|
||||
for line in f:
|
||||
if ': ' in line:
|
||||
splitline = line.split(': ', 1)
|
||||
key = splitline[0]
|
||||
value = splitline[1].strip()
|
||||
key = key.replace(":" + pkg, "")
|
||||
if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']:
|
||||
if keyword_rc.search(value):
|
||||
match = True
|
||||
break
|
||||
if match:
|
||||
print_match(fn)
|
||||
matches.append(fn)
|
||||
else:
|
||||
logger.warning('Package data is not available, results may be limited')
|
||||
|
||||
for recipe in tinfoil.all_recipes():
|
||||
if args.fixed_setup and 'nativesdk' in recipe.inherits():
|
||||
continue
|
||||
|
||||
match = False
|
||||
if keyword_rc.search(recipe.pn):
|
||||
match = True
|
||||
else:
|
||||
for prov in recipe.provides:
|
||||
if keyword_rc.search(prov):
|
||||
match = True
|
||||
break
|
||||
if not match:
|
||||
for rprov in recipe.rprovides:
|
||||
if keyword_rc.search(rprov):
|
||||
match = True
|
||||
break
|
||||
if match and not recipe.pn in matches:
|
||||
print_match(recipe.pn)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_search = subparsers.add_parser('search', help='Search available recipes',
|
||||
description='Searches for available recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name and summary on match.',
|
||||
group='info')
|
||||
parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed, use quotes to avoid shell expansion)')
|
||||
parser_search.set_defaults(func=search, no_workspace=True, fixed_setup=context.fixed_setup)
|
||||
2474
sources/poky/scripts/lib/devtool/standard.py
Normal file
2474
sources/poky/scripts/lib/devtool/standard.py
Normal file
File diff suppressed because it is too large
Load Diff
693
sources/poky/scripts/lib/devtool/upgrade.py
Normal file
693
sources/poky/scripts/lib/devtool/upgrade.py
Normal file
@@ -0,0 +1,693 @@
|
||||
# Development tool - upgrade command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
"""Devtool upgrade plugin"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import logging
|
||||
import argparse
|
||||
import scriptutils
|
||||
import errno
|
||||
import bb
|
||||
|
||||
devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib'
|
||||
sys.path = sys.path + [devtool_path]
|
||||
|
||||
import oe.recipeutils
|
||||
from devtool import standard
|
||||
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs, check_prerelease_version
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def _run(cmd, cwd=''):
|
||||
logger.debug("Running command %s> %s" % (cwd,cmd))
|
||||
return bb.process.run('%s' % cmd, cwd=cwd)
|
||||
|
||||
def _get_srctree(tmpdir):
|
||||
srctree = tmpdir
|
||||
dirs = scriptutils.filter_src_subdirs(tmpdir)
|
||||
if len(dirs) == 1:
|
||||
srctree = os.path.join(tmpdir, dirs[0])
|
||||
else:
|
||||
raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
|
||||
return srctree
|
||||
|
||||
def _copy_source_code(orig, dest):
|
||||
for path in standard._ls_tree(orig):
|
||||
dest_dir = os.path.join(dest, os.path.dirname(path))
|
||||
bb.utils.mkdirhier(dest_dir)
|
||||
dest_path = os.path.join(dest, path)
|
||||
shutil.move(os.path.join(orig, path), dest_path)
|
||||
|
||||
def _remove_patch_dirs(recipefolder):
|
||||
for root, dirs, files in os.walk(recipefolder):
|
||||
for d in dirs:
|
||||
shutil.rmtree(os.path.join(root,d))
|
||||
|
||||
def _recipe_contains(rd, var):
|
||||
rf = rd.getVar('FILE')
|
||||
varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
|
||||
for var, fn in varfiles.items():
|
||||
if fn and fn.startswith(os.path.dirname(rf) + os.sep):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _rename_recipe_dirs(oldpv, newpv, path):
|
||||
for root, dirs, files in os.walk(path):
|
||||
# Rename directories with the version in their name
|
||||
for olddir in dirs:
|
||||
if olddir.find(oldpv) != -1:
|
||||
newdir = olddir.replace(oldpv, newpv)
|
||||
if olddir != newdir:
|
||||
shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
|
||||
# Rename any inc files with the version in their name (unusual, but possible)
|
||||
for oldfile in files:
|
||||
if oldfile.endswith('.inc'):
|
||||
if oldfile.find(oldpv) != -1:
|
||||
newfile = oldfile.replace(oldpv, newpv)
|
||||
if oldfile != newfile:
|
||||
bb.utils.rename(os.path.join(path, oldfile),
|
||||
os.path.join(path, newfile))
|
||||
|
||||
def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
|
||||
oldrecipe = os.path.basename(oldrecipe)
|
||||
if oldrecipe.endswith('_%s.bb' % oldpv):
|
||||
newrecipe = '%s_%s.bb' % (bpn, newpv)
|
||||
if oldrecipe != newrecipe:
|
||||
shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
|
||||
else:
|
||||
newrecipe = oldrecipe
|
||||
return os.path.join(path, newrecipe)
|
||||
|
||||
def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
|
||||
_rename_recipe_dirs(oldpv, newpv, path)
|
||||
return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
|
||||
|
||||
def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
|
||||
"""Writes an append file"""
|
||||
if not os.path.exists(rc):
|
||||
raise DevtoolError("bbappend not created because %s does not exist" % rc)
|
||||
|
||||
appendpath = os.path.join(workspace, 'appends')
|
||||
if not os.path.exists(appendpath):
|
||||
bb.utils.mkdirhier(appendpath)
|
||||
|
||||
brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
|
||||
|
||||
srctree = os.path.abspath(srctree)
|
||||
pn = d.getVar('PN')
|
||||
af = os.path.join(appendpath, '%s.bbappend' % brf)
|
||||
with open(af, 'w') as f:
|
||||
f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
|
||||
# Local files can be modified/tracked in separate subdir under srctree
|
||||
# Mostly useful for packages with S != WORKDIR
|
||||
f.write('FILESPATH:prepend := "%s:"\n' %
|
||||
os.path.join(srctreebase, 'oe-local-files'))
|
||||
f.write('# srctreebase: %s\n' % srctreebase)
|
||||
f.write('inherit externalsrc\n')
|
||||
f.write(('# NOTE: We use pn- overrides here to avoid affecting'
|
||||
'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
|
||||
f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
|
||||
b_is_s = use_external_build(same_dir, no_same_dir, d)
|
||||
if b_is_s:
|
||||
f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
|
||||
f.write('\n')
|
||||
if revs:
|
||||
for name, rev in revs.items():
|
||||
f.write('# initial_rev %s: %s\n' % (name, rev))
|
||||
if copied:
|
||||
f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
|
||||
f.write('# original_files: %s\n' % ' '.join(copied))
|
||||
return af
|
||||
|
||||
def _cleanup_on_error(rd, srctree):
|
||||
if os.path.exists(rd):
|
||||
shutil.rmtree(rd)
|
||||
srctree = os.path.abspath(srctree)
|
||||
if os.path.exists(srctree):
|
||||
shutil.rmtree(srctree)
|
||||
|
||||
def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
|
||||
if not keep_failure:
|
||||
_cleanup_on_error(rd, srctree)
|
||||
logger.error(e)
|
||||
if extramsg:
|
||||
logger.error(extramsg)
|
||||
if keep_failure:
|
||||
logger.info('Preserving failed upgrade files (--keep-failure)')
|
||||
sys.exit(1)
|
||||
|
||||
def _get_uri(rd):
|
||||
srcuris = rd.getVar('SRC_URI').split()
|
||||
if not len(srcuris):
|
||||
raise DevtoolError('SRC_URI not found on recipe')
|
||||
# Get first non-local entry in SRC_URI - usually by convention it's
|
||||
# the first entry, but not always!
|
||||
srcuri = None
|
||||
for entry in srcuris:
|
||||
if not entry.startswith('file://'):
|
||||
srcuri = entry
|
||||
break
|
||||
if not srcuri:
|
||||
raise DevtoolError('Unable to find non-local entry in SRC_URI')
|
||||
srcrev = '${AUTOREV}'
|
||||
if '://' in srcuri:
|
||||
# Fetch a URL
|
||||
rev_re = re.compile(';rev=([^;]+)')
|
||||
res = rev_re.search(srcuri)
|
||||
if res:
|
||||
srcrev = res.group(1)
|
||||
srcuri = rev_re.sub('', srcuri)
|
||||
return srcuri, srcrev
|
||||
|
||||
def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
|
||||
"""Extract sources of a recipe with a new version"""
|
||||
|
||||
def __run(cmd):
|
||||
"""Simple wrapper which calls _run with srctree as cwd"""
|
||||
return _run(cmd, srctree)
|
||||
|
||||
crd = rd.createCopy()
|
||||
|
||||
pv = crd.getVar('PV')
|
||||
crd.setVar('PV', newpv)
|
||||
|
||||
tmpsrctree = None
|
||||
uri, rev = _get_uri(crd)
|
||||
if srcrev:
|
||||
rev = srcrev
|
||||
paths = [srctree]
|
||||
if uri.startswith('git://') or uri.startswith('gitsm://'):
|
||||
__run('git fetch')
|
||||
__run('git checkout %s' % rev)
|
||||
__run('git tag -f devtool-base-new')
|
||||
__run('git submodule update --recursive')
|
||||
__run('git submodule foreach \'git tag -f devtool-base-new\'')
|
||||
(stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
|
||||
paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
|
||||
checksums = {}
|
||||
_, _, _, _, _, params = bb.fetch2.decodeurl(uri)
|
||||
srcsubdir_rel = params.get('destsuffix', 'git')
|
||||
if not srcbranch:
|
||||
check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
|
||||
get_branch = [x.strip() for x in check_branch.splitlines()]
|
||||
# Remove HEAD reference point and drop remote prefix
|
||||
get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
|
||||
if len(get_branch) == 1:
|
||||
# If srcrev is on only ONE branch, then use that branch
|
||||
srcbranch = get_branch[0]
|
||||
elif 'main' in get_branch:
|
||||
# If srcrev is on multiple branches, then choose 'main' if it is one of them
|
||||
srcbranch = 'main'
|
||||
elif 'master' in get_branch:
|
||||
# Otherwise choose 'master' if it is one of the branches
|
||||
srcbranch = 'master'
|
||||
else:
|
||||
# If get_branch contains more than one objects, then display error and exit.
|
||||
mbrch = '\n ' + '\n '.join(get_branch)
|
||||
raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
|
||||
else:
|
||||
__run('git checkout devtool-base -b devtool-%s' % newpv)
|
||||
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
|
||||
except scriptutils.FetchUrlFailure as e:
|
||||
raise DevtoolError(e)
|
||||
|
||||
if ftmpdir and keep_temp:
|
||||
logger.info('Fetch temp directory is %s' % ftmpdir)
|
||||
|
||||
tmpsrctree = _get_srctree(tmpdir)
|
||||
srctree = os.path.abspath(srctree)
|
||||
srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
|
||||
|
||||
# Delete all sources so we ensure no stray files are left over
|
||||
for item in os.listdir(srctree):
|
||||
if item in ['.git', 'oe-local-files']:
|
||||
continue
|
||||
itempath = os.path.join(srctree, item)
|
||||
if os.path.isdir(itempath):
|
||||
shutil.rmtree(itempath)
|
||||
else:
|
||||
os.remove(itempath)
|
||||
|
||||
# Copy in new ones
|
||||
_copy_source_code(tmpsrctree, srctree)
|
||||
|
||||
(stdout,_) = __run('git ls-files --modified --others')
|
||||
filelist = stdout.splitlines()
|
||||
pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist))
|
||||
pbar.start()
|
||||
batchsize = 100
|
||||
for i in range(0, len(filelist), batchsize):
|
||||
batch = filelist[i:i+batchsize]
|
||||
__run('git add -f -A %s' % ' '.join(['"%s"' % item for item in batch]))
|
||||
pbar.update(i)
|
||||
pbar.finish()
|
||||
|
||||
useroptions = []
|
||||
oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
|
||||
__run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
|
||||
__run('git tag -f devtool-base-%s' % newpv)
|
||||
|
||||
revs = {}
|
||||
for path in paths:
|
||||
(stdout, _) = _run('git rev-parse HEAD', cwd=path)
|
||||
revs[os.path.relpath(path, srctree)] = stdout.rstrip()
|
||||
|
||||
if no_patch:
|
||||
patches = oe.recipeutils.get_recipe_patches(crd)
|
||||
if patches:
|
||||
logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
|
||||
else:
|
||||
for path in paths:
|
||||
_run('git checkout devtool-patched -b %s' % branch, cwd=path)
|
||||
(stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
|
||||
branches_to_rebase = [branch] + stdout.split()
|
||||
target_branch = revs[os.path.relpath(path, srctree)]
|
||||
|
||||
# There is a bug (or feature?) in git rebase where if a commit with
|
||||
# a note is fully rebased away by being part of an old commit, the
|
||||
# note is still attached to the old commit. Avoid this by making
|
||||
# sure all old devtool related commits have a note attached to them
|
||||
# (this assumes git config notes.rewriteMode is set to ignore).
|
||||
(stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
|
||||
for rev in stdout.splitlines():
|
||||
if not oe.patch.GitApplyTree.getNotes(path, rev):
|
||||
oe.patch.GitApplyTree.addNote(path, rev, "dummy")
|
||||
|
||||
for b in branches_to_rebase:
|
||||
logger.info("Rebasing {} onto {}".format(b, target_branch))
|
||||
_run('git checkout %s' % b, cwd=path)
|
||||
try:
|
||||
_run('git rebase %s' % target_branch, cwd=path)
|
||||
except bb.process.ExecutionError as e:
|
||||
if 'conflict' in e.stdout:
|
||||
logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
|
||||
_run('git rebase --abort', cwd=path)
|
||||
else:
|
||||
logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
|
||||
|
||||
# Remove any dummy notes added above.
|
||||
(stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
|
||||
for rev in stdout.splitlines():
|
||||
oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
|
||||
|
||||
_run('git checkout %s' % branch, cwd=path)
|
||||
|
||||
if tmpsrctree:
|
||||
if keep_temp:
|
||||
logger.info('Preserving temporary directory %s' % tmpsrctree)
|
||||
else:
|
||||
shutil.rmtree(tmpsrctree)
|
||||
if tmpdir != tmpsrctree:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
return (revs, checksums, srcbranch, srcsubdir_rel)
|
||||
|
||||
def _add_license_diff_to_recipe(path, diff):
|
||||
notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
|
||||
# The following is the difference between the old and the new license text.
|
||||
# Please update the LICENSE value if needed, and summarize the changes in
|
||||
# the commit message via 'License-Update:' tag.
|
||||
# (example: 'License-Update: copyright years updated.')
|
||||
#
|
||||
# The changes:
|
||||
#
|
||||
"""
|
||||
commented_diff = "\n".join(["# {}".format(l) for l in diff.split('\n')])
|
||||
with open(path, 'rb') as f:
|
||||
orig_content = f.read()
|
||||
with open(path, 'wb') as f:
|
||||
f.write(notice_text.encode())
|
||||
f.write(commented_diff.encode())
|
||||
f.write("\n#\n\n".encode())
|
||||
f.write(orig_content)
|
||||
|
||||
def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
|
||||
"""Creates the new recipe under workspace"""
|
||||
|
||||
bpn = rd.getVar('BPN')
|
||||
path = os.path.join(workspace, 'recipes', bpn)
|
||||
bb.utils.mkdirhier(path)
|
||||
copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
|
||||
if not copied:
|
||||
raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn)
|
||||
logger.debug('Copied %s to %s' % (copied, path))
|
||||
|
||||
oldpv = rd.getVar('PV')
|
||||
if not newpv:
|
||||
newpv = oldpv
|
||||
origpath = rd.getVar('FILE')
|
||||
fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
|
||||
logger.debug('Upgraded %s => %s' % (origpath, fullpath))
|
||||
|
||||
newvalues = {}
|
||||
if _recipe_contains(rd, 'PV') and newpv != oldpv:
|
||||
newvalues['PV'] = newpv
|
||||
|
||||
if srcrev:
|
||||
newvalues['SRCREV'] = srcrev
|
||||
|
||||
if srcbranch:
|
||||
src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
|
||||
changed = False
|
||||
replacing = True
|
||||
new_src_uri = []
|
||||
for entry in src_uri:
|
||||
try:
|
||||
scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
|
||||
except bb.fetch2.MalformedUrl as e:
|
||||
raise DevtoolError("Could not decode SRC_URI: {}".format(e))
|
||||
if replacing and scheme in ['git', 'gitsm']:
|
||||
branch = params.get('branch', 'master')
|
||||
if rd.expand(branch) != srcbranch:
|
||||
# Handle case where branch is set through a variable
|
||||
res = re.match(r'\$\{([^}@]+)\}', branch)
|
||||
if res:
|
||||
newvalues[res.group(1)] = srcbranch
|
||||
# We know we won't change SRC_URI now, so break out
|
||||
break
|
||||
else:
|
||||
params['branch'] = srcbranch
|
||||
entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
|
||||
changed = True
|
||||
replacing = False
|
||||
new_src_uri.append(entry)
|
||||
if changed:
|
||||
newvalues['SRC_URI'] = ' '.join(new_src_uri)
|
||||
|
||||
newvalues['PR'] = None
|
||||
|
||||
# Work out which SRC_URI entries have changed in case the entry uses a name
|
||||
crd = rd.createCopy()
|
||||
crd.setVar('PV', newpv)
|
||||
for var, value in newvalues.items():
|
||||
crd.setVar(var, value)
|
||||
old_src_uri = (rd.getVar('SRC_URI') or '').split()
|
||||
new_src_uri = (crd.getVar('SRC_URI') or '').split()
|
||||
newnames = []
|
||||
addnames = []
|
||||
for newentry in new_src_uri:
|
||||
_, _, _, _, _, params = bb.fetch2.decodeurl(newentry)
|
||||
if 'name' in params:
|
||||
newnames.append(params['name'])
|
||||
if newentry not in old_src_uri:
|
||||
addnames.append(params['name'])
|
||||
# Find what's been set in the original recipe
|
||||
oldnames = []
|
||||
oldsums = []
|
||||
noname = False
|
||||
for varflag in rd.getVarFlags('SRC_URI'):
|
||||
for checksum in checksums:
|
||||
if varflag.endswith('.' + checksum):
|
||||
name = varflag.rsplit('.', 1)[0]
|
||||
if name not in oldnames:
|
||||
oldnames.append(name)
|
||||
oldsums.append(checksum)
|
||||
elif varflag == checksum:
|
||||
noname = True
|
||||
oldsums.append(checksum)
|
||||
# Even if SRC_URI has named entries it doesn't have to actually use the name
|
||||
if noname and addnames and addnames[0] not in oldnames:
|
||||
addnames = []
|
||||
# Drop any old names (the name actually might include ${PV})
|
||||
for name in oldnames:
|
||||
if name not in newnames:
|
||||
for checksum in oldsums:
|
||||
newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
|
||||
|
||||
nameprefix = '%s.' % addnames[0] if addnames else ''
|
||||
|
||||
# md5sum is deprecated, remove any traces of it. If it was the only old
|
||||
# checksum, then replace it with the default checksums.
|
||||
if 'md5sum' in oldsums:
|
||||
newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
|
||||
oldsums.remove('md5sum')
|
||||
if not oldsums:
|
||||
oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
|
||||
|
||||
for checksum in oldsums:
|
||||
newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
|
||||
|
||||
if srcsubdir_new != srcsubdir_old:
|
||||
s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
|
||||
s_subdir_new = os.path.relpath(os.path.abspath(crd.getVar('S')), crd.getVar('WORKDIR'))
|
||||
if srcsubdir_old == s_subdir_old and srcsubdir_new != s_subdir_new:
|
||||
# Subdir for old extracted source matches what S points to (it should!)
|
||||
# but subdir for new extracted source doesn't match what S will be
|
||||
newvalues['S'] = '${WORKDIR}/%s' % srcsubdir_new.replace(newpv, '${PV}')
|
||||
if crd.expand(newvalues['S']) == crd.expand('${WORKDIR}/${BP}'):
|
||||
# It's the default, drop it
|
||||
# FIXME what if S is being set in a .inc?
|
||||
newvalues['S'] = None
|
||||
logger.info('Source subdirectory has changed, dropping S value since it now matches the default ("${WORKDIR}/${BP}")')
|
||||
else:
|
||||
logger.info('Source subdirectory has changed, updating S value')
|
||||
|
||||
if license_diff:
|
||||
newlicchksum = " ".join(["file://{}".format(l['path']) +
|
||||
(";beginline={}".format(l['beginline']) if l['beginline'] else "") +
|
||||
(";endline={}".format(l['endline']) if l['endline'] else "") +
|
||||
(";md5={}".format(l['actual_md5'])) for l in new_licenses])
|
||||
newvalues["LIC_FILES_CHKSUM"] = newlicchksum
|
||||
_add_license_diff_to_recipe(fullpath, license_diff)
|
||||
|
||||
tinfoil.modified_files()
|
||||
try:
|
||||
rd = tinfoil.parse_recipe_file(fullpath, False)
|
||||
except bb.tinfoil.TinfoilCommandFailed as e:
|
||||
_upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
|
||||
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
|
||||
|
||||
return fullpath, copied
|
||||
|
||||
|
||||
def _check_git_config():
|
||||
def getconfig(name):
|
||||
try:
|
||||
value = bb.process.run('git config %s' % name)[0].strip()
|
||||
except bb.process.ExecutionError as e:
|
||||
if e.exitcode == 1:
|
||||
value = None
|
||||
else:
|
||||
raise
|
||||
return value
|
||||
|
||||
username = getconfig('user.name')
|
||||
useremail = getconfig('user.email')
|
||||
configerr = []
|
||||
if not username:
|
||||
configerr.append('Please set your name using:\n git config --global user.name')
|
||||
if not useremail:
|
||||
configerr.append('Please set your email using:\n git config --global user.email')
|
||||
if configerr:
|
||||
raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
|
||||
|
||||
def _extract_licenses(srcpath, recipe_licenses):
|
||||
licenses = []
|
||||
for url in recipe_licenses.split():
|
||||
license = {}
|
||||
(type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
|
||||
license['path'] = path
|
||||
license['md5'] = parm.get('md5', '')
|
||||
license['beginline'], license['endline'] = 0, 0
|
||||
if 'beginline' in parm:
|
||||
license['beginline'] = int(parm['beginline'])
|
||||
if 'endline' in parm:
|
||||
license['endline'] = int(parm['endline'])
|
||||
license['text'] = []
|
||||
with open(os.path.join(srcpath, path), 'rb') as f:
|
||||
import hashlib
|
||||
actual_md5 = hashlib.md5()
|
||||
lineno = 0
|
||||
for line in f:
|
||||
lineno += 1
|
||||
if (lineno >= license['beginline']) and ((lineno <= license['endline']) or not license['endline']):
|
||||
license['text'].append(line.decode(errors='ignore'))
|
||||
actual_md5.update(line)
|
||||
license['actual_md5'] = actual_md5.hexdigest()
|
||||
licenses.append(license)
|
||||
return licenses
|
||||
|
||||
def _generate_license_diff(old_licenses, new_licenses):
|
||||
need_diff = False
|
||||
for l in new_licenses:
|
||||
if l['md5'] != l['actual_md5']:
|
||||
need_diff = True
|
||||
break
|
||||
if need_diff == False:
|
||||
return None
|
||||
|
||||
import difflib
|
||||
diff = ''
|
||||
for old, new in zip(old_licenses, new_licenses):
|
||||
for line in difflib.unified_diff(old['text'], new['text'], old['path'], new['path']):
|
||||
diff = diff + line
|
||||
return diff
|
||||
|
||||
def upgrade(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'upgrade' subcommand"""
|
||||
|
||||
if args.recipename in workspace:
|
||||
raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
|
||||
if args.srcbranch and not args.srcrev:
|
||||
raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
|
||||
|
||||
_check_git_config()
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, True)
|
||||
if not rd:
|
||||
return 1
|
||||
|
||||
pn = rd.getVar('PN')
|
||||
if pn != args.recipename:
|
||||
logger.info('Mapping %s to %s' % (args.recipename, pn))
|
||||
if pn in workspace:
|
||||
raise DevtoolError("recipe %s is already in your workspace" % pn)
|
||||
|
||||
if args.srctree:
|
||||
srctree = os.path.abspath(args.srctree)
|
||||
else:
|
||||
srctree = standard.get_default_srctree(config, pn)
|
||||
|
||||
srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
|
||||
|
||||
# try to automatically discover latest version and revision if not provided on command line
|
||||
if not args.version and not args.srcrev:
|
||||
version_info = oe.recipeutils.get_recipe_upstream_version(rd)
|
||||
if version_info['version'] and not version_info['version'].endswith("new-commits-available"):
|
||||
args.version = version_info['version']
|
||||
if version_info['revision']:
|
||||
args.srcrev = version_info['revision']
|
||||
if not args.version and not args.srcrev:
|
||||
raise DevtoolError("Automatic discovery of latest version/revision failed - you must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option.")
|
||||
|
||||
standard._check_compatible_recipe(pn, rd)
|
||||
old_srcrev = rd.getVar('SRCREV')
|
||||
if old_srcrev == 'INVALID':
|
||||
old_srcrev = None
|
||||
if old_srcrev and not args.srcrev:
|
||||
raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
|
||||
old_ver = rd.getVar('PV')
|
||||
if old_ver == args.version and old_srcrev == args.srcrev:
|
||||
raise DevtoolError("Current and upgrade versions are the same version")
|
||||
if args.version:
|
||||
if bb.utils.vercmp_string(args.version, old_ver) < 0:
|
||||
logger.warning('Upgrade version %s compares as less than the current version %s. If you are using a package feed for on-target upgrades or providing this recipe for general consumption, then you should increment PE in the recipe (or if there is no current PE value set, set it to "1")' % (args.version, old_ver))
|
||||
check_prerelease_version(args.version, 'devtool upgrade')
|
||||
|
||||
rf = None
|
||||
license_diff = None
|
||||
try:
|
||||
logger.info('Extracting current version source...')
|
||||
rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
|
||||
old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
|
||||
logger.info('Extracting upgraded version source...')
|
||||
rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
|
||||
args.srcrev, args.srcbranch, args.branch, args.keep_temp,
|
||||
tinfoil, rd)
|
||||
new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
|
||||
license_diff = _generate_license_diff(old_licenses, new_licenses)
|
||||
rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
|
||||
except (bb.process.CmdError, DevtoolError) as e:
|
||||
recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN'))
|
||||
_upgrade_error(e, recipedir, srctree, args.keep_failure)
|
||||
standard._add_md5(config, pn, os.path.dirname(rf))
|
||||
|
||||
af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
|
||||
copied, config.workspace_path, rd)
|
||||
standard._add_md5(config, pn, af)
|
||||
|
||||
update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
|
||||
|
||||
logger.info('Upgraded source extracted to %s' % srctree)
|
||||
logger.info('New recipe is %s' % rf)
|
||||
if license_diff:
|
||||
logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.')
|
||||
preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN'))
|
||||
if preferred_version:
|
||||
logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
return 0
|
||||
|
||||
def latest_version(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'latest_version' subcommand"""
|
||||
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, True)
|
||||
if not rd:
|
||||
return 1
|
||||
version_info = oe.recipeutils.get_recipe_upstream_version(rd)
|
||||
# "new-commits-available" is an indication that upstream never issues version tags
|
||||
if not version_info['version'].endswith("new-commits-available"):
|
||||
logger.info("Current version: {}".format(version_info['current_version']))
|
||||
logger.info("Latest version: {}".format(version_info['version']))
|
||||
if version_info['revision']:
|
||||
logger.info("Latest version's commit: {}".format(version_info['revision']))
|
||||
else:
|
||||
logger.info("Latest commit: {}".format(version_info['revision']))
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
return 0
|
||||
|
||||
def check_upgrade_status(args, config, basepath, workspace):
|
||||
if not args.recipe:
|
||||
logger.info("Checking the upstream status for all recipes may take a few minutes")
|
||||
results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
|
||||
for result in results:
|
||||
# pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
|
||||
if args.all or result[1] != 'MATCH':
|
||||
print("{:25} {:15} {:15} {} {} {}".format( result[0],
|
||||
result[2],
|
||||
result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
|
||||
result[4],
|
||||
result[5] if result[5] != 'N/A' else "",
|
||||
"cannot be updated due to: %s" %(result[6]) if result[6] else ""))
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
|
||||
defsrctree = standard.get_default_srctree(context.config)
|
||||
|
||||
parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
|
||||
description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
|
||||
group='starting')
|
||||
parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
|
||||
parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
|
||||
parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV). If omitted, latest upstream version will be determined and used, if possible.')
|
||||
parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (useful when fetching from an SCM such as git)')
|
||||
parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
|
||||
parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
|
||||
parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
|
||||
parser_upgrade.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
|
||||
group = parser_upgrade.add_mutually_exclusive_group()
|
||||
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
|
||||
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
|
||||
parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
|
||||
parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
|
||||
parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
|
||||
|
||||
parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
|
||||
description='Queries the upstream server for what the latest upstream release is (for git, tags are checked, for tarballs, a list of them is obtained, and one with the highest version number is reported)',
|
||||
group='info')
|
||||
parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
|
||||
parser_latest_version.set_defaults(func=latest_version)
|
||||
|
||||
parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
|
||||
description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
|
||||
group='info')
|
||||
parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
|
||||
parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
|
||||
parser_check_upgrade_status.set_defaults(func=check_upgrade_status)
|
||||
242
sources/poky/scripts/lib/devtool/utilcmds.py
Normal file
242
sources/poky/scripts/lib/devtool/utilcmds.py
Normal file
@@ -0,0 +1,242 @@
|
||||
# Development tool - utility commands plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
"""Devtool utility plugins"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
import logging
|
||||
import argparse
|
||||
import subprocess
|
||||
import scriptutils
|
||||
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
from devtool import parse_recipe
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def _find_recipe_path(args, config, basepath, workspace):
|
||||
if args.any_recipe:
|
||||
logger.warning('-a/--any-recipe option is now always active, and thus the option will be removed in a future release')
|
||||
if args.recipename in workspace:
|
||||
recipefile = workspace[args.recipename]['recipefile']
|
||||
else:
|
||||
recipefile = None
|
||||
if not recipefile:
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, True)
|
||||
if not rd:
|
||||
raise DevtoolError("Failed to find specified recipe")
|
||||
recipefile = rd.getVar('FILE')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
return recipefile
|
||||
|
||||
|
||||
def find_recipe(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'find-recipe' subcommand"""
|
||||
recipefile = _find_recipe_path(args, config, basepath, workspace)
|
||||
print(recipefile)
|
||||
return 0
|
||||
|
||||
|
||||
def edit_recipe(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'edit-recipe' subcommand"""
|
||||
return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
|
||||
|
||||
|
||||
def configure_help(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'configure-help' subcommand"""
|
||||
import oe.utils
|
||||
|
||||
check_workspace_recipe(workspace, args.recipename)
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
|
||||
if not rd:
|
||||
return 1
|
||||
b = rd.getVar('B')
|
||||
s = rd.getVar('S')
|
||||
configurescript = os.path.join(s, 'configure')
|
||||
confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or [])
|
||||
configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
|
||||
extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
|
||||
extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
|
||||
do_configure = rd.getVar('do_configure') or ''
|
||||
do_configure_noexpand = rd.getVar('do_configure', False) or ''
|
||||
packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
|
||||
autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
|
||||
cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
|
||||
cmake_do_configure = rd.getVar('cmake_do_configure')
|
||||
pn = rd.getVar('PN')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if 'doc' in packageconfig:
|
||||
del packageconfig['doc']
|
||||
|
||||
if autotools and not os.path.exists(configurescript):
|
||||
logger.info('Running do_configure to generate configure script')
|
||||
try:
|
||||
stdout, _ = exec_build_env_command(config.init_path, basepath,
|
||||
'bitbake -c configure %s' % args.recipename,
|
||||
stderr=subprocess.STDOUT)
|
||||
except bb.process.ExecutionError:
|
||||
pass
|
||||
|
||||
if confdisabled or do_configure.strip() in ('', ':'):
|
||||
raise DevtoolError("do_configure task has been disabled for this recipe")
|
||||
elif args.no_pager and not os.path.exists(configurescript):
|
||||
raise DevtoolError("No configure script found and no other information to display")
|
||||
else:
|
||||
configopttext = ''
|
||||
if autotools and configureopts:
|
||||
configopttext = '''
|
||||
Arguments currently passed to the configure script:
|
||||
|
||||
%s
|
||||
|
||||
Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
|
||||
if extra_oeconf:
|
||||
configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
|
||||
|
||||
%s''' % extra_oeconf
|
||||
|
||||
elif cmake:
|
||||
in_cmake = False
|
||||
cmake_cmd = ''
|
||||
for line in cmake_do_configure.splitlines():
|
||||
if in_cmake:
|
||||
cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
|
||||
if not line.endswith('\\'):
|
||||
break
|
||||
if line.lstrip().startswith('cmake '):
|
||||
cmake_cmd = line.strip().rstrip('\\')
|
||||
if line.endswith('\\'):
|
||||
in_cmake = True
|
||||
else:
|
||||
break
|
||||
if cmake_cmd:
|
||||
configopttext = '''
|
||||
The current cmake command line:
|
||||
|
||||
%s
|
||||
|
||||
Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
|
||||
|
||||
%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
|
||||
else:
|
||||
configopttext = '''
|
||||
The current implementation of cmake_do_configure:
|
||||
|
||||
cmake_do_configure() {
|
||||
%s
|
||||
}
|
||||
|
||||
Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
|
||||
|
||||
%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
|
||||
|
||||
elif do_configure:
|
||||
configopttext = '''
|
||||
The current implementation of do_configure:
|
||||
|
||||
do_configure() {
|
||||
%s
|
||||
}''' % do_configure.rstrip()
|
||||
if '${EXTRA_OECONF}' in do_configure_noexpand:
|
||||
configopttext += '''
|
||||
|
||||
Arguments specified through EXTRA_OECONF (which you can change or add to easily):
|
||||
|
||||
%s''' % extra_oeconf
|
||||
|
||||
if packageconfig:
|
||||
configopttext += '''
|
||||
|
||||
Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
|
||||
|
||||
if args.arg:
|
||||
helpargs = ' '.join(args.arg)
|
||||
elif cmake:
|
||||
helpargs = '-LH'
|
||||
else:
|
||||
helpargs = '--help'
|
||||
|
||||
msg = '''configure information for %s
|
||||
------------------------------------------
|
||||
%s''' % (pn, configopttext)
|
||||
|
||||
if cmake:
|
||||
msg += '''
|
||||
|
||||
The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
|
||||
------------------------------------------''' % (helpargs, pn)
|
||||
elif os.path.exists(configurescript):
|
||||
msg += '''
|
||||
|
||||
The ./configure %s output for %s follows.
|
||||
------------------------------------------''' % (helpargs, pn)
|
||||
|
||||
olddir = os.getcwd()
|
||||
tmppath = tempfile.mkdtemp()
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
if not args.no_header:
|
||||
tf.write(msg + '\n')
|
||||
tf.close()
|
||||
try:
|
||||
try:
|
||||
cmd = 'cat %s' % tf.name
|
||||
if cmake:
|
||||
cmd += '; cmake %s %s 2>&1' % (helpargs, s)
|
||||
os.chdir(b)
|
||||
elif os.path.exists(configurescript):
|
||||
cmd += '; %s %s' % (configurescript, helpargs)
|
||||
if sys.stdout.isatty() and not args.no_pager:
|
||||
pager = os.environ.get('PAGER', 'less')
|
||||
cmd = '(%s) | %s' % (cmd, pager)
|
||||
subprocess.check_call(cmd, shell=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
return e.returncode
|
||||
finally:
|
||||
os.chdir(olddir)
|
||||
shutil.rmtree(tmppath)
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file',
|
||||
description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
|
||||
group='working')
|
||||
parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
|
||||
# FIXME drop -a at some point in future
|
||||
parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
|
||||
parser_edit_recipe.set_defaults(func=edit_recipe)
|
||||
|
||||
# Find-recipe
|
||||
parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file',
|
||||
description='Finds a recipe file. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
|
||||
group='working')
|
||||
parser_find_recipe.add_argument('recipename', help='Recipe to find')
|
||||
# FIXME drop -a at some point in future
|
||||
parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
|
||||
parser_find_recipe.set_defaults(func=find_recipe)
|
||||
|
||||
# NOTE: Needed to override the usage string here since the default
|
||||
# gets the order wrong - recipename must come before --arg
|
||||
parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
|
||||
usage='devtool configure-help [options] recipename [--arg ...]',
|
||||
description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
|
||||
group='working')
|
||||
parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
|
||||
parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
|
||||
parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
|
||||
parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
|
||||
parser_configure_help.set_defaults(func=configure_help)
|
||||
Reference in New Issue
Block a user