Complete Yocto mirror with license table for TQMa6UL (2038-compliance)

- 264 license table entries with exact download URLs (224/264 resolved)
- Complete sources/ directory with all BitBake recipes
- Build configuration: tqma6ul-multi-mba6ulx, spaetzle (musl)
- Full traceability for Softwarefreigabeantrag
- GCC 13.4.0, Linux 6.6.102, U-Boot 2023.04, musl 1.2.4
- License distribution: GPL-2.0 (24), MIT (23), GPL-2.0+ (18), BSD-3 (16)
This commit is contained in:
Siggi (OpenClaw Agent)
2026-03-01 20:58:18 +00:00
commit 16accb6b24
15086 changed files with 1292356 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
{
"layers": [
"../meta-poky",
"../meta"
],
"version": "1.0"
}

View File

@@ -0,0 +1 @@
This directory contains Various useful scripts for working with OE builds

View File

@@ -0,0 +1,93 @@
#!/bin/bash
#
# Copyright OpenEmbedded Contributors
#
# Script which can be run on new autobuilder workers to check all needed configuration is present.
# Designed to be run in a repo where bitbake/oe-core are already present.
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Todo
# Add testtools/subunit import test
# Add python3-git test
# Add pigz test
# vnc tests/checkvnc?
# test sendmail works (for QA email notification)
# test error report submission works
# test buildistory git repo works?
#
if [ ! -x $HOME/yocto-autobuilder-helper/scripts/checkvnc ]; then
echo "$HOME/yocto-autobuilder-helper should be created."
exit 1
fi
$HOME/yocto-autobuilder-helper/scripts/checkvnc
. ./oe-init-build-env > /dev/null
if [ "$?" != "0" ]; then
exit 1
fi
git config --global user.name > /dev/null
if [ "$?" != "0" ]; then
echo "Please set git config --global user.name"
exit 1
fi
git config --global user.email > /dev/null
if [ "$?" != "0" ]; then
echo "Please set git config --global user.email"
exit 1
fi
python3 -c "import jinja2"
if [ "$?" != "0" ]; then
echo "Please ensure jinja2 is available"
exit 1
fi
bitbake -p
if [ "$?" != "0" ]; then
echo "Bitbake parsing failed"
exit 1
fi
WATCHES=$(PATH="/sbin:/usr/sbin:$PATH" sysctl fs.inotify.max_user_watches -n)
if (( $WATCHES < 65000 )); then
echo 'Need to increase watches (echo fs.inotify.max_user_watches=65536 | sudo tee -a /etc/sysctl.conf'
exit 1
fi
OPEN_FILES=$(ulimit -n)
if (( $OPEN_FILES < 65535 )); then
echo 'Increase maximum open files in /etc/security/limits.conf'
echo '* soft nofile 131072'
echo '* hard nofile 131072'
exit 1
fi
MAX_PROCESSES=$(ulimit -u)
if (( $MAX_PROCESSES < 514542 )); then
echo 'Increase maximum user processes in /etc/security/limits.conf'
echo '* hard nproc 515294'
echo '* soft nproc 514543'
exit 1
fi
mkdir -p tmp/deploy/images/qemux86-64
pushd tmp/deploy/images/qemux86-64
if [ ! -e core-image-minimal-qemux86-64.ext4 ]; then
wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.ext4
fi
if [ ! -e core-image-minimal-qemux86-64.qemuboot.conf ]; then
wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/core-image-minimal-qemux86-64.qemuboot.conf
fi
if [ ! -e bzImage-qemux86-64.bin ]; then
wget http://downloads.yoctoproject.org/releases/yocto/yocto-4.0/machines/qemu/qemux86-64/bzImage-qemux86-64.bin
fi
popd
bitbake qemu-helper-native
DISPLAY=:1 runqemu serialstdio qemux86-64
if [ "$?" != "0" ]; then
echo "Unable to use runqemu"
exit 1
fi
DISPLAY=:1 runqemu serialstdio qemux86-64 kvm
if [ "$?" != "0" ]; then
echo "Unable to use runqemu with kvm"
exit 1
fi

184
sources/poky/scripts/bblock Executable file
View File

@@ -0,0 +1,184 @@
#!/usr/bin/env python3
# bblock
# lock/unlock task to latest signature
#
# Copyright (c) 2023 BayLibre, SAS
# Author: Julien Stepahn <jstephan@baylibre.com>
#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import sys
import logging
scripts_path = os.path.dirname(os.path.realpath(__file__))
lib_path = scripts_path + "/lib"
sys.path = sys.path + [lib_path]
import scriptpath
scriptpath.add_bitbake_lib_path()
import bb.tinfoil
import bb.msg
import argparse_oe
myname = os.path.basename(sys.argv[0])
logger = bb.msg.logger_create(myname)
def getTaskSignatures(tinfoil, pn, tasks):
tinfoil.set_event_mask(
[
"bb.event.GetTaskSignatureResult",
"logging.LogRecord",
"bb.command.CommandCompleted",
"bb.command.CommandFailed",
]
)
ret = tinfoil.run_command("getTaskSignatures", pn, tasks)
if ret:
while True:
event = tinfoil.wait_event(1)
if event:
if isinstance(event, bb.command.CommandCompleted):
break
elif isinstance(event, bb.command.CommandFailed):
logger.error(str(event))
sys.exit(2)
elif isinstance(event, bb.event.GetTaskSignatureResult):
sig = event.sig
elif isinstance(event, logging.LogRecord):
logger.handle(event)
else:
logger.error("No result returned from getTaskSignatures command")
sys.exit(2)
return sig
def parseRecipe(tinfoil, recipe):
try:
tinfoil.parse_recipes()
d = tinfoil.parse_recipe(recipe)
except Exception:
logger.error("Failed to get recipe info for: %s" % recipe)
sys.exit(1)
return d
def bblockDump(lockfile):
try:
with open(lockfile, "r") as lockfile:
for line in lockfile:
print(line.strip())
except IOError:
return 1
return 0
def bblockReset(lockfile, pns, package_archs, tasks):
if not pns:
logger.info("Unlocking all recipes")
try:
os.remove(lockfile)
except FileNotFoundError:
pass
else:
logger.info("Unlocking {pns}".format(pns=pns))
tmp_lockfile = lockfile + ".tmp"
with open(lockfile, "r") as infile, open(tmp_lockfile, "w") as outfile:
for line in infile:
if not (
any(element in line for element in pns)
and any(element in line for element in package_archs.split())
):
outfile.write(line)
else:
if tasks and not any(element in line for element in tasks):
outfile.write(line)
os.remove(lockfile)
os.rename(tmp_lockfile, lockfile)
def main():
parser = argparse_oe.ArgumentParser(description="Lock and unlock a recipe")
parser.add_argument("pn", nargs="*", help="Space separated list of recipe to lock")
parser.add_argument(
"-t",
"--tasks",
help="Comma separated list of tasks",
type=lambda s: [
task if task.startswith("do_") else "do_" + task for task in s.split(",")
],
)
parser.add_argument(
"-r",
"--reset",
action="store_true",
help="Unlock pn recipes, or all recipes if pn is empty",
)
parser.add_argument(
"-d",
"--dump",
action="store_true",
help="Dump generated bblock.conf file",
)
global_args, unparsed_args = parser.parse_known_args()
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=True)
package_archs = tinfoil.config_data.getVar("PACKAGE_ARCHS")
builddir = tinfoil.config_data.getVar("TOPDIR")
lockfile = "{builddir}/conf/bblock.conf".format(builddir=builddir)
if global_args.dump:
bblockDump(lockfile)
return 0
if global_args.reset:
bblockReset(lockfile, global_args.pn, package_archs, global_args.tasks)
return 0
with open(lockfile, "a") as lockfile:
s = ""
if lockfile.tell() == 0:
s = "# Generated by bblock\n"
s += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "info"\n'
s += 'SIGGEN_LOCKEDSIGS_TYPES += "${PACKAGE_ARCHS}"\n'
s += "\n"
for pn in global_args.pn:
d = parseRecipe(tinfoil, pn)
package_arch = d.getVar("PACKAGE_ARCH")
siggen_locked_sigs_package_arch = d.getVar(
"SIGGEN_LOCKEDSIGS_{package_arch}".format(package_arch=package_arch)
)
sigs = getTaskSignatures(tinfoil, [pn], global_args.tasks)
for sig in sigs:
new_entry = "{pn}:{taskname}:{sig}".format(
pn=sig[0], taskname=sig[1], sig=sig[2]
)
if (
siggen_locked_sigs_package_arch
and not new_entry in siggen_locked_sigs_package_arch
) or not siggen_locked_sigs_package_arch:
s += 'SIGGEN_LOCKEDSIGS_{package_arch} += "{new_entry}"\n'.format(
package_arch=package_arch, new_entry=new_entry
)
lockfile.write(s)
return 0
if __name__ == "__main__":
try:
ret = main()
except Exception:
ret = 1
import traceback
traceback.print_exc()
sys.exit(ret)

View File

@@ -0,0 +1,120 @@
#!/usr/bin/env bash
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
help ()
{
base=`basename $0`
echo -e "Usage: $base command"
echo "Avaliable commands:"
echo -e "\texport <file.conf>: export and lock down the AUTOPR values from the PR service into a file for release."
echo -e "\timport <file.conf>: import the AUTOPR values from the exported file into the PR service."
}
clean_cache()
{
s=`bitbake -e | grep ^CACHE= | cut -f2 -d\"`
# Stop any active memory resident server
bitbake -m
# Remove cache entries since we want to trigger a full reparse
if [ "x${s}" != "x" ]; then
rm -f ${s}/bb_cache*.dat.*
fi
}
do_export ()
{
file=$1
[ "x${file}" == "x" ] && help && exit 1
rm -f ${file}
clean_cache
bitbake -R conf/prexport.conf -p
s=`bitbake -R conf/prexport.conf -e | grep ^PRSERV_DUMPFILE= | cut -f2 -d\"`
if [ "x${s}" != "x" ];
then
[ -e $s ] && mv -f $s $file && echo "Exporting to file $file succeeded!"
return 0
fi
echo "Exporting to file $file failed!"
return 1
}
do_import ()
{
file=$1
[ "x${file}" == "x" ] && help && exit 1
clean_cache
bitbake -R conf/primport.conf -R $file -p
ret=$?
[ $ret -eq 0 ] && echo "Importing from file $file succeeded!" || echo "Importing from file $file failed!"
return $ret
}
do_migrate_localcount ()
{
df=`bitbake -R conf/migrate_localcount.conf -e | \
grep ^LOCALCOUNT_DUMPFILE= | cut -f2 -d\"`
if [ "x${df}" == "x" ];
then
echo "LOCALCOUNT_DUMPFILE is not defined!"
return 1
fi
rm -f $df
clean_cache
echo "Exporting LOCALCOUNT to AUTOINCs..."
bitbake -R conf/migrate_localcount.conf -p
[ ! $? -eq 0 ] && echo "Exporting to file $df failed!" && exit 1
if [ -e $df ];
then
echo "Exporting to file $df succeeded!"
else
echo "Exporting to file $df failed!"
exit 1
fi
echo "Importing generated AUTOINC entries..."
[ -e $df ] && do_import $df
if [ ! $? -eq 0 ]
then
echo "Migration from LOCALCOUNT to AUTOINCs failed!"
return 1
fi
echo "Migration from LOCALCOUNT to AUTOINCs succeeded!"
return 0
}
[ $# -eq 0 ] && help && exit 1
case $2 in
*.conf|*.inc)
;;
*)
echo ERROR: $2 must end with .conf or .inc!
exit 1
;;
esac
case $1 in
export)
do_export $2
;;
import)
do_import $2
;;
migrate_localcount)
do_migrate_localcount
;;
*)
help
exit 1
;;
esac

View File

@@ -0,0 +1,120 @@
#!/bin/sh
# Copyright (c) 2020 Wind River Systems, Inc.
#
# SPDX-License-Identifier: GPL-2.0-only
#
# buildall-qemu: a tool for automating build testing of recipes
# TODO: Add support for selecting which qemu architectures to build
# TODO: Add support for queueing up multiple recipe builds
# TODO: Add more logging options (e.g. local.conf info, bitbake env info)
usage ()
{
base=$(basename "$0")
echo "Usage: $base [options] [recipename/target]"
echo "Executes a build of a given target for selected LIBCs. With no options, default to both libc and musl."
echo "Options:"
echo "-l, --libc Specify one of \"glibc\" or \"musl\""
}
buildall ()
{
# Get path to oe-core directory. Since oe-init-build-env prepends $PATH with
# the path to the scripts directory, get it from there
SCRIPTS_PATH="$(echo "$PATH" | cut -d ":" -f 1)"
OE_CORE_PATH=$(echo "$SCRIPTS_PATH" | sed 's|\(.*\)/.*|\1|')
# Get target list and host machine information
TARGET_LIST=$(find "$OE_CORE_PATH"/meta/conf/machine -maxdepth 1 -type f | grep qemu | sed 's|.*/||' | sed -e 's/\.conf//')
# Set LIBC value to use for the builds based on options provided by the user
if [ -n "$2" ]
then
LIBC_LIST="$2"
echo "$LIBC_LIST"
else
LIBC_LIST="glibc musl"
echo "$LIBC_LIST"
fi
START_TIME=$(date "+%Y-%m-%d_%H:%M:%S")
LOG_FILE="$1-buildall.log"
OS_INFO=$(grep "PRETTY_NAME=" /etc/os-release | awk -F "=" '{print $2}' | sed -e 's/^"//' -e 's/"$//')
# Append an existing log file for this build with .old if one exists
if [ -f "${LOG_FILE}" ]
then
mv "${LOG_FILE}" "${LOG_FILE}.old"
else
touch "${LOG_FILE}"
fi
# Fill the log file with build and host info
echo "BUILDALL-QEMU LOG FOR $1" >> "${LOG_FILE}"
echo "START TIME: ${START_TIME}" >> "${LOG_FILE}"
echo "HOSTNAME: $(uname -n)" >> "${LOG_FILE}"
echo "HOST OS: ${OS_INFO}" >> "${LOG_FILE}"
echo "HOST KERNEL: $(uname -r)" >> "${LOG_FILE}"
echo "===============" >> "${LOG_FILE}"
echo "BUILD RESULTS:" >> "${LOG_FILE}"
# start the builds for each MACHINE and TCLIBC
for j in ${LIBC_LIST}
do
echo "[$j]" >> "${LOG_FILE}"
for i in ${TARGET_LIST}
do
echo "$i" "$j"; \
TCLIBC=$j MACHINE=$i bitbake "$1" && echo "PASS: $i" >> "${LOG_FILE}" || echo "FAIL: $i" >> "${LOG_FILE}"
done
done
# Get pass/fail totals and add them to the end of the log
PASSED=$(grep "PASS:" "${LOG_FILE}" | wc -l)
FAILED=$(grep "FAIL:" "${LOG_FILE}" | wc -l)
echo "===============" >> "${LOG_FILE}"
echo "PASSED: ${PASSED}" >> "${LOG_FILE}"
echo "FAILED: ${FAILED}" >> "${LOG_FILE}"
}
# fail entire script if any command fails
set -e
# print usage and exit if not enough args given
[ $# -eq 0 ] && usage && exit 1
# handle arguments
RECIPE=
while [ $# -gt 0 ]
do
arg=$1
case $arg in
-l|--libc)
if [ "$2" = "glibc" ] || [ "$2" = "musl" ]
then
LIBC_LIST="$2"
else
echo "Unrecognized libc option."
usage && exit 1
fi
shift
shift
;;
*)
RECIPE="$1"
shift
;;
esac
done
set -- "$RECIPE"
# run buildall for the given recipe and LIBC
if [ -n "$1" ]
then
buildall "$1" "$LIBC_LIST"
fi

View File

@@ -0,0 +1,108 @@
#!/usr/bin/env python3
#
# Collects the recorded SRCREV values from buildhistory and reports on them
#
# Copyright 2013 Intel Corporation
# Authored-by: Paul Eggleton <paul.eggleton@intel.com>
#
# SPDX-License-Identifier: GPL-2.0-only
#
import collections
import os
import sys
import optparse
import logging
def logger_create():
logger = logging.getLogger("buildhistory")
loggerhandler = logging.StreamHandler()
loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
logger.addHandler(loggerhandler)
logger.setLevel(logging.INFO)
return logger
logger = logger_create()
def main():
parser = optparse.OptionParser(
description = "Collects the recorded SRCREV values from buildhistory and reports on them.",
usage = """
%prog [options]""")
parser.add_option("-a", "--report-all",
help = "Report all SRCREV values, not just ones where AUTOREV has been used",
action="store_true", dest="reportall")
parser.add_option("-f", "--forcevariable",
help = "Use forcevariable override for all output lines",
action="store_true", dest="forcevariable")
parser.add_option("-p", "--buildhistory-dir",
help = "Specify path to buildhistory directory (defaults to buildhistory/ under cwd)",
action="store", dest="buildhistory_dir", default='buildhistory/')
options, args = parser.parse_args(sys.argv)
if len(args) > 1:
sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args[1:]))
parser.print_help()
sys.exit(1)
if not os.path.exists(options.buildhistory_dir):
sys.stderr.write('Buildhistory directory "%s" does not exist\n\n' % options.buildhistory_dir)
parser.print_help()
sys.exit(1)
if options.forcevariable:
forcevariable = ':forcevariable'
else:
forcevariable = ''
all_srcrevs = collections.defaultdict(list)
for root, dirs, files in os.walk(options.buildhistory_dir):
dirs.sort()
if '.git' in dirs:
dirs.remove('.git')
for fn in files:
if fn == 'latest_srcrev':
curdir = os.path.basename(os.path.dirname(root))
fullpath = os.path.join(root, fn)
pn = os.path.basename(root)
srcrev = None
orig_srcrev = None
orig_srcrevs = {}
srcrevs = {}
with open(fullpath) as f:
for line in f:
if '=' in line:
splitval = line.split('=')
value = splitval[1].strip('" \t\n\r')
if line.startswith('# SRCREV = '):
orig_srcrev = value
elif line.startswith('# SRCREV_'):
splitval = line.split('=')
name = splitval[0].split('_')[1].strip()
orig_srcrevs[name] = value
elif line.startswith('SRCREV ='):
srcrev = value
elif line.startswith('SRCREV_'):
name = splitval[0].split('_')[1].strip()
srcrevs[name] = value
if srcrev and (options.reportall or srcrev != orig_srcrev):
all_srcrevs[curdir].append((pn, None, srcrev))
for name, value in srcrevs.items():
orig = orig_srcrevs.get(name, orig_srcrev)
if options.reportall or value != orig:
all_srcrevs[curdir].append((pn, name, value))
for curdir, srcrevs in sorted(all_srcrevs.items()):
if srcrevs:
print('# %s' % curdir)
for pn, name, srcrev in srcrevs:
if name:
print('SRCREV_%s:pn-%s%s = "%s"' % (name, pn, forcevariable, srcrev))
else:
print('SRCREV:pn-%s%s = "%s"' % (pn, forcevariable, srcrev))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,134 @@
#!/usr/bin/env python3
# Report significant differences in the buildhistory repository since a specific revision
#
# Copyright (C) 2013 Intel Corporation
# Author: Paul Eggleton <paul.eggleton@linux.intel.com>
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
# Ensure PythonGit is installed (buildhistory_analysis needs it)
try:
import git
except ImportError:
print("Please install GitPython (python3-git) 0.3.4 or later in order to use this script")
sys.exit(1)
def get_args_parser():
description = "Reports significant differences in the buildhistory repository."
parser = argparse.ArgumentParser(description=description,
usage="""
%(prog)s [options] [from-revision [to-revision]]
(if not specified, from-revision defaults to build-minus-1, and to-revision defaults to HEAD)""")
default_dir = os.path.join(os.environ.get('BUILDDIR', '.'), 'buildhistory')
parser.add_argument('-p', '--buildhistory-dir',
action='store',
dest='buildhistory_dir',
default=default_dir,
help="Specify path to buildhistory directory (defaults to buildhistory/ under cwd)")
parser.add_argument('-v', '--report-version',
action='store_true',
dest='report_ver',
default=False,
help="Report changes in PKGE/PKGV/PKGR even when the values are still the default (PE/PV/PR)")
parser.add_argument('-a', '--report-all',
action='store_true',
dest='report_all',
default=False,
help="Report all changes, not just the default significant ones")
parser.add_argument('-s', '---signatures',
action='store_true',
dest='sigs',
default=False,
help="Report list of signatures differing instead of output")
parser.add_argument('-S', '--signatures-with-diff',
action='store_true',
dest='sigsdiff',
default=False,
help="Report on actual signature differences instead of output (requires signature data to have been generated, either by running the actual tasks or using bitbake -S)")
parser.add_argument('-e', '--exclude-path',
action='append',
help="Exclude path from the output")
parser.add_argument('-c', '--colour',
choices=('yes', 'no', 'auto'),
default="auto",
help="Whether to colourise (defaults to auto)")
parser.add_argument('revisions',
default = ['build-minus-1', 'HEAD'],
nargs='*',
help=argparse.SUPPRESS)
return parser
def main():
parser = get_args_parser()
args = parser.parse_args()
if len(args.revisions) > 2:
sys.stderr.write('Invalid argument(s) specified: %s\n\n' % ' '.join(args.revisions[2:]))
parser.print_help()
sys.exit(1)
if not os.path.exists(args.buildhistory_dir):
sys.stderr.write('Buildhistory directory "%s" does not exist\n\n' % args.buildhistory_dir)
parser.print_help()
sys.exit(1)
scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
import scriptpath
# Set path to OE lib dir so we can import the buildhistory_analysis module
scriptpath.add_oe_lib_path()
# Set path to bitbake lib dir so the buildhistory_analysis module can load bb.utils
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
sys.exit(1)
if len(args.revisions) == 1:
if '..' in args.revisions[0]:
fromrev, torev = args.revisions[0].split('..')
else:
fromrev, torev = args.revisions[0], 'HEAD'
elif len(args.revisions) == 2:
fromrev, torev = args.revisions
from oe.buildhistory_analysis import init_colours, process_changes
import gitdb
init_colours({"yes": True, "no": False, "auto": sys.stdout.isatty()}[args.colour])
try:
changes = process_changes(args.buildhistory_dir, fromrev, torev,
args.report_all, args.report_ver, args.sigs,
args.sigsdiff, args.exclude_path)
except gitdb.exc.BadObject as e:
if not args.revisions:
sys.stderr.write("Unable to find previous build revision in buildhistory repository\n\n")
parser.print_help()
else:
sys.stderr.write('Specified git revision "%s" is not valid\n' % e.args[0])
sys.exit(1)
for chg in changes:
out = str(chg)
if out:
print(out)
sys.exit(0)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,302 @@
#!/usr/bin/env python3
#
# Script for comparing buildstats from two different builds
#
# Copyright (c) 2016, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
import glob
import logging
import math
import os
import sys
from operator import attrgetter
# Import oe libs
scripts_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(scripts_path, 'lib'))
from buildstats import BuildStats, diff_buildstats, taskdiff_fields, BSVerDiff
# Setup logging
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
log = logging.getLogger()
class ScriptError(Exception):
"""Exception for internal error handling of this script"""
pass
def read_buildstats(path, multi):
"""Read buildstats"""
if not os.path.exists(path):
raise ScriptError("No such file or directory: {}".format(path))
if os.path.isfile(path):
return BuildStats.from_file_json(path)
if os.path.isfile(os.path.join(path, 'build_stats')):
return BuildStats.from_dir(path)
# Handle a non-buildstat directory
subpaths = sorted(glob.glob(path + '/*'))
if len(subpaths) > 1:
if multi:
log.info("Averaging over {} buildstats from {}".format(
len(subpaths), path))
else:
raise ScriptError("Multiple buildstats found in '{}'. Please give "
"a single buildstat directory of use the --multi "
"option".format(path))
bs = None
for subpath in subpaths:
if os.path.isfile(subpath):
_bs = BuildStats.from_file_json(subpath)
else:
_bs = BuildStats.from_dir(subpath)
if bs is None:
bs = _bs
else:
bs.aggregate(_bs)
if not bs:
raise ScriptError("No buildstats found under {}".format(path))
return bs
def print_ver_diff(bs1, bs2):
"""Print package version differences"""
diff = BSVerDiff(bs1, bs2)
maxlen = max([len(r) for r in set(bs1.keys()).union(set(bs2.keys()))])
fmt_str = " {:{maxlen}} ({})"
if diff.new:
print("\nNEW RECIPES:")
print("------------")
for name, val in sorted(diff.new.items()):
print(fmt_str.format(name, val.nevr, maxlen=maxlen))
if diff.dropped:
print("\nDROPPED RECIPES:")
print("----------------")
for name, val in sorted(diff.dropped.items()):
print(fmt_str.format(name, val.nevr, maxlen=maxlen))
fmt_str = " {0:{maxlen}} {1:<20} ({2})"
if diff.rchanged:
print("\nREVISION CHANGED:")
print("-----------------")
for name, val in sorted(diff.rchanged.items()):
field1 = "{} -> {}".format(val.left.revision, val.right.revision)
field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
print(fmt_str.format(name, field1, field2, maxlen=maxlen))
if diff.vchanged:
print("\nVERSION CHANGED:")
print("----------------")
for name, val in sorted(diff.vchanged.items()):
field1 = "{} -> {}".format(val.left.version, val.right.version)
field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
print(fmt_str.format(name, field1, field2, maxlen=maxlen))
if diff.echanged:
print("\nEPOCH CHANGED:")
print("--------------")
for name, val in sorted(diff.echanged.items()):
field1 = "{} -> {}".format(val.left.epoch, val.right.epoch)
field2 = "{} -> {}".format(val.left.nevr, val.right.nevr)
print(fmt_str.format(name, field1, field2, maxlen=maxlen))
def print_task_diff(bs1, bs2, val_type, min_val=0, min_absdiff=0, sort_by=('absdiff',), only_tasks=[]):
"""Diff task execution times"""
def val_to_str(val, human_readable=False):
"""Convert raw value to printable string"""
def hms_time(secs):
"""Get time in human-readable HH:MM:SS format"""
h = int(secs / 3600)
m = int((secs % 3600) / 60)
s = secs % 60
if h == 0:
return "{:02d}:{:04.1f}".format(m, s)
else:
return "{:d}:{:02d}:{:04.1f}".format(h, m, s)
if 'time' in val_type:
if human_readable:
return hms_time(val)
else:
return "{:.1f}s".format(val)
elif 'bytes' in val_type and human_readable:
prefix = ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi']
dec = int(math.log(val, 2) / 10)
prec = 1 if dec > 0 else 0
return "{:.{prec}f}{}B".format(val / (2 ** (10 * dec)),
prefix[dec], prec=prec)
elif 'ops' in val_type and human_readable:
prefix = ['', 'k', 'M', 'G', 'T', 'P']
dec = int(math.log(val, 1000))
prec = 1 if dec > 0 else 0
return "{:.{prec}f}{}ops".format(val / (1000 ** dec),
prefix[dec], prec=prec)
return str(int(val))
def sum_vals(buildstats):
"""Get cumulative sum of all tasks"""
total = 0.0
for recipe_data in buildstats.values():
for name, bs_task in recipe_data.tasks.items():
if not only_tasks or name in only_tasks:
total += getattr(bs_task, val_type)
return total
if min_val:
print("Ignoring tasks less than {} ({})".format(
val_to_str(min_val, True), val_to_str(min_val)))
if min_absdiff:
print("Ignoring differences less than {} ({})".format(
val_to_str(min_absdiff, True), val_to_str(min_absdiff)))
# Prepare the data
tasks_diff = diff_buildstats(bs1, bs2, val_type, min_val, min_absdiff, only_tasks)
# Sort our list
for field in reversed(sort_by):
if field.startswith('-'):
field = field[1:]
reverse = True
else:
reverse = False
tasks_diff = sorted(tasks_diff, key=attrgetter(field), reverse=reverse)
linedata = [(' ', 'PKG', ' ', 'TASK', 'ABSDIFF', 'RELDIFF',
val_type.upper() + '1', val_type.upper() + '2')]
field_lens = dict([('len_{}'.format(i), len(f)) for i, f in enumerate(linedata[0])])
# Prepare fields in string format and measure field lengths
for diff in tasks_diff:
task_prefix = diff.task_op if diff.pkg_op == ' ' else ' '
linedata.append((diff.pkg_op, diff.pkg, task_prefix, diff.task,
val_to_str(diff.absdiff),
'{:+.1f}%'.format(diff.reldiff),
val_to_str(diff.value1),
val_to_str(diff.value2)))
for i, field in enumerate(linedata[-1]):
key = 'len_{}'.format(i)
if len(field) > field_lens[key]:
field_lens[key] = len(field)
# Print data
print()
for fields in linedata:
print("{:{len_0}}{:{len_1}} {:{len_2}}{:{len_3}} {:>{len_4}} {:>{len_5}} {:>{len_6}} -> {:{len_7}}".format(
*fields, **field_lens))
# Print summary of the diffs
total1 = sum_vals(bs1)
total2 = sum_vals(bs2)
print("\nCumulative {}:".format(val_type))
print (" {} {:+.1f}% {} ({}) -> {} ({})".format(
val_to_str(total2 - total1), 100 * (total2-total1) / total1,
val_to_str(total1, True), val_to_str(total1),
val_to_str(total2, True), val_to_str(total2)))
def parse_args(argv):
"""Parse cmdline arguments"""
description="""
Script for comparing buildstats of two separate builds."""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description=description)
min_val_defaults = {'cputime': 3.0,
'read_bytes': 524288,
'write_bytes': 524288,
'read_ops': 500,
'write_ops': 500,
'walltime': 5}
min_absdiff_defaults = {'cputime': 1.0,
'read_bytes': 131072,
'write_bytes': 131072,
'read_ops': 50,
'write_ops': 50,
'walltime': 2}
parser.add_argument('--debug', '-d', action='store_true',
help="Verbose logging")
parser.add_argument('--ver-diff', action='store_true',
help="Show package version differences and exit")
parser.add_argument('--diff-attr', default='cputime',
choices=min_val_defaults.keys(),
help="Buildstat attribute which to compare")
parser.add_argument('--min-val', default=min_val_defaults, type=float,
help="Filter out tasks less than MIN_VAL. "
"Default depends on --diff-attr.")
parser.add_argument('--min-absdiff', default=min_absdiff_defaults, type=float,
help="Filter out tasks whose difference is less than "
"MIN_ABSDIFF, Default depends on --diff-attr.")
parser.add_argument('--sort-by', default='absdiff',
help="Comma-separated list of field sort order. "
"Prepend the field name with '-' for reversed sort. "
"Available fields are: {}".format(', '.join(taskdiff_fields)))
parser.add_argument('--multi', action='store_true',
help="Read all buildstats from the given paths and "
"average over them")
parser.add_argument('--only-task', dest='only_tasks', metavar='TASK', action='append', default=[],
help="Only include TASK in report. May be specified multiple times")
parser.add_argument('buildstats1', metavar='BUILDSTATS1', help="'Left' buildstat")
parser.add_argument('buildstats2', metavar='BUILDSTATS2', help="'Right' buildstat")
args = parser.parse_args(argv)
# We do not nedd/want to read all buildstats if we just want to look at the
# package versions
if args.ver_diff:
args.multi = False
# Handle defaults for the filter arguments
if args.min_val is min_val_defaults:
args.min_val = min_val_defaults[args.diff_attr]
if args.min_absdiff is min_absdiff_defaults:
args.min_absdiff = min_absdiff_defaults[args.diff_attr]
return args
def main(argv=None):
"""Script entry point"""
args = parse_args(argv)
if args.debug:
log.setLevel(logging.DEBUG)
# Validate sort fields
sort_by = []
for field in args.sort_by.split(','):
if field.lstrip('-') not in taskdiff_fields:
log.error("Invalid sort field '%s' (must be one of: %s)" %
(field, ', '.join(taskdiff_fields)))
sys.exit(1)
sort_by.append(field)
try:
bs1 = read_buildstats(args.buildstats1, args.multi)
bs2 = read_buildstats(args.buildstats2, args.multi)
if args.ver_diff:
print_ver_diff(bs1, bs2)
else:
print_task_diff(bs1, bs2, args.diff_attr, args.min_val,
args.min_absdiff, sort_by, args.only_tasks)
except ScriptError as err:
log.error(str(err))
return 1
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,126 @@
#!/usr/bin/env python3
#
# Dump a summary of the specified buildstats to the terminal, filtering and
# sorting by walltime.
#
# SPDX-License-Identifier: GPL-2.0-only
import argparse
import dataclasses
import datetime
import enum
import os
import pathlib
import sys
scripts_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(scripts_path, "lib"))
import buildstats
@dataclasses.dataclass
class Task:
recipe: str
task: str
start: datetime.datetime
duration: datetime.timedelta
class Sorting(enum.Enum):
start = 1
duration = 2
# argparse integration
def __str__(self) -> str:
return self.name
def __repr__(self) -> str:
return self.name
@staticmethod
def from_string(s: str):
try:
return Sorting[s]
except KeyError:
return s
def read_buildstats(path: pathlib.Path) -> buildstats.BuildStats:
if not path.exists():
raise Exception(f"No such file or directory: {path}")
if path.is_file():
return buildstats.BuildStats.from_file_json(path)
if (path / "build_stats").is_file():
return buildstats.BuildStats.from_dir(path)
raise Exception(f"Cannot find buildstats in {path}")
def dump_buildstats(args, bs: buildstats.BuildStats):
tasks = []
for recipe in bs.values():
for task, stats in recipe.tasks.items():
t = Task(
recipe.name,
task,
datetime.datetime.fromtimestamp(stats["start_time"]),
datetime.timedelta(seconds=int(stats.walltime)),
)
tasks.append(t)
tasks.sort(key=lambda t: getattr(t, args.sort.name))
minimum = datetime.timedelta(seconds=args.shortest)
highlight = datetime.timedelta(seconds=args.highlight)
for t in tasks:
if t.duration >= minimum:
line = f"{t.duration} {t.recipe}:{t.task}"
if args.highlight and t.duration >= highlight:
print(f"\033[1m{line}\033[0m")
else:
print(line)
def main(argv=None) -> int:
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"buildstats", metavar="BUILDSTATS", help="Buildstats file", type=pathlib.Path
)
parser.add_argument(
"--sort",
"-s",
type=Sorting.from_string,
choices=list(Sorting),
default=Sorting.start,
help="Sort tasks",
)
parser.add_argument(
"--shortest",
"-t",
type=int,
default=1,
metavar="SECS",
help="Hide tasks shorter than SECS seconds",
)
parser.add_argument(
"--highlight",
"-g",
type=int,
default=60,
metavar="SECS",
help="Highlight tasks longer than SECS seconds (0 disabled)",
)
args = parser.parse_args(argv)
bs = read_buildstats(args.buildstats)
dump_buildstats(args, bs)
return 0
if __name__ == "__main__":
sys.exit(main())

1384
sources/poky/scripts/combo-layer Executable file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,25 @@
#!/bin/sh
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Hook to add source component/revision info to commit message
# Parameter:
# $1 patch-file
# $2 revision
# $3 reponame
patchfile=$1
rev=$2
reponame=$3
sed -i -e "0,/^Subject:/s#^Subject: \[PATCH\] \($reponame: \)*\(.*\)#Subject: \[PATCH\] $reponame: \2#" $patchfile
if grep -q '^Signed-off-by:' $patchfile; then
# Insert before Signed-off-by.
sed -i -e "0,/^Signed-off-by:/s#\(^Signed-off-by:.*\)#\(From $reponame rev: $rev\)\n\n\1#" $patchfile
else
# Insert before final --- separator, with extra blank lines removed.
perl -e "\$_ = join('', <>); s/^(.*\S[ \t]*)(\n|\n\s*\n)---\n/\$1\n\nFrom $reponame rev: $rev\n---\n/s; print;" $patchfile >$patchfile.tmp
mv $patchfile.tmp $patchfile
fi

View File

@@ -0,0 +1,93 @@
# combo-layer example configuration file
# Default values for all sections.
[DEFAULT]
# Add 'Signed-off-by' to all commits that get imported automatically.
signoff = True
# component name
[bitbake]
# Override signedoff default above (not very useful, but possible).
signoff = False
# mandatory options
# git upstream uri
src_uri = git://git.openembedded.org/bitbake
# the directory to clone the component repo
local_repo_dir = /home/kyu3/src/test/bitbake
# the relative dir within the combo repo to put the component files
# use "." if the files should be in the root dir
dest_dir = bitbake
# the last update revision.
# "init" will set this to the latest revision automatically, however if it
# is empty when "update" is run, the tool will start from the first commit.
# Note that this value will get updated by "update" if the component repo's
# latest revision changed and the operation completes successfully.
last_revision =
# optional options:
# branch: specify the branch in the component repo to pull from
# (master if not specified)
# file_filter: only include the specified file(s)
# file_filter = [path] [path] ...
# example:
# file_filter = src/ : only include the subdir src
# file_filter = src/*.c : only include the src *.c file
# file_filter = src/main.c src/Makefile.am : only include these two files
# file_exclude: filter out these file(s)
# file_exclude = [path] [path] ...
#
# Each entry must match a file name. In contrast do file_filter, matching
# a directory has no effect. To achieve that, use append a * wildcard
# at the end.
#
# Wildcards are applied to the complete path and also match slashes.
#
# example:
# file_exclude = src/foobar/* : exclude everything under src/foobar
# file_exclude = src/main.c : filter out main.c after including it with file_filter = src/*.c
# file_exclude = *~ : exclude backup files
# hook: if provided, the tool will call the hook to process the generated
# patch from upstream, and then apply the modified patch to the combo
# repo.
# the hook script is called as follows: ./hook patchpath revision reponame
# example:
# hook = combo-layer-hook-default.sh
# since_revision:
# since_revision = release-1-2
# since_revision = 12345 abcdf
#
# If provided, truncate imported history during "combo-layer --history
# init" at the specified revision(s). More than one can be specified
# to cut off multiple component branches.
#
# The specified commits themselves do not get imported. Instead, an
# artificial commit with "unknown" author is created with a content
# that matches the original commit.
[oe-core]
src_uri = git://git.openembedded.org/openembedded-core
local_repo_dir = /home/kyu3/src/test/oecore
dest_dir = .
last_revision =
since_revision = some-tag-or-commit-on-master-branch
# It is also possible to embed python code in the config values. Similar
# to bitbake it considers every value starting with @ to be a python
# script.
# e.g. local_repo_dir could easily be configured using an environment
# variable:
#
# [bitbake]
# local_repo_dir = @os.getenv("LOCAL_REPO_DIR") + "/bitbake"
#

View File

@@ -0,0 +1,124 @@
#!/bin/bash
#
# Copyright (c) 2011, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script operates on the .dat file generated by bb-matrix.sh. It tolerates
# the header by skipping the first line, but error messages and bad data records
# need to be removed first. It will generate three views of the plot, and leave
# an interactive view open for further analysis.
#
# AUTHORS
# Darren Hart <dvhart@linux.intel.com>
#
# Setup the defaults
DATFILE="bb-matrix.dat"
XLABEL="BB\\\\_NUMBER\\\\_THREADS"
YLABEL="PARALLEL\\\\_MAKE"
FIELD=3
DEF_TITLE="Elapsed Time (seconds)"
PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100"
SIZE="640,480"
function usage {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-d datfile] [-f field] [-h] [-t title] [-w]
-d datfile The data file generated by bb-matrix.sh (default: $DATFILE)
-f field The field index to plot as the Z axis from the data file
(default: $FIELD, "$DEF_TITLE")
-h Display this help message
-s W,H PNG and window size in pixels (default: $SIZE)
-t title The title to display, should describe the field (-f) and units
(default: "$DEF_TITLE")
-w Render the plot as wireframe with a 2D colormap projected on the
XY plane rather than as the texture for the surface
EOM
}
# Parse and validate arguments
while getopts "d:f:hs:t:w" OPT; do
case $OPT in
d)
DATFILE="$OPTARG"
;;
f)
FIELD="$OPTARG"
;;
h)
usage
exit 0
;;
s)
SIZE="$OPTARG"
;;
t)
TITLE="$OPTARG"
;;
w)
PM3D_FRAGMENT="set pm3d at b"
W="-w"
;;
*)
usage
exit 1
;;
esac
done
# Ensure the data file exists
if [ ! -f "$DATFILE" ]; then
echo "ERROR: $DATFILE does not exist"
usage
exit 1
fi
PLOT_BASENAME=${DATFILE%.*}-f$FIELD$W
# Set a sane title
# TODO: parse the header and define titles for each format parameter for TIME(1)
if [ -z "$TITLE" ]; then
if [ ! "$FIELD" == "3" ]; then
TITLE="Field $FIELD"
else
TITLE="$DEF_TITLE"
fi
fi
# Determine the dgrid3d mesh dimensions size
MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | head -n1)
MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | tail -n1)
BB_CNT=$[${MAX} - $MIN + 1]
MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | head -n1)
MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | tail -n1)
PM_CNT=$[${MAX} - $MIN + 1]
(cat <<EOF
set title "$TITLE"
set xlabel "$XLABEL"
set ylabel "$YLABEL"
set style line 100 lt 5 lw 1.5
$PM3D_FRAGMENT
set dgrid3d $PM_CNT,$BB_CNT splines
set ticslevel 0.2
set term png size $SIZE
set output "$PLOT_BASENAME.png"
splot "$DATFILE" every ::1 using 1:2:$FIELD with lines ls 100
set view 90,0
set output "$PLOT_BASENAME-bb.png"
replot
set view 90,90
set output "$PLOT_BASENAME-pm.png"
replot
set view 60,30
set term wxt size $SIZE
replot
EOF
) | gnuplot --persist

View File

@@ -0,0 +1,66 @@
#!/bin/bash
#
# Copyright (c) 2011, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script runs BB_CMD (typically building core-image-sato) for all
# combincations of BB_RANGE and PM_RANGE values. It saves off all the console
# logs, the buildstats directories, and creates a bb-pm-runtime.dat file which
# can be used to postprocess the results with a plotting tool, spreadsheet, etc.
# Before running this script, it is recommended that you pre-download all the
# necessary sources by performing the BB_CMD once manually. It is also a good
# idea to disable cron to avoid runtime variations caused by things like the
# locate process. Be sure to sanitize the dat file prior to post-processing as
# it may contain error messages or bad runs that should be removed.
#
# AUTHORS
# Darren Hart <dvhart@linux.intel.com>
#
# The following ranges are appropriate for a 4 core system with 8 logical units
# Use leading 0s to ensure all digits are the same string length, this results
# in nice log file names and columnar dat files.
BB_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16"
PM_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16"
DATADIR="bb-matrix-$$"
BB_CMD="bitbake core-image-minimal"
RUNTIME_LOG="$DATADIR/bb-matrix.dat"
# See TIME(1) for a description of the time format parameters
# The following all report 0: W K r s t w
TIME_STR="%e %S %U %P %c %w %R %F %M %x"
# Prepare the DATADIR
mkdir $DATADIR
if [ $? -ne 0 ]; then
echo "Failed to create $DATADIR."
exit 1
fi
# Add a simple header
echo "BB PM $TIME_STR" > $RUNTIME_LOG
for BB in $BB_RANGE; do
for PM in $PM_RANGE; do
RUNDIR="$DATADIR/$BB-$PM-build"
mkdir $RUNDIR
BB_LOG=$RUNDIR/$BB-$PM-bitbake.log
date
echo "BB=$BB PM=$PM Logging to $BB_LOG"
echo -n " Preparing the work directory... "
rm -rf pseudodone tmp sstate-cache tmp-eglibc &> /dev/null
echo "done"
# Export the variables under test and run the bitbake command
# Strip any leading zeroes before passing to bitbake
export BB_NUMBER_THREADS=$(echo $BB | sed 's/^0*//')
export PARALLEL_MAKE="-j $(echo $PM | sed 's/^0*//')"
/usr/bin/time -f "$BB $PM $TIME_STR" -a -o $RUNTIME_LOG $BB_CMD &> $BB_LOG
echo " $(tail -n1 $RUNTIME_LOG)"
cp -a tmp/buildstats $RUNDIR/$BB-$PM-buildstats
done
done

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env bash
#
# Copyright (c) 2011, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
#
# Produces script data to be consumed by gnuplot. There are two possible plots
# depending if either the -S parameter is present or not:
#
# * without -S: Produces a histogram listing top N recipes/tasks versus
# stats. The first stat defined in the -s parameter is the one taken
# into account for ranking
# * -S: Produces a histogram listing tasks versus stats. In this case,
# the value of each stat is the sum for that particular stat in all recipes found.
# Stats values are in descending order defined by the first stat defined on -s
#
# EXAMPLES
#
# 1. Top recipes' tasks taking into account utime
#
# $ buildstats-plot.sh -s utime | gnuplot -p
#
# 2. Tasks versus utime:stime
#
# $ buildstats-plot.sh -s utime:stime -S | gnuplot -p
#
# 3. Tasks versus IO write_bytes:IO read_bytes
#
# $ buildstats-plot.sh -s 'IO write_bytes:IO read_bytes' -S | gnuplot -p
#
# AUTHORS
# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
#
set -o nounset
set -o errexit
BS_DIR="tmp/buildstats"
N=10
RECIPE=""
TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
STATS="utime"
ACCUMULATE=""
SUM=""
OUTDATA_FILE="$PWD/buildstats-plot.out"
function usage {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-b buildstats_dir] [-t do_task]
-b buildstats The path where the folder resides
(default: "$BS_DIR")
-n N Top N recipes to display. Ignored if -S is present
(default: "$N")
-r recipe The recipe mask to be searched
-t tasks The tasks to be computed
(default: "$TASKS")
-s stats The stats to be matched. If more that one stat, units
should be the same because data is plot as histogram.
(see buildstats.sh -h for all options) or any other defined
(build)stat separated by colons, i.e. stime:utime
(default: "$STATS")
-a Accumulate all stats values for found recipes
-S Sum values for a particular stat for found recipes
-o Output data file.
(default: "$OUTDATA_FILE")
-h Display this help message
EOM
}
# Parse and validate arguments
while getopts "b:n:r:t:s:o:aSh" OPT; do
case $OPT in
b)
BS_DIR="$OPTARG"
;;
n)
N="$OPTARG"
;;
r)
RECIPE="-r $OPTARG"
;;
t)
TASKS="$OPTARG"
;;
s)
STATS="$OPTARG"
;;
a)
ACCUMULATE="-a"
;;
S)
SUM="y"
;;
o)
OUTDATA_FILE="$OPTARG"
;;
h)
usage
exit 0
;;
*)
usage
exit 1
;;
esac
done
# Get number of stats
IFS=':'; statsarray=(${STATS}); unset IFS
nstats=${#statsarray[@]}
# Get script folder, use to run buildstats.sh
CD=$(dirname $0)
# Parse buildstats recipes to produce a single table
OUTBUILDSTATS="$PWD/buildstats.log"
$CD/buildstats.sh -b "$BS_DIR" -s "$STATS" -t "$TASKS" $RECIPE $ACCUMULATE -H > $OUTBUILDSTATS
# Get headers
HEADERS=$(cat $OUTBUILDSTATS | sed -n -e 's/\(.*\)/"\1"/' -e '1s/ /\\\\\\\\ /g' -e 's/_/\\\\\\\\_/g' -e '1s/:/" "/gp')
echo -e "set boxwidth 0.9 relative"
echo -e "set style data histograms"
echo -e "set style fill solid 1.0 border lt -1"
echo -e "set xtics rotate by 45 right"
# Get output data
if [ -z "$SUM" ]; then
cat $OUTBUILDSTATS | sed -e '1d' -e 's/_/\\\\_/g' | sort -k3 -n -r | head -$N > $OUTDATA_FILE
# include task at recipe column
sed -i -e "1i\
${HEADERS}" $OUTDATA_FILE
echo -e "set title \"Top task/recipes\""
echo -e "plot for [COL=3:`expr 3 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(stringcolumn(1).' '.stringcolumn(2)) title columnheader(COL)"
else
# Construct datatamash sum argument (sum 3 sum 4 ...)
declare -a sumargs
j=0
for i in `seq $nstats`; do
sumargs[j]=sum; j=$(( $j + 1 ))
sumargs[j]=`expr 3 + $i - 1`; j=$(( $j + 1 ))
done
# Do the processing with datamash
cat $OUTBUILDSTATS | sed -e '1d' | datamash -t ' ' -g1 ${sumargs[*]} | sort -k2 -n -r > $OUTDATA_FILE
# Include headers into resulted file, so we can include gnuplot xtics
HEADERS=$(echo $HEADERS | sed -e 's/recipe//1')
sed -i -e "1i\
${HEADERS}" $OUTDATA_FILE
# Plot
echo -e "set title \"Sum stats values per task for all recipes\""
echo -e "plot for [COL=2:`expr 2 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(1) title columnheader(COL)"
fi

View File

@@ -0,0 +1,167 @@
#!/bin/bash
#
# Copyright (c) 2011, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# Given 'buildstats' data (generate by bitbake when setting
# USER_CLASSES ?= "buildstats" on local.conf), task names and a stats values
# (these are the ones preset on the buildstats files), outputs
# '<task> <recipe> <value_1> <value_2> ... <value_n>'. The units are the ones
# defined at buildstats, which in turn takes data from /proc/[pid] files
#
# Some useful pipelines
#
# 1. Tasks with largest stime (Amount of time that this process has been scheduled
# in kernel mode) values
# $ buildstats.sh -b <buildstats> -s stime | sort -k3 -n -r | head
#
# 2. Min, max, sum utime (Amount of time that this process has been scheduled
# in user mode) per task (in needs GNU datamash)
# $ buildstats.sh -b <buildstats> -s utime | datamash -t' ' -g1 min 3 max 3 sum 3 | sort -k4 -n -r
#
# AUTHORS
# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
#
# Stats, by type
TIME="utime:stime:cutime:cstime"
IO="IO wchar:IO write_bytes:IO syscr:IO read_bytes:IO rchar:IO syscw:IO cancelled_write_bytes"
RUSAGE="rusage ru_utime:rusage ru_stime:rusage ru_maxrss:rusage ru_minflt:rusage ru_majflt:\
rusage ru_inblock:rusage ru_oublock:rusage ru_nvcsw:rusage ru_nivcsw"
CHILD_RUSAGE="Child rusage ru_utime:Child rusage ru_stime:Child rusage ru_maxrss:Child rusage ru_minflt:\
Child rusage ru_majflt:Child rusage ru_inblock:Child rusage ru_oublock:Child rusage ru_nvcsw:\
Child rusage ru_nivcsw"
BS_DIR="tmp/buildstats"
RECIPE=""
TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
STATS="$TIME"
ACCUMULATE=""
HEADER="" # No header by default
function usage {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-b buildstats_dir] [-t do_task]
-b buildstats The path where the folder resides
(default: "$BS_DIR")
-r recipe The recipe to be computed
-t tasks The tasks to be computed
(default: "$TASKS")
-s stats The stats to be matched. Options: TIME, IO, RUSAGE, CHILD_RUSAGE
or any other defined buildstat separated by colons, i.e. stime:utime
(default: "$STATS")
Default stat sets:
TIME=$TIME
IO=$IO
RUSAGE=$RUSAGE
CHILD_RUSAGE=$CHILD_RUSAGE
-a Accumulate all stats values for found recipes
-h Display this help message
EOM
}
# Parse and validate arguments
while getopts "b:r:t:s:aHh" OPT; do
case $OPT in
b)
BS_DIR="$OPTARG"
;;
r)
RECIPE="$OPTARG"
;;
t)
TASKS="$OPTARG"
;;
s)
STATS="$OPTARG"
;;
a)
ACCUMULATE="y"
;;
H)
HEADER="y"
;;
h)
usage
exit 0
;;
*)
usage
exit 1
;;
esac
done
# Ensure the buildstats folder exists
if [ ! -d "$BS_DIR" ]; then
echo "ERROR: $BS_DIR does not exist"
usage
exit 1
fi
stats=""
IFS=":"
for stat in ${STATS}; do
case $stat in
TIME)
stats="${stats}:${TIME}"
;;
IO)
stats="${stats}:${IO}"
;;
RUSAGE)
stats="${stats}:${RUSAGE}"
;;
CHILD_RUSAGE)
stats="${stats}:${CHILD_RUSAGE}"
;;
*)
stats="${STATS}"
;;
esac
done
# remove possible colon at the beginning
stats="$(echo "$stats" | sed -e 's/^://1')"
# Provide a header if required by the user
if [ -n "$HEADER" ] ; then
if [ -n "$ACCUMULATE" ]; then
echo "task:recipe:accumulated(${stats//:/;})"
else
echo "task:recipe:$stats"
fi
fi
for task in ${TASKS}; do
task="do_${task}"
for file in $(find ${BS_DIR} -type f -path *${RECIPE}*/${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do
recipe="$(basename $(dirname $file))"
times=""
for stat in ${stats}; do
[ -z "$stat" ] && { echo "empty stats"; }
time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file)
# in case the stat is not present, set the value as NA
[ -z "$time" ] && { time="NA"; }
# Append it to times
if [ -z "$times" ]; then
times="${time}"
else
times="${times} ${time}"
fi
done
if [ -n "$ACCUMULATE" ]; then
IFS=' '; valuesarray=(${times}); IFS=':'
times=0
for value in "${valuesarray[@]}"; do
[ "$value" == "NA" ] && { echo "ERROR: stat is not present."; usage; exit 1; }
times=$(( $times + $value ))
done
fi
echo "${task} ${recipe} ${times}"
done
done

View File

@@ -0,0 +1,168 @@
#!/usr/bin/env python3
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# Copyright (C) Darren Hart <dvhart@linux.intel.com>, 2010
import sys
import getopt
import os
import os.path
import re
# Set up sys.path to let us import tinfoil
scripts_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
lib_path = scripts_path + '/lib'
sys.path.insert(0, lib_path)
import scriptpath
scriptpath.add_bitbake_lib_path()
import bb.tinfoil
def usage():
print('Usage: %s -d FILENAME [-d FILENAME]*' % os.path.basename(sys.argv[0]))
print(' -d FILENAME documentation file to search')
print(' -h, --help display this help and exit')
print(' -t FILENAME documentation config file (for doc tags)')
print(' -T Only display variables with doc tags (requires -t)')
def bbvar_is_documented(var, documented_vars):
''' Check if variable (var) is in the list of documented variables(documented_vars) '''
if var in documented_vars:
return True
else:
return False
def collect_documented_vars(docfiles):
''' Walk the docfiles and collect the documented variables '''
documented_vars = []
prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-")
var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>')
for d in docfiles:
with open(d) as f:
documented_vars += var_prog.findall(f.read())
return documented_vars
def bbvar_doctag(var, docconf):
prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var))
if docconf == "":
return "?"
try:
f = open(docconf)
except IOError as err:
return err.args[1]
for line in f:
m = prog.search(line)
if m:
return m.group(1)
f.close()
return ""
def main():
docfiles = []
bbvars = set()
undocumented = []
docconf = ""
onlydoctags = False
# Collect and validate input
try:
opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"])
except getopt.GetoptError as err:
print('%s' % str(err))
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(0)
elif o == '-d':
if os.path.isfile(a):
docfiles.append(a)
else:
print('ERROR: documentation file %s is not a regular file' % a)
sys.exit(3)
elif o == "-t":
if os.path.isfile(a):
docconf = a
elif o == "-T":
onlydoctags = True
else:
assert False, "unhandled option"
if len(docfiles) == 0:
print('ERROR: no docfile specified')
usage()
sys.exit(5)
if onlydoctags and docconf == "":
print('ERROR: no docconf specified')
usage()
sys.exit(7)
prog = re.compile("^[^a-z]*$")
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False)
parser = bb.codeparser.PythonParser('parser', None)
datastore = tinfoil.config_data
def bbvars_update(data):
if prog.match(data):
bbvars.add(data)
if tinfoil.config_data.getVarFlag(data, 'python'):
try:
parser.parse_python(tinfoil.config_data.getVar(data))
except bb.data_smart.ExpansionError:
pass
for var in parser.references:
if prog.match(var):
bbvars.add(var)
else:
try:
expandedVar = datastore.expandWithRefs(datastore.getVar(data, False), data)
for var in expandedVar.references:
if prog.match(var):
bbvars.add(var)
except bb.data_smart.ExpansionError:
pass
# Use tinfoil to collect all the variable names globally
for data in datastore:
bbvars_update(data)
# Collect variables from all recipes
for recipe in tinfoil.all_recipe_files(variants=False):
print("Checking %s" % recipe)
for data in tinfoil.parse_recipe_file(recipe):
bbvars_update(data)
documented_vars = collect_documented_vars(docfiles)
# Check each var for documentation
varlen = 0
for v in bbvars:
if len(v) > varlen:
varlen = len(v)
if not bbvar_is_documented(v, documented_vars):
undocumented.append(v)
undocumented.sort()
varlen = varlen + 1
# Report all undocumented variables
print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars)))
header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7))
print(header)
print(str("").ljust(len(header), '='))
for v in undocumented:
doctag = bbvar_doctag(v, docconf)
if not onlydoctags or not doctag == "":
print('%s%s' % (v.ljust(varlen), doctag))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,247 @@
#!/bin/bash
#
# Build performance test script wrapper
#
# Copyright (c) 2016, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
# This script is a simple wrapper around the actual build performance tester
# script. This script initializes the build environment, runs
# oe-build-perf-test and archives the results.
script=`basename $0`
script_dir=$(realpath $(dirname $0))
archive_dir=~/perf-results/archives
usage () {
cat << EOF
Usage: $script [-h] [-c COMMITISH] [-C GIT_REPO]
Optional arguments:
-h show this help and exit.
-a ARCHIVE_DIR archive results tarball here, give an empty string to
disable tarball archiving (default: $archive_dir)
-c COMMITISH test (checkout) this commit, <branch>:<commit> can be
specified to test specific commit of certain branch
-C GIT_REPO commit results into Git
-d DOWNLOAD_DIR directory to store downloaded sources in
-E EMAIL_ADDR send email report
-g GLOBALRES_DIR where to place the globalres file
-P GIT_REMOTE push results to a remote Git repository
-R DEST rsync reports to a remote destination
-w WORK_DIR work dir for this script
(default: GIT_TOP_DIR/build-perf-test)
-x create xml report (instead of json)
EOF
}
get_os_release_var () {
( source /etc/os-release; eval echo '$'$1 )
}
# Parse command line arguments
commitish=""
oe_build_perf_test_extra_opts=()
oe_git_archive_extra_opts=()
while getopts "ha:c:C:d:E:g:P:R:w:x" opt; do
case $opt in
h) usage
exit 0
;;
a) mkdir -p "$OPTARG"
archive_dir=`realpath -s "$OPTARG"`
;;
c) commitish=$OPTARG
;;
C) mkdir -p "$OPTARG"
results_repo=`realpath -s "$OPTARG"`
;;
d) download_dir=`realpath -s "$OPTARG"`
;;
E) email_to="$OPTARG"
;;
g) mkdir -p "$OPTARG"
globalres_dir=`realpath -s "$OPTARG"`
;;
P) oe_git_archive_extra_opts+=("--push" "$OPTARG")
;;
R) rsync_dst="$OPTARG"
;;
w) base_dir=`realpath -s "$OPTARG"`
;;
x) oe_build_perf_test_extra_opts+=("--xml")
;;
*) usage
exit 1
;;
esac
done
# Check positional args
shift "$((OPTIND - 1))"
if [ $# -ne 0 ]; then
echo "ERROR: No positional args are accepted."
usage
exit 1
fi
# Open a file descriptor for flock and acquire lock
LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
if ! exec 3> "$LOCK_FILE"; then
echo "ERROR: Unable to open loemack file"
exit 1
fi
if ! flock -n 3; then
echo "ERROR: Another instance of this script is running"
exit 1
fi
echo "Running on `uname -n`"
if ! git_topdir=$(git rev-parse --show-toplevel); then
echo "The current working dir doesn't seem to be a git clone. Please cd there before running `basename $0`"
exit 1
fi
cd "$git_topdir"
if [ -n "$commitish" ]; then
echo "Running git fetch"
git fetch &> /dev/null
git checkout HEAD^0 &> /dev/null
# Handle <branch>:<commit> format
if echo "$commitish" | grep -q ":"; then
commit=`echo "$commitish" | cut -d":" -f2`
branch=`echo "$commitish" | cut -d":" -f1`
else
commit="$commitish"
branch="$commitish"
fi
echo "Checking out $commitish"
git branch -D $branch &> /dev/null
if ! git checkout -f $branch &> /dev/null; then
echo "ERROR: Git checkout failed"
exit 1
fi
# Check that the specified branch really contains the commit
commit_hash=`git rev-parse --revs-only $commit --`
if [ -z "$commit_hash" -o "`git merge-base $branch $commit`" != "$commit_hash" ]; then
echo "ERROR: branch $branch does not contain commit $commit"
exit 1
fi
git reset --hard $commit > /dev/null
fi
# Determine name of the current branch
branch=`git symbolic-ref HEAD 2> /dev/null`
# Strip refs/heads/
branch=${branch:11}
# Setup build environment
if [ -z "$base_dir" ]; then
base_dir="$git_topdir/build-perf-test"
fi
echo "Using working dir $base_dir"
if [ -z "$download_dir" ]; then
download_dir="$base_dir/downloads"
fi
if [ -z "$globalres_dir" ]; then
globalres_dir="$base_dir"
fi
timestamp=`date "+%Y%m%d%H%M%S"`
git_rev=$(git rev-parse --short HEAD) || exit 1
build_dir="$base_dir/build-$git_rev-$timestamp"
results_dir="$base_dir/results-$git_rev-$timestamp"
globalres_log="$globalres_dir/globalres.log"
machine="qemux86"
mkdir -p "$base_dir"
source ./oe-init-build-env $build_dir >/dev/null || exit 1
# Additional config
auto_conf="$build_dir/conf/auto.conf"
echo "MACHINE = \"$machine\"" > "$auto_conf"
echo 'BB_NUMBER_THREADS = "8"' >> "$auto_conf"
echo 'PARALLEL_MAKE = "-j 8"' >> "$auto_conf"
echo "DL_DIR = \"$download_dir\"" >> "$auto_conf"
# Disabling network sanity check slightly reduces the variance of timing results
echo 'CONNECTIVITY_CHECK_URIS = ""' >> "$auto_conf"
# Possibility to define extra settings
if [ -f "$base_dir/auto.conf.extra" ]; then
cat "$base_dir/auto.conf.extra" >> "$auto_conf"
fi
# Run actual test script
oe-build-perf-test --out-dir "$results_dir" \
--globalres-file "$globalres_log" \
"${oe_build_perf_test_extra_opts[@]}" \
--lock-file "$base_dir/oe-build-perf.lock"
case $? in
1) echo "ERROR: oe-build-perf-test script failed!"
exit 1
;;
2) echo "NOTE: some tests failed!"
;;
esac
# Commit results to git
if [ -n "$results_repo" ]; then
echo -e "\nArchiving results in $results_repo"
oe-git-archive \
--git-dir "$results_repo" \
--branch-name "{hostname}/{branch}/{machine}" \
--tag-name "{hostname}/{branch}/{machine}/{commit_count}-g{commit}/{tag_number}" \
--exclude "buildstats.json" \
--notes "buildstats/{branch_name}" "$results_dir/buildstats.json" \
"${oe_git_archive_extra_opts[@]}" \
"$results_dir"
# Generate test reports
sanitized_branch=`echo $branch | tr / _`
report_txt=`hostname`_${sanitized_branch}_${machine}.txt
report_html=`hostname`_${sanitized_branch}_${machine}.html
echo -e "\nGenerating test report"
oe-build-perf-report -r "$results_repo" > $report_txt
oe-build-perf-report -r "$results_repo" --html > $report_html
# Send email report
if [ -n "$email_to" ]; then
echo "Emailing test report"
os_name=`get_os_release_var PRETTY_NAME`
"$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
fi
# Upload report files, unless we're on detached head
if [ -n "$rsync_dst" -a -n "$branch" ]; then
echo "Uploading test report"
rsync $report_txt $report_html $rsync_dst
fi
fi
echo -ne "\n\n-----------------\n"
echo "Global results file:"
echo -ne "\n"
cat "$globalres_log"
if [ -n "$archive_dir" ]; then
echo -ne "\n\n-----------------\n"
echo "Archiving results in $archive_dir"
mkdir -p "$archive_dir"
results_basename=`basename "$results_dir"`
results_dirname=`dirname "$results_dir"`
tar -czf "$archive_dir/`uname -n`-${results_basename}.tar.gz" -C "$results_dirname" "$results_basename"
fi
rm -rf "$build_dir"
rm -rf "$results_dir"
echo "DONE"

View File

@@ -0,0 +1,155 @@
#!/usr/bin/env python3
#
# Conversion script to add new override syntax to existing bitbake metadata
#
# Copyright (C) 2021 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
#
# To use this script on a new layer you need to list the overrides the
# layer is known to use in the list below.
#
# Known constraint: Matching is 'loose' and in particular will find variable
# and function names with "_append" and "_remove" in them. Those need to be
# filtered out manually or in the skip list below.
#
import re
import os
import sys
import tempfile
import shutil
import mimetypes
import argparse
parser = argparse.ArgumentParser(description="Convert override syntax")
parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros")
parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override")
parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')")
parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables")
parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables")
parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides")
parser.add_argument("path", nargs="+", help="Paths to convert")
args = parser.parse_args()
# List of strings to treat as overrides
vars = args.override
vars += ["append", "prepend", "remove"]
vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
vars += ["genericx86", "edgerouter", "beaglebone-yocto"]
vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
vars += ["tune-", "pn-", "forcevariable"]
vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
vars += ["linux-gnueabi", "eabi"]
vars += ["virtclass-multilib", "virtclass-mcextend"]
# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
# Handles issues with arc matching arch.
shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override
# Variables which take packagenames as an override
packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
"pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
"PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
"pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
"INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars
# Expressions to skip if encountered, these are not overrides
skips = args.skip
skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars
packagevars += imagevars
skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext
vars_re = {}
for exp in vars:
vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
shortvars_re = {}
for exp in shortvars:
shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3")
package_re = {}
for exp in packagevars:
package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
# Other substitutions to make
subs = {
'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")',
"val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
"f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
"d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)',
}
def processfile(fn):
print("processing file '%s'" % fn)
try:
fh, abs_path = tempfile.mkstemp()
with os.fdopen(fh, 'w') as new_file:
with open(fn, "r") as old_file:
for line in old_file:
skip = False
for s in skips:
if s in line:
skip = True
if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line:
skip = False
for sub in subs:
if sub in line:
line = line.replace(sub, subs[sub])
skip = True
if not skip:
for pvar in packagevars:
line = package_re[pvar][0].sub(package_re[pvar][1], line)
for var in vars:
line = vars_re[var][0].sub(vars_re[var][1], line)
for shortvar in shortvars:
line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line)
if "pkg_postinst:ontarget" in line:
line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget")
new_file.write(line)
shutil.copymode(fn, abs_path)
os.remove(fn)
shutil.move(abs_path, fn)
except UnicodeDecodeError:
pass
ourname = os.path.basename(sys.argv[0])
ourversion = "0.9.3"
for p in args.path:
if os.path.isfile(p):
processfile(p)
else:
print("processing directory '%s'" % p)
for root, dirs, files in os.walk(p):
for name in files:
if name == ourname:
continue
fn = os.path.join(root, name)
if os.path.islink(fn):
continue
if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext):
continue
processfile(fn)
print("All files processed with version %s" % ourversion)

View File

@@ -0,0 +1,145 @@
#!/usr/bin/env python3
#
# Conversion script to change LICENSE entries to SPDX identifiers
#
# Copyright (C) 2021-2022 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import os
import sys
import tempfile
import shutil
import mimetypes
if len(sys.argv) < 2:
print("Please specify a directory to run the conversion script against.")
sys.exit(1)
license_map = {
"AGPL-3" : "AGPL-3.0-only",
"AGPL-3+" : "AGPL-3.0-or-later",
"AGPLv3" : "AGPL-3.0-only",
"AGPLv3+" : "AGPL-3.0-or-later",
"AGPLv3.0" : "AGPL-3.0-only",
"AGPLv3.0+" : "AGPL-3.0-or-later",
"AGPL-3.0" : "AGPL-3.0-only",
"AGPL-3.0+" : "AGPL-3.0-or-later",
"BSD-0-Clause" : "0BSD",
"GPL-1" : "GPL-1.0-only",
"GPL-1+" : "GPL-1.0-or-later",
"GPLv1" : "GPL-1.0-only",
"GPLv1+" : "GPL-1.0-or-later",
"GPLv1.0" : "GPL-1.0-only",
"GPLv1.0+" : "GPL-1.0-or-later",
"GPL-1.0" : "GPL-1.0-only",
"GPL-1.0+" : "GPL-1.0-or-later",
"GPL-2" : "GPL-2.0-only",
"GPL-2+" : "GPL-2.0-or-later",
"GPLv2" : "GPL-2.0-only",
"GPLv2+" : "GPL-2.0-or-later",
"GPLv2.0" : "GPL-2.0-only",
"GPLv2.0+" : "GPL-2.0-or-later",
"GPL-2.0" : "GPL-2.0-only",
"GPL-2.0+" : "GPL-2.0-or-later",
"GPL-3" : "GPL-3.0-only",
"GPL-3+" : "GPL-3.0-or-later",
"GPLv3" : "GPL-3.0-only",
"GPLv3+" : "GPL-3.0-or-later",
"GPLv3.0" : "GPL-3.0-only",
"GPLv3.0+" : "GPL-3.0-or-later",
"GPL-3.0" : "GPL-3.0-only",
"GPL-3.0+" : "GPL-3.0-or-later",
"LGPLv2" : "LGPL-2.0-only",
"LGPLv2+" : "LGPL-2.0-or-later",
"LGPLv2.0" : "LGPL-2.0-only",
"LGPLv2.0+" : "LGPL-2.0-or-later",
"LGPL-2.0" : "LGPL-2.0-only",
"LGPL-2.0+" : "LGPL-2.0-or-later",
"LGPL2.1" : "LGPL-2.1-only",
"LGPL2.1+" : "LGPL-2.1-or-later",
"LGPLv2.1" : "LGPL-2.1-only",
"LGPLv2.1+" : "LGPL-2.1-or-later",
"LGPL-2.1" : "LGPL-2.1-only",
"LGPL-2.1+" : "LGPL-2.1-or-later",
"LGPLv3" : "LGPL-3.0-only",
"LGPLv3+" : "LGPL-3.0-or-later",
"LGPL-3.0" : "LGPL-3.0-only",
"LGPL-3.0+" : "LGPL-3.0-or-later",
"MPL-1" : "MPL-1.0",
"MPLv1" : "MPL-1.0",
"MPLv1.1" : "MPL-1.1",
"MPLv2" : "MPL-2.0",
"MIT-X" : "MIT",
"MIT-style" : "MIT",
"openssl" : "OpenSSL",
"PSF" : "PSF-2.0",
"PSFv2" : "PSF-2.0",
"Python-2" : "Python-2.0",
"Apachev2" : "Apache-2.0",
"Apache-2" : "Apache-2.0",
"Artisticv1" : "Artistic-1.0",
"Artistic-1" : "Artistic-1.0",
"AFL-2" : "AFL-2.0",
"AFL-1" : "AFL-1.2",
"AFLv2" : "AFL-2.0",
"AFLv1" : "AFL-1.2",
"CDDLv1" : "CDDL-1.0",
"CDDL-1" : "CDDL-1.0",
"EPLv1.0" : "EPL-1.0",
"FreeType" : "FTL",
"Nauman" : "Naumen",
"tcl" : "TCL",
"vim" : "Vim",
"SGIv1" : "SGI-1",
}
def processfile(fn):
print("processing file '%s'" % fn)
try:
fh, abs_path = tempfile.mkstemp()
modified = False
with os.fdopen(fh, 'w') as new_file:
with open(fn, "r") as old_file:
for line in old_file:
if not line.startswith("LICENSE"):
new_file.write(line)
continue
orig = line
for license in sorted(license_map, key=len, reverse=True):
for ending in ['"', "'", " ", ")"]:
line = line.replace(license + ending, license_map[license] + ending)
if orig != line:
modified = True
new_file.write(line)
new_file.close()
if modified:
shutil.copymode(fn, abs_path)
os.remove(fn)
shutil.move(abs_path, fn)
except UnicodeDecodeError:
pass
ourname = os.path.basename(sys.argv[0])
ourversion = "0.01"
if os.path.isfile(sys.argv[1]):
processfile(sys.argv[1])
sys.exit(0)
for targetdir in sys.argv[1:]:
print("processing directory '%s'" % targetdir)
for root, dirs, files in os.walk(targetdir):
for name in files:
if name == ourname:
continue
fn = os.path.join(root, name)
if os.path.islink(fn):
continue
if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
continue
processfile(fn)
print("All files processed with version %s" % ourversion)

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
#
# Conversion script to update SRC_URI to add branch to git urls
#
# Copyright (C) 2021 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import os
import sys
import tempfile
import shutil
import mimetypes
if len(sys.argv) < 2:
print("Please specify a directory to run the conversion script against.")
sys.exit(1)
def processfile(fn):
def matchline(line):
if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line:
return False
return True
print("processing file '%s'" % fn)
try:
if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn:
return
fh, abs_path = tempfile.mkstemp()
modified = False
with os.fdopen(fh, 'w') as new_file:
with open(fn, "r") as old_file:
for line in old_file:
if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line):
if line.endswith('"\n'):
line = line.replace('"\n', ';branch=master"\n')
elif re.search('\s*\\\\$', line):
line = re.sub('\s*\\\\$', ';branch=master \\\\', line)
modified = True
if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line):
if "protocol=git" in line:
line = line.replace('protocol=git', 'protocol=https')
elif line.endswith('"\n'):
line = line.replace('"\n', ';protocol=https"\n')
elif re.search('\s*\\\\$', line):
line = re.sub('\s*\\\\$', ';protocol=https \\\\', line)
modified = True
new_file.write(line)
if modified:
shutil.copymode(fn, abs_path)
os.remove(fn)
shutil.move(abs_path, fn)
except UnicodeDecodeError:
pass
ourname = os.path.basename(sys.argv[0])
ourversion = "0.1"
if os.path.isfile(sys.argv[1]):
processfile(sys.argv[1])
sys.exit(0)
for targetdir in sys.argv[1:]:
print("processing directory '%s'" % targetdir)
for root, dirs, files in os.walk(targetdir):
for name in files:
if name == ourname:
continue
fn = os.path.join(root, name)
if os.path.islink(fn):
continue
if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
continue
processfile(fn)
print("All files processed with version %s" % ourversion)

View File

@@ -0,0 +1,116 @@
#!/usr/bin/env python3
#
# Conversion script to rename variables to versions with improved terminology.
# Also highlights potentially problematic language and removed variables.
#
# Copyright (C) 2021 Richard Purdie
# Copyright (C) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import os
import sys
import tempfile
import shutil
import mimetypes
if len(sys.argv) < 2:
print("Please specify a directory to run the conversion script against.")
sys.exit(1)
renames = {
"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
"PNBLACKLIST" : "SKIP_RECIPE",
"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
}
removed_list = [
"BB_STAMP_WHITELIST",
"BB_STAMP_POLICY",
"INHERIT_BLACKLIST",
"TUNEABI_WHITELIST",
]
context_check_list = [
"blacklist",
"whitelist",
"abort",
]
def processfile(fn):
print("processing file '%s'" % fn)
try:
fh, abs_path = tempfile.mkstemp()
modified = False
with os.fdopen(fh, 'w') as new_file:
with open(fn, "r") as old_file:
lineno = 0
for line in old_file:
lineno += 1
if not line or "BB_RENAMED_VARIABLE" in line:
continue
# Do the renames
for old_name, new_name in renames.items():
if old_name in line:
line = line.replace(old_name, new_name)
modified = True
# Find removed names
for removed_name in removed_list:
if removed_name in line:
print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
for check_word in context_check_list:
if re.search(check_word, line, re.IGNORECASE):
print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
new_file.write(line)
new_file.close()
if modified:
print("*** Modified file '%s'" % (fn))
shutil.copymode(fn, abs_path)
os.remove(fn)
shutil.move(abs_path, fn)
except UnicodeDecodeError:
pass
ourname = os.path.basename(sys.argv[0])
ourversion = "0.1"
if os.path.isfile(sys.argv[1]):
processfile(sys.argv[1])
sys.exit(0)
for targetdir in sys.argv[1:]:
print("processing directory '%s'" % targetdir)
for root, dirs, files in os.walk(targetdir):
for name in files:
if name == ourname:
continue
fn = os.path.join(root, name)
if os.path.islink(fn):
continue
if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
continue
processfile(fn)
print("All files processed with version %s" % ourversion)

View File

@@ -0,0 +1,172 @@
#!/bin/sh
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# 1MB blocksize
BLOCKSIZE=1048576
usage() {
echo "Usage: $(basename $0) IMAGE DEVICE"
}
image_details() {
IMG=$1
echo "Image details"
echo "============="
echo " image: $(basename $IMG)"
# stat format is different on Mac OS and Linux
if [ "$(uname)" = "Darwin" ]; then
echo " size: $(stat -L -f '%z bytes' $IMG)"
echo " modified: $(stat -L -f '%Sm' $IMG)"
else
echo " size: $(stat -L -c '%s bytes' $IMG)"
echo " modified: $(stat -L -c '%y' $IMG)"
fi
echo " type: $(file -L -b $IMG)"
echo ""
}
device_details() {
BLOCK_SIZE=512
echo "Device details"
echo "=============="
# Collect disk info using diskutil on Mac OS
if [ "$(uname)" = "Darwin" ]; then
diskutil info $DEVICE | egrep "(Device Node|Media Name|Total Size)"
return
fi
# Default / Linux information collection
ACTUAL_DEVICE=`readlink -f $DEVICE`
DEV=`basename $ACTUAL_DEVICE`
if [ "$ACTUAL_DEVICE" != "$DEVICE" ] ; then
echo " device: $DEVICE -> $ACTUAL_DEVICE"
else
echo " device: $DEVICE"
fi
if [ -f "/sys/class/block/$DEV/device/vendor" ]; then
echo " vendor: $(cat /sys/class/block/$DEV/device/vendor)"
else
echo " vendor: UNKNOWN"
fi
if [ -f "/sys/class/block/$DEV/device/model" ]; then
echo " model: $(cat /sys/class/block/$DEV/device/model)"
else
echo " model: UNKNOWN"
fi
if [ -f "/sys/class/block/$DEV/size" ]; then
echo " size: $(($(cat /sys/class/block/$DEV/size) * $BLOCK_SIZE)) bytes"
else
echo " size: UNKNOWN"
fi
echo ""
}
check_mount_device() {
if cat /proc/self/mounts | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1$" ; then
return 0
fi
return 1
}
is_mounted() {
if [ "$(uname)" = "Darwin" ]; then
if df | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1(s[0-9]+)?$" ; then
return 0
fi
else
if check_mount_device $1 ; then
return 0
fi
DEV=`basename $1`
if [ -d /sys/class/block/$DEV/ ] ; then
PARENT_BLKDEV=`basename $(readlink -f "/sys/class/block/$DEV/..")`
if [ "$PARENT_BLKDEV" != "block" ] ; then
if check_mount_device $PARENT_BLKDEV ; then
return 0
fi
fi
for CHILD_BLKDEV in `find /sys/class/block/$DEV/ -mindepth 1 -maxdepth 1 -name "$DEV*" -type d`
do
if check_mount_device /dev/`basename $CHILD_BLKDEV` ; then
return 0
fi
done
fi
fi
return 1
}
is_inuse() {
HOLDERS_DIR="/sys/class/block/`basename $1`/holders"
if [ -d $HOLDERS_DIR ] && [ `ls -A $HOLDERS_DIR` ] ; then
return 0
fi
return 1
}
if [ $# -ne 2 ]; then
usage
exit 1
fi
IMAGE=$1
DEVICE=$2
if [ ! -e "$IMAGE" ]; then
echo "ERROR: Image $IMAGE does not exist"
usage
exit 1
fi
if [ ! -e "$DEVICE" ]; then
echo "ERROR: Device $DEVICE does not exist"
usage
exit 1
fi
if [ "$(uname)" = "Darwin" ]; then
# readlink doesn't support -f on MacOS, just assume it isn't a symlink
ACTUAL_DEVICE=$DEVICE
else
ACTUAL_DEVICE=`readlink -f $DEVICE`
fi
if is_mounted $ACTUAL_DEVICE ; then
echo "ERROR: Device $DEVICE is currently mounted - check if this is the right device, and unmount it first if so"
device_details
exit 1
fi
if is_inuse $ACTUAL_DEVICE ; then
echo "ERROR: Device $DEVICE is currently in use (possibly part of LVM) - check if this is the right device!"
device_details
exit 1
fi
if [ ! -w "$DEVICE" ]; then
echo "ERROR: Device $DEVICE is not writable - possibly use sudo?"
usage
exit 1
fi
image_details $IMAGE
device_details
printf "Write $IMAGE to $DEVICE [y/N]? "
read RESPONSE
if [ "$RESPONSE" != "y" ]; then
echo "Write aborted"
exit 0
fi
echo "Writing image..."
if which pv >/dev/null 2>&1; then
pv "$IMAGE" | dd of="$DEVICE" bs="$BLOCKSIZE"
else
dd if="$IMAGE" of="$DEVICE" bs="$BLOCKSIZE"
fi
sync

View File

@@ -0,0 +1,245 @@
#!/usr/bin/env python3
# devtool stress tester
#
# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
#
# Copyright 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import os.path
import subprocess
import re
import argparse
import logging
import tempfile
import shutil
import signal
import fnmatch
scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
sys.path.insert(0, scripts_lib_path)
import scriptutils
import argparse_oe
logger = scriptutils.logger_create('devtool-stress')
def select_recipes(args):
import bb.tinfoil
tinfoil = bb.tinfoil.Tinfoil()
tinfoil.prepare(False)
pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn
(latest_versions, preferred_versions) = bb.providers.findProviders(tinfoil.config_data, tinfoil.cooker.recipecaches[''], pkg_pn)
skip_classes = args.skip_classes.split(',')
recipelist = []
for pn in sorted(pkg_pn):
pref = preferred_versions[pn]
inherits = [os.path.splitext(os.path.basename(f))[0] for f in tinfoil.cooker.recipecaches[''].inherits[pref[1]]]
for cls in skip_classes:
if cls in inherits:
break
else:
recipelist.append(pn)
tinfoil.shutdown()
resume_from = args.resume_from
if resume_from:
if not resume_from in recipelist:
print('%s is not a testable recipe' % resume_from)
return 1
if args.only:
only = args.only.split(',')
for onlyitem in only:
for pn in recipelist:
if fnmatch.fnmatch(pn, onlyitem):
break
else:
print('%s does not match any testable recipe' % onlyitem)
return 1
else:
only = None
if args.skip:
skip = args.skip.split(',')
else:
skip = []
recipes = []
for pn in recipelist:
if resume_from:
if pn == resume_from:
resume_from = None
else:
continue
if args.only:
for item in only:
if fnmatch.fnmatch(pn, item):
break
else:
continue
skipit = False
for item in skip:
if fnmatch.fnmatch(pn, item):
skipit = True
if skipit:
continue
recipes.append(pn)
return recipes
def stress_extract(args):
import bb.process
recipes = select_recipes(args)
failures = 0
tmpdir = tempfile.mkdtemp()
os.setpgrp()
try:
for pn in recipes:
sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
sys.stdout.flush()
failed = False
skipped = None
srctree = os.path.join(tmpdir, pn)
try:
bb.process.run('devtool extract %s %s' % (pn, srctree))
except bb.process.ExecutionError as exc:
if exc.exitcode == 4:
skipped = 'incompatible'
else:
failed = True
with open('stress_%s_extract.log' % pn, 'w') as f:
f.write(str(exc))
if os.path.exists(srctree):
shutil.rmtree(srctree)
if failed:
print('failed')
failures += 1
elif skipped:
print('skipped (%s)' % skipped)
else:
print('ok')
except KeyboardInterrupt:
# We want any child processes killed. This is crude, but effective.
os.killpg(0, signal.SIGTERM)
if failures:
return 1
else:
return 0
def stress_modify(args):
import bb.process
recipes = select_recipes(args)
failures = 0
tmpdir = tempfile.mkdtemp()
os.setpgrp()
try:
for pn in recipes:
sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
sys.stdout.flush()
failed = False
reset = True
skipped = None
srctree = os.path.join(tmpdir, pn)
try:
bb.process.run('devtool modify -x %s %s' % (pn, srctree))
except bb.process.ExecutionError as exc:
if exc.exitcode == 4:
skipped = 'incompatible'
else:
with open('stress_%s_modify.log' % pn, 'w') as f:
f.write(str(exc))
failed = 'modify'
reset = False
if not skipped:
if not failed:
try:
bb.process.run('bitbake -c install %s' % pn)
except bb.process.CmdError as exc:
with open('stress_%s_install.log' % pn, 'w') as f:
f.write(str(exc))
failed = 'build'
if reset:
try:
bb.process.run('devtool reset %s' % pn)
except bb.process.CmdError as exc:
print('devtool reset failed: %s' % str(exc))
break
if os.path.exists(srctree):
shutil.rmtree(srctree)
if failed:
print('failed (%s)' % failed)
failures += 1
elif skipped:
print('skipped (%s)' % skipped)
else:
print('ok')
except KeyboardInterrupt:
# We want any child processes killed. This is crude, but effective.
os.killpg(0, signal.SIGTERM)
if failures:
return 1
else:
return 0
def main():
parser = argparse_oe.ArgumentParser(description="devtool stress tester",
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-r', '--resume-from', help='Resume from specified recipe', metavar='PN')
parser.add_argument('-o', '--only', help='Only test specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST')
parser.add_argument('-s', '--skip', help='Skip specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST', default='gcc-source-*,kernel-devsrc,package-index,perf,meta-world-pkgdata,glibc-locale,glibc-mtrace,glibc-scripts,os-release')
parser.add_argument('-c', '--skip-classes', help='Skip recipes inheriting specified classes (comma-separated) - default %(default)s', metavar='CLASSLIST', default='native,nativesdk,cross,cross-canadian,image,populate_sdk,meta,packagegroup')
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
subparsers.required = True
parser_modify = subparsers.add_parser('modify',
help='Run "devtool modify" followed by a build with bitbake on matching recipes',
description='Runs "devtool modify" followed by a build with bitbake on matching recipes')
parser_modify.set_defaults(func=stress_modify)
parser_extract = subparsers.add_parser('extract',
help='Run "devtool extract" on matching recipes',
description='Runs "devtool extract" on matching recipes')
parser_extract.set_defaults(func=stress_extract)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
import scriptpath
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
return 1
logger.debug('Found bitbake path: %s' % bitbakepath)
ret = args.func(args)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,57 @@
#!/bin/sh
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Simple script to show a manual power prompt for when you want to use
# automated hardware testing with testimage.bbclass but you don't have a
# web-enabled power strip or similar to do the power on/off/cycle.
#
# You can enable it by enabling testimage (see the Yocto Project
# Development manual "Performing Automated Runtime Testing" section)
# and setting the following in your local.conf:
#
# TEST_POWERCONTROL_CMD = "${COREBASE}/scripts/contrib/dialog-power-control"
#
PROMPT=""
while true; do
case $1 in
on)
PROMPT="Please turn device power on";;
off)
PROMPT="Please turn device power off";;
cycle)
PROMPT="Please click Done, then turn the device power off then on";;
"")
break;;
esac
shift
done
if [ "$PROMPT" = "" ] ; then
echo "ERROR: no power action specified on command line"
exit 2
fi
if [ "`which kdialog 2>/dev/null`" != "" ] ; then
DIALOGUTIL="kdialog"
elif [ "`which zenity 2>/dev/null`" != "" ] ; then
DIALOGUTIL="zenity"
else
echo "ERROR: couldn't find program to display a message, install kdialog or zenity"
exit 3
fi
if [ "$DIALOGUTIL" = "kdialog" ] ; then
kdialog --yesno "$PROMPT" --title "TestImage Power Control" --yes-label "Done" --no-label "Cancel test"
elif [ "$DIALOGUTIL" = "zenity" ] ; then
zenity --question --text="$PROMPT" --title="TestImage Power Control" --ok-label="Done" --cancel-label="Cancel test"
fi
if [ "$?" != "0" ] ; then
echo "User cancelled test at power prompt"
exit 1
fi

View File

@@ -0,0 +1,97 @@
#!/bin/bash
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Perform an audit of which packages provide documentation and which
# are missing -doc packages.
#
# Setup requirements: be sure to be building for MACHINE=qemux86. Run
# this script after source'ing the build environment script, so you're
# running it from build/ directory.
#
REPORT_DOC_SIMPLE="documentation_exists.txt"
REPORT_DOC_DETAIL="documentation_exists_detail.txt"
REPORT_MISSING_SIMPLE="documentation_missing.txt"
REPORT_MISSING_DETAIL="documentation_missing_detail.txt"
REPORT_BUILD_ERRORS="build_errors.txt"
rm -rf $REPORT_DOC_SIMPLE $REPORT_DOC_DETAIL $REPORT_MISSING_SIMPLE $REPORT_MISSING_DETAIL
BITBAKE=`which bitbake`
if [ -z "$BITBAKE" ]; then
echo "Error: bitbake command not found."
echo "Did you forget to source the build environment script?"
exit 1
fi
echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
for pkg in `bitbake -s | awk '{ print \$1 }'`; do
if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
"$pkg" == "Recipe" ||
"$pkg" == "Parsing" || "$pkg" == "Package" ||
"$pkg" == "NOTE:" || "$pkg" == "WARNING:" ||
"$pkg" == "done." || "$pkg" == "===========" ]]
then
# Skip initial bitbake output
continue
fi
if [[ "$pkg" =~ -native$ || "$pkg" =~ -nativesdk$ ||
"$pkg" =~ -cross-canadian ]]; then
# Skip native/nativesdk/cross-canadian recipes
continue
fi
if [[ "$pkg" =~ ^meta- || "$pkg" =~ ^packagegroup- || "$pkg" =~ -image ]]; then
# Skip meta, task and image recipes
continue
fi
if [[ "$pkg" =~ ^glibc- || "$pkg" =~ ^libiconv$ ||
"$pkg" =~ -toolchain$ || "$pkg" =~ ^package-index$ ||
"$pkg" =~ ^linux- || "$pkg" =~ ^adt-installer$ ||
"$pkg" =~ ^eds-tools$ || "$pkg" =~ ^external-python-tarball$ ||
"$pkg" =~ ^qt4-embedded$ || "$pkg" =~ ^qt-mobility ]]; then
# Skip glibc, libiconv, -toolchain, and other recipes known
# to cause build conflicts or trigger false positives.
continue
fi
echo "Building package $pkg..."
bitbake $pkg > /dev/null
if [ $? -ne 0 ]; then
echo "There was an error building package $pkg" >> "$REPORT_MISSING_DETAIL"
echo "$pkg" >> $REPORT_BUILD_ERRORS
# Do not skip the remaining tests, as sometimes the
# exit status is 1 due to QA errors, and we can still
# perform the -doc checks.
fi
echo "$pkg built successfully, checking for a documentation package..."
WORKDIR=`bitbake -e $pkg | grep ^WORKDIR | awk -F '=' '{ print \$2 }' | awk -F '"' '{ print \$2 }'`
FIND_DOC_PKG=`find $WORKDIR/packages-split/*-doc -maxdepth 0 -type d`
if [ -z "$FIND_DOC_PKG" ]; then
# No -doc package was generated:
echo "No -doc package: $pkg" >> "$REPORT_MISSING_DETAIL"
echo "$pkg" >> $REPORT_MISSING_SIMPLE
continue
fi
FIND_DOC_FILES=`find $FIND_DOC_PKG -type f`
if [ -z "$FIND_DOC_FILES" ]; then
# No files shipped with the -doc package:
echo "No files shipped with the -doc package: $pkg" >> "$REPORT_MISSING_DETAIL"
echo "$pkg" >> $REPORT_MISSING_SIMPLE
continue
fi
echo "Documentation shipped with $pkg:" >> "$REPORT_DOC_DETAIL"
echo "$FIND_DOC_FILES" >> "$REPORT_DOC_DETAIL"
echo "" >> "$REPORT_DOC_DETAIL"
echo "$pkg" >> "$REPORT_DOC_SIMPLE"
done

View File

@@ -0,0 +1,118 @@
#!/usr/bin/env python3
# Simple graph query utility
# useful for getting answers from .dot files produced by bitbake -g
#
# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
#
# Copyright 2013 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
sys.path.insert(0, scripts_lib_path)
import argparse_oe
def get_path_networkx(dotfile, fromnode, tonode):
try:
import networkx
except ImportError:
print('ERROR: Please install the networkx python module')
sys.exit(1)
graph = networkx.DiGraph(networkx.nx_pydot.read_dot(dotfile))
def node_missing(node):
import difflib
close_matches = difflib.get_close_matches(node, graph.nodes(), cutoff=0.7)
if close_matches:
print('ERROR: no node "%s" in graph. Close matches:\n %s' % (node, '\n '.join(close_matches)))
sys.exit(1)
if not fromnode in graph:
node_missing(fromnode)
if not tonode in graph:
node_missing(tonode)
return networkx.all_simple_paths(graph, source=fromnode, target=tonode)
def find_paths(args):
path = None
for path in get_path_networkx(args.dotfile, args.fromnode, args.tonode):
print(" -> ".join(map(str, path)))
if not path:
print("ERROR: no path from %s to %s in graph" % (args.fromnode, args.tonode))
return 1
def filter_graph(args):
import fnmatch
exclude_tasks = []
if args.exclude_tasks:
for task in args.exclude_tasks.split(','):
if not task.startswith('do_'):
task = 'do_%s' % task
exclude_tasks.append(task)
def checkref(strval):
strval = strval.strip().strip('"')
target, taskname = strval.rsplit('.', 1)
if exclude_tasks:
for extask in exclude_tasks:
if fnmatch.fnmatch(taskname, extask):
return False
if strval in args.ref or target in args.ref:
return True
return False
with open(args.infile, 'r') as f:
for line in f:
line = line.rstrip()
if line.startswith(('digraph', '}')):
print(line)
elif '->' in line:
linesplit = line.split('->')
if checkref(linesplit[0]) and checkref(linesplit[1]):
print(line)
elif (not args.no_nodes) and checkref(line.split()[0]):
print(line)
def main():
parser = argparse_oe.ArgumentParser(description='Small utility for working with .dot graph files')
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
subparsers.required = True
parser_find_paths = subparsers.add_parser('find-paths',
help='Find all of the paths between two nodes in a dot graph',
description='Finds all of the paths between two nodes in a dot graph')
parser_find_paths.add_argument('dotfile', help='.dot graph to search in')
parser_find_paths.add_argument('fromnode', help='starting node name')
parser_find_paths.add_argument('tonode', help='ending node name')
parser_find_paths.set_defaults(func=find_paths)
parser_filter = subparsers.add_parser('filter',
help='Pare down a task graph to contain only the specified references',
description='Pares down a task-depends.dot graph produced by bitbake -g to contain only the specified references')
parser_filter.add_argument('infile', help='Input file')
parser_filter.add_argument('ref', nargs='+', help='Reference to include (either recipe/target name or full target.taskname specification)')
parser_filter.add_argument('-n', '--no-nodes', action='store_true', help='Skip node formatting lines')
parser_filter.add_argument('-x', '--exclude-tasks', help='Comma-separated list of tasks to exclude (do_ prefix optional, wildcards allowed)')
parser_filter.set_defaults(func=filter_graph)
args = parser.parse_args()
ret = args.func(args)
return ret
if __name__ == "__main__":
ret = main()
sys.exit(ret)

View File

@@ -0,0 +1,523 @@
#!/usr/bin/env python3
# Script to extract information from image manifests
#
# Copyright (C) 2018 Intel Corporation
# Copyright (C) 2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
import logging
import json
import shutil
import tempfile
import tarfile
from collections import OrderedDict
scripts_path = os.path.dirname(__file__)
lib_path = scripts_path + '/../lib'
sys.path = sys.path + [lib_path]
import scriptutils
logger = scriptutils.logger_create(os.path.basename(__file__))
import argparse_oe
import scriptpath
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
sys.exit(1)
logger.debug('Using standard bitbake path %s' % bitbakepath)
scriptpath.add_oe_lib_path()
import bb.tinfoil
import bb.utils
import oe.utils
import oe.recipeutils
def get_pkg_list(manifest):
pkglist = []
with open(manifest, 'r') as f:
for line in f:
linesplit = line.split()
if len(linesplit) == 3:
# manifest file
pkglist.append(linesplit[0])
elif len(linesplit) == 1:
# build dependency file
pkglist.append(linesplit[0])
return sorted(pkglist)
def list_packages(args):
pkglist = get_pkg_list(args.manifest)
for pkg in pkglist:
print('%s' % pkg)
def pkg2recipe(tinfoil, pkg):
if "-native" in pkg:
logger.info('skipping %s' % pkg)
return None
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
logger.debug('pkgdatafile %s' % pkgdatafile)
try:
f = open(pkgdatafile, 'r')
for line in f:
if line.startswith('PN:'):
recipe = line.split(':', 1)[1].strip()
return recipe
except Exception:
logger.warning('%s is missing' % pkgdatafile)
return None
def get_recipe_list(manifest, tinfoil):
pkglist = get_pkg_list(manifest)
recipelist = []
for pkg in pkglist:
recipe = pkg2recipe(tinfoil,pkg)
if recipe:
if not recipe in recipelist:
recipelist.append(recipe)
return sorted(recipelist)
def list_recipes(args):
import bb.tinfoil
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only=True)
recipelist = get_recipe_list(args.manifest, tinfoil)
for recipe in sorted(recipelist):
print('%s' % recipe)
def list_layers(args):
def find_git_repo(pth):
checkpth = pth
while checkpth != os.sep:
if os.path.exists(os.path.join(checkpth, '.git')):
return checkpth
checkpth = os.path.dirname(checkpth)
return None
def get_git_remote_branch(repodir):
try:
stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir)
except bb.process.ExecutionError as e:
stdout = None
if stdout:
return stdout.strip()
else:
return None
def get_git_head_commit(repodir):
try:
stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir)
except bb.process.ExecutionError as e:
stdout = None
if stdout:
return stdout.strip()
else:
return None
def get_git_repo_url(repodir, remote='origin'):
import bb.process
# Try to get upstream repo location from origin remote
try:
stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir)
except bb.process.ExecutionError as e:
stdout = None
if stdout:
for line in stdout.splitlines():
splitline = line.split()
if len(splitline) > 1:
if splitline[0] == remote and scriptutils.is_src_url(splitline[1]):
return splitline[1]
return None
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only=False)
layers = OrderedDict()
for layerdir in tinfoil.config_data.getVar('BBLAYERS').split():
layerdata = OrderedDict()
layername = os.path.basename(layerdir)
logger.debug('layername %s, layerdir %s' % (layername, layerdir))
if layername in layers:
logger.warning('layername %s is not unique in configuration' % layername)
layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir)
logger.debug('trying layername %s' % layername)
if layername in layers:
logger.error('Layer name %s is not unique in configuration' % layername)
sys.exit(2)
repodir = find_git_repo(layerdir)
if repodir:
remotebranch = get_git_remote_branch(repodir)
remote = 'origin'
if remotebranch and '/' in remotebranch:
rbsplit = remotebranch.split('/', 1)
layerdata['actual_branch'] = rbsplit[1]
remote = rbsplit[0]
layerdata['vcs_url'] = get_git_repo_url(repodir, remote)
if os.path.abspath(repodir) != os.path.abspath(layerdir):
layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir)
commit = get_git_head_commit(repodir)
if commit:
layerdata['vcs_commit'] = commit
layers[layername] = layerdata
json.dump(layers, args.output, indent=2)
def get_recipe(args):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only=True)
recipe = pkg2recipe(tinfoil, args.package)
print(' %s package provided by %s' % (args.package, recipe))
def pkg_dependencies(args):
def get_recipe_info(tinfoil, recipe):
try:
info = tinfoil.get_recipe_info(recipe)
except Exception:
logger.error('Failed to get recipe info for: %s' % recipe)
sys.exit(1)
if not info:
logger.warning('No recipe info found for: %s' % recipe)
sys.exit(1)
append_files = tinfoil.get_file_appends(info.fn)
appends = True
data = tinfoil.parse_recipe_file(info.fn, appends, append_files)
data.pn = info.pn
data.pv = info.pv
return data
def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order):
spaces = ' ' * order
data = recipe_info[rn]
if args.native:
logger.debug('%s- %s' % (spaces, data.pn))
elif "-native" not in data.pn:
if "cross" not in data.pn:
logger.debug('%s- %s' % (spaces, data.pn))
depends = []
for dep in data.depends:
if dep not in assume_provided:
depends.append(dep)
# First find all dependencies not in package list.
for dep in depends:
if dep not in packages:
packages.append(dep)
dep_data = get_recipe_info(tinfoil, dep)
# Do this once now to reduce the number of bitbake calls.
dep_data.depends = dep_data.getVar('DEPENDS').split()
recipe_info[dep] = dep_data
# Then recursively analyze all of the dependencies for the current recipe.
for dep in depends:
find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1)
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare()
assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split()
logger.debug('assumed provided:')
for ap in sorted(assume_provided):
logger.debug(' - %s' % ap)
recipe = pkg2recipe(tinfoil, args.package)
data = get_recipe_info(tinfoil, recipe)
data.depends = []
depends = data.getVar('DEPENDS').split()
for dep in depends:
if dep not in assume_provided:
data.depends.append(dep)
recipe_info = dict([(recipe, data)])
packages = []
find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1)
print('\nThe following packages are required to build %s' % recipe)
for p in sorted(packages):
data = recipe_info[p]
if "-native" not in data.pn:
if "cross" not in data.pn:
print(" %s (%s)" % (data.pn,p))
if args.native:
print('\nThe following native packages are required to build %s' % recipe)
for p in sorted(packages):
data = recipe_info[p]
if "-native" in data.pn:
print(" %s(%s)" % (data.pn,p))
if "cross" in data.pn:
print(" %s(%s)" % (data.pn,p))
def default_config():
vlist = OrderedDict()
vlist['PV'] = 'yes'
vlist['SUMMARY'] = 'no'
vlist['DESCRIPTION'] = 'no'
vlist['SECTION'] = 'no'
vlist['LICENSE'] = 'yes'
vlist['HOMEPAGE'] = 'no'
vlist['BUGTRACKER'] = 'no'
vlist['PROVIDES'] = 'no'
vlist['BBCLASSEXTEND'] = 'no'
vlist['DEPENDS'] = 'no'
vlist['PACKAGECONFIG'] = 'no'
vlist['SRC_URI'] = 'yes'
vlist['SRCREV'] = 'yes'
vlist['EXTRA_OECONF'] = 'no'
vlist['EXTRA_OESCONS'] = 'no'
vlist['EXTRA_OECMAKE'] = 'no'
vlist['EXTRA_OEMESON'] = 'no'
clist = OrderedDict()
clist['variables'] = vlist
clist['filepath'] = 'no'
clist['sha256sum'] = 'no'
clist['layerdir'] = 'no'
clist['layer'] = 'no'
clist['inherits'] = 'no'
clist['source_urls'] = 'no'
clist['packageconfig_opts'] = 'no'
clist['patches'] = 'no'
clist['packagedir'] = 'no'
return clist
def dump_config(args):
config = default_config()
f = open('default_config.json', 'w')
json.dump(config, f, indent=2)
logger.info('Default config list dumped to default_config.json')
def export_manifest_info(args):
def handle_value(value):
if value:
return oe.utils.squashspaces(value)
else:
return value
if args.config:
logger.debug('config: %s' % args.config)
f = open(args.config, 'r')
config = json.load(f, object_pairs_hook=OrderedDict)
else:
config = default_config()
if logger.isEnabledFor(logging.DEBUG):
print('Configuration:')
json.dump(config, sys.stdout, indent=2)
print('')
tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-')
logger.debug('tmp dir: %s' % tmpoutdir)
# export manifest
shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest"))
with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only=False)
pkglist = get_pkg_list(args.manifest)
# export pkg list
f = open(os.path.join(tmpoutdir, "pkgs"), 'w')
for pkg in pkglist:
f.write('%s\n' % pkg)
f.close()
recipelist = []
for pkg in pkglist:
recipe = pkg2recipe(tinfoil,pkg)
if recipe:
if not recipe in recipelist:
recipelist.append(recipe)
recipelist.sort()
# export recipe list
f = open(os.path.join(tmpoutdir, "recipes"), 'w')
for recipe in recipelist:
f.write('%s\n' % recipe)
f.close()
try:
rvalues = OrderedDict()
for pn in sorted(recipelist):
logger.debug('Package: %s' % pn)
rd = tinfoil.parse_recipe(pn)
rvalues[pn] = OrderedDict()
for varname in config['variables']:
if config['variables'][varname] == 'yes':
rvalues[pn][varname] = handle_value(rd.getVar(varname))
fpth = rd.getVar('FILE')
layerdir = oe.recipeutils.find_layerdir(fpth)
if config['filepath'] == 'yes':
rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir)
if config['sha256sum'] == 'yes':
rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth)
if config['layerdir'] == 'yes':
rvalues[pn]['layerdir'] = layerdir
if config['layer'] == 'yes':
rvalues[pn]['layer'] = os.path.basename(layerdir)
if config['inherits'] == 'yes':
gr = set(tinfoil.config_data.getVar("__inherit_cache") or [])
lr = set(rd.getVar("__inherit_cache") or [])
rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr})
if config['source_urls'] == 'yes':
rvalues[pn]['source_urls'] = []
for url in (rd.getVar('SRC_URI') or '').split():
if not url.startswith('file://'):
url = url.split(';')[0]
rvalues[pn]['source_urls'].append(url)
if config['packageconfig_opts'] == 'yes':
rvalues[pn]['packageconfig_opts'] = OrderedDict()
for key in rd.getVarFlags('PACKAGECONFIG').keys():
if key == 'doc':
continue
rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key)
if config['patches'] == 'yes':
patches = oe.recipeutils.get_recipe_patches(rd)
rvalues[pn]['patches'] = []
if patches:
recipeoutdir = os.path.join(tmpoutdir, pn, 'patches')
bb.utils.mkdirhier(recipeoutdir)
for patch in patches:
# Patches may be in other layers too
patchlayerdir = oe.recipeutils.find_layerdir(patch)
# patchlayerdir will be None for remote patches, which we ignore
# (since currently they are considered as part of sources)
if patchlayerdir:
rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir)))
shutil.copy(patch, recipeoutdir)
if config['packagedir'] == 'yes':
pn_dir = os.path.join(tmpoutdir, pn)
bb.utils.mkdirhier(pn_dir)
f = open(os.path.join(pn_dir, 'recipe.json'), 'w')
json.dump(rvalues[pn], f, indent=2)
f.close()
with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f:
json.dump(rvalues, f, indent=2)
if args.output:
outname = os.path.basename(args.output)
else:
outname = os.path.splitext(os.path.basename(args.manifest))[0]
if outname.endswith('.tar.gz'):
outname = outname[:-7]
elif outname.endswith('.tgz'):
outname = outname[:-4]
tarfn = outname
if tarfn.endswith(os.sep):
tarfn = tarfn[:-1]
if not tarfn.endswith(('.tar.gz', '.tgz')):
tarfn += '.tar.gz'
with open(tarfn, 'wb') as f:
with tarfile.open(None, "w:gz", f) as tar:
tar.add(tmpoutdir, outname)
finally:
shutil.rmtree(tmpoutdir)
def main():
parser = argparse_oe.ArgumentParser(description="Image manifest utility",
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
subparsers.required = True
# get recipe info
parser_get_recipes = subparsers.add_parser('recipe-info',
help='Get recipe info',
description='Get recipe information for a package')
parser_get_recipes.add_argument('package', help='Package name')
parser_get_recipes.set_defaults(func=get_recipe)
# list runtime dependencies
parser_pkg_dep = subparsers.add_parser('list-depends',
help='List dependencies',
description='List dependencies required to build the package')
parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true')
parser_pkg_dep.add_argument('package', help='Package name')
parser_pkg_dep.set_defaults(func=pkg_dependencies)
# list recipes
parser_recipes = subparsers.add_parser('list-recipes',
help='List recipes producing packages within an image',
description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata')
parser_recipes.add_argument('manifest', help='Manifest file')
parser_recipes.set_defaults(func=list_recipes)
# list packages
parser_packages = subparsers.add_parser('list-packages',
help='List packages within an image',
description='Lists packages that went into an image, using the manifest')
parser_packages.add_argument('manifest', help='Manifest file')
parser_packages.set_defaults(func=list_packages)
# list layers
parser_layers = subparsers.add_parser('list-layers',
help='List included layers',
description='Lists included layers')
parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified',
default=sys.stdout, type=argparse.FileType('w'))
parser_layers.set_defaults(func=list_layers)
# dump default configuration file
parser_dconfig = subparsers.add_parser('dump-config',
help='Dump default config',
description='Dump default config to default_config.json')
parser_dconfig.set_defaults(func=dump_config)
# export recipe info for packages in manifest
parser_export = subparsers.add_parser('manifest-info',
help='Export recipe info for a manifest',
description='Export recipe information using the manifest')
parser_export.add_argument('-c', '--config', help='load config from json file')
parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified')
parser_export.add_argument('manifest', help='Manifest file')
parser_export.set_defaults(func=export_manifest_info)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
logger.debug("Debug Enabled")
elif args.quiet:
logger.setLevel(logging.ERROR)
ret = args.func(args)
return ret
if __name__ == "__main__":
try:
ret = main()
except Exception:
ret = 1
import traceback
traceback.print_exc()
sys.exit(ret)

View File

@@ -0,0 +1,167 @@
#!/usr/bin/env python3
# Copyright (C) 2013 Wind River Systems, Inc.
# Copyright (C) 2014 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# - list available recipes which have PACKAGECONFIG flags
# - list available PACKAGECONFIG flags and all affected recipes
# - list all recipes and PACKAGECONFIG information
import sys
import optparse
import os
scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
lib_path = os.path.abspath(scripts_path + '/../lib')
sys.path = sys.path + [lib_path]
import scriptpath
# For importing the following modules
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
sys.exit(1)
import bb.cooker
import bb.providers
import bb.tinfoil
def get_fnlist(bbhandler, pkg_pn, preferred):
''' Get all recipe file names '''
if preferred:
(latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn)
fn_list = []
for pn in sorted(pkg_pn):
if preferred:
fn_list.append(preferred_versions[pn][1])
else:
fn_list.extend(pkg_pn[pn])
return fn_list
def get_recipesdata(bbhandler, preferred):
''' Get data of all available recipes which have PACKAGECONFIG flags '''
pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
data_dict = {}
for fn in get_fnlist(bbhandler, pkg_pn, preferred):
data = bbhandler.parse_recipe_file(fn)
flags = data.getVarFlags("PACKAGECONFIG")
flags.pop('doc', None)
if flags:
data_dict[fn] = data
return data_dict
def collect_pkgs(data_dict):
''' Collect available pkgs in which have PACKAGECONFIG flags '''
# pkg_dict = {'pkg1': ['flag1', 'flag2',...]}
pkg_dict = {}
for fn in data_dict:
pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG")
pkgconfigflags.pop('doc', None)
pkgname = data_dict[fn].getVar("PN")
pkg_dict[pkgname] = sorted(pkgconfigflags.keys())
return pkg_dict
def collect_flags(pkg_dict):
''' Collect available PACKAGECONFIG flags and all affected pkgs '''
# flag_dict = {'flag': ['pkg1', 'pkg2',...]}
flag_dict = {}
for pkgname, flaglist in pkg_dict.items():
for flag in flaglist:
if flag in flag_dict:
flag_dict[flag].append(pkgname)
else:
flag_dict[flag] = [pkgname]
return flag_dict
def display_pkgs(pkg_dict):
''' Display available pkgs which have PACKAGECONFIG flags '''
pkgname_len = len("RECIPE NAME") + 1
for pkgname in pkg_dict:
if pkgname_len < len(pkgname):
pkgname_len = len(pkgname)
pkgname_len += 1
header = '%-*s%s' % (pkgname_len, str("RECIPE NAME"), str("PACKAGECONFIG FLAGS"))
print(header)
print(str("").ljust(len(header), '='))
for pkgname in sorted(pkg_dict):
print('%-*s%s' % (pkgname_len, pkgname, ' '.join(pkg_dict[pkgname])))
def display_flags(flag_dict):
''' Display available PACKAGECONFIG flags and all affected pkgs '''
flag_len = len("PACKAGECONFIG FLAG") + 5
header = '%-*s%s' % (flag_len, str("PACKAGECONFIG FLAG"), str("RECIPE NAMES"))
print(header)
print(str("").ljust(len(header), '='))
for flag in sorted(flag_dict):
print('%-*s%s' % (flag_len, flag, ' '.join(sorted(flag_dict[flag]))))
def display_all(data_dict):
''' Display all pkgs and PACKAGECONFIG information '''
print(str("").ljust(50, '='))
for fn in data_dict:
print('%s' % data_dict[fn].getVar("P"))
print(fn)
packageconfig = data_dict[fn].getVar("PACKAGECONFIG") or ''
if packageconfig.strip() == '':
packageconfig = 'None'
print('PACKAGECONFIG %s' % packageconfig)
for flag,flag_val in data_dict[fn].getVarFlags("PACKAGECONFIG").items():
if flag == "doc":
continue
print('PACKAGECONFIG[%s] %s' % (flag, flag_val))
print('')
def main():
pkg_dict = {}
flag_dict = {}
# Collect and validate input
parser = optparse.OptionParser(
description = "Lists recipes and PACKAGECONFIG flags. Without -a or -f, recipes and their available PACKAGECONFIG flags are listed.",
usage = """
%prog [options]""")
parser.add_option("-f", "--flags",
help = "list available PACKAGECONFIG flags and affected recipes",
action="store_const", dest="listtype", const="flags", default="recipes")
parser.add_option("-a", "--all",
help = "list all recipes and PACKAGECONFIG information",
action="store_const", dest="listtype", const="all")
parser.add_option("-p", "--preferred-only",
help = "where multiple recipe versions are available, list only the preferred version",
action="store_true", dest="preferred", default=False)
options, args = parser.parse_args(sys.argv)
with bb.tinfoil.Tinfoil() as bbhandler:
bbhandler.prepare()
print("Gathering recipe data...")
data_dict = get_recipesdata(bbhandler, options.preferred)
if options.listtype == 'flags':
pkg_dict = collect_pkgs(data_dict)
flag_dict = collect_flags(pkg_dict)
display_flags(flag_dict)
elif options.listtype == 'recipes':
pkg_dict = collect_pkgs(data_dict)
display_pkgs(pkg_dict)
elif options.listtype == 'all':
display_all(data_dict)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,121 @@
#!/usr/bin/python3
#
# Send build performance test report emails
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
import base64
import logging
import os
import pwd
import re
import shutil
import smtplib
import socket
import subprocess
import sys
import tempfile
from email.mime.text import MIMEText
# Setup logging
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
log = logging.getLogger('oe-build-perf-report')
def parse_args(argv):
"""Parse command line arguments"""
description = """Email build perf test report"""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description=description)
parser.add_argument('--debug', '-d', action='store_true',
help="Verbose logging")
parser.add_argument('--quiet', '-q', action='store_true',
help="Only print errors")
parser.add_argument('--to', action='append',
help="Recipients of the email")
parser.add_argument('--cc', action='append',
help="Carbon copy recipients of the email")
parser.add_argument('--bcc', action='append',
help="Blind carbon copy recipients of the email")
parser.add_argument('--subject', default="Yocto build perf test report",
help="Email subject")
parser.add_argument('--outdir', '-o',
help="Store files in OUTDIR. Can be used to preserve "
"the email parts")
parser.add_argument('--text',
help="Plain text message")
args = parser.parse_args(argv)
if not args.text:
parser.error("Please specify --text")
return args
def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
# Generate email message
with open(text_fn) as f:
msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
pw_data = pwd.getpwuid(os.getuid())
full_name = pw_data.pw_gecos.split(',')[0]
email = os.environ.get('EMAIL',
'{}@{}'.format(pw_data.pw_name, socket.getfqdn()))
msg['From'] = "{} <{}>".format(full_name, email)
msg['To'] = ', '.join(recipients)
if copy:
msg['Cc'] = ', '.join(copy)
if blind_copy:
msg['Bcc'] = ', '.join(blind_copy)
msg['Subject'] = subject
# Send email
with smtplib.SMTP('localhost') as smtp:
smtp.send_message(msg)
def main(argv=None):
"""Script entry point"""
args = parse_args(argv)
if args.quiet:
log.setLevel(logging.ERROR)
if args.debug:
log.setLevel(logging.DEBUG)
if args.outdir:
outdir = args.outdir
if not os.path.exists(outdir):
os.mkdir(outdir)
else:
outdir = tempfile.mkdtemp(dir='.')
try:
log.debug("Storing email parts in %s", outdir)
if args.to:
log.info("Sending email to %s", ', '.join(args.to))
if args.cc:
log.info("Copying to %s", ', '.join(args.cc))
if args.bcc:
log.info("Blind copying to %s", ', '.join(args.bcc))
send_email(args.text, args.subject, args.to, args.cc, args.bcc)
except subprocess.CalledProcessError as err:
log.error("%s, with output:\n%s", str(err), err.output.decode())
return 1
finally:
if not args.outdir:
log.debug("Wiping %s", outdir)
shutil.rmtree(outdir)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,277 @@
#! /usr/bin/env python3
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
import collections
import json
import os
import os.path
import pathlib
import re
import subprocess
# TODO
# - option to just list all broken files
# - test suite
# - validate signed-off-by
status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream")
class Result:
# Whether the patch has an Upstream-Status or not
missing_upstream_status = False
# If the Upstream-Status tag is malformed in some way (string for bad bit)
malformed_upstream_status = None
# If the Upstream-Status value is unknown (boolean)
unknown_upstream_status = False
# The upstream status value (Pending, etc)
upstream_status = None
# Whether the patch has a Signed-off-by or not
missing_sob = False
# Whether the Signed-off-by tag is malformed in some way
malformed_sob = False
# The Signed-off-by tag value
sob = None
# Whether a patch looks like a CVE but doesn't have a CVE tag
missing_cve = False
def blame_patch(patch):
"""
From a patch filename, return a list of "commit summary (author name <author
email>)" strings representing the history.
"""
return subprocess.check_output(("git", "log",
"--follow", "--find-renames", "--diff-filter=A",
"--format=%s (%aN <%aE>)",
"--", patch)).decode("utf-8").splitlines()
def patchreview(patches):
# General pattern: start of line, optional whitespace, tag with optional
# hyphen or spaces, maybe a colon, some whitespace, then the value, all case
# insensitive.
sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE)
cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
results = {}
for patch in patches:
result = Result()
results[patch] = result
content = open(patch, encoding='ascii', errors='ignore').read()
# Find the Signed-off-by tag
match = sob_re.search(content)
if match:
value = match.group(1)
if value != "Signed-off-by:":
result.malformed_sob = value
result.sob = match.group(2)
else:
result.missing_sob = True
# Find the Upstream-Status tag
match = status_re.search(content)
if match:
value = match.group(1)
if value != "Upstream-Status:":
result.malformed_upstream_status = value
value = match.group(2).lower()
# TODO: check case
if value not in status_values:
result.unknown_upstream_status = True
result.upstream_status = value
else:
result.missing_upstream_status = True
# Check that patches which looks like CVEs have CVE tags
if cve_re.search(patch) or cve_re.search(content):
if not cve_tag_re.search(content):
result.missing_cve = True
# TODO: extract CVE list
return results
def analyse(results, want_blame=False, verbose=True):
"""
want_blame: display blame data for each malformed patch
verbose: display per-file results instead of just summary
"""
# want_blame requires verbose, so disable blame if we're not verbose
if want_blame and not verbose:
want_blame = False
total_patches = 0
missing_sob = 0
malformed_sob = 0
missing_status = 0
malformed_status = 0
missing_cve = 0
pending_patches = 0
for patch in sorted(results):
r = results[patch]
total_patches += 1
need_blame = False
# Build statistics
if r.missing_sob:
missing_sob += 1
if r.malformed_sob:
malformed_sob += 1
if r.missing_upstream_status:
missing_status += 1
if r.malformed_upstream_status or r.unknown_upstream_status:
malformed_status += 1
# Count patches with no status as pending
pending_patches +=1
if r.missing_cve:
missing_cve += 1
if r.upstream_status == "pending":
pending_patches += 1
# Output warnings
if r.missing_sob:
need_blame = True
if verbose:
print("Missing Signed-off-by tag (%s)" % patch)
if r.malformed_sob:
need_blame = True
if verbose:
print("Malformed Signed-off-by '%s' (%s)" % (r.malformed_sob, patch))
if r.missing_cve:
need_blame = True
if verbose:
print("Missing CVE tag (%s)" % patch)
if r.missing_upstream_status:
need_blame = True
if verbose:
print("Missing Upstream-Status tag (%s)" % patch)
if r.malformed_upstream_status:
need_blame = True
if verbose:
print("Malformed Upstream-Status '%s' (%s)" % (r.malformed_upstream_status, patch))
if r.unknown_upstream_status:
need_blame = True
if verbose:
print("Unknown Upstream-Status value '%s' (%s)" % (r.upstream_status, patch))
if want_blame and need_blame:
print("\n".join(blame_patch(patch)) + "\n")
def percent(num):
try:
return "%d (%d%%)" % (num, round(num * 100.0 / total_patches))
except ZeroDivisionError:
return "N/A"
if verbose:
print()
print("""Total patches found: %d
Patches missing Signed-off-by: %s
Patches with malformed Signed-off-by: %s
Patches missing CVE: %s
Patches missing Upstream-Status: %s
Patches with malformed Upstream-Status: %s
Patches in Pending state: %s""" % (total_patches,
percent(missing_sob),
percent(malformed_sob),
percent(missing_cve),
percent(missing_status),
percent(malformed_status),
percent(pending_patches)))
def histogram(results):
from toolz import recipes, dicttoolz
import math
counts = recipes.countby(lambda r: r.upstream_status, results.values())
bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
for k in bars:
print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
def find_layers(candidate):
# candidate can either be the path to a layer directly (eg meta-intel), or a
# repository that contains other layers (meta-arm). We can determine what by
# looking for a conf/layer.conf file. If that file exists then it's a layer,
# otherwise its a repository of layers and we can assume they're called
# meta-*.
if (candidate / "conf" / "layer.conf").exists():
return [candidate.absolute()]
else:
return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))]
# TODO these don't actually handle dynamic-layers/
def gather_patches(layers):
patches = []
for directory in layers:
filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split()
patches += [os.path.join(directory, f) for f in filenames]
return patches
def count_recipes(layers):
count = 0
for directory in layers:
output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True)
count += len(output.splitlines())
return count
if __name__ == "__main__":
args = argparse.ArgumentParser(description="Patch Review Tool")
args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
args.add_argument("-j", "--json", help="update JSON")
args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)")
args = args.parse_args()
layers = find_layers(args.directory)
print(f"Found layers {' '.join((d.name for d in layers))}")
patches = gather_patches(layers)
results = patchreview(patches)
analyse(results, want_blame=args.blame, verbose=args.verbose)
if args.json:
if os.path.isfile(args.json):
data = json.load(open(args.json))
else:
data = []
row = collections.Counter()
row["total"] = len(results)
row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip()
row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip()
row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip()
row['recipe_count'] = count_recipes(layers)
for r in results.values():
if r.upstream_status in status_values:
row[r.upstream_status] += 1
if r.malformed_upstream_status or r.missing_upstream_status:
row['malformed-upstream-status'] += 1
if r.malformed_sob or r.missing_sob:
row['malformed-sob'] += 1
data.append(row)
json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t")
if args.histogram:
print()
histogram(results)

View File

@@ -0,0 +1,104 @@
#!/bin/bash
#
# patchtest: Run patchtest on commits starting at master
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
set -o errexit
# Default values
pokydir=''
usage() {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-h] [-p pokydir]
-p pokydir Defaults to current directory
EOM
>&2
exit 1
}
function clone() {
local REPOREMOTE=$1
local REPODIR=$2
if [ ! -d $REPODIR ]; then
git clone $REPOREMOTE $REPODIR --quiet
else
( cd $REPODIR; git pull --quiet )
fi
}
while getopts ":p:h" opt; do
case $opt in
p)
pokydir=$OPTARG
;;
h)
usage
;;
\?)
echo "Invalid option: -$OPTARG" >&2
usage
;;
:)
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
shift $((OPTIND-1))
CDIR="$PWD"
# default pokydir to current directory if user did not specify one
if [ -z "$pokydir" ]; then
pokydir="$CDIR"
fi
PTENV="$PWD/patchtest"
PT="$PTENV/patchtest"
PTOE="$PTENV/patchtest-oe"
if ! which virtualenv > /dev/null; then
echo "Install virtualenv before proceeding"
exit 1;
fi
# activate the virtual env
virtualenv $PTENV --quiet
source $PTENV/bin/activate
cd $PTENV
# clone or pull
clone git://git.yoctoproject.org/patchtest $PT
clone git://git.yoctoproject.org/patchtest-oe $PTOE
# install requirements
pip install -r $PT/requirements.txt --quiet
pip install -r $PTOE/requirements.txt --quiet
PATH="$PT:$PT/scripts:$PATH"
# loop through parent to HEAD and execute patchtest on each commit
for commit in $(git rev-list master..HEAD --reverse)
do
shortlog="$(git log "$commit^1..$commit" --pretty='%h: %aN: %cd: %s')"
log="$(git format-patch "$commit^1..$commit" --stdout | patchtest - -r $pokydir -s $PTOE/tests --base-commit $commit^1 --json 2>/dev/null | create-summary --fail --only-results)"
if [ -z "$log" ]; then
shortlog="$shortlog: OK"
else
shortlog="$shortlog: FAIL"
fi
echo "$shortlog"
echo "$log" | sed -n -e '/Issue/p' -e '/Suggested fix/p'
echo ""
done
deactivate
cd $CDIR

View File

@@ -0,0 +1,61 @@
#!/bin/sh
# Copyright (C) 2014 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
if [ "$1" = "" -o "$1" = "--help" ] ; then
echo "Usage: $0 <serial terminal command>"
echo
echo "Simple script to handle maintaining a terminal for serial devices that"
echo "disappear when a device is powered down or reset, such as the USB"
echo "serial console on the original BeagleBone (white version)."
echo
echo "e.g. $0 picocom -b 115200 /dev/ttyUSB0"
echo
exit
fi
args="$@"
DEVICE=""
while [ "$1" != "" ]; do
case "$1" in
/dev/*)
DEVICE=$1
break;;
esac
shift
done
if [ "$DEVICE" != "" ] ; then
while true; do
if [ ! -e $DEVICE ] ; then
echo "serdevtry: waiting for $DEVICE to exist..."
while [ ! -e $DEVICE ]; do
sleep 0.1
done
fi
if [ ! -w $DEVICE ] ; then
# Sometimes (presumably because of a race with udev) we get to
# the device before its permissions have been set up
RETRYNUM=0
while [ ! -w $DEVICE ]; do
if [ "$RETRYNUM" = "2" ] ; then
echo "Device $DEVICE exists but is not writable!"
exit 1
fi
RETRYNUM=$((RETRYNUM+1))
sleep 0.1
done
fi
$args
if [ -e $DEVICE ] ; then
break
fi
done
else
echo "Unable to determine device node from command: $args"
exit 1
fi

View File

@@ -0,0 +1,223 @@
#!/bin/bash
# Build performance regression test script
#
# Copyright 2011 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script is intended to be used in conjunction with "git bisect run"
# in order to find regressions in build time, however it can also be used
# independently. It cleans out the build output directories, runs a
# specified worker script (an example is test_build_time_worker.sh) under
# TIME(1), logs the results to TEST_LOGDIR (default /tmp) and returns a
# value telling "git bisect run" whether the build time is good (under
# the specified threshold) or bad (over it). There is also a tolerance
# option but it is not particularly useful as it only subtracts the
# tolerance from the given threshold and uses it as the actual threshold.
#
# It is also capable of taking a file listing git revision hashes to be
# test-applied to the repository in order to get past build failures that
# would otherwise cause certain revisions to have to be skipped; if a
# revision does not apply cleanly then the script assumes it does not
# need to be applied and ignores it.
#
# Please see the help output (syntax below) for some important setup
# instructions.
#
# AUTHORS
# Paul Eggleton <paul.eggleton@linux.intel.com>
syntax() {
echo "syntax: $0 <script> <time> <tolerance> [patchrevlist]"
echo ""
echo " script - worker script file (if in current dir, prefix with ./)"
echo " time - time threshold (in seconds, suffix m for minutes)"
echo " tolerance - tolerance (in seconds, suffix m for minutes or % for"
echo " percentage, can be 0)"
echo " patchrevlist - optional file listing revisions to apply as patches on top"
echo ""
echo "You must set TEST_BUILDDIR to point to a previously created build directory,"
echo "however please note that this script will wipe out the TMPDIR defined in"
echo "TEST_BUILDDIR/conf/local.conf as part of its initial setup (as well as your"
echo "~/.ccache)"
echo ""
echo "To get rid of the sudo prompt, please add the following line to /etc/sudoers"
echo "(use 'visudo' to edit this; also it is assumed that the user you are running"
echo "as is a member of the 'wheel' group):"
echo ""
echo "%wheel ALL=(ALL) NOPASSWD: /sbin/sysctl -w vm.drop_caches=[1-3]"
echo ""
echo "Note: it is recommended that you disable crond and any other process that"
echo "may cause significant CPU or I/O usage during build performance tests."
}
# Note - we exit with 250 here because that will tell git bisect run that
# something bad happened and stop
if [ "$1" = "" ] ; then
syntax
exit 250
fi
if [ "$2" = "" ] ; then
syntax
exit 250
fi
if [ "$3" = "" ] ; then
syntax
exit 250
fi
if ! [[ "$2" =~ ^[0-9][0-9m.]*$ ]] ; then
echo "'$2' is not a valid number for threshold"
exit 250
fi
if ! [[ "$3" =~ ^[0-9][0-9m.%]*$ ]] ; then
echo "'$3' is not a valid number for tolerance"
exit 250
fi
if [ "$TEST_BUILDDIR" = "" ] ; then
echo "Please set TEST_BUILDDIR to a previously created build directory"
exit 250
fi
if [ ! -d "$TEST_BUILDDIR" ] ; then
echo "TEST_BUILDDIR $TEST_BUILDDIR not found"
exit 250
fi
git diff --quiet
if [ $? != 0 ] ; then
echo "Working tree is dirty, cannot proceed"
exit 251
fi
if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
fi
runscript=$1
timethreshold=$2
tolerance=$3
if [ "$4" != "" ] ; then
patchrevlist=`cat $4`
else
patchrevlist=""
fi
if [[ timethreshold == *m* ]] ; then
timethreshold=`echo $timethreshold | sed s/m/*60/ | bc`
fi
if [[ $tolerance == *m* ]] ; then
tolerance=`echo $tolerance | sed s/m/*60/ | bc`
elif [[ $tolerance == *%* ]] ; then
tolerance=`echo $tolerance | sed s/%//`
tolerance=`echo "scale = 2; (($tolerance * $timethreshold) / 100)" | bc`
fi
tmpdir=`grep "^TMPDIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/TMPDIR[ \t]*=[ \t\?]*"//' -e 's/"//'`
if [ "x$tmpdir" = "x" ]; then
echo "Unable to determine TMPDIR from $TEST_BUILDDIR/conf/local.conf, bailing out"
exit 250
fi
sstatedir=`grep "^SSTATE_DIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/SSTATE_DIR[ \t\?]*=[ \t]*"//' -e 's/"//'`
if [ "x$sstatedir" = "x" ]; then
echo "Unable to determine SSTATE_DIR from $TEST_BUILDDIR/conf/local.conf, bailing out"
exit 250
fi
if [ `expr length $tmpdir` -lt 4 ] ; then
echo "TMPDIR $tmpdir is less than 4 characters, bailing out"
exit 250
fi
if [ `expr length $sstatedir` -lt 4 ] ; then
echo "SSTATE_DIR $sstatedir is less than 4 characters, bailing out"
exit 250
fi
echo -n "About to wipe out TMPDIR $tmpdir, press Ctrl+C to break out... "
for i in 9 8 7 6 5 4 3 2 1
do
echo -ne "\x08$i"
sleep 1
done
echo
pushd . > /dev/null
rm -f pseudodone
echo "Removing TMPDIR $tmpdir..."
rm -rf $tmpdir
echo "Removing TMPDIR $tmpdir-*libc..."
rm -rf $tmpdir-*libc
echo "Removing SSTATE_DIR $sstatedir..."
rm -rf $sstatedir
echo "Removing ~/.ccache..."
rm -rf ~/.ccache
echo "Syncing..."
sync
sync
echo "Dropping VM cache..."
#echo 3 > /proc/sys/vm/drop_caches
sudo /sbin/sysctl -w vm.drop_caches=3 > /dev/null
if [ "$TEST_LOGDIR" = "" ] ; then
logdir="/tmp"
else
logdir="$TEST_LOGDIR"
fi
rev=`git rev-parse HEAD`
logfile="$logdir/timelog_$rev.log"
echo -n > $logfile
gitroot=`git rev-parse --show-toplevel`
cd $gitroot
for patchrev in $patchrevlist ; do
echo "Applying $patchrev"
patchfile=`mktemp`
git show $patchrev > $patchfile
git apply --check $patchfile &> /dev/null
if [ $? != 0 ] ; then
echo " ... patch does not apply without errors, ignoring"
else
echo "Applied $patchrev" >> $logfile
git apply $patchfile &> /dev/null
fi
rm $patchfile
done
sync
echo "Quiescing for 5s..."
sleep 5
echo "Running $runscript at $rev..."
timeoutfile=`mktemp`
/usr/bin/time -o $timeoutfile -f "%e\nreal\t%E\nuser\t%Us\nsys\t%Ss\nmaxm\t%Mk" $runscript 2>&1 | tee -a $logfile
exitstatus=$PIPESTATUS
git reset --hard HEAD > /dev/null
popd > /dev/null
timeresult=`head -n1 $timeoutfile`
cat $timeoutfile | tee -a $logfile
rm $timeoutfile
if [ $exitstatus != 0 ] ; then
# Build failed, exit with 125 to tell git bisect run to skip this rev
echo "*** Build failed (exit code $exitstatus), skipping..." | tee -a $logfile
exit 125
fi
ret=`echo "scale = 2; $timeresult > $timethreshold - $tolerance" | bc`
echo "Returning $ret" | tee -a $logfile
exit $ret

View File

@@ -0,0 +1,41 @@
#!/bin/bash
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# This is an example script to be used in conjunction with test_build_time.sh
if [ "$TEST_BUILDDIR" = "" ] ; then
echo "TEST_BUILDDIR is not set"
exit 1
fi
buildsubdir=`basename $TEST_BUILDDIR`
if [ ! -d $buildsubdir ] ; then
echo "Unable to find build subdir $buildsubdir in current directory"
exit 1
fi
if [ -f oe-init-build-env ] ; then
. ./oe-init-build-env $buildsubdir
elif [ -f poky-init-build-env ] ; then
. ./poky-init-build-env $buildsubdir
else
echo "Unable to find build environment setup script"
exit 1
fi
if [ -f ../meta/recipes-sato/images/core-image-sato.bb ] ; then
target="core-image-sato"
else
target="poky-image-sato"
fi
echo "Build started at `date "+%Y-%m-%d %H:%M:%S"`"
echo "bitbake $target"
bitbake $target
ret=$?
echo "Build finished at `date "+%Y-%m-%d %H:%M:%S"`"
exit $ret

View File

@@ -0,0 +1,26 @@
#!/bin/bash -eur
#
# Find python modules uncovered by oe-seltest
#
# Copyright (c) 2016, Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Author: Ed Bartosh <ed.bartosh@linux.intel.com>
#
if [ ! "$#" -eq 1 -o -t 0 ] ; then
echo 'Usage: coverage report | ./scripts/contrib/uncovered <dir>' 1>&2
exit 1
fi
path=$(readlink -ev $1)
if [ ! -d "$path" ] ; then
echo "directory $1 doesn't exist" 1>&2
exit 1
fi
diff -u <(grep "$path" | grep -v '0%$' | cut -f1 -d: | sort) \
<(find $path | xargs file | grep 'Python script' | cut -f1 -d:| sort) | \
grep "^+$path" | cut -c2-

View File

@@ -0,0 +1,66 @@
#!/usr/bin/env python3
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# This script can be used to verify HOMEPAGE values for all recipes in
# the current configuration.
# The result is influenced by network environment, since the timeout of connect url is 5 seconds as default.
import sys
import os
import subprocess
import urllib.request
# Allow importing scripts/lib modules
scripts_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/..')
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
import scriptpath
import scriptutils
# Allow importing bitbake modules
bitbakepath = scriptpath.add_bitbake_lib_path()
import bb.tinfoil
logger = scriptutils.logger_create('verify_homepage')
def wgetHomepage(pn, homepage):
result = subprocess.call('wget ' + '-q -T 5 -t 1 --spider ' + homepage, shell = True)
if result:
logger.warning("%s: failed to verify HOMEPAGE: %s " % (pn, homepage))
return 1
else:
return 0
def verifyHomepage(bbhandler):
pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
pnlist = sorted(pkg_pn)
count = 0
checked = []
for pn in pnlist:
for fn in pkg_pn[pn]:
# There's no point checking multiple BBCLASSEXTENDed variants of the same recipe
realfn, _, _ = bb.cache.virtualfn2realfn(fn)
if realfn in checked:
continue
data = bbhandler.parse_recipe_file(realfn)
homepage = data.getVar("HOMEPAGE")
if homepage:
try:
urllib.request.urlopen(homepage, timeout=5)
except Exception:
count = count + wgetHomepage(os.path.basename(realfn), homepage)
checked.append(realfn)
return count
if __name__=='__main__':
with bb.tinfoil.Tinfoil() as bbhandler:
bbhandler.prepare()
logger.info("Start verifying HOMEPAGE:")
failcount = verifyHomepage(bbhandler)
logger.info("Finished verifying HOMEPAGE.")
logger.info("Summary: %s failed" % failcount)

56
sources/poky/scripts/cp-noerror Executable file
View File

@@ -0,0 +1,56 @@
#!/usr/bin/env python3
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Allow copying of $1 to $2 but if files in $1 disappear during the copy operation,
# don't error.
# Also don't error if $1 disappears.
#
import sys
import os
import shutil
def copytree(src, dst, symlinks=False, ignore=None):
"""Based on shutil.copytree"""
names = os.listdir(src)
try:
os.makedirs(dst)
except OSError:
# Already exists
pass
errors = []
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
d = dstname
if os.path.isdir(dstname):
d = os.path.join(dstname, os.path.basename(srcname))
if os.path.exists(d):
continue
try:
os.link(srcname, dstname)
except OSError:
shutil.copy2(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except shutil.Error as err:
errors.extend(err.args[0])
except EnvironmentError as why:
errors.append((srcname, dstname, str(why)))
try:
shutil.copystat(src, dst)
except OSError as why:
errors.extend((src, dst, str(why)))
if errors:
raise shutil.Error(errors)
try:
copytree(sys.argv[1], sys.argv[2])
except shutil.Error:
pass
except OSError:
pass

View File

@@ -0,0 +1,279 @@
#!/bin/sh
#
# Copyright (c) 2010-2013, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
#
# This script is intended to be used to prepare a series of patches
# and a cover letter in an appropriate and consistent format for
# submission to Open Embedded and The Yocto Project, as well as to
# related projects and layers.
#
ODIR=pull-$$
RELATIVE_TO="master"
COMMIT_ID="HEAD"
PREFIX="PATCH"
RFC=0
usage() {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-h] [-o output_dir] [-m msg_body_file] [-s subject] [-r relative_to] [-i commit_id] [-d relative_dir] -u remote [-b branch] [-- <format-patch options>]
-b branch Branch name in the specified remote (default: current branch)
-l local branch Local branch name (default: HEAD)
-c Create an RFC (Request for Comment) patch series
-h Display this help message
-a Automatically push local branch (-l) to remote branch (-b),
or set CPR_CONTRIB_AUTO_PUSH in env
-i commit_id Ending commit (default: HEAD)
-m msg_body_file The file containing a blurb to be inserted into the summary email
-o output_dir Specify the output directory for the messages (default: pull-PID)
-p prefix Use [prefix N/M] instead of [PATCH N/M] as the subject prefix
-r relative_to Starting commit (default: master)
-s subject The subject to be inserted into the summary email
-u remote The git remote where the branch is located, or set CPR_CONTRIB_REMOTE in env
-d relative_dir Generate patches relative to directory
Examples:
$CMD -u contrib -b nitin/basic
$CMD -u contrib -r distro/master -i nitin/distro -b nitin/distro
$CMD -u contrib -r distro/master -i nitin/distro -b nitin/distro -l distro
$CMD -u contrib -r master -i misc -b nitin/misc -o pull-misc
$CMD -u contrib -p "RFC PATCH" -b nitin/experimental
$CMD -u contrib -i misc -b nitin/misc -d ./bitbake
$CMD -u contrib -r origin/master -o /tmp/out.v3 -- -v3 --in-reply-to=20170511120134.XX7799@site.com
EOM
}
REMOTE="$CPR_CONTRIB_REMOTE"
# Parse and validate arguments
while getopts "b:acd:hi:m:o:p:r:s:u:l:" OPT; do
case $OPT in
b)
BRANCH="$OPTARG"
;;
l)
L_BRANCH="$OPTARG"
;;
c)
RFC=1
;;
d)
RELDIR="$OPTARG"
;;
h)
usage
exit 0
;;
i)
COMMIT_ID="$OPTARG"
;;
m)
BODY="$OPTARG"
if [ ! -e "$BODY" ]; then
echo "ERROR: Body file does not exist"
exit 1
fi
;;
o)
ODIR="$OPTARG"
;;
p)
PREFIX="$OPTARG"
;;
r)
RELATIVE_TO="$OPTARG"
;;
s)
SUBJECT="$OPTARG"
;;
u)
REMOTE="$OPTARG"
;;
a)
CPR_CONTRIB_AUTO_PUSH="1"
;;
--)
shift
break
;;
esac
done
shift "$((OPTIND - 1))"
extraopts="$@"
if [ -z "$REMOTE" ]; then
echo "ERROR: Missing parameter -u or CPR_CONTRIB_REMOTE in env, no git remote!"
usage
exit 1
fi
REMOTE_URL=$(git config remote.$REMOTE.url)
if [ $? -ne 0 ]; then
echo "ERROR: git config failed to find a url for '$REMOTE'"
echo
echo "To add a remote url for $REMOTE, use:"
echo " git config remote.$REMOTE.url <url>"
exit 1
fi
# Rewrite private URLs to public URLs
# Determine the repository name for use in the WEB_URL later
USER_RE="[A-Za-z0-9_.@][A-Za-z0-9_.@-]*\$\?"
PROTO_RE="[a-z][a-z+]*://"
GIT_RE="\(^\($PROTO_RE\)\?\)\($USER_RE@\)\?\([^:/]*\)[:/]\(.*\)"
REMOTE_URL=${REMOTE_URL%.git}
REMOTE_REPO=$(echo $REMOTE_URL | sed "s#$GIT_RE#\5#")
REMOTE_URL=$(echo $REMOTE_URL | sed "s#$GIT_RE#https://\4/\5#")
if [ -z "$BRANCH" ]; then
BRANCH=$(git branch | grep -e "^\* " | cut -d' ' -f2)
echo "NOTE: Assuming remote branch '$BRANCH', use -b to override."
fi
if [ -z "$L_BRANCH" ]; then
L_BRANCH=HEAD
echo "NOTE: Assuming local branch HEAD, use -l to override."
fi
if [ $RFC -eq 1 ]; then
PREFIX="RFC $PREFIX"
fi
# Set WEB_URL from known remotes
WEB_URL=""
case "$REMOTE_URL" in
*git.yoctoproject.org*)
WEB_URL="https://git.yoctoproject.org/$REMOTE_REPO/log/?h=$BRANCH"
;;
*git.openembedded.org*)
WEB_URL="https://git.openembedded.org/$REMOTE_REPO/log/?h=$BRANCH"
;;
*github.com*)
WEB_URL="https://github.com/$REMOTE_REPO/tree/$BRANCH"
;;
esac
# Perform a sanity test on the web URL. Issue a warning if it is not
# accessible, but do not abort as users may want to run offline.
if [ -n "$WEB_URL" ]; then
if [ "$CPR_CONTRIB_AUTO_PUSH" = "1" ]; then
echo "Pushing '$BRANCH' on '$REMOTE' as requested..."
git push $REMOTE $L_BRANCH:$BRANCH
echo ""
fi
wget --no-check-certificate -q $WEB_URL -O /dev/null
if [ $? -ne 0 ]; then
echo "WARNING: Branch '$BRANCH' was not found on the contrib git tree."
echo " Please check your remote and branch parameter before sending."
echo ""
fi
fi
if [ -e $ODIR ]; then
echo "ERROR: output directory $ODIR exists."
exit 1
fi
mkdir $ODIR
if [ -n "$RELDIR" ]; then
ODIR=$(realpath $ODIR)
pdir=$(pwd)
cd $RELDIR
extraopts="$extraopts --relative"
fi
# Generate the patches and cover letter
git format-patch $extraopts -M40 --subject-prefix="$PREFIX" -n -o $ODIR --thread=shallow --cover-letter $RELATIVE_TO..$COMMIT_ID > /dev/null
if [ -z "$(ls -A $ODIR 2> /dev/null)" ]; then
echo "ERROR: $ODIR is empty, no cover letter and patches was generated!"
echo " This is most likely due to that \$RRELATIVE_TO..\$COMMIT_ID"
echo " ($RELATIVE_TO..$COMMIT_ID) don't contain any differences."
rmdir $ODIR
exit 1
fi
[ -n "$RELDIR" ] && cd $pdir
# Customize the cover letter
CL="$(echo $ODIR/*0000-cover-letter.patch)"
PM="$ODIR/pull-msg"
GIT_VERSION=$(`git --version` | tr -d '[:alpha:][:space:].' | sed 's/\(...\).*/\1/')
NEWER_GIT_VERSION=210
if [ $GIT_VERSION -lt $NEWER_GIT_VERSION ]; then
git request-pull $RELATIVE_TO $REMOTE_URL $COMMIT_ID >> "$PM"
else
git request-pull $RELATIVE_TO $REMOTE_URL $L_BRANCH:$BRANCH >> "$PM"
fi
if [ $? -ne 0 ]; then
echo "ERROR: git request-pull reported an error"
rm -rf $ODIR
exit 1
fi
# The cover letter already has a diffstat, remove it from the pull-msg
# before inserting it.
sed -n "0,\#$REMOTE_URL# p" "$PM" | sed -i "/BLURB HERE/ r /dev/stdin" "$CL"
rm "$PM"
# If this is an RFC, make that clear in the cover letter
if [ $RFC -eq 1 ]; then
(cat <<EOM
Please review the following changes for suitability for inclusion. If you have
any objections or suggestions for improvement, please respond to the patches. If
you agree with the changes, please provide your Acked-by.
EOM
) | sed -i "/BLURB HERE/ r /dev/stdin" "$CL"
fi
# Insert the WEB_URL if there is one
if [ -n "$WEB_URL" ]; then
echo " $WEB_URL" | sed -i "\#$REMOTE_URL# r /dev/stdin" "$CL"
fi
# If the user specified a message body, insert it into the cover letter and
# remove the BLURB token.
if [ -n "$BODY" ]; then
sed -i "/BLURB HERE/ r $BODY" "$CL"
sed -i "/BLURB HERE/ d" "$CL"
fi
# Set subject automatically if there is only one patch
patch_cnt=`git log --pretty=oneline ${RELATIVE_TO}..${L_BRANCH} | wc -l`
if [ -z "$SUBJECT" -a $patch_cnt -eq 1 ]; then
SUBJECT="`git log --format=%s ${RELATIVE_TO}..${L_BRANCH}`"
fi
# Replace the SUBJECT token with it.
if [ -n "$SUBJECT" ]; then
sed -i -e "s\`\*\*\* SUBJECT HERE \*\*\*\`$SUBJECT\`" "$CL"
fi
# Generate report for user
cat <<EOM
The following patches have been prepared:
$(for PATCH in $(ls $ODIR/*); do echo " $PATCH"; done)
Review their content, especially the summary mail:
$CL
When you are satisfied, you can send them with:
send-pull-request -a -p $ODIR
EOM
# Check the patches for trailing white space
egrep -q -e "^\+.*\s+$" $ODIR/*
if [ $? -ne 1 ]; then
echo
echo "WARNING: Trailing white space detected at these locations"
egrep -nH --color -e "^\+.*\s+$" $ODIR/*
fi

View File

@@ -0,0 +1 @@
../native-intercept/ar

458
sources/poky/scripts/crosstap Executable file
View File

@@ -0,0 +1,458 @@
#!/usr/bin/env python3
#
# Build a systemtap script for a given image, kernel
#
# Effectively script extracts needed information from set of
# 'bitbake -e' commands and contructs proper invocation of stap on
# host to build systemtap script for a given target.
#
# By default script will compile scriptname.ko that could be copied
# to taget and activated with 'staprun scriptname.ko' command. Or if
# --remote user@hostname option is specified script will build, load
# execute script on target.
#
# This script is very similar and inspired by crosstap shell script.
# The major difference that this script supports user-land related
# systemtap script, whereas crosstap could deal only with scripts
# related to kernel.
#
# Copyright (c) 2018, Cisco Systems.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import re
import subprocess
import os
import optparse
class Stap(object):
def __init__(self, script, module, remote):
self.script = script
self.module = module
self.remote = remote
self.stap = None
self.sysroot = None
self.runtime = None
self.tapset = None
self.arch = None
self.cross_compile = None
self.kernel_release = None
self.target_path = None
self.target_ld_library_path = None
if not self.remote:
if not self.module:
# derive module name from script
self.module = os.path.basename(self.script)
if self.module[-4:] == ".stp":
self.module = self.module[:-4]
# replace - if any with _
self.module = self.module.replace("-", "_")
def command(self, args):
ret = []
ret.append(self.stap)
if self.remote:
ret.append("--remote")
ret.append(self.remote)
else:
ret.append("-p4")
ret.append("-m")
ret.append(self.module)
ret.append("-a")
ret.append(self.arch)
ret.append("-B")
ret.append("CROSS_COMPILE=" + self.cross_compile)
ret.append("-r")
ret.append(self.kernel_release)
ret.append("-I")
ret.append(self.tapset)
ret.append("-R")
ret.append(self.runtime)
if self.sysroot:
ret.append("--sysroot")
ret.append(self.sysroot)
ret.append("--sysenv=PATH=" + self.target_path)
ret.append("--sysenv=LD_LIBRARY_PATH=" + self.target_ld_library_path)
ret = ret + args
ret.append(self.script)
return ret
def additional_environment(self):
ret = {}
ret["SYSTEMTAP_DEBUGINFO_PATH"] = "+:.debug:build"
return ret
def environment(self):
ret = os.environ.copy()
additional = self.additional_environment()
for e in additional:
ret[e] = additional[e]
return ret
def display_command(self, args):
additional_env = self.additional_environment()
command = self.command(args)
print("#!/bin/sh")
for e in additional_env:
print("export %s=\"%s\"" % (e, additional_env[e]))
print(" ".join(command))
class BitbakeEnvInvocationException(Exception):
def __init__(self, message):
self.message = message
class BitbakeEnv(object):
BITBAKE="bitbake"
def __init__(self, package):
self.package = package
self.cmd = BitbakeEnv.BITBAKE + " -e " + self.package
self.popen = subprocess.Popen(self.cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.__lines = self.popen.stdout.readlines()
self.popen.wait()
self.lines = []
for line in self.__lines:
self.lines.append(line.decode('utf-8'))
def get_vars(self, vars):
if self.popen.returncode:
raise BitbakeEnvInvocationException(
"\nFailed to execute '" + self.cmd +
"' with the following message:\n" +
''.join(self.lines))
search_patterns = []
retdict = {}
for var in vars:
# regular not exported variable
rexpr = "^" + var + "=\"(.*)\""
re_compiled = re.compile(rexpr)
search_patterns.append((var, re_compiled))
# exported variable
rexpr = "^export " + var + "=\"(.*)\""
re_compiled = re.compile(rexpr)
search_patterns.append((var, re_compiled))
for line in self.lines:
for var, rexpr in search_patterns:
m = rexpr.match(line)
if m:
value = m.group(1)
retdict[var] = value
# fill variables values in order how they were requested
ret = []
for var in vars:
ret.append(retdict.get(var))
# if it is single value list return it as scalar, not the list
if len(ret) == 1:
ret = ret[0]
return ret
class ParamDiscovery(object):
SYMBOLS_CHECK_MESSAGE = """
WARNING: image '%s' does not have dbg-pkgs IMAGE_FEATURES enabled and no
"image-combined-dbg" in inherited classes is specified. As result the image
does not have symbols for user-land processes DWARF based probes. Consider
adding 'dbg-pkgs' to EXTRA_IMAGE_FEATURES or adding "image-combined-dbg" to
USER_CLASSES. I.e add this line 'USER_CLASSES += "image-combined-dbg"' to
local.conf file.
Or you may use IMAGE_GEN_DEBUGFS="1" option, and then after build you need
recombine/unpack image and image-dbg tarballs and pass resulting dir location
with --sysroot option.
"""
def __init__(self, image):
self.image = image
self.image_rootfs = None
self.image_features = None
self.image_gen_debugfs = None
self.inherit = None
self.base_bindir = None
self.base_sbindir = None
self.base_libdir = None
self.bindir = None
self.sbindir = None
self.libdir = None
self.staging_bindir_toolchain = None
self.target_prefix = None
self.target_arch = None
self.target_kernel_builddir = None
self.staging_dir_native = None
self.image_combined_dbg = False
def discover(self):
if self.image:
benv_image = BitbakeEnv(self.image)
(self.image_rootfs,
self.image_features,
self.image_gen_debugfs,
self.inherit,
self.base_bindir,
self.base_sbindir,
self.base_libdir,
self.bindir,
self.sbindir,
self.libdir
) = benv_image.get_vars(
("IMAGE_ROOTFS",
"IMAGE_FEATURES",
"IMAGE_GEN_DEBUGFS",
"INHERIT",
"base_bindir",
"base_sbindir",
"base_libdir",
"bindir",
"sbindir",
"libdir"
))
benv_kernel = BitbakeEnv("virtual/kernel")
(self.staging_bindir_toolchain,
self.target_prefix,
self.target_arch,
self.target_kernel_builddir
) = benv_kernel.get_vars(
("STAGING_BINDIR_TOOLCHAIN",
"TARGET_PREFIX",
"TRANSLATED_TARGET_ARCH",
"B"
))
benv_systemtap = BitbakeEnv("systemtap-native")
(self.staging_dir_native
) = benv_systemtap.get_vars(["STAGING_DIR_NATIVE"])
if self.inherit:
if "image-combined-dbg" in self.inherit.split():
self.image_combined_dbg = True
def check(self, sysroot_option):
ret = True
if self.image_rootfs:
sysroot = self.image_rootfs
if not os.path.isdir(self.image_rootfs):
print("ERROR: Cannot find '" + sysroot +
"' directory. Was '" + self.image + "' image built?")
ret = False
stap = self.staging_dir_native + "/usr/bin/stap"
if not os.path.isfile(stap):
print("ERROR: Cannot find '" + stap +
"'. Was 'systemtap-native' built?")
ret = False
if not os.path.isdir(self.target_kernel_builddir):
print("ERROR: Cannot find '" + self.target_kernel_builddir +
"' directory. Was 'kernel/virtual' built?")
ret = False
if not sysroot_option and self.image_rootfs:
dbg_pkgs_found = False
if self.image_features:
image_features = self.image_features.split()
if "dbg-pkgs" in image_features:
dbg_pkgs_found = True
if not dbg_pkgs_found \
and not self.image_combined_dbg:
print(ParamDiscovery.SYMBOLS_CHECK_MESSAGE % (self.image))
if not ret:
print("")
return ret
def __map_systemtap_arch(self):
a = self.target_arch
ret = a
if re.match('(athlon|x86.64)$', a):
ret = 'x86_64'
elif re.match('i.86$', a):
ret = 'i386'
elif re.match('arm$', a):
ret = 'arm'
elif re.match('aarch64$', a):
ret = 'arm64'
elif re.match('mips(isa|)(32|64|)(r6|)(el|)$', a):
ret = 'mips'
elif re.match('p(pc|owerpc)(|64)', a):
ret = 'powerpc'
return ret
def fill_stap(self, stap):
stap.stap = self.staging_dir_native + "/usr/bin/stap"
if not stap.sysroot:
if self.image_rootfs:
if self.image_combined_dbg:
stap.sysroot = self.image_rootfs + "-dbg"
else:
stap.sysroot = self.image_rootfs
stap.runtime = self.staging_dir_native + "/usr/share/systemtap/runtime"
stap.tapset = self.staging_dir_native + "/usr/share/systemtap/tapset"
stap.arch = self.__map_systemtap_arch()
stap.cross_compile = self.staging_bindir_toolchain + "/" + \
self.target_prefix
stap.kernel_release = self.target_kernel_builddir
# do we have standard that tells in which order these need to appear
target_path = []
if self.sbindir:
target_path.append(self.sbindir)
if self.bindir:
target_path.append(self.bindir)
if self.base_sbindir:
target_path.append(self.base_sbindir)
if self.base_bindir:
target_path.append(self.base_bindir)
stap.target_path = ":".join(target_path)
target_ld_library_path = []
if self.libdir:
target_ld_library_path.append(self.libdir)
if self.base_libdir:
target_ld_library_path.append(self.base_libdir)
stap.target_ld_library_path = ":".join(target_ld_library_path)
def main():
usage = """usage: %prog -s <systemtap-script> [options] [-- [systemtap options]]
%prog cross compile given SystemTap script against given image, kernel
It needs to run in environtment set for bitbake - it uses bitbake -e
invocations to retrieve information to construct proper stap cross build
invocation arguments. It assumes that systemtap-native is built in given
bitbake workspace.
Anything after -- option is passed directly to stap.
Legacy script invocation style supported but deprecated:
%prog <user@hostname> <sytemtap-script> [systemtap options]
To enable most out of systemtap the following site.conf or local.conf
configuration is recommended:
# enables symbol + target binaries rootfs-dbg in workspace
IMAGE_GEN_DEBUGFS = "1"
IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
USER_CLASSES += "image-combined-dbg"
# enables kernel debug symbols
KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
# minimal, just run-time systemtap configuration in target image
PACKAGECONFIG:pn-systemtap = "monitor"
# add systemtap run-time into target image if it is not there yet
IMAGE_INSTALL:append = " systemtap"
"""
option_parser = optparse.OptionParser(usage=usage)
option_parser.add_option("-s", "--script", dest="script",
help="specify input script FILE name",
metavar="FILE")
option_parser.add_option("-i", "--image", dest="image",
help="specify image name for which script should be compiled")
option_parser.add_option("-r", "--remote", dest="remote",
help="specify username@hostname of remote target to run script "
"optional, it assumes that remote target can be accessed through ssh")
option_parser.add_option("-m", "--module", dest="module",
help="specify module name, optional, has effect only if --remote is not used, "
"if not specified module name will be derived from passed script name")
option_parser.add_option("-y", "--sysroot", dest="sysroot",
help="explicitely specify image sysroot location. May need to use it in case "
"when IMAGE_GEN_DEBUGFS=\"1\" option is used and recombined with symbols "
"in different location",
metavar="DIR")
option_parser.add_option("-o", "--out", dest="out",
action="store_true",
help="output shell script that equvivalent invocation of this script with "
"given set of arguments, in given bitbake environment. It could be stored in "
"separate shell script and could be repeated without incuring bitbake -e "
"invocation overhead",
default=False)
option_parser.add_option("-d", "--debug", dest="debug",
action="store_true",
help="enable debug output. Use this option to see resulting stap invocation",
default=False)
# is invocation follow syntax from orignal crosstap shell script
legacy_args = False
# check if we called the legacy way
if len(sys.argv) >= 3:
if sys.argv[1].find("@") != -1 and os.path.exists(sys.argv[2]):
legacy_args = True
# fill options values for legacy invocation case
options = optparse.Values
options.script = sys.argv[2]
options.remote = sys.argv[1]
options.image = None
options.module = None
options.sysroot = None
options.out = None
options.debug = None
remaining_args = sys.argv[3:]
if not legacy_args:
(options, remaining_args) = option_parser.parse_args()
if not options.script or not os.path.exists(options.script):
print("'-s FILE' option is missing\n")
option_parser.print_help()
else:
stap = Stap(options.script, options.module, options.remote)
discovery = ParamDiscovery(options.image)
discovery.discover()
if not discovery.check(options.sysroot):
option_parser.print_help()
else:
stap.sysroot = options.sysroot
discovery.fill_stap(stap)
if options.out:
stap.display_command(remaining_args)
else:
cmd = stap.command(remaining_args)
env = stap.environment()
if options.debug:
print(" ".join(cmd))
os.execve(cmd[0], cmd, env)
main()

355
sources/poky/scripts/devtool Executable file
View File

@@ -0,0 +1,355 @@
#!/usr/bin/env python3
# OpenEmbedded Development tool
#
# Copyright (C) 2014-2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
import glob
import re
import configparser
import subprocess
import logging
basepath = ''
workspace = {}
config = None
context = None
scripts_path = os.path.dirname(os.path.realpath(__file__))
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
from devtool import DevtoolError, setup_tinfoil
import scriptutils
import argparse_oe
logger = scriptutils.logger_create('devtool')
plugins = []
class ConfigHandler(object):
config_file = ''
config_obj = None
init_path = ''
workspace_path = ''
def __init__(self, filename):
self.config_file = filename
self.config_obj = configparser.ConfigParser()
def get(self, section, option, default=None):
try:
ret = self.config_obj.get(section, option)
except (configparser.NoOptionError, configparser.NoSectionError):
if default != None:
ret = default
else:
raise
return ret
def read(self):
if os.path.exists(self.config_file):
self.config_obj.read(self.config_file)
if self.config_obj.has_option('General', 'init_path'):
pth = self.get('General', 'init_path')
self.init_path = os.path.join(basepath, pth)
if not os.path.exists(self.init_path):
logger.error('init_path %s specified in config file cannot be found' % pth)
return False
else:
self.config_obj.add_section('General')
self.workspace_path = self.get('General', 'workspace_path', os.path.join(basepath, 'workspace'))
return True
def write(self):
logger.debug('writing to config file %s' % self.config_file)
self.config_obj.set('General', 'workspace_path', self.workspace_path)
with open(self.config_file, 'w') as f:
self.config_obj.write(f)
def set(self, section, option, value):
if not self.config_obj.has_section(section):
self.config_obj.add_section(section)
self.config_obj.set(section, option, value)
class Context:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def read_workspace():
global workspace
workspace = {}
if not os.path.exists(os.path.join(config.workspace_path, 'conf', 'layer.conf')):
if context.fixed_setup:
logger.error("workspace layer not set up")
sys.exit(1)
else:
logger.info('Creating workspace layer in %s' % config.workspace_path)
_create_workspace(config.workspace_path, config, basepath)
if not context.fixed_setup:
_enable_workspace_layer(config.workspace_path, config, basepath)
logger.debug('Reading workspace in %s' % config.workspace_path)
externalsrc_re = re.compile(r'^EXTERNALSRC(:pn-([^ =]+))? *= *"([^"]*)"$')
for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')):
with open(fn, 'r') as f:
pnvalues = {}
pn = None
for line in f:
res = externalsrc_re.match(line.rstrip())
if res:
recipepn = os.path.splitext(os.path.basename(fn))[0].split('_')[0]
pn = res.group(2) or recipepn
# Find the recipe file within the workspace, if any
bbfile = os.path.basename(fn).replace('.bbappend', '.bb').replace('%', '*')
recipefile = glob.glob(os.path.join(config.workspace_path,
'recipes',
recipepn,
bbfile))
if recipefile:
recipefile = recipefile[0]
pnvalues['srctree'] = res.group(3)
pnvalues['bbappend'] = fn
pnvalues['recipefile'] = recipefile
elif line.startswith('# srctreebase: '):
pnvalues['srctreebase'] = line.split(':', 1)[1].strip()
if pnvalues:
if not pn:
raise DevtoolError("Found *.bbappend in %s, but could not determine EXTERNALSRC:pn-*. "
"Maybe still using old syntax?" % config.workspace_path)
if not pnvalues.get('srctreebase', None):
pnvalues['srctreebase'] = pnvalues['srctree']
logger.debug('Found recipe %s' % pnvalues)
workspace[pn] = pnvalues
def create_workspace(args, config, basepath, workspace):
if args.layerpath:
workspacedir = os.path.abspath(args.layerpath)
else:
workspacedir = os.path.abspath(os.path.join(basepath, 'workspace'))
layerseries = None
if args.layerseries:
layerseries = args.layerseries
_create_workspace(workspacedir, config, basepath, layerseries)
if not args.create_only:
_enable_workspace_layer(workspacedir, config, basepath)
def _create_workspace(workspacedir, config, basepath, layerseries=None):
import bb
confdir = os.path.join(workspacedir, 'conf')
if os.path.exists(os.path.join(confdir, 'layer.conf')):
logger.info('Specified workspace already set up, leaving as-is')
else:
if not layerseries:
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
try:
layerseries = tinfoil.config_data.getVar('LAYERSERIES_CORENAMES')
finally:
tinfoil.shutdown()
# Add a config file
bb.utils.mkdirhier(confdir)
with open(os.path.join(confdir, 'layer.conf'), 'w') as f:
f.write('# ### workspace layer auto-generated by devtool ###\n')
f.write('BBPATH =. "$' + '{LAYERDIR}:"\n')
f.write('BBFILES += "$' + '{LAYERDIR}/recipes/*/*.bb \\\n')
f.write(' $' + '{LAYERDIR}/appends/*.bbappend"\n')
f.write('BBFILE_COLLECTIONS += "workspacelayer"\n')
f.write('BBFILE_PATTERN_workspacelayer = "^$' + '{LAYERDIR}/"\n')
f.write('BBFILE_PATTERN_IGNORE_EMPTY_workspacelayer = "1"\n')
f.write('BBFILE_PRIORITY_workspacelayer = "99"\n')
f.write('LAYERSERIES_COMPAT_workspacelayer = "%s"\n' % layerseries)
# Add a README file
with open(os.path.join(workspacedir, 'README'), 'w') as f:
f.write('This layer was created by the OpenEmbedded devtool utility in order to\n')
f.write('contain recipes and bbappends that are currently being worked on. The idea\n')
f.write('is that the contents is temporary - once you have finished working on a\n')
f.write('recipe you use the appropriate method to move the files you have been\n')
f.write('working on to a proper layer. In most instances you should use the\n')
f.write('devtool utility to manage files within it rather than modifying files\n')
f.write('directly (although recipes added with "devtool add" will often need\n')
f.write('direct modification.)\n')
f.write('\nIf you no longer need to use devtool or the workspace layer\'s contents\n')
f.write('you can remove the path to this workspace layer from your conf/bblayers.conf\n')
f.write('file (and then delete the layer, if you wish).\n')
f.write('\nNote that by default, if devtool fetches and unpacks source code, it\n')
f.write('will place it in a subdirectory of a "sources" subdirectory of the\n')
f.write('layer. If you prefer it to be elsewhere you can specify the source\n')
f.write('tree path on the command line.\n')
def _enable_workspace_layer(workspacedir, config, basepath):
"""Ensure the workspace layer is in bblayers.conf"""
import bb
bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf')
if not os.path.exists(bblayers_conf):
logger.error('Unable to find bblayers.conf')
return
if os.path.abspath(workspacedir) != os.path.abspath(config.workspace_path):
removedir = config.workspace_path
else:
removedir = None
_, added = bb.utils.edit_bblayers_conf(bblayers_conf, workspacedir, removedir)
if added:
logger.info('Enabling workspace layer in bblayers.conf')
if config.workspace_path != workspacedir:
# Update our config to point to the new location
config.workspace_path = workspacedir
config.write()
def main():
global basepath
global config
global context
if sys.getfilesystemencoding() != "utf-8":
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
context = Context(fixed_setup=False)
# Default basepath
basepath = os.path.dirname(os.path.abspath(__file__))
parser = argparse_oe.ArgumentParser(description="OpenEmbedded development tool",
add_help=False,
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('--basepath', help='Base directory of SDK / build directory')
parser.add_argument('--bbpath', help='Explicitly specify the BBPATH, rather than getting it from the metadata')
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
global_args, unparsed_args = parser.parse_known_args()
# Help is added here rather than via add_help=True, as we don't want it to
# be handled by parse_known_args()
parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
help='show this help message and exit')
if global_args.debug:
logger.setLevel(logging.DEBUG)
elif global_args.quiet:
logger.setLevel(logging.ERROR)
if global_args.basepath:
# Override
basepath = global_args.basepath
if os.path.exists(os.path.join(basepath, '.devtoolbase')):
context.fixed_setup = True
else:
pth = basepath
while pth != '' and pth != os.sep:
if os.path.exists(os.path.join(pth, '.devtoolbase')):
context.fixed_setup = True
basepath = pth
break
pth = os.path.dirname(pth)
if not context.fixed_setup:
basepath = os.environ.get('BUILDDIR')
if not basepath:
logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
sys.exit(1)
logger.debug('Using basepath %s' % basepath)
config = ConfigHandler(os.path.join(basepath, 'conf', 'devtool.conf'))
if not config.read():
return -1
context.config = config
bitbake_subdir = config.get('General', 'bitbake_subdir', '')
if bitbake_subdir:
# Normally set for use within the SDK
logger.debug('Using bitbake subdir %s' % bitbake_subdir)
sys.path.insert(0, os.path.join(basepath, bitbake_subdir, 'lib'))
core_meta_subdir = config.get('General', 'core_meta_subdir')
sys.path.insert(0, os.path.join(basepath, core_meta_subdir, 'lib'))
else:
# Standard location
import scriptpath
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
sys.exit(1)
logger.debug('Using standard bitbake path %s' % bitbakepath)
scriptpath.add_oe_lib_path()
scriptutils.logger_setup_color(logger, global_args.color)
if global_args.bbpath is None:
try:
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
try:
global_args.bbpath = tinfoil.config_data.getVar('BBPATH')
finally:
tinfoil.shutdown()
except bb.BBHandledException:
return 2
# Search BBPATH first to allow layers to override plugins in scripts_path
pluginpaths = [os.path.join(path, 'lib', 'devtool') for path in global_args.bbpath.split(':') + [scripts_path]]
context.pluginpaths = pluginpaths
for pluginpath in pluginpaths:
scriptutils.load_plugins(logger, plugins, pluginpath)
subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
subparsers.required = True
subparsers.add_subparser_group('sdk', 'SDK maintenance', -2)
subparsers.add_subparser_group('advanced', 'Advanced', -1)
subparsers.add_subparser_group('starting', 'Beginning work on a recipe', 100)
subparsers.add_subparser_group('info', 'Getting information')
subparsers.add_subparser_group('working', 'Working on a recipe in the workspace')
subparsers.add_subparser_group('testbuild', 'Testing changes on target')
if not context.fixed_setup:
parser_create_workspace = subparsers.add_parser('create-workspace',
help='Set up workspace in an alternative location',
description='Sets up a new workspace. NOTE: other devtool subcommands will create a workspace automatically as needed, so you only need to use %(prog)s if you want to specify where the workspace should be located.',
group='advanced')
parser_create_workspace.add_argument('layerpath', nargs='?', help='Path in which the workspace layer should be created')
parser_create_workspace.add_argument('--layerseries', help='Layer series the workspace should be set to be compatible with')
parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace layer, do not alter configuration')
parser_create_workspace.set_defaults(func=create_workspace, no_workspace=True)
for plugin in plugins:
if hasattr(plugin, 'register_commands'):
plugin.register_commands(subparsers, context)
args = parser.parse_args(unparsed_args, namespace=global_args)
try:
if not getattr(args, 'no_workspace', False):
read_workspace()
ret = args.func(args, config, basepath, workspace)
except DevtoolError as err:
if str(err):
logger.error(str(err))
ret = err.exitcode
except argparse_oe.ArgumentUsageError as ae:
parser.error_subcommand(ae.message, ae.subcommand)
return ret
if __name__ == "__main__":
try:
ret = main()
except Exception:
ret = 1
import traceback
traceback.print_exc()
sys.exit(ret)

View File

@@ -0,0 +1 @@
../devtool

View File

@@ -0,0 +1 @@
../oe-find-native-sysroot

View File

@@ -0,0 +1 @@
../recipetool

View File

@@ -0,0 +1 @@
../runqemu

View File

@@ -0,0 +1 @@
../runqemu-addptable2image

View File

@@ -0,0 +1 @@
../runqemu-export-rootfs

View File

@@ -0,0 +1 @@
../runqemu-extract-sdk

View File

@@ -0,0 +1 @@
../runqemu-gen-tapdevs

View File

@@ -0,0 +1 @@
../runqemu-ifdown

View File

@@ -0,0 +1 @@
../runqemu-ifup

View File

@@ -0,0 +1 @@
../wic

View File

@@ -0,0 +1,122 @@
#!/usr/bin/env python3
#
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import sys
import shutil
import errno
import time
def mkdir(d):
try:
os.makedirs(d)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
# extract the hash from past the last colon to last underscore
def extract_sha(filename):
return filename.split(':')[7].split('_')[0]
# get all files in a directory, extract hash and make
# a map from hash to list of file with that hash
def map_sha_to_files(dir_, prefix, sha_map):
sstate_prefix_path = dir_ + '/' + prefix + '/'
if not os.path.exists(sstate_prefix_path):
return
sstate_files = os.listdir(sstate_prefix_path)
for f in sstate_files:
try:
sha = extract_sha(f)
if sha not in sha_map:
sha_map[sha] = []
sha_map[sha].append(sstate_prefix_path + f)
except IndexError:
continue
# given a prefix build a map of hash to list of files
def build_sha_cache(prefix):
sha_map = {}
sstate_dir = sys.argv[2]
map_sha_to_files(sstate_dir, prefix, sha_map)
native_sstate_dir = sys.argv[2] + '/' + sys.argv[4]
map_sha_to_files(native_sstate_dir, prefix, sha_map)
return sha_map
if len(sys.argv) < 5:
print("Incorrect number of arguments specified")
print("syntax: gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir> <nativelsbstring> [filterfile]")
sys.exit(1)
filterlist = []
if len(sys.argv) > 5:
print('Reading filter file %s' % sys.argv[5])
with open(sys.argv[5]) as f:
for l in f.readlines():
if ":" in l:
filterlist.append(l.rstrip())
print('Reading %s' % sys.argv[1])
sigs = []
with open(sys.argv[1]) as f:
for l in f.readlines():
if ":" in l:
task, sig = l.split()[0].rsplit(':', 1)
if filterlist and not task in filterlist:
print('Filtering out %s' % task)
else:
sigs.append(sig)
print('Gathering file list')
start_time = time.perf_counter()
files = set()
sstate_content_cache = {}
for s in sigs:
prefix = s[:2]
prefix2 = s[2:4]
if prefix not in sstate_content_cache:
sstate_content_cache[prefix] = {}
if prefix2 not in sstate_content_cache[prefix]:
sstate_content_cache[prefix][prefix2] = build_sha_cache(prefix + "/" + prefix2)
if s in sstate_content_cache[prefix][prefix2]:
for f in sstate_content_cache[prefix][prefix2][s]:
files.add(f)
elapsed = time.perf_counter() - start_time
print("Gathering file list took %.1fs" % elapsed)
print('Processing files')
for f in files:
sys.stdout.write('Processing %s... ' % f)
if not f.endswith(('.tar.zst', '.siginfo', '.sig')):
# Most likely a temp file, skip it
print('skipping')
continue
dst = os.path.join(sys.argv[3], os.path.relpath(f, sys.argv[2]))
destdir = os.path.dirname(dst)
mkdir(destdir)
src = os.path.realpath(f)
if os.path.exists(dst):
os.remove(dst)
if (os.stat(src).st_dev == os.stat(destdir).st_dev):
print('linking')
try:
os.link(src, dst)
except OSError as e:
print('hard linking failed, copying')
shutil.copyfile(src, dst)
else:
print('copying')
shutil.copyfile(src, dst)
print('Done!')

View File

@@ -0,0 +1,43 @@
#! /bin/sh
# Copyright (c) 2005-2008 Wind River Systems, Inc.
#
# SPDX-License-Identifier: GPL-2.0-only
#
cat << EOF
AC_PREREQ(2.57)
AC_INIT([site_wide],[1.0.0])
EOF
# Disable as endian is set in the default config
#echo AC_C_BIGENDIAN
#echo
if [ -e $1/types ] ; then
while read type ; do
echo "AC_CHECK_SIZEOF([$type])"
done < $1/types
echo
fi
if [ -e $1/funcs ]; then
while read func ; do
echo "AC_CHECK_FUNCS([$func])"
done < $1/funcs
echo
fi
if [ -e $1/headers ]; then
while read header ; do
echo "AC_CHECK_HEADERS([$header])"
done < $1/headers
echo
fi
cat << EOF
AC_OUTPUT
EOF

30
sources/poky/scripts/git Executable file
View File

@@ -0,0 +1,30 @@
#!/usr/bin/env python3
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: MIT
#
# Wrapper around 'git' that doesn't think we are root
import os
import shutil
import sys
os.environ['PSEUDO_UNLOAD'] = '1'
# calculate path to the real 'git'
path = os.environ['PATH']
# we need to remove our path but also any other copy of this script which
# may be present, e.g. eSDK.
replacements = [os.path.dirname(sys.argv[0])]
for p in path.split(":"):
if p.endswith("/scripts"):
replacements.append(p)
for r in replacements:
path = path.replace(r, '/ignoreme')
real_git = shutil.which('git', path=path)
if len(sys.argv) == 1:
os.execl(real_git, 'git')
os.execv(real_git, sys.argv)

View File

@@ -0,0 +1,360 @@
#!/usr/bin/env python3
# Buildtools and buildtools extended installer helper script
#
# Copyright (C) 2017-2020 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
# NOTE: --with-extended-buildtools is on by default
#
# Example usage (extended buildtools from milestone):
# (1) using --url and --filename
# $ install-buildtools \
# --url http://downloads.yoctoproject.org/releases/yocto/milestones/yocto-3.1_M3/buildtools \
# --filename x86_64-buildtools-extended-nativesdk-standalone-3.0+snapshot-20200315.sh
# (2) using --base-url, --release, --installer-version and --build-date
# $ install-buildtools \
# --base-url http://downloads.yoctoproject.org/releases/yocto \
# --release yocto-3.1_M3 \
# --installer-version 3.0+snapshot
# --build-date 202000315
#
# Example usage (standard buildtools from release):
# (3) using --url and --filename
# $ install-buildtools --without-extended-buildtools \
# --url http://downloads.yoctoproject.org/releases/yocto/yocto-3.0.2/buildtools \
# --filename x86_64-buildtools-nativesdk-standalone-3.0.2.sh
# (4) using --base-url, --release and --installer-version
# $ install-buildtools --without-extended-buildtools \
# --base-url http://downloads.yoctoproject.org/releases/yocto \
# --release yocto-3.0.2 \
# --installer-version 3.0.2
#
import argparse
import logging
import os
import platform
import re
import shutil
import shlex
import stat
import subprocess
import sys
import tempfile
from urllib.parse import quote
scripts_path = os.path.dirname(os.path.realpath(__file__))
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
import scriptutils
import scriptpath
PROGNAME = 'install-buildtools'
logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout)
DEFAULT_INSTALL_DIR = os.path.join(os.path.split(scripts_path)[0],'buildtools')
DEFAULT_BASE_URL = 'https://downloads.yoctoproject.org/releases/yocto'
DEFAULT_RELEASE = 'yocto-5.0.11'
DEFAULT_INSTALLER_VERSION = '5.0.11'
DEFAULT_BUILDDATE = '202110XX'
# Python version sanity check
if not (sys.version_info.major == 3 and sys.version_info.minor >= 4):
logger.error("This script requires Python 3.4 or greater")
logger.error("You have Python %s.%s" %
(sys.version_info.major, sys.version_info.minor))
sys.exit(1)
# The following three functions are copied directly from
# bitbake/lib/bb/utils.py, in order to allow this script
# to run on versions of python earlier than what bitbake
# supports (e.g. less than Python 3.5 for YP 3.1 release)
def _hasher(method, filename):
import mmap
with open(filename, "rb") as f:
try:
with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
for chunk in iter(lambda: mm.read(8192), b''):
method.update(chunk)
except ValueError:
# You can't mmap() an empty file so silence this exception
pass
return method.hexdigest()
def md5_file(filename):
"""
Return the hex string representation of the MD5 checksum of filename.
"""
import hashlib
return _hasher(hashlib.md5(), filename)
def sha256_file(filename):
"""
Return the hex string representation of the 256-bit SHA checksum of
filename.
"""
import hashlib
return _hasher(hashlib.sha256(), filename)
def remove_quotes(var):
"""
If a variable starts and ends with double quotes, remove them.
Assumption: if a variable starts with double quotes, it must also
end with them.
"""
if var[0] == '"':
var = var[1:-1]
return var
def main():
global DEFAULT_INSTALL_DIR
global DEFAULT_BASE_URL
global DEFAULT_RELEASE
global DEFAULT_INSTALLER_VERSION
global DEFAULT_BUILDDATE
filename = ""
release = ""
buildtools_url = ""
install_dir = ""
arch = platform.machine()
parser = argparse.ArgumentParser(
description="Buildtools installation helper",
add_help=False)
parser.add_argument('-u', '--url',
help='URL from where to fetch buildtools SDK installer, not '
'including filename (optional)\n'
'Requires --filename.',
action='store')
parser.add_argument('-f', '--filename',
help='filename for the buildtools SDK installer to be installed '
'(optional)\nRequires --url',
action='store')
parser.add_argument('-d', '--directory',
default=DEFAULT_INSTALL_DIR,
help='directory where buildtools SDK will be installed (optional)',
action='store')
parser.add_argument('-r', '--release',
default=DEFAULT_RELEASE,
help='Yocto Project release string for SDK which will be '
'installed (optional)',
action='store')
parser.add_argument('-V', '--installer-version',
default=DEFAULT_INSTALLER_VERSION,
help='version string for the SDK to be installed (optional)',
action='store')
parser.add_argument('-b', '--base-url',
default=DEFAULT_BASE_URL,
help='base URL from which to fetch SDK (optional)', action='store')
parser.add_argument('-t', '--build-date',
default=DEFAULT_BUILDDATE,
help='Build date of pre-release SDK (optional)', action='store')
group = parser.add_mutually_exclusive_group()
group.add_argument('--with-extended-buildtools', action='store_true',
dest='with_extended_buildtools',
default=True,
help='enable extended buildtools tarball (on by default)')
group.add_argument('--without-extended-buildtools', action='store_false',
dest='with_extended_buildtools',
help='disable extended buildtools (traditional buildtools tarball)')
group.add_argument('--make-only', action='store_true',
help='only install make tarball')
group = parser.add_mutually_exclusive_group()
group.add_argument('-c', '--check', help='enable checksum validation',
default=True, action='store_true')
group.add_argument('-n', '--no-check', help='disable checksum validation',
dest="check", action='store_false')
parser.add_argument('-D', '--debug', help='enable debug output',
action='store_true')
parser.add_argument('-q', '--quiet', help='print only errors',
action='store_true')
parser.add_argument('-h', '--help', action='help',
default=argparse.SUPPRESS,
help='show this help message and exit')
args = parser.parse_args()
if args.make_only:
args.with_extended_buildtools = False
if args.debug:
logger.setLevel(logging.DEBUG)
elif args.quiet:
logger.setLevel(logging.ERROR)
if args.url and args.filename:
logger.debug("--url and --filename detected. Ignoring --base-url "
"--release --installer-version arguments.")
filename = args.filename
buildtools_url = "%s/%s" % (args.url, filename)
else:
if args.base_url:
base_url = args.base_url
else:
base_url = DEFAULT_BASE_URL
if args.release:
# check if this is a pre-release "milestone" SDK
m = re.search(r"^(?P<distro>[a-zA-Z\-]+)(?P<version>[0-9.]+)(?P<milestone>_M[1-9])$",
args.release)
logger.debug("milestone regex: %s" % m)
if m and m.group('milestone'):
logger.debug("release[distro]: %s" % m.group('distro'))
logger.debug("release[version]: %s" % m.group('version'))
logger.debug("release[milestone]: %s" % m.group('milestone'))
if not args.build_date:
logger.error("Milestone installers require --build-date")
else:
if args.make_only:
filename = "%s-buildtools-make-nativesdk-standalone-%s-%s.sh" % (
arch, args.installer_version, args.build_date)
elif args.with_extended_buildtools:
filename = "%s-buildtools-extended-nativesdk-standalone-%s-%s.sh" % (
arch, args.installer_version, args.build_date)
else:
filename = "%s-buildtools-nativesdk-standalone-%s-%s.sh" % (
arch, args.installer_version, args.build_date)
safe_filename = quote(filename)
buildtools_url = "%s/milestones/%s/buildtools/%s" % (base_url, args.release, safe_filename)
# regular release SDK
else:
if args.make_only:
filename = "%s-buildtools-make-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
if args.with_extended_buildtools:
filename = "%s-buildtools-extended-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
else:
filename = "%s-buildtools-nativesdk-standalone-%s.sh" % (arch, args.installer_version)
safe_filename = quote(filename)
buildtools_url = "%s/%s/buildtools/%s" % (base_url, args.release, safe_filename)
tmpsdk_dir = tempfile.mkdtemp()
try:
# Fetch installer
logger.info("Fetching buildtools installer")
tmpbuildtools = os.path.join(tmpsdk_dir, filename)
ret = subprocess.call("wget -q -O %s %s" %
(tmpbuildtools, buildtools_url), shell=True)
if ret != 0:
logger.error("Could not download file from %s" % buildtools_url)
return ret
# Verify checksum
if args.check:
logger.info("Fetching buildtools installer checksum")
checksum_type = "sha256sum"
check_url = "{}.{}".format(buildtools_url, checksum_type)
checksum_filename = "{}.{}".format(filename, checksum_type)
tmpbuildtools_checksum = os.path.join(tmpsdk_dir, checksum_filename)
ret = subprocess.call("wget -q -O %s %s" %
(tmpbuildtools_checksum, check_url), shell=True)
if ret != 0:
logger.error("Could not download file from %s" % check_url)
return ret
regex = re.compile(r"^(?P<checksum>[0-9a-f]+)\s+(?P<path>.*/)?(?P<filename>.*)$")
with open(tmpbuildtools_checksum, 'rb') as f:
original = f.read()
m = re.search(regex, original.decode("utf-8"))
logger.debug("checksum regex match: %s" % m)
logger.debug("checksum: %s" % m.group('checksum'))
logger.debug("path: %s" % m.group('path'))
logger.debug("filename: %s" % m.group('filename'))
if filename != m.group('filename'):
logger.error("Filename does not match name in checksum")
return 1
checksum = m.group('checksum')
checksum_value = sha256_file(tmpbuildtools)
if checksum == checksum_value:
logger.info("Checksum success")
else:
logger.error("Checksum %s expected. Actual checksum is %s." %
(checksum, checksum_value))
return 1
# Make installer executable
logger.info("Making installer executable")
st = os.stat(tmpbuildtools)
os.chmod(tmpbuildtools, st.st_mode | stat.S_IEXEC)
logger.debug(os.stat(tmpbuildtools))
if args.directory:
install_dir = os.path.abspath(args.directory)
ret = subprocess.call("%s -d %s -y" %
(tmpbuildtools, install_dir), shell=True)
else:
install_dir = "/opt/poky/%s" % args.installer_version
ret = subprocess.call("%s -y" % tmpbuildtools, shell=True)
if ret != 0:
logger.error("Could not run buildtools installer")
return ret
# Setup the environment
logger.info("Setting up the environment")
regex = re.compile(r'^(?P<export>export )?(?P<env_var>[A-Z_]+)=(?P<env_val>.+)$')
with open("%s/environment-setup-%s-pokysdk-linux" %
(install_dir, arch), 'rb') as f:
for line in f:
match = regex.search(line.decode('utf-8'))
logger.debug("export regex: %s" % match)
if match:
env_var = match.group('env_var')
logger.debug("env_var: %s" % env_var)
env_val = remove_quotes(match.group('env_val'))
logger.debug("env_val: %s" % env_val)
os.environ[env_var] = env_val
# Test installation
logger.info("Testing installation")
tool = ""
m = re.search("extended", tmpbuildtools)
logger.debug("extended regex: %s" % m)
if args.with_extended_buildtools and not m:
logger.info("Ignoring --with-extended-buildtools as filename "
"does not contain 'extended'")
if args.make_only:
tool = 'make'
elif args.with_extended_buildtools and m:
tool = 'gcc'
else:
tool = 'tar'
logger.debug("install_dir: %s" % install_dir)
cmd = shlex.split("/usr/bin/which %s" % tool)
logger.debug("cmd: %s" % cmd)
logger.debug("tool: %s" % tool)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
output, errors = proc.communicate()
logger.debug("proc.args: %s" % proc.args)
logger.debug("proc.communicate(): output %s" % output)
logger.debug("proc.communicate(): errors %s" % errors)
which_tool = output.decode('utf-8')
logger.debug("which %s: %s" % (tool, which_tool))
ret = proc.returncode
if not which_tool.startswith(install_dir):
logger.error("Something went wrong: %s not found in %s" %
(tool, install_dir))
if ret != 0:
logger.error("Something went wrong: installation failed")
else:
logger.info("Installation successful. Remember to source the "
"environment setup script now and in any new session.")
return ret
finally:
# cleanup tmp directory
shutil.rmtree(tmpsdk_dir)
if __name__ == '__main__':
try:
ret = main()
except Exception:
ret = 1
import traceback
traceback.print_exc()
sys.exit(ret)

View File

@@ -0,0 +1,182 @@
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import argparse
from collections import defaultdict, OrderedDict
class ArgumentUsageError(Exception):
"""Exception class you can raise (and catch) in order to show the help"""
def __init__(self, message, subcommand=None):
self.message = message
self.subcommand = subcommand
class ArgumentParser(argparse.ArgumentParser):
"""Our own version of argparse's ArgumentParser"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('formatter_class', OeHelpFormatter)
self._subparser_groups = OrderedDict()
super(ArgumentParser, self).__init__(*args, **kwargs)
self._positionals.title = 'arguments'
self._optionals.title = 'options'
def error(self, message):
"""error(message: string)
Prints a help message incorporating the message to stderr and
exits.
"""
self._print_message('%s: error: %s\n' % (self.prog, message), sys.stderr)
self.print_help(sys.stderr)
sys.exit(2)
def error_subcommand(self, message, subcommand):
if subcommand:
action = self._get_subparser_action()
try:
subparser = action._name_parser_map[subcommand]
except KeyError:
self.error('no subparser for name "%s"' % subcommand)
else:
subparser.error(message)
self.error(message)
def add_subparsers(self, *args, **kwargs):
if 'dest' not in kwargs:
kwargs['dest'] = '_subparser_name'
ret = super(ArgumentParser, self).add_subparsers(*args, **kwargs)
# Need a way of accessing the parent parser
ret._parent_parser = self
# Ensure our class gets instantiated
ret._parser_class = ArgumentSubParser
# Hacky way of adding a method to the subparsers object
ret.add_subparser_group = self.add_subparser_group
return ret
def add_subparser_group(self, groupname, groupdesc, order=0):
self._subparser_groups[groupname] = (groupdesc, order)
def parse_args(self, args=None, namespace=None):
"""Parse arguments, using the correct subparser to show the error."""
args, argv = self.parse_known_args(args, namespace)
if argv:
message = 'unrecognized arguments: %s' % ' '.join(argv)
if self._subparsers:
subparser = self._get_subparser(args)
subparser.error(message)
else:
self.error(message)
sys.exit(2)
return args
def _get_subparser(self, args):
action = self._get_subparser_action()
if action.dest == argparse.SUPPRESS:
self.error('cannot get subparser, the subparser action dest is suppressed')
name = getattr(args, action.dest)
try:
return action._name_parser_map[name]
except KeyError:
self.error('no subparser for name "%s"' % name)
def _get_subparser_action(self):
if not self._subparsers:
self.error('cannot return the subparser action, no subparsers added')
for action in self._subparsers._group_actions:
if isinstance(action, argparse._SubParsersAction):
return action
class ArgumentSubParser(ArgumentParser):
def __init__(self, *args, **kwargs):
if 'group' in kwargs:
self._group = kwargs.pop('group')
if 'order' in kwargs:
self._order = kwargs.pop('order')
super(ArgumentSubParser, self).__init__(*args, **kwargs)
def parse_known_args(self, args=None, namespace=None):
# This works around argparse not handling optional positional arguments being
# intermixed with other options. A pretty horrible hack, but we're not left
# with much choice given that the bug in argparse exists and it's difficult
# to subclass.
# Borrowed from http://stackoverflow.com/questions/20165843/argparse-how-to-handle-variable-number-of-arguments-nargs
# with an extra workaround (in format_help() below) for the positional
# arguments disappearing from the --help output, as well as structural tweaks.
# Originally simplified from http://bugs.python.org/file30204/test_intermixed.py
positionals = self._get_positional_actions()
for action in positionals:
# deactivate positionals
action.save_nargs = action.nargs
action.nargs = 0
namespace, remaining_args = super(ArgumentSubParser, self).parse_known_args(args, namespace)
for action in positionals:
# remove the empty positional values from namespace
if hasattr(namespace, action.dest):
delattr(namespace, action.dest)
for action in positionals:
action.nargs = action.save_nargs
# parse positionals
namespace, extras = super(ArgumentSubParser, self).parse_known_args(remaining_args, namespace)
return namespace, extras
def format_help(self):
# Quick, restore the positionals!
positionals = self._get_positional_actions()
for action in positionals:
if hasattr(action, 'save_nargs'):
action.nargs = action.save_nargs
return super(ArgumentParser, self).format_help()
class OeHelpFormatter(argparse.HelpFormatter):
def _format_action(self, action):
if hasattr(action, '_get_subactions'):
# subcommands list
groupmap = defaultdict(list)
ordermap = {}
subparser_groups = action._parent_parser._subparser_groups
groups = sorted(subparser_groups.keys(), key=lambda item: subparser_groups[item][1], reverse=True)
for subaction in self._iter_indented_subactions(action):
parser = action._name_parser_map[subaction.dest]
group = getattr(parser, '_group', None)
groupmap[group].append(subaction)
if group not in groups:
groups.append(group)
order = getattr(parser, '_order', 0)
ordermap[subaction.dest] = order
lines = []
if len(groupmap) > 1:
groupindent = ' '
else:
groupindent = ''
for group in groups:
subactions = groupmap[group]
if not subactions:
continue
if groupindent:
if not group:
group = 'other'
groupdesc = subparser_groups.get(group, (group, 0))[0]
lines.append(' %s:' % groupdesc)
for subaction in sorted(subactions, key=lambda item: ordermap[item.dest], reverse=True):
lines.append('%s%s' % (groupindent, self._format_action(subaction).rstrip()))
return '\n'.join(lines)
else:
return super(OeHelpFormatter, self)._format_action(action)
def int_positive(value):
ivalue = int(value)
if ivalue <= 0:
raise argparse.ArgumentTypeError(
"%s is not a positive int value" % value)
return ivalue

View File

@@ -0,0 +1,24 @@
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Build performance test library functions"""
def print_table(rows, row_fmt=None):
"""Print data table"""
if not rows:
return
if not row_fmt:
row_fmt = ['{:{wid}} '] * len(rows[0])
# Go through the data to get maximum cell widths
num_cols = len(row_fmt)
col_widths = [0] * num_cols
for row in rows:
for i, val in enumerate(row):
col_widths[i] = max(col_widths[i], len(str(val)))
for row in rows:
print(*[row_fmt[i].format(col, wid=col_widths[i]) for i, col in enumerate(row)])

View File

@@ -0,0 +1,12 @@
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Helper module for HTML reporting"""
from jinja2 import Environment, PackageLoader
env = Environment(loader=PackageLoader('build_perf', 'html'))
template = env.get_template('report.html')

View File

@@ -0,0 +1,50 @@
<script type="text/javascript">
chartsDrawing += 1;
google.charts.setOnLoadCallback(drawChart_{{ chart_elem_id }});
function drawChart_{{ chart_elem_id }}() {
var data = new google.visualization.DataTable();
// Chart options
var options = {
theme : 'material',
legend: 'none',
hAxis: { format: '', title: 'Commit number',
minValue: {{ chart_opts.haxis.min }},
maxValue: {{ chart_opts.haxis.max }} },
{% if measurement.type == 'time' %}
vAxis: { format: 'h:mm:ss' },
{% else %}
vAxis: { format: '' },
{% endif %}
pointSize: 5,
chartArea: { left: 80, right: 15 },
};
// Define data columns
data.addColumn('number', 'Commit');
data.addColumn('{{ measurement.value_type.gv_data_type }}',
'{{ measurement.value_type.quantity }}');
// Add data rows
data.addRows([
{% for sample in measurement.samples %}
[{{ sample.commit_num }}, {{ sample.mean.gv_value() }}],
{% endfor %}
]);
// Finally, draw the chart
chart_div = document.getElementById('{{ chart_elem_id }}');
var chart = new google.visualization.LineChart(chart_div);
google.visualization.events.addListener(chart, 'ready', function () {
//chart_div = document.getElementById('{{ chart_elem_id }}');
//chart_div.innerHTML = '<img src="' + chart.getImageURI() + '">';
png_div = document.getElementById('{{ chart_elem_id }}_png');
png_div.outerHTML = '<a id="{{ chart_elem_id }}_png" href="' + chart.getImageURI() + '">PNG</a>';
console.log("CHART READY: {{ chart_elem_id }}");
chartsDrawing -= 1;
if (chartsDrawing == 0)
console.log("ALL CHARTS READY");
});
chart.draw(data, options);
}
</script>

View File

@@ -0,0 +1,289 @@
<!DOCTYPE html>
<html lang="en">
<head>
{# Scripts, for visualization#}
<!--START-OF-SCRIPTS-->
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript">
google.charts.load('current', {'packages':['corechart']});
var chartsDrawing = 0;
</script>
{# Render measurement result charts #}
{% for test in test_data %}
{% if test.status == 'SUCCESS' %}
{% for measurement in test.measurements %}
{% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %}
{% include 'measurement_chart.html' %}
{% endfor %}
{% endif %}
{% endfor %}
<!--END-OF-SCRIPTS-->
{# Styles #}
<style>
.meta-table {
font-size: 14px;
text-align: left;
border-collapse: collapse;
}
.meta-table tr:nth-child(even){background-color: #f2f2f2}
meta-table th, .meta-table td {
padding: 4px;
}
.summary {
margin: 0;
font-size: 14px;
text-align: left;
border-collapse: collapse;
}
summary th, .meta-table td {
padding: 4px;
}
.measurement {
padding: 8px 0px 8px 8px;
border: 2px solid #f0f0f0;
margin-bottom: 10px;
}
.details {
margin: 0;
font-size: 12px;
text-align: left;
border-collapse: collapse;
}
.details th {
padding-right: 8px;
}
.details.plain th {
font-weight: normal;
}
.preformatted {
font-family: monospace;
white-space: pre-wrap;
background-color: #f0f0f0;
margin-left: 10px;
}
hr {
color: #f0f0f0;
}
h2 {
font-size: 20px;
margin-bottom: 0px;
color: #707070;
}
h3 {
font-size: 16px;
margin: 0px;
color: #707070;
}
</style>
<title>{{ title }}</title>
</head>
{% macro poky_link(commit) -%}
<a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a>
{%- endmacro %}
<body><div style="width: 700px">
{# Test metadata #}
<h2>General</h2>
<hr>
<table class="meta-table" style="width: 100%">
<tr>
<th></th>
<th>Current commit</th>
<th>Comparing with</th>
</tr>
{% for key, item in metadata.items() %}
<tr>
<th>{{ item.title }}</th>
{%if key == 'commit' %}
<td>{{ poky_link(item.value) }}</td>
<td>{{ poky_link(item.value_old) }}</td>
{% else %}
<td>{{ item.value }}</td>
<td>{{ item.value_old }}</td>
{% endif %}
</tr>
{% endfor %}
</table>
{# Test result summary #}
<h2>Test result summary</h2>
<hr>
<table class="summary" style="width: 100%">
{% for test in test_data %}
{% if loop.index is even %}
{% set row_style = 'style="background-color: #f2f2f2"' %}
{% else %}
{% set row_style = 'style="background-color: #ffffff"' %}
{% endif %}
{% if test.status == 'SUCCESS' %}
{% for measurement in test.measurements %}
<tr {{ row_style }}>
{% if loop.index == 1 %}
<td>{{ test.name }}: {{ test.description }}</td>
{% else %}
{# add empty cell in place of the test name#}
<td></td>
{% endif %}
{% if measurement.absdiff > 0 %}
{% set result_style = "color: red" %}
{% elif measurement.absdiff == measurement.absdiff %}
{% set result_style = "color: green" %}
{% else %}
{% set result_style = "color: orange" %}
{%endif %}
{% if measurement.reldiff|abs > 2 %}
{% set result_style = result_style + "; font-weight: bold" %}
{% endif %}
<td>{{ measurement.description }}</td>
<td style="font-weight: bold">{{ measurement.value.mean }}</td>
<td style="{{ result_style }}">{{ measurement.absdiff_str }}</td>
<td style="{{ result_style }}">{{ measurement.reldiff_str }}</td>
</tr>
{% endfor %}
{% else %}
<td style="font-weight: bold; color: red;">{{test.status }}</td>
<td></td> <td></td> <td></td> <td></td>
{% endif %}
{% endfor %}
</table>
{# Detailed test results #}
{% for test in test_data %}
<h2>{{ test.name }}: {{ test.description }}</h2>
<hr>
{% if test.status == 'SUCCESS' %}
{% for measurement in test.measurements %}
<div class="measurement">
<h3>{{ measurement.description }}</h3>
<div style="font-weight:bold;">
<span style="font-size: 23px;">{{ measurement.value.mean }}</span>
<span style="font-size: 20px; margin-left: 12px">
{% if measurement.absdiff > 0 %}
<span style="color: red">
{% elif measurement.absdiff == measurement.absdiff %}
<span style="color: green">
{% else %}
<span style="color: orange">
{% endif %}
{{ measurement.absdiff_str }} ({{measurement.reldiff_str}})
</span></span>
</div>
{# Table for trendchart and the statistics #}
<table style="width: 100%">
<tr>
<td style="width: 75%">
{# Linechart #}
<div id="{{ test.name }}_{{ measurement.name }}_chart"></div>
</td>
<td>
{# Measurement statistics #}
<table class="details plain">
<tr>
<th>Test runs</th><td>{{ measurement.value.sample_cnt }}</td>
</tr><tr>
<th>-/+</th><td>-{{ measurement.value.minus }} / +{{ measurement.value.plus }}</td>
</tr><tr>
<th>Min</th><td>{{ measurement.value.min }}</td>
</tr><tr>
<th>Max</th><td>{{ measurement.value.max }}</td>
</tr><tr>
<th>Stdev</th><td>{{ measurement.value.stdev }}</td>
</tr><tr>
<th><div id="{{ test.name }}_{{ measurement.name }}_chart_png"></div></th>
<td></td>
</tr>
</table>
</td>
</tr>
</table>
{# Task and recipe summary from buildstats #}
{% if 'buildstats' in measurement %}
Task resource usage
<table class="details" style="width:100%">
<tr>
<th>Number of tasks</th>
<th>Top consumers of cputime</th>
</tr>
<tr>
<td style="vertical-align: top">{{ measurement.buildstats.tasks.count }} ({{ measurement.buildstats.tasks.change }})</td>
{# Table of most resource-hungry tasks #}
<td>
<table class="details plain">
{% for diff in measurement.buildstats.top_consumer|reverse %}
<tr>
<th>{{ diff.pkg }}.{{ diff.task }}</th>
<td>{{ '%0.0f' % diff.value2 }} s</td>
</tr>
{% endfor %}
</table>
</td>
</tr>
<tr>
<th>Biggest increase in cputime</th>
<th>Biggest decrease in cputime</th>
</tr>
<tr>
{# Table biggest increase in resource usage #}
<td>
<table class="details plain">
{% for diff in measurement.buildstats.top_increase|reverse %}
<tr>
<th>{{ diff.pkg }}.{{ diff.task }}</th>
<td>{{ '%+0.0f' % diff.absdiff }} s</td>
</tr>
{% endfor %}
</table>
</td>
{# Table biggest decrease in resource usage #}
<td>
<table class="details plain">
{% for diff in measurement.buildstats.top_decrease %}
<tr>
<th>{{ diff.pkg }}.{{ diff.task }}</th>
<td>{{ '%+0.0f' % diff.absdiff }} s</td>
</tr>
{% endfor %}
</table>
</td>
</tr>
</table>
{# Recipe version differences #}
{% if measurement.buildstats.ver_diff %}
<div style="margin-top: 16px">Recipe version changes</div>
<table class="details">
{% for head, recipes in measurement.buildstats.ver_diff.items() %}
<tr>
<th colspan="2">{{ head }}</th>
</tr>
{% for name, info in recipes|sort %}
<tr>
<td>{{ name }}</td>
<td>{{ info }}</td>
</tr>
{% endfor %}
{% endfor %}
</table>
{% else %}
<div style="margin-top: 16px">No recipe version changes detected</div>
{% endif %}
{% endif %}
</div>
{% endfor %}
{# Unsuccessful test #}
{% else %}
<span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }}
{% if test.err_type %}<span style="font-size: 75%; font-weight: normal">({{ test.err_type }})</span>{% endif %}
</span>
<div class="preformatted">{{ test.message }}</div>
{% endif %}
{% endfor %}
</div></body>
</html>

View File

@@ -0,0 +1,339 @@
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Handling of build perf test reports"""
from collections import OrderedDict, namedtuple
from collections.abc import Mapping
from datetime import datetime, timezone
from numbers import Number
from statistics import mean, stdev, variance
AggregateTestData = namedtuple('AggregateTestData', ['metadata', 'results'])
def isofmt_to_timestamp(string):
"""Convert timestamp string in ISO 8601 format into unix timestamp"""
if '.' in string:
dt = datetime.strptime(string, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime.strptime(string, '%Y-%m-%dT%H:%M:%S')
return dt.replace(tzinfo=timezone.utc).timestamp()
def metadata_xml_to_json(elem):
"""Convert metadata xml into JSON format"""
assert elem.tag == 'metadata', "Invalid metadata file format"
def _xml_to_json(elem):
"""Convert xml element to JSON object"""
out = OrderedDict()
for child in elem.getchildren():
key = child.attrib.get('name', child.tag)
if len(child):
out[key] = _xml_to_json(child)
else:
out[key] = child.text
return out
return _xml_to_json(elem)
def results_xml_to_json(elem):
"""Convert results xml into JSON format"""
rusage_fields = ('ru_utime', 'ru_stime', 'ru_maxrss', 'ru_minflt',
'ru_majflt', 'ru_inblock', 'ru_oublock', 'ru_nvcsw',
'ru_nivcsw')
iostat_fields = ('rchar', 'wchar', 'syscr', 'syscw', 'read_bytes',
'write_bytes', 'cancelled_write_bytes')
def _read_measurement(elem):
"""Convert measurement to JSON"""
data = OrderedDict()
data['type'] = elem.tag
data['name'] = elem.attrib['name']
data['legend'] = elem.attrib['legend']
values = OrderedDict()
# SYSRES measurement
if elem.tag == 'sysres':
for subel in elem:
if subel.tag == 'time':
values['start_time'] = isofmt_to_timestamp(subel.attrib['timestamp'])
values['elapsed_time'] = float(subel.text)
elif subel.tag == 'rusage':
rusage = OrderedDict()
for field in rusage_fields:
if 'time' in field:
rusage[field] = float(subel.attrib[field])
else:
rusage[field] = int(subel.attrib[field])
values['rusage'] = rusage
elif subel.tag == 'iostat':
values['iostat'] = OrderedDict([(f, int(subel.attrib[f]))
for f in iostat_fields])
elif subel.tag == 'buildstats_file':
values['buildstats_file'] = subel.text
else:
raise TypeError("Unknown sysres value element '{}'".format(subel.tag))
# DISKUSAGE measurement
elif elem.tag == 'diskusage':
values['size'] = int(elem.find('size').text)
else:
raise Exception("Unknown measurement tag '{}'".format(elem.tag))
data['values'] = values
return data
def _read_testcase(elem):
"""Convert testcase into JSON"""
assert elem.tag == 'testcase', "Expecting 'testcase' element instead of {}".format(elem.tag)
data = OrderedDict()
data['name'] = elem.attrib['name']
data['description'] = elem.attrib['description']
data['status'] = 'SUCCESS'
data['start_time'] = isofmt_to_timestamp(elem.attrib['timestamp'])
data['elapsed_time'] = float(elem.attrib['time'])
measurements = OrderedDict()
for subel in elem.getchildren():
if subel.tag == 'error' or subel.tag == 'failure':
data['status'] = subel.tag.upper()
data['message'] = subel.attrib['message']
data['err_type'] = subel.attrib['type']
data['err_output'] = subel.text
elif subel.tag == 'skipped':
data['status'] = 'SKIPPED'
data['message'] = subel.text
else:
measurements[subel.attrib['name']] = _read_measurement(subel)
data['measurements'] = measurements
return data
def _read_testsuite(elem):
"""Convert suite to JSON"""
assert elem.tag == 'testsuite', \
"Expecting 'testsuite' element instead of {}".format(elem.tag)
data = OrderedDict()
if 'hostname' in elem.attrib:
data['tester_host'] = elem.attrib['hostname']
data['start_time'] = isofmt_to_timestamp(elem.attrib['timestamp'])
data['elapsed_time'] = float(elem.attrib['time'])
tests = OrderedDict()
for case in elem.getchildren():
tests[case.attrib['name']] = _read_testcase(case)
data['tests'] = tests
return data
# Main function
assert elem.tag == 'testsuites', "Invalid test report format"
assert len(elem) == 1, "Too many testsuites"
return _read_testsuite(elem.getchildren()[0])
def aggregate_metadata(metadata):
"""Aggregate metadata into one, basically a sanity check"""
mutable_keys = ('pretty_name', 'version_id')
def aggregate_obj(aggregate, obj, assert_str=True):
"""Aggregate objects together"""
assert type(aggregate) is type(obj), \
"Type mismatch: {} != {}".format(type(aggregate), type(obj))
if isinstance(obj, Mapping):
assert set(aggregate.keys()) == set(obj.keys())
for key, val in obj.items():
aggregate_obj(aggregate[key], val, key not in mutable_keys)
elif isinstance(obj, list):
assert len(aggregate) == len(obj)
for i, val in enumerate(obj):
aggregate_obj(aggregate[i], val)
elif not isinstance(obj, str) or (isinstance(obj, str) and assert_str):
assert aggregate == obj, "Data mismatch {} != {}".format(aggregate, obj)
if not metadata:
return {}
# Do the aggregation
aggregate = metadata[0].copy()
for testrun in metadata[1:]:
aggregate_obj(aggregate, testrun)
aggregate['testrun_count'] = len(metadata)
return aggregate
def aggregate_data(data):
"""Aggregate multiple test results JSON structures into one"""
mutable_keys = ('status', 'message', 'err_type', 'err_output')
class SampleList(list):
"""Container for numerical samples"""
pass
def new_aggregate_obj(obj):
"""Create new object for aggregate"""
if isinstance(obj, Number):
new_obj = SampleList()
new_obj.append(obj)
elif isinstance(obj, str):
new_obj = obj
else:
# Lists and and dicts are kept as is
new_obj = obj.__class__()
aggregate_obj(new_obj, obj)
return new_obj
def aggregate_obj(aggregate, obj, assert_str=True):
"""Recursive "aggregation" of JSON objects"""
if isinstance(obj, Number):
assert isinstance(aggregate, SampleList)
aggregate.append(obj)
return
assert type(aggregate) == type(obj), \
"Type mismatch: {} != {}".format(type(aggregate), type(obj))
if isinstance(obj, Mapping):
for key, val in obj.items():
if not key in aggregate:
aggregate[key] = new_aggregate_obj(val)
else:
aggregate_obj(aggregate[key], val, key not in mutable_keys)
elif isinstance(obj, list):
for i, val in enumerate(obj):
if i >= len(aggregate):
aggregate[key] = new_aggregate_obj(val)
else:
aggregate_obj(aggregate[i], val)
elif isinstance(obj, str):
# Sanity check for data
if assert_str:
assert aggregate == obj, "Data mismatch {} != {}".format(aggregate, obj)
else:
raise Exception("BUG: unable to aggregate '{}' ({})".format(type(obj), str(obj)))
if not data:
return {}
# Do the aggregation
aggregate = data[0].__class__()
for testrun in data:
aggregate_obj(aggregate, testrun)
return aggregate
class MeasurementVal(float):
"""Base class representing measurement values"""
gv_data_type = 'number'
def gv_value(self):
"""Value formatting for visualization"""
if self != self:
return "null"
else:
return self
class TimeVal(MeasurementVal):
"""Class representing time values"""
quantity = 'time'
gv_title = 'elapsed time'
gv_data_type = 'timeofday'
def hms(self):
"""Split time into hours, minutes and seconeds"""
hhh = int(abs(self) / 3600)
mmm = int((abs(self) % 3600) / 60)
sss = abs(self) % 60
return hhh, mmm, sss
def __str__(self):
if self != self:
return "nan"
hh, mm, ss = self.hms()
sign = '-' if self < 0 else ''
if hh > 0:
return '{}{:d}:{:02d}:{:02.0f}'.format(sign, hh, mm, ss)
elif mm > 0:
return '{}{:d}:{:04.1f}'.format(sign, mm, ss)
elif ss > 1:
return '{}{:.1f} s'.format(sign, ss)
else:
return '{}{:.2f} s'.format(sign, ss)
def gv_value(self):
"""Value formatting for visualization"""
if self != self:
return "null"
hh, mm, ss = self.hms()
return [hh, mm, int(ss), int(ss*1000) % 1000]
class SizeVal(MeasurementVal):
"""Class representing time values"""
quantity = 'size'
gv_title = 'size in MiB'
gv_data_type = 'number'
def __str__(self):
if self != self:
return "nan"
if abs(self) < 1024:
return '{:.1f} kiB'.format(self)
elif abs(self) < 1048576:
return '{:.2f} MiB'.format(self / 1024)
else:
return '{:.2f} GiB'.format(self / 1048576)
def gv_value(self):
"""Value formatting for visualization"""
if self != self:
return "null"
return self / 1024
def measurement_stats(meas, prefix=''):
"""Get statistics of a measurement"""
if not meas:
return {prefix + 'sample_cnt': 0,
prefix + 'mean': MeasurementVal('nan'),
prefix + 'stdev': MeasurementVal('nan'),
prefix + 'variance': MeasurementVal('nan'),
prefix + 'min': MeasurementVal('nan'),
prefix + 'max': MeasurementVal('nan'),
prefix + 'minus': MeasurementVal('nan'),
prefix + 'plus': MeasurementVal('nan')}
stats = {'name': meas['name']}
if meas['type'] == 'sysres':
val_cls = TimeVal
values = meas['values']['elapsed_time']
elif meas['type'] == 'diskusage':
val_cls = SizeVal
values = meas['values']['size']
else:
raise Exception("Unknown measurement type '{}'".format(meas['type']))
stats['val_cls'] = val_cls
stats['quantity'] = val_cls.quantity
stats[prefix + 'sample_cnt'] = len(values)
mean_val = val_cls(mean(values))
min_val = val_cls(min(values))
max_val = val_cls(max(values))
stats[prefix + 'mean'] = mean_val
if len(values) > 1:
stats[prefix + 'stdev'] = val_cls(stdev(values))
stats[prefix + 'variance'] = val_cls(variance(values))
else:
stats[prefix + 'stdev'] = float('nan')
stats[prefix + 'variance'] = float('nan')
stats[prefix + 'min'] = min_val
stats[prefix + 'max'] = max_val
stats[prefix + 'minus'] = val_cls(mean_val - min_val)
stats[prefix + 'plus'] = val_cls(max_val - mean_val)
return stats

View File

@@ -0,0 +1,56 @@
var fs = require('fs');
var system = require('system');
var page = require('webpage').create();
// Examine console log for message from chart drawing
page.onConsoleMessage = function(msg) {
console.log(msg);
if (msg === "ALL CHARTS READY") {
window.charts_ready = true;
}
else if (msg.slice(0, 11) === "CHART READY") {
var chart_id = msg.split(" ")[2];
console.log('grabbing ' + chart_id);
var png_data = page.evaluate(function (chart_id) {
var chart_div = document.getElementById(chart_id + '_png');
return chart_div.outerHTML;
}, chart_id);
fs.write(args[2] + '/' + chart_id + '.png', png_data, 'w');
}
};
// Check command line arguments
var args = system.args;
if (args.length != 3) {
console.log("USAGE: " + args[0] + " REPORT_HTML OUT_DIR\n");
phantom.exit(1);
}
// Open the web page
page.open(args[1], function(status) {
if (status == 'fail') {
console.log("Failed to open file '" + args[1] + "'");
phantom.exit(1);
}
});
// Check status every 100 ms
interval = window.setInterval(function () {
//console.log('waiting');
if (window.charts_ready) {
clearTimeout(timer);
clearInterval(interval);
var fname = args[1].replace(/\/+$/, "").split("/").pop()
console.log("saving " + fname);
fs.write(args[2] + '/' + fname, page.content, 'w');
phantom.exit(0);
}
}, 100);
// Time-out after 10 seconds
timer = window.setTimeout(function () {
clearInterval(interval);
console.log("ERROR: timeout");
phantom.exit(1);
}, 10000);

View File

@@ -0,0 +1,368 @@
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Functionality for analyzing buildstats"""
import json
import logging
import os
import re
from collections import namedtuple
from statistics import mean
log = logging.getLogger()
taskdiff_fields = ('pkg', 'pkg_op', 'task', 'task_op', 'value1', 'value2',
'absdiff', 'reldiff')
TaskDiff = namedtuple('TaskDiff', ' '.join(taskdiff_fields))
class BSError(Exception):
"""Error handling of buildstats"""
pass
class BSTask(dict):
def __init__(self, *args, **kwargs):
self['start_time'] = None
self['elapsed_time'] = None
self['status'] = None
self['iostat'] = {}
self['rusage'] = {}
self['child_rusage'] = {}
super(BSTask, self).__init__(*args, **kwargs)
@property
def cputime(self):
"""Sum of user and system time taken by the task"""
rusage = self['rusage']['ru_stime'] + self['rusage']['ru_utime']
if self['child_rusage']:
# Child rusage may have been optimized out
return rusage + self['child_rusage']['ru_stime'] + self['child_rusage']['ru_utime']
else:
return rusage
@property
def walltime(self):
"""Elapsed wall clock time"""
return self['elapsed_time']
@property
def read_bytes(self):
"""Bytes read from the block layer"""
return self['iostat']['read_bytes']
@property
def write_bytes(self):
"""Bytes written to the block layer"""
return self['iostat']['write_bytes']
@property
def read_ops(self):
"""Number of read operations on the block layer"""
if self['child_rusage']:
# Child rusage may have been optimized out
return self['rusage']['ru_inblock'] + self['child_rusage']['ru_inblock']
else:
return self['rusage']['ru_inblock']
@property
def write_ops(self):
"""Number of write operations on the block layer"""
if self['child_rusage']:
# Child rusage may have been optimized out
return self['rusage']['ru_oublock'] + self['child_rusage']['ru_oublock']
else:
return self['rusage']['ru_oublock']
@classmethod
def from_file(cls, buildstat_file, fallback_end=0):
"""Read buildstat text file. fallback_end is an optional end time for tasks that are not recorded as finishing."""
bs_task = cls()
log.debug("Reading task buildstats from %s", buildstat_file)
end_time = None
with open(buildstat_file) as fobj:
for line in fobj.readlines():
key, val = line.split(':', 1)
val = val.strip()
if key == 'Started':
start_time = float(val)
bs_task['start_time'] = start_time
elif key == 'Ended':
end_time = float(val)
elif key.startswith('IO '):
split = key.split()
bs_task['iostat'][split[1]] = int(val)
elif key.find('rusage') >= 0:
split = key.split()
ru_key = split[-1]
if ru_key in ('ru_stime', 'ru_utime'):
val = float(val)
else:
val = int(val)
ru_type = 'rusage' if split[0] == 'rusage' else \
'child_rusage'
bs_task[ru_type][ru_key] = val
elif key == 'Status':
bs_task['status'] = val
# If the task didn't finish, fill in the fallback end time if specified
if start_time and not end_time and fallback_end:
end_time = fallback_end
if start_time and end_time:
bs_task['elapsed_time'] = end_time - start_time
else:
raise BSError("{} looks like a invalid buildstats file".format(buildstat_file))
return bs_task
class BSTaskAggregate(object):
"""Class representing multiple runs of the same task"""
properties = ('cputime', 'walltime', 'read_bytes', 'write_bytes',
'read_ops', 'write_ops')
def __init__(self, tasks=None):
self._tasks = tasks or []
self._properties = {}
def __getattr__(self, name):
if name in self.properties:
if name not in self._properties:
# Calculate properties on demand only. We only provide mean
# value, so far
self._properties[name] = mean([getattr(t, name) for t in self._tasks])
return self._properties[name]
else:
raise AttributeError("'BSTaskAggregate' has no attribute '{}'".format(name))
def append(self, task):
"""Append new task"""
# Reset pre-calculated properties
assert isinstance(task, BSTask), "Type is '{}' instead of 'BSTask'".format(type(task))
self._properties = {}
self._tasks.append(task)
class BSRecipe(object):
"""Class representing buildstats of one recipe"""
def __init__(self, name, epoch, version, revision):
self.name = name
self.epoch = epoch
self.version = version
self.revision = revision
if epoch is None:
self.evr = "{}-{}".format(version, revision)
else:
self.evr = "{}_{}-{}".format(epoch, version, revision)
self.tasks = {}
def aggregate(self, bsrecipe):
"""Aggregate data of another recipe buildstats"""
if self.nevr != bsrecipe.nevr:
raise ValueError("Refusing to aggregate buildstats, recipe version "
"differs: {} vs. {}".format(self.nevr, bsrecipe.nevr))
if set(self.tasks.keys()) != set(bsrecipe.tasks.keys()):
raise ValueError("Refusing to aggregate buildstats, set of tasks "
"in {} differ".format(self.name))
for taskname, taskdata in bsrecipe.tasks.items():
if not isinstance(self.tasks[taskname], BSTaskAggregate):
self.tasks[taskname] = BSTaskAggregate([self.tasks[taskname]])
self.tasks[taskname].append(taskdata)
@property
def nevr(self):
return self.name + '-' + self.evr
class BuildStats(dict):
"""Class representing buildstats of one build"""
@property
def num_tasks(self):
"""Get number of tasks"""
num = 0
for recipe in self.values():
num += len(recipe.tasks)
return num
@classmethod
def from_json(cls, bs_json):
"""Create new BuildStats object from JSON object"""
buildstats = cls()
for recipe in bs_json:
if recipe['name'] in buildstats:
raise BSError("Cannot handle multiple versions of the same "
"package ({})".format(recipe['name']))
bsrecipe = BSRecipe(recipe['name'], recipe['epoch'],
recipe['version'], recipe['revision'])
for task, data in recipe['tasks'].items():
bsrecipe.tasks[task] = BSTask(data)
buildstats[recipe['name']] = bsrecipe
return buildstats
@staticmethod
def from_file_json(path):
"""Load buildstats from a JSON file"""
with open(path) as fobj:
bs_json = json.load(fobj)
return BuildStats.from_json(bs_json)
@staticmethod
def split_nevr(nevr):
"""Split name and version information from recipe "nevr" string"""
n_e_v, revision = nevr.rsplit('-', 1)
match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[0-9]\S*)$',
n_e_v)
if not match:
# If we're not able to parse a version starting with a number, just
# take the part after last dash
match = re.match(r'^(?P<name>\S+)-((?P<epoch>[0-9]{1,5})_)?(?P<version>[^-]+)$',
n_e_v)
name = match.group('name')
version = match.group('version')
epoch = match.group('epoch')
return name, epoch, version, revision
@staticmethod
def parse_top_build_stats(path):
"""
Parse the top-level build_stats file for build-wide start and duration.
"""
start = elapsed = 0
with open(path) as fobj:
for line in fobj.readlines():
key, val = line.split(':', 1)
val = val.strip()
if key == 'Build Started':
start = float(val)
elif key == "Elapsed time":
elapsed = float(val.split()[0])
return start, elapsed
@classmethod
def from_dir(cls, path):
"""Load buildstats from a buildstats directory"""
top_stats = os.path.join(path, 'build_stats')
if not os.path.isfile(top_stats):
raise BSError("{} does not look like a buildstats directory".format(path))
log.debug("Reading buildstats directory %s", path)
buildstats = cls()
build_started, build_elapsed = buildstats.parse_top_build_stats(top_stats)
build_end = build_started + build_elapsed
subdirs = os.listdir(path)
for dirname in subdirs:
recipe_dir = os.path.join(path, dirname)
if dirname == "reduced_proc_pressure" or not os.path.isdir(recipe_dir):
continue
name, epoch, version, revision = cls.split_nevr(dirname)
bsrecipe = BSRecipe(name, epoch, version, revision)
for task in os.listdir(recipe_dir):
bsrecipe.tasks[task] = BSTask.from_file(
os.path.join(recipe_dir, task), build_end)
if name in buildstats:
raise BSError("Cannot handle multiple versions of the same "
"package ({})".format(name))
buildstats[name] = bsrecipe
return buildstats
def aggregate(self, buildstats):
"""Aggregate other buildstats into this"""
if set(self.keys()) != set(buildstats.keys()):
raise ValueError("Refusing to aggregate buildstats, set of "
"recipes is different: %s" % (set(self.keys()) ^ set(buildstats.keys())))
for pkg, data in buildstats.items():
self[pkg].aggregate(data)
def diff_buildstats(bs1, bs2, stat_attr, min_val=None, min_absdiff=None, only_tasks=[]):
"""Compare the tasks of two buildstats"""
tasks_diff = []
pkgs = set(bs1.keys()).union(set(bs2.keys()))
for pkg in pkgs:
tasks1 = bs1[pkg].tasks if pkg in bs1 else {}
tasks2 = bs2[pkg].tasks if pkg in bs2 else {}
if only_tasks:
tasks1 = {k: v for k, v in tasks1.items() if k in only_tasks}
tasks2 = {k: v for k, v in tasks2.items() if k in only_tasks}
if not tasks1:
pkg_op = '+'
elif not tasks2:
pkg_op = '-'
else:
pkg_op = ' '
for task in set(tasks1.keys()).union(set(tasks2.keys())):
task_op = ' '
if task in tasks1:
val1 = getattr(bs1[pkg].tasks[task], stat_attr)
else:
task_op = '+'
val1 = 0
if task in tasks2:
val2 = getattr(bs2[pkg].tasks[task], stat_attr)
else:
val2 = 0
task_op = '-'
if val1 == 0:
reldiff = float('inf')
else:
reldiff = 100 * (val2 - val1) / val1
if min_val and max(val1, val2) < min_val:
log.debug("Filtering out %s:%s (%s)", pkg, task,
max(val1, val2))
continue
if min_absdiff and abs(val2 - val1) < min_absdiff:
log.debug("Filtering out %s:%s (difference of %s)", pkg, task,
val2-val1)
continue
tasks_diff.append(TaskDiff(pkg, pkg_op, task, task_op, val1, val2,
val2-val1, reldiff))
return tasks_diff
class BSVerDiff(object):
"""Class representing recipe version differences between two buildstats"""
def __init__(self, bs1, bs2):
RecipeVerDiff = namedtuple('RecipeVerDiff', 'left right')
recipes1 = set(bs1.keys())
recipes2 = set(bs2.keys())
self.new = dict([(r, bs2[r]) for r in sorted(recipes2 - recipes1)])
self.dropped = dict([(r, bs1[r]) for r in sorted(recipes1 - recipes2)])
self.echanged = {}
self.vchanged = {}
self.rchanged = {}
self.unchanged = {}
self.empty_diff = False
common = recipes2.intersection(recipes1)
if common:
for recipe in common:
rdiff = RecipeVerDiff(bs1[recipe], bs2[recipe])
if bs1[recipe].epoch != bs2[recipe].epoch:
self.echanged[recipe] = rdiff
elif bs1[recipe].version != bs2[recipe].version:
self.vchanged[recipe] = rdiff
elif bs1[recipe].revision != bs2[recipe].revision:
self.rchanged[recipe] = rdiff
else:
self.unchanged[recipe] = rdiff
if len(recipes1) == len(recipes2) == len(self.unchanged):
self.empty_diff = True
def __bool__(self):
return not self.empty_diff

View File

@@ -0,0 +1,454 @@
# Yocto Project layer check tool
#
# Copyright (C) 2017 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
import os
import re
import subprocess
from enum import Enum
import bb.tinfoil
class LayerType(Enum):
BSP = 0
DISTRO = 1
SOFTWARE = 2
CORE = 3
ERROR_NO_LAYER_CONF = 98
ERROR_BSP_DISTRO = 99
def _get_configurations(path):
configs = []
for f in os.listdir(path):
file_path = os.path.join(path, f)
if os.path.isfile(file_path) and f.endswith('.conf'):
configs.append(f[:-5]) # strip .conf
return configs
def _get_layer_collections(layer_path, lconf=None, data=None):
import bb.parse
import bb.data
if lconf is None:
lconf = os.path.join(layer_path, 'conf', 'layer.conf')
if data is None:
ldata = bb.data.init()
bb.parse.init_parser(ldata)
else:
ldata = data.createCopy()
ldata.setVar('LAYERDIR', layer_path)
try:
ldata = bb.parse.handle(lconf, ldata, include=True, baseconfig=True)
except:
raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path)
ldata.expandVarref('LAYERDIR')
collections = (ldata.getVar('BBFILE_COLLECTIONS') or '').split()
if not collections:
name = os.path.basename(layer_path)
collections = [name]
collections = {c: {} for c in collections}
for name in collections:
priority = ldata.getVar('BBFILE_PRIORITY_%s' % name)
pattern = ldata.getVar('BBFILE_PATTERN_%s' % name)
depends = ldata.getVar('LAYERDEPENDS_%s' % name)
compat = ldata.getVar('LAYERSERIES_COMPAT_%s' % name)
try:
depDict = bb.utils.explode_dep_versions2(depends or "")
except bb.utils.VersionStringException as vse:
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (name, str(vse)))
collections[name]['priority'] = priority
collections[name]['pattern'] = pattern
collections[name]['depends'] = ' '.join(depDict.keys())
collections[name]['compat'] = compat
return collections
def _detect_layer(layer_path):
"""
Scans layer directory to detect what type of layer
is BSP, Distro or Software.
Returns a dictionary with layer name, type and path.
"""
layer = {}
layer_name = os.path.basename(layer_path)
layer['name'] = layer_name
layer['path'] = layer_path
layer['conf'] = {}
if not os.path.isfile(os.path.join(layer_path, 'conf', 'layer.conf')):
layer['type'] = LayerType.ERROR_NO_LAYER_CONF
return layer
machine_conf = os.path.join(layer_path, 'conf', 'machine')
distro_conf = os.path.join(layer_path, 'conf', 'distro')
is_bsp = False
is_distro = False
if os.path.isdir(machine_conf):
machines = _get_configurations(machine_conf)
if machines:
is_bsp = True
if os.path.isdir(distro_conf):
distros = _get_configurations(distro_conf)
if distros:
is_distro = True
layer['collections'] = _get_layer_collections(layer['path'])
if layer_name == "meta" and "core" in layer['collections']:
layer['type'] = LayerType.CORE
layer['conf']['machines'] = machines
layer['conf']['distros'] = distros
elif is_bsp and is_distro:
layer['type'] = LayerType.ERROR_BSP_DISTRO
elif is_bsp:
layer['type'] = LayerType.BSP
layer['conf']['machines'] = machines
elif is_distro:
layer['type'] = LayerType.DISTRO
layer['conf']['distros'] = distros
else:
layer['type'] = LayerType.SOFTWARE
return layer
def detect_layers(layer_directories, no_auto):
layers = []
for directory in layer_directories:
directory = os.path.realpath(directory)
if directory[-1] == '/':
directory = directory[0:-1]
if no_auto:
conf_dir = os.path.join(directory, 'conf')
if os.path.isdir(conf_dir):
layer = _detect_layer(directory)
if layer:
layers.append(layer)
else:
for root, dirs, files in os.walk(directory):
dir_name = os.path.basename(root)
conf_dir = os.path.join(root, 'conf')
if os.path.isdir(conf_dir):
layer = _detect_layer(root)
if layer:
layers.append(layer)
return layers
def _find_layer(depend, layers):
for layer in layers:
if 'collections' not in layer:
continue
for collection in layer['collections']:
if depend == collection:
return layer
return None
def sanity_check_layers(layers, logger):
"""
Check that we didn't find duplicate collection names, as the layer that will
be used is non-deterministic. The precise check is duplicate collections
with different patterns, as the same pattern being repeated won't cause
problems.
"""
import collections
passed = True
seen = collections.defaultdict(set)
for layer in layers:
for name, data in layer.get("collections", {}).items():
seen[name].add(data["pattern"])
for name, patterns in seen.items():
if len(patterns) > 1:
passed = False
logger.error("Collection %s found multiple times: %s" % (name, ", ".join(patterns)))
return passed
def get_layer_dependencies(layer, layers, logger):
def recurse_dependencies(depends, layer, layers, logger, ret = []):
logger.debug('Processing dependencies %s for layer %s.' % \
(depends, layer['name']))
for depend in depends.split():
# core (oe-core) is suppose to be provided
if depend == 'core':
continue
layer_depend = _find_layer(depend, layers)
if not layer_depend:
logger.error('Layer %s depends on %s and isn\'t found.' % \
(layer['name'], depend))
ret = None
continue
# We keep processing, even if ret is None, this allows us to report
# multiple errors at once
if ret is not None and layer_depend not in ret:
ret.append(layer_depend)
else:
# we might have processed this dependency already, in which case
# we should not do it again (avoid recursive loop)
continue
# Recursively process...
if 'collections' not in layer_depend:
continue
for collection in layer_depend['collections']:
collect_deps = layer_depend['collections'][collection]['depends']
if not collect_deps:
continue
ret = recurse_dependencies(collect_deps, layer_depend, layers, logger, ret)
return ret
layer_depends = []
for collection in layer['collections']:
depends = layer['collections'][collection]['depends']
if not depends:
continue
layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends)
# Note: [] (empty) is allowed, None is not!
return layer_depends
def add_layer_dependencies(bblayersconf, layer, layers, logger):
layer_depends = get_layer_dependencies(layer, layers, logger)
if layer_depends is None:
return False
else:
add_layers(bblayersconf, layer_depends, logger)
return True
def add_layers(bblayersconf, layers, logger):
# Don't add a layer that is already present.
added = set()
output = check_command('Getting existing layers failed.', 'bitbake-layers show-layers').decode('utf-8')
for layer, path, pri in re.findall(r'^(\S+) +([^\n]*?) +(\d+)$', output, re.MULTILINE):
added.add(path)
with open(bblayersconf, 'a+') as f:
for layer in layers:
logger.info('Adding layer %s' % layer['name'])
name = layer['name']
path = layer['path']
if path in added:
logger.info('%s is already in %s' % (name, bblayersconf))
else:
added.add(path)
f.write("\nBBLAYERS += \"%s\"\n" % path)
return True
def check_bblayers(bblayersconf, layer_path, logger):
'''
If layer_path found in BBLAYERS return True
'''
import bb.parse
import bb.data
ldata = bb.parse.handle(bblayersconf, bb.data.init(), include=True)
for bblayer in (ldata.getVar('BBLAYERS') or '').split():
if os.path.normpath(bblayer) == os.path.normpath(layer_path):
return True
return False
def check_command(error_msg, cmd, cwd=None):
'''
Run a command under a shell, capture stdout and stderr in a single stream,
throw an error when command returns non-zero exit code. Returns the output.
'''
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=cwd)
output, _ = p.communicate()
if p.returncode:
msg = "%s\nCommand: %s\nOutput:\n%s" % (error_msg, cmd, output.decode('utf-8'))
raise RuntimeError(msg)
return output
def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
import re
# some recipes needs to be excluded like meta-world-pkgdata
# because a layer can add recipes to a world build so signature
# will be change
exclude_recipes = ('meta-world-pkgdata',)
sigs = {}
tune2tasks = {}
cmd = 'BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
if extravars:
cmd += extravars
cmd += ' '
if machine:
cmd += 'MACHINE=%s ' % machine
cmd += 'bitbake '
if failsafe:
cmd += '-k '
cmd += '-S lockedsigs world'
sigs_file = os.path.join(builddir, 'locked-sigs.inc')
if os.path.exists(sigs_file):
os.unlink(sigs_file)
try:
check_command('Generating signatures failed. This might be due to some parse error and/or general layer incompatibilities.',
cmd, builddir)
except RuntimeError as ex:
if failsafe and os.path.exists(sigs_file):
# Ignore the error here. Most likely some recipes active
# in a world build lack some dependencies. There is a
# separate test_machine_world_build which exposes the
# failure.
pass
else:
raise
sig_regex = re.compile(r"^(?P<task>.*:.*):(?P<hash>.*) .$")
tune_regex = re.compile(r"(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
current_tune = None
with open(sigs_file, 'r') as f:
for line in f.readlines():
line = line.strip()
t = tune_regex.search(line)
if t:
current_tune = t.group('tune')
s = sig_regex.match(line)
if s:
exclude = False
for er in exclude_recipes:
(recipe, task) = s.group('task').split(':')
if er == recipe:
exclude = True
break
if exclude:
continue
sigs[s.group('task')] = s.group('hash')
tune2tasks.setdefault(current_tune, []).append(s.group('task'))
if not sigs:
raise RuntimeError('Can\'t load signatures from %s' % sigs_file)
return (sigs, tune2tasks)
def get_depgraph(targets=['world'], failsafe=False):
'''
Returns the dependency graph for the given target(s).
The dependency graph is taken directly from DepTreeEvent.
'''
depgraph = None
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False)
tinfoil.set_event_mask(['bb.event.NoProvider', 'bb.event.DepTreeGenerated', 'bb.command.CommandCompleted'])
if not tinfoil.run_command('generateDepTreeEvent', targets, 'do_build'):
raise RuntimeError('starting generateDepTreeEvent failed')
while True:
event = tinfoil.wait_event(timeout=1000)
if event:
if isinstance(event, bb.command.CommandFailed):
raise RuntimeError('Generating dependency information failed: %s' % event.error)
elif isinstance(event, bb.command.CommandCompleted):
break
elif isinstance(event, bb.event.NoProvider):
if failsafe:
# The event is informational, we will get information about the
# remaining dependencies eventually and thus can ignore this
# here like we do in get_signatures(), if desired.
continue
if event._reasons:
raise RuntimeError('Nothing provides %s: %s' % (event._item, event._reasons))
else:
raise RuntimeError('Nothing provides %s.' % (event._item))
elif isinstance(event, bb.event.DepTreeGenerated):
depgraph = event._depgraph
if depgraph is None:
raise RuntimeError('Could not retrieve the depgraph.')
return depgraph
def compare_signatures(old_sigs, curr_sigs):
'''
Compares the result of two get_signatures() calls. Returns None if no
problems found, otherwise a string that can be used as additional
explanation in self.fail().
'''
# task -> (old signature, new signature)
sig_diff = {}
for task in old_sigs:
if task in curr_sigs and \
old_sigs[task] != curr_sigs[task]:
sig_diff[task] = (old_sigs[task], curr_sigs[task])
if not sig_diff:
return None
# Beware, depgraph uses task=<pn>.<taskname> whereas get_signatures()
# uses <pn>:<taskname>. Need to convert sometimes. The output follows
# the convention from get_signatures() because that seems closer to
# normal bitbake output.
def sig2graph(task):
pn, taskname = task.rsplit(':', 1)
return pn + '.' + taskname
def graph2sig(task):
pn, taskname = task.rsplit('.', 1)
return pn + ':' + taskname
depgraph = get_depgraph(failsafe=True)
depends = depgraph['tdepends']
# If a task A has a changed signature, but none of its
# dependencies, then we need to report it because it is
# the one which introduces a change. Any task depending on
# A (directly or indirectly) will also have a changed
# signature, but we don't need to report it. It might have
# its own changes, which will become apparent once the
# issues that we do report are fixed and the test gets run
# again.
sig_diff_filtered = []
for task, (old_sig, new_sig) in sig_diff.items():
deps_tainted = False
for dep in depends.get(sig2graph(task), ()):
if graph2sig(dep) in sig_diff:
deps_tainted = True
break
if not deps_tainted:
sig_diff_filtered.append((task, old_sig, new_sig))
msg = []
msg.append('%d signatures changed, initial differences (first hash before, second after):' %
len(sig_diff))
for diff in sorted(sig_diff_filtered):
recipe, taskname = diff[0].rsplit(':', 1)
cmd = 'bitbake-diffsigs --task %s %s --signature %s %s' % \
(recipe, taskname, diff[1], diff[2])
msg.append(' %s: %s -> %s' % diff)
msg.append(' %s' % cmd)
try:
output = check_command('Determining signature difference failed.',
cmd).decode('utf-8')
except RuntimeError as error:
output = str(error)
if output:
msg.extend([' ' + line for line in output.splitlines()])
msg.append('')
return '\n'.join(msg)

View File

@@ -0,0 +1,9 @@
# Copyright (C) 2017 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
from oeqa.core.case import OETestCase
class OECheckLayerTestCase(OETestCase):
pass

View File

@@ -0,0 +1,206 @@
# Copyright (C) 2017 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
import unittest
from checklayer import LayerType, get_signatures, check_command, get_depgraph
from checklayer.case import OECheckLayerTestCase
class BSPCheckLayer(OECheckLayerTestCase):
@classmethod
def setUpClass(self):
if self.tc.layer['type'] not in (LayerType.BSP, LayerType.CORE):
raise unittest.SkipTest("BSPCheckLayer: Layer %s isn't BSP one." %\
self.tc.layer['name'])
def test_bsp_defines_machines(self):
self.assertTrue(self.tc.layer['conf']['machines'],
"Layer is BSP but doesn't defines machines.")
def test_bsp_no_set_machine(self):
from oeqa.utils.commands import get_bb_var
machine = get_bb_var('MACHINE')
self.assertEqual(self.td['bbvars']['MACHINE'], machine,
msg="Layer %s modified machine %s -> %s" % \
(self.tc.layer['name'], self.td['bbvars']['MACHINE'], machine))
def test_machine_world(self):
'''
"bitbake world" is expected to work regardless which machine is selected.
BSP layers sometimes break that by enabling a recipe for a certain machine
without checking whether that recipe actually can be built in the current
distro configuration (for example, OpenGL might not enabled).
This test iterates over all machines. It would be nicer to instantiate
it once per machine. It merely checks for errors during parse
time. It does not actually attempt to build anything.
'''
if not self.td['machines']:
self.skipTest('No machines set with --machines.')
msg = []
for machine in self.td['machines']:
# In contrast to test_machine_signatures() below, errors are fatal here.
try:
get_signatures(self.td['builddir'], failsafe=False, machine=machine)
except RuntimeError as ex:
msg.append(str(ex))
if msg:
msg.insert(0, 'The following machines broke a world build:')
self.fail('\n'.join(msg))
def test_machine_signatures(self):
'''
Selecting a machine may only affect the signature of tasks that are specific
to that machine. In other words, when MACHINE=A and MACHINE=B share a recipe
foo and the output of foo, then both machine configurations must build foo
in exactly the same way. Otherwise it is not possible to use both machines
in the same distribution.
This criteria can only be tested by testing different machines in combination,
i.e. one main layer, potentially several additional BSP layers and an explicit
choice of machines:
yocto-check-layer --additional-layers .../meta-intel --machines intel-corei7-64 imx6slevk -- .../meta-freescale
'''
if not self.td['machines']:
self.skipTest('No machines set with --machines.')
# Collect signatures for all machines that we are testing
# and merge that into a hash:
# tune -> task -> signature -> list of machines with that combination
#
# It is an error if any tune/task pair has more than one signature,
# because that implies that the machines that caused those different
# signatures do not agree on how to execute the task.
tunes = {}
# Preserve ordering of machines as chosen by the user.
for machine in self.td['machines']:
curr_sigs, tune2tasks = get_signatures(self.td['builddir'], failsafe=True, machine=machine)
# Invert the tune -> [tasks] mapping.
tasks2tune = {}
for tune, tasks in tune2tasks.items():
for task in tasks:
tasks2tune[task] = tune
for task, sighash in curr_sigs.items():
tunes.setdefault(tasks2tune[task], {}).setdefault(task, {}).setdefault(sighash, []).append(machine)
msg = []
pruned = 0
last_line_key = None
# do_fetch, do_unpack, ..., do_build
taskname_list = []
if tunes:
# The output below is most useful when we start with tasks that are at
# the bottom of the dependency chain, i.e. those that run first. If
# those tasks differ, the rest also does.
#
# To get an ordering of tasks, we do a topological sort of the entire
# depgraph for the base configuration, then on-the-fly flatten that list by stripping
# out the recipe names and removing duplicates. The base configuration
# is not necessarily representative, but should be close enough. Tasks
# that were not encountered get a default priority.
depgraph = get_depgraph()
depends = depgraph['tdepends']
WHITE = 1
GRAY = 2
BLACK = 3
color = {}
found = set()
def visit(task):
color[task] = GRAY
for dep in depends.get(task, ()):
if color.setdefault(dep, WHITE) == WHITE:
visit(dep)
color[task] = BLACK
pn, taskname = task.rsplit('.', 1)
if taskname not in found:
taskname_list.append(taskname)
found.add(taskname)
for task in depends.keys():
if color.setdefault(task, WHITE) == WHITE:
visit(task)
taskname_order = dict([(task, index) for index, task in enumerate(taskname_list) ])
def task_key(task):
pn, taskname = task.rsplit(':', 1)
return (pn, taskname_order.get(taskname, len(taskname_list)), taskname)
for tune in sorted(tunes.keys()):
tasks = tunes[tune]
# As for test_signatures it would be nicer to sort tasks
# by dependencies here, but that is harder because we have
# to report on tasks from different machines, which might
# have different dependencies. We resort to pruning the
# output by reporting only one task per recipe if the set
# of machines matches.
#
# "bitbake-diffsigs -t -s" is intelligent enough to print
# diffs recursively, so often it does not matter that much
# if we don't pick the underlying difference
# here. However, sometimes recursion fails
# (https://bugzilla.yoctoproject.org/show_bug.cgi?id=6428).
#
# To mitigate that a bit, we use a hard-coded ordering of
# tasks that represents how they normally run and prefer
# to print the ones that run first.
for task in sorted(tasks.keys(), key=task_key):
signatures = tasks[task]
# do_build can be ignored: it is know to have
# different signatures in some cases, for example in
# the allarch ca-certificates due to RDEPENDS=openssl.
# That particular dependency is marked via
# SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up
# in the sstate signature hash because filtering it
# out would be hard and running do_build multiple
# times doesn't really matter.
if len(signatures.keys()) > 1 and \
not task.endswith(':do_build'):
# Error!
#
# Sort signatures by machines, because the hex values don't mean anything.
# => all-arch adwaita-icon-theme:do_build: 1234... (beaglebone, qemux86) != abcdf... (qemux86-64)
#
# Skip the line if it is covered already by the predecessor (same pn, same sets of machines).
pn, taskname = task.rsplit(':', 1)
next_line_key = (pn, sorted(signatures.values()))
if next_line_key != last_line_key:
line = ' %s %s: ' % (tune, task)
line += ' != '.join(['%s (%s)' % (signature, ', '.join([m for m in signatures[signature]])) for
signature in sorted(signatures.keys(), key=lambda s: signatures[s])])
last_line_key = next_line_key
msg.append(line)
# Randomly pick two mismatched signatures and remember how to invoke
# bitbake-diffsigs for them.
iterator = iter(signatures.items())
a = next(iterator)
b = next(iterator)
diffsig_machines = '(%s) != (%s)' % (', '.join(a[1]), ', '.join(b[1]))
diffsig_params = '-t %s %s -s %s %s' % (pn, taskname, a[0], b[0])
else:
pruned += 1
if msg:
msg.insert(0, 'The machines have conflicting signatures for some shared tasks:')
if pruned > 0:
msg.append('')
msg.append('%d tasks where not listed because some other task of the recipe already differed.' % pruned)
msg.append('It is likely that differences from different recipes also have the same root cause.')
msg.append('')
# Explain how to investigate...
msg.append('To investigate, run bitbake-diffsigs -t recipename taskname -s fromsig tosig.')
cmd = 'bitbake-diffsigs %s' % diffsig_params
msg.append('Example: %s in the last line' % diffsig_machines)
msg.append('Command: %s' % cmd)
# ... and actually do it automatically for that example, but without aborting
# when that fails.
try:
output = check_command('Comparing signatures failed.', cmd).decode('utf-8')
except RuntimeError as ex:
output = str(ex)
msg.extend([' ' + line for line in output.splitlines()])
self.fail('\n'.join(msg))

View File

@@ -0,0 +1,104 @@
# Copyright (C) 2017 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
import glob
import os
import unittest
import re
from checklayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures
from checklayer.case import OECheckLayerTestCase
class CommonCheckLayer(OECheckLayerTestCase):
def test_readme(self):
if self.tc.layer['type'] == LayerType.CORE:
raise unittest.SkipTest("Core layer's README is top level")
# The top-level README file may have a suffix (like README.rst or README.txt).
readme_files = glob.glob(os.path.join(self.tc.layer['path'], '[Rr][Ee][Aa][Dd][Mm][Ee]*'))
self.assertTrue(len(readme_files) > 0,
msg="Layer doesn't contain a README file.")
# There might be more than one file matching the file pattern above
# (for example, README.rst and README-COPYING.rst). The one with the shortest
# name is considered the "main" one.
readme_file = sorted(readme_files)[0]
data = ''
with open(readme_file, 'r') as f:
data = f.read()
self.assertTrue(data,
msg="Layer contains a README file but it is empty.")
# If a layer's README references another README, then the checks below are not valid
if re.search('README', data, re.IGNORECASE):
return
self.assertIn('maintainer', data.lower())
self.assertIn('patch', data.lower())
# Check that there is an email address in the README
email_regex = re.compile(r"[^@]+@[^@]+")
self.assertTrue(email_regex.match(data))
def test_parse(self):
check_command('Layer %s failed to parse.' % self.tc.layer['name'],
'bitbake -p')
def test_show_environment(self):
check_command('Layer %s failed to show environment.' % self.tc.layer['name'],
'bitbake -e')
def test_world(self):
'''
"bitbake world" is expected to work. test_signatures does not cover that
because it is more lenient and ignores recipes in a world build that
are not actually buildable, so here we fail when "bitbake -S none world"
fails.
'''
get_signatures(self.td['builddir'], failsafe=False)
def test_world_inherit_class(self):
'''
This also does "bitbake -S none world" along with inheriting "yocto-check-layer"
class, which can do additional per-recipe test cases.
'''
msg = []
try:
get_signatures(self.td['builddir'], failsafe=False, machine=None, extravars='BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS INHERIT" INHERIT="yocto-check-layer"')
except RuntimeError as ex:
msg.append(str(ex))
if msg:
msg.insert(0, 'Layer %s failed additional checks from yocto-check-layer.bbclass\nSee below log for specific recipe parsing errors:\n' % \
self.tc.layer['name'])
self.fail('\n'.join(msg))
@unittest.expectedFailure
def test_patches_upstream_status(self):
import sys
sys.path.append(os.path.join(sys.path[0], '../../../../meta/lib/'))
import oe.qa
patches = []
for dirpath, dirs, files in os.walk(self.tc.layer['path']):
for filename in files:
if filename.endswith(".patch"):
ppath = os.path.join(dirpath, filename)
if oe.qa.check_upstream_status(ppath):
patches.append(ppath)
self.assertEqual(len(patches), 0 , \
msg="Found following patches with malformed or missing upstream status:\n%s" % '\n'.join([str(patch) for patch in patches]))
def test_signatures(self):
if self.tc.layer['type'] == LayerType.SOFTWARE and \
not self.tc.test_software_layer_signatures:
raise unittest.SkipTest("Not testing for signature changes in a software layer %s." \
% self.tc.layer['name'])
curr_sigs, _ = get_signatures(self.td['builddir'], failsafe=True)
msg = compare_signatures(self.td['sigs'], curr_sigs)
if msg is not None:
self.fail('Adding layer %s changed signatures.\n%s' % (self.tc.layer['name'], msg))
def test_layerseries_compat(self):
for collection_name, collection_data in self.tc.layer['collections'].items():
self.assertTrue(collection_data['compat'], "Collection %s from layer %s does not set compatible oe-core versions via LAYERSERIES_COMPAT_collection." \
% (collection_name, self.tc.layer['name']))

View File

@@ -0,0 +1,28 @@
# Copyright (C) 2017 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
import unittest
from checklayer import LayerType
from checklayer.case import OECheckLayerTestCase
class DistroCheckLayer(OECheckLayerTestCase):
@classmethod
def setUpClass(self):
if self.tc.layer['type'] not in (LayerType.DISTRO, LayerType.CORE):
raise unittest.SkipTest("DistroCheckLayer: Layer %s isn't Distro one." %\
self.tc.layer['name'])
def test_distro_defines_distros(self):
self.assertTrue(self.tc.layer['conf']['distros'],
"Layer is BSP but doesn't defines machines.")
def test_distro_no_set_distros(self):
from oeqa.utils.commands import get_bb_var
distro = get_bb_var('DISTRO')
self.assertEqual(self.td['bbvars']['DISTRO'], distro,
msg="Layer %s modified distro %s -> %s" % \
(self.tc.layer['name'], self.td['bbvars']['DISTRO'], distro))

View File

@@ -0,0 +1,17 @@
# Copyright (C) 2017 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
import os
import sys
import glob
import re
from oeqa.core.context import OETestContext
class CheckLayerTestContext(OETestContext):
def __init__(self, td=None, logger=None, layer=None, test_software_layer_signatures=True):
super(CheckLayerTestContext, self).__init__(td, logger)
self.layer = layer
self.test_software_layer_signatures = test_software_layer_signatures

View File

@@ -0,0 +1,404 @@
#!/usr/bin/env python3
# Development tool - utility functions for plugins
#
# Copyright (C) 2014 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool plugins module"""
import os
import sys
import subprocess
import logging
import re
import codecs
logger = logging.getLogger('devtool')
class DevtoolError(Exception):
"""Exception for handling devtool errors"""
def __init__(self, message, exitcode=1):
super(DevtoolError, self).__init__(message)
self.exitcode = exitcode
def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
"""Run a program in bitbake build context"""
import bb
if not 'cwd' in options:
options["cwd"] = builddir
if init_path:
# As the OE init script makes use of BASH_SOURCE to determine OEROOT,
# and can't determine it when running under dash, we need to set
# the executable to bash to correctly set things up
if not 'executable' in options:
options['executable'] = 'bash'
logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
else:
logger.debug('Executing command "%s"' % cmd)
init_prefix = ''
if watch:
if sys.stdout.isatty():
# Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
cmd = 'script -e -q -c "%s" /dev/null' % cmd
return exec_watch('%s%s' % (init_prefix, cmd), **options)
else:
return bb.process.run('%s%s' % (init_prefix, cmd), **options)
def exec_watch(cmd, **options):
"""Run program with stdout shown on sys.stdout"""
import bb
if isinstance(cmd, str) and not "shell" in options:
options["shell"] = True
process = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
)
reader = codecs.getreader('utf-8')(process.stdout)
buf = ''
while True:
out = reader.read(1, 1)
if out:
sys.stdout.write(out)
sys.stdout.flush()
buf += out
elif out == '' and process.poll() != None:
break
if process.returncode != 0:
raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
return buf, None
def exec_fakeroot(d, cmd, **kwargs):
"""Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
# Grab the command and check it actually exists
fakerootcmd = d.getVar('FAKEROOTCMD')
fakerootenv = d.getVar('FAKEROOTENV')
exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
if not os.path.exists(fakerootcmd):
logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
return 2
# Set up the appropriate environment
newenv = dict(os.environ)
for varvalue in fakerootenv.split():
if '=' in varvalue:
splitval = varvalue.split('=', 1)
newenv[splitval[0]] = splitval[1]
return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
def setup_tinfoil(config_only=False, basepath=None, tracking=False):
"""Initialize tinfoil api from bitbake"""
import scriptpath
orig_cwd = os.path.abspath(os.curdir)
try:
if basepath:
os.chdir(basepath)
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
sys.exit(1)
import bb.tinfoil
tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
try:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only)
except bb.tinfoil.TinfoilUIException:
tinfoil.shutdown()
raise DevtoolError('Failed to start bitbake environment')
except:
tinfoil.shutdown()
raise
finally:
os.chdir(orig_cwd)
return tinfoil
def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
"""Parse the specified recipe"""
try:
recipefile = tinfoil.get_recipe_file(pn)
except bb.providers.NoProvider as e:
logger.error(str(e))
return None
if appends:
append_files = tinfoil.get_file_appends(recipefile)
if filter_workspace:
# Filter out appends from the workspace
append_files = [path for path in append_files if
not path.startswith(config.workspace_path)]
else:
append_files = None
try:
rd = tinfoil.parse_recipe_file(recipefile, appends, append_files)
except Exception as e:
logger.error(str(e))
return None
return rd
def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
"""
Check that a recipe is in the workspace and (optionally) that source
is present.
"""
workspacepn = pn
for recipe, value in workspace.items():
if recipe == pn:
break
if bbclassextend:
recipefile = value['recipefile']
if recipefile:
targets = get_bbclassextend_targets(recipefile, recipe)
if pn in targets:
workspacepn = recipe
break
else:
raise DevtoolError("No recipe named '%s' in your workspace" % pn)
if checksrc:
srctree = workspace[workspacepn]['srctree']
if not os.path.exists(srctree):
raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
if not os.listdir(srctree):
raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
return workspacepn
def use_external_build(same_dir, no_same_dir, d):
"""
Determine if we should use B!=S (separate build and source directories) or not
"""
b_is_s = True
if no_same_dir:
logger.info('Using separate build directory since --no-same-dir specified')
b_is_s = False
elif same_dir:
logger.info('Using source tree as build directory since --same-dir specified')
elif bb.data.inherits_class('autotools-brokensep', d):
logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
logger.info('Using source tree as build directory since that would be the default for this recipe')
else:
b_is_s = False
return b_is_s
def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
"""
Set up the git repository for the source tree
"""
import bb.process
import oe.patch
if not os.path.exists(os.path.join(repodir, '.git')):
bb.process.run('git init', cwd=repodir)
bb.process.run('git config --local gc.autodetach 0', cwd=repodir)
bb.process.run('git add -f -A .', cwd=repodir)
commit_cmd = ['git']
oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
commit_cmd += ['commit', '-q']
stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
if not stdout:
commit_cmd.append('--allow-empty')
commitmsg = "Initial empty commit with no upstream sources"
elif version:
commitmsg = "Initial commit from upstream at version %s" % version
else:
commitmsg = "Initial commit from upstream"
commit_cmd += ['-m', commitmsg]
bb.process.run(commit_cmd, cwd=repodir)
# Ensure singletask.lock (as used by externalsrc.bbclass) is ignored by git
gitinfodir = os.path.join(repodir, '.git', 'info')
try:
os.mkdir(gitinfodir)
except FileExistsError:
pass
excludes = []
excludefile = os.path.join(gitinfodir, 'exclude')
try:
with open(excludefile, 'r') as f:
excludes = f.readlines()
except FileNotFoundError:
pass
if 'singletask.lock\n' not in excludes:
excludes.append('singletask.lock\n')
with open(excludefile, 'w') as f:
for line in excludes:
f.write(line)
bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
bb.process.run('git tag -f %s' % basetag, cwd=repodir)
# if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
# so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
found = False
for line in stdout.splitlines():
if line.endswith("/"):
new_dir = line.split()[1]
for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
if ".git" in dirs + files:
(stdout, _) = bb.process.run('git remote', cwd=root)
remote = stdout.splitlines()[0]
(stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
remote_url = stdout.splitlines()[0]
logger.error(os.path.relpath(os.path.join(root, ".."), root))
bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
found = True
if found:
oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
found = False
if os.path.exists(os.path.join(repodir, '.gitmodules')):
bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir)
def recipe_to_append(recipefile, config, wildcard=False):
"""
Convert a recipe file to a bbappend file path within the workspace.
NOTE: if the bbappend already exists, you should be using
workspace[args.recipename]['bbappend'] instead of calling this
function.
"""
appendname = os.path.splitext(os.path.basename(recipefile))[0]
if wildcard:
appendname = re.sub(r'_.*', '_%', appendname)
appendpath = os.path.join(config.workspace_path, 'appends')
appendfile = os.path.join(appendpath, appendname + '.bbappend')
return appendfile
def get_bbclassextend_targets(recipefile, pn):
"""
Cheap function to get BBCLASSEXTEND and then convert that to the
list of targets that would result.
"""
import bb.utils
values = {}
def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
values[varname] = origvalue
return origvalue, None, 0, True
with open(recipefile, 'r') as f:
bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
targets = []
bbclassextend = values.get('BBCLASSEXTEND', '').split()
if bbclassextend:
for variant in bbclassextend:
if variant == 'nativesdk':
targets.append('%s-%s' % (variant, pn))
elif variant in ['native', 'cross', 'crosssdk']:
targets.append('%s-%s' % (pn, variant))
return targets
def replace_from_file(path, old, new):
"""Replace strings on a file"""
def read_file(path):
data = None
with open(path) as f:
data = f.read()
return data
def write_file(path, data):
if data is None:
return
wdata = data.rstrip() + "\n"
with open(path, "w") as f:
f.write(wdata)
# In case old is None, return immediately
if old is None:
return
try:
rdata = read_file(path)
except IOError as e:
# if file does not exit, just quit, otherwise raise an exception
if e.errno == errno.ENOENT:
return
else:
raise
old_contents = rdata.splitlines()
new_contents = []
for old_content in old_contents:
try:
new_contents.append(old_content.replace(old, new))
except ValueError:
pass
write_file(path, "\n".join(new_contents))
def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
""" This function will make unlocked-sigs.inc match the recipes in the
workspace plus any extras we want unlocked. """
if not fixed_setup:
# Only need to write this out within the eSDK
return
if not extra:
extra = []
confdir = os.path.join(basepath, 'conf')
unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
# Get current unlocked list if any
values = {}
def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
values[varname] = origvalue
return origvalue, None, 0, True
if os.path.exists(unlockedsigs):
with open(unlockedsigs, 'r') as f:
bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
# If the new list is different to the current list, write it out
newunlocked = sorted(list(workspace.keys()) + extra)
if unlocked != newunlocked:
bb.utils.mkdirhier(confdir)
with open(unlockedsigs, 'w') as f:
f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
"# This layer was created by the OpenEmbedded devtool" +
" utility in order to\n" +
"# contain recipes that are unlocked.\n")
f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
for pn in newunlocked:
f.write(' ' + pn)
f.write('"')
def check_prerelease_version(ver, operation):
if 'pre' in ver or 'rc' in ver:
logger.warning('Version "%s" looks like a pre-release version. '
'If that is the case, in order to ensure that the '
'version doesn\'t appear to go backwards when you '
'later upgrade to the final release version, it is '
'recommmended that instead you use '
'<current version>+<pre-release version> e.g. if '
'upgrading from 1.9 to 2.0-rc2 use "1.9+2.0-rc2". '
'If you prefer not to reset and re-try, you can change '
'the version after %s succeeds using "devtool rename" '
'with -V/--version.' % (ver, operation))
def check_git_repo_dirty(repodir):
"""Check if a git repository is clean or not"""
stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
return stdout
def check_git_repo_op(srctree, ignoredirs=None):
"""Check if a git repository is in the middle of a rebase"""
stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree)
topleveldir = stdout.strip()
if ignoredirs and topleveldir in ignoredirs:
return
gitdir = os.path.join(topleveldir, '.git')
if os.path.exists(os.path.join(gitdir, 'rebase-merge')):
raise DevtoolError("Source tree %s appears to be in the middle of a rebase - please resolve this first" % srctree)
if os.path.exists(os.path.join(gitdir, 'rebase-apply')):
raise DevtoolError("Source tree %s appears to be in the middle of 'git am' or 'git apply' - please resolve this first" % srctree)

View File

@@ -0,0 +1,92 @@
# Development tool - build command plugin
#
# Copyright (C) 2014-2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool build plugin"""
import os
import bb
import logging
import argparse
import tempfile
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
from devtool import parse_recipe
logger = logging.getLogger('devtool')
def _set_file_values(fn, values):
remaining = list(values.keys())
def varfunc(varname, origvalue, op, newlines):
newvalue = values.get(varname, origvalue)
remaining.remove(varname)
return (newvalue, '=', 0, True)
with open(fn, 'r') as f:
(updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
for item in remaining:
updated = True
newlines.append('%s = "%s"' % (item, values[item]))
if updated:
with open(fn, 'w') as f:
f.writelines(newlines)
return updated
def _get_build_tasks(config):
tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
return ['do_%s' % task.strip() for task in tasks]
def build(args, config, basepath, workspace):
"""Entry point for the devtool 'build' subcommand"""
workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
if not rd:
return 1
deploytask = 'do_deploy' in rd.getVar('__BBTASKS')
finally:
tinfoil.shutdown()
if args.clean:
# use clean instead of cleansstate to avoid messing things up in eSDK
build_tasks = ['do_clean']
else:
build_tasks = _get_build_tasks(config)
if deploytask:
build_tasks.append('do_deploy')
bbappend = workspace[workspacepn]['bbappend']
if args.disable_parallel_make:
logger.info("Disabling 'make' parallelism")
_set_file_values(bbappend, {'PARALLEL_MAKE': ''})
try:
bbargs = []
for task in build_tasks:
if args.recipename.endswith('-native') and 'package' in task:
continue
bbargs.append('%s:%s' % (args.recipename, task))
exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
except bb.process.ExecutionError as e:
# We've already seen the output since watch=True, so just ensure we return something to the user
return e.exitcode
finally:
if args.disable_parallel_make:
_set_file_values(bbappend, {'PARALLEL_MAKE': None})
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
parser_build = subparsers.add_parser('build', help='Build a recipe',
description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
group='working', order=50)
parser_build.add_argument('recipename', help='Recipe to build')
parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
parser_build.add_argument('-c', '--clean', action='store_true', help='clean up recipe building results')
parser_build.set_defaults(func=build)

View File

@@ -0,0 +1,164 @@
# Development tool - build-image plugin
#
# Copyright (C) 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool plugin containing the build-image subcommand."""
import os
import errno
import logging
from bb.process import ExecutionError
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
logger = logging.getLogger('devtool')
class TargetNotImageError(Exception):
pass
def _get_packages(tinfoil, workspace, config):
"""Get list of packages from recipes in the workspace."""
result = []
for recipe in workspace:
data = parse_recipe(config, tinfoil, recipe, True)
if 'class-target' in data.getVar('OVERRIDES').split(':'):
if recipe in data.getVar('PACKAGES').split():
result.append(recipe)
else:
logger.warning("Skipping recipe %s as it doesn't produce a "
"package with the same name", recipe)
return result
def build_image(args, config, basepath, workspace):
"""Entry point for the devtool 'build-image' subcommand."""
image = args.imagename
auto_image = False
if not image:
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
if sdk_targets:
image = sdk_targets[0]
auto_image = True
if not image:
raise DevtoolError('Unable to determine image to build, please specify one')
try:
if args.add_packages:
add_packages = args.add_packages.split(',')
else:
add_packages = None
result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
except TargetNotImageError:
if auto_image:
raise DevtoolError('Unable to determine image to build, please specify one')
else:
raise DevtoolError('Specified recipe %s is not an image recipe' % image)
if result == 0:
logger.info('Successfully built %s. You can find output files in %s'
% (image, outputdir))
return result
def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
# remove <image>.bbappend to make sure setup_tinfoil doesn't
# break because of it
target_basename = config.get('SDK', 'target_basename', '')
if target_basename:
appendfile = os.path.join(config.workspace_path, 'appends',
'%s.bbappend' % target_basename)
try:
os.unlink(appendfile)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
tinfoil = setup_tinfoil(basepath=basepath)
try:
rd = parse_recipe(config, tinfoil, image, True)
if not rd:
# Error already shown
return (1, None)
if not bb.data.inherits_class('image', rd):
raise TargetNotImageError()
# Get the actual filename used and strip the .bb and full path
target_basename = rd.getVar('FILE')
target_basename = os.path.splitext(os.path.basename(target_basename))[0]
config.set('SDK', 'target_basename', target_basename)
config.write()
appendfile = os.path.join(config.workspace_path, 'appends',
'%s.bbappend' % target_basename)
outputdir = None
try:
if workspace or add_packages:
if add_packages:
packages = add_packages
else:
packages = _get_packages(tinfoil, workspace, config)
else:
packages = None
if not task:
if not packages and not add_packages and workspace:
logger.warning('No recipes in workspace, building image %s unmodified', image)
elif not packages:
logger.warning('No packages to add, building image %s unmodified', image)
if packages or extra_append:
bb.utils.mkdirhier(os.path.dirname(appendfile))
with open(appendfile, 'w') as afile:
if packages:
# include packages from workspace recipes into the image
afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages))
if not task:
logger.info('Building image %s with the following '
'additional packages: %s', image, ' '.join(packages))
if extra_append:
for line in extra_append:
afile.write('%s\n' % line)
if task in ['populate_sdk', 'populate_sdk_ext']:
outputdir = rd.getVar('SDK_DEPLOY')
else:
outputdir = rd.getVar('DEPLOY_DIR_IMAGE')
tmp_tinfoil = tinfoil
tinfoil = None
tmp_tinfoil.shutdown()
options = ''
if task:
options += '-c %s' % task
# run bitbake to build image (or specified task)
try:
exec_build_env_command(config.init_path, basepath,
'bitbake %s %s' % (options, image), watch=True)
except ExecutionError as err:
return (err.exitcode, None)
finally:
if os.path.isfile(appendfile):
os.unlink(appendfile)
finally:
if tinfoil:
tinfoil.shutdown()
return (0, outputdir)
def register_commands(subparsers, context):
"""Register devtool subcommands from the build-image plugin"""
parser = subparsers.add_parser('build-image',
help='Build image including workspace recipe packages',
description='Builds an image, extending it to include '
'packages from recipes in the workspace',
group='testbuild', order=-10)
parser.add_argument('imagename', help='Image recipe to build', nargs='?')
parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
'entire workspace, specify packages to be added to the image '
'(separate multiple packages by commas)',
metavar='PACKAGES')
parser.set_defaults(func=build_image)

View File

@@ -0,0 +1,55 @@
# Development tool - build-sdk command plugin
#
# Copyright (C) 2015-2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import subprocess
import logging
import glob
import shutil
import errno
import sys
import tempfile
from devtool import DevtoolError
from devtool import build_image
logger = logging.getLogger('devtool')
def build_sdk(args, config, basepath, workspace):
"""Entry point for the devtool build-sdk command"""
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
if sdk_targets:
image = sdk_targets[0]
else:
raise DevtoolError('Unable to determine image to build SDK for')
extra_append = ['SDK_DERIVATIVE = "1"']
try:
result, outputdir = build_image.build_image_task(config,
basepath,
workspace,
image,
task='populate_sdk_ext',
extra_append=extra_append)
except build_image.TargetNotImageError:
raise DevtoolError('Unable to determine image to build SDK for')
if result == 0:
logger.info('Successfully built SDK. You can find output files in %s'
% outputdir)
return result
def register_commands(subparsers, context):
"""Register devtool subcommands"""
if context.fixed_setup:
parser_build_sdk = subparsers.add_parser('build-sdk',
help='Build a derivative SDK of this one',
description='Builds an extensible SDK based upon this one and the items in your workspace',
group='advanced')
parser_build_sdk.set_defaults(func=build_sdk)

View File

@@ -0,0 +1,378 @@
# Development tool - deploy/undeploy command plugin
#
# Copyright (C) 2014-2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool plugin containing the deploy subcommands"""
import logging
import os
import shutil
import subprocess
import tempfile
import bb.utils
import argparse_oe
import oe.types
from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
logger = logging.getLogger('devtool')
deploylist_path = '/.devtool'
def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
"""
Prepare a shell script for running on the target to
deploy/undeploy files. We have to be careful what we put in this
script - only commands that are likely to be available on the
target are suitable (the target might be constrained, e.g. using
busybox rather than bash with coreutils).
"""
lines = []
lines.append('#!/bin/sh')
lines.append('set -e')
if undeployall:
# Yes, I know this is crude - but it does work
lines.append('for entry in %s/*.list; do' % deploylist_path)
lines.append('[ ! -f $entry ] && exit')
lines.append('set `basename $entry | sed "s/.list//"`')
if dryrun:
if not deploy:
lines.append('echo "Previously deployed files for $1:"')
lines.append('manifest="%s/$1.list"' % deploylist_path)
lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
lines.append('if [ -f $manifest ] ; then')
# Read manifest in reverse and delete files / remove empty dirs
lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
lines.append(' do')
if dryrun:
lines.append(' if [ ! -d $file ] ; then')
lines.append(' echo $file')
lines.append(' fi')
else:
lines.append(' if [ -d $file ] ; then')
# Avoid deleting a preserved directory in case it has special perms
lines.append(' if [ ! -d $preservedir/$file ] ; then')
lines.append(' rmdir $file > /dev/null 2>&1 || true')
lines.append(' fi')
lines.append(' else')
lines.append(' rm -f $file')
lines.append(' fi')
lines.append(' done')
if not dryrun:
lines.append(' rm $manifest')
if not deploy and not dryrun:
# May as well remove all traces
lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
lines.append('fi')
if deploy:
if not nocheckspace:
# Check for available space
# FIXME This doesn't take into account files spread across multiple
# partitions, but doing that is non-trivial
# Find the part of the destination path that exists
lines.append('checkpath="$2"')
lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
lines.append('do')
lines.append(' checkpath=`dirname "$checkpath"`')
lines.append('done')
lines.append(r'freespace=$(df -P $checkpath | sed -nre "s/^(\S+\s+){3}([0-9]+).*/\2/p")')
# First line of the file is the total space
lines.append('total=`head -n1 $3`')
lines.append('if [ $total -gt $freespace ] ; then')
lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
lines.append(' exit 1')
lines.append('fi')
if not nopreserve:
# Preserve any files that exist. Note that this will add to the
# preserved list with successive deployments if the list of files
# deployed changes, but because we've deleted any previously
# deployed files at this point it will never preserve anything
# that was deployed, only files that existed prior to any deploying
# (which makes the most sense)
lines.append('cat $3 | sed "1d" | while read file fsize')
lines.append('do')
lines.append(' if [ -e $file ] ; then')
lines.append(' dest="$preservedir/$file"')
lines.append(' mkdir -p `dirname $dest`')
lines.append(' mv $file $dest')
lines.append(' fi')
lines.append('done')
lines.append('rm $3')
lines.append('mkdir -p `dirname $manifest`')
lines.append('mkdir -p $2')
if verbose:
lines.append(' tar xv -C $2 -f - | tee $manifest')
else:
lines.append(' tar xv -C $2 -f - > $manifest')
lines.append('sed -i "s!^./!$2!" $manifest')
elif not dryrun:
# Put any preserved files back
lines.append('if [ -d $preservedir ] ; then')
lines.append(' cd $preservedir')
# find from busybox might not have -exec, so we don't use that
lines.append(' find . -type f | while read file')
lines.append(' do')
lines.append(' mv $file /$file')
lines.append(' done')
lines.append(' cd /')
lines.append(' rm -rf $preservedir')
lines.append('fi')
if undeployall:
if not dryrun:
lines.append('echo "NOTE: Successfully undeployed $1"')
lines.append('done')
# Delete the script itself
lines.append('rm $0')
lines.append('')
return '\n'.join(lines)
def deploy(args, config, basepath, workspace):
"""Entry point for the devtool 'deploy' subcommand"""
import oe.utils
check_workspace_recipe(workspace, args.recipename, checksrc=False)
tinfoil = setup_tinfoil(basepath=basepath)
try:
try:
rd = tinfoil.parse_recipe(args.recipename)
except Exception as e:
raise DevtoolError('Exception parsing recipe %s: %s' %
(args.recipename, e))
srcdir = rd.getVar('D')
workdir = rd.getVar('WORKDIR')
path = rd.getVar('PATH')
strip_cmd = rd.getVar('STRIP')
libdir = rd.getVar('libdir')
base_libdir = rd.getVar('base_libdir')
max_process = oe.utils.get_bb_number_threads(rd)
fakerootcmd = rd.getVar('FAKEROOTCMD')
fakerootenv = rd.getVar('FAKEROOTENV')
finally:
tinfoil.shutdown()
return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
import math
import oe.package
try:
host, destdir = args.target.split(':')
except ValueError:
destdir = '/'
else:
args.target = host
if not destdir.endswith('/'):
destdir += '/'
recipe_outdir = srcdir
if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
raise DevtoolError('No files to deploy - have you built the %s '
'recipe? If so, the install step has not installed '
'any files.' % args.recipename)
if args.strip and not args.dry_run:
# Fakeroot copy to new destination
srcdir = recipe_outdir
recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
if os.path.isdir(recipe_outdir):
exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
filelist = []
inodes = set({})
ftotalsize = 0
for root, _, files in os.walk(recipe_outdir):
for fn in files:
fstat = os.lstat(os.path.join(root, fn))
# Get the size in kiB (since we'll be comparing it to the output of du -k)
# MUST use lstat() here not stat() or getfilesize() since we don't want to
# dereference symlinks
if fstat.st_ino in inodes:
fsize = 0
else:
fsize = int(math.ceil(float(fstat.st_size)/1024))
inodes.add(fstat.st_ino)
ftotalsize += fsize
# The path as it would appear on the target
fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
filelist.append((fpath, fsize))
if args.dry_run:
print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
for item, _ in filelist:
print(' %s' % item)
return 0
extraoptions = ''
if args.no_host_check:
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
if not args.show_status:
extraoptions += ' -q'
scp_sshexec = ''
ssh_sshexec = 'ssh'
if args.ssh_exec:
scp_sshexec = "-S %s" % args.ssh_exec
ssh_sshexec = args.ssh_exec
scp_port = ''
ssh_port = ''
if args.port:
scp_port = "-P %s" % args.port
ssh_port = "-p %s" % args.port
if args.key:
extraoptions += ' -i %s' % args.key
# In order to delete previously deployed files and have the manifest file on
# the target, we write out a shell script and then copy it to the target
# so we can then run it (piping tar output to it).
# (We cannot use scp here, because it doesn't preserve symlinks.)
tmpdir = tempfile.mkdtemp(prefix='devtool')
try:
tmpscript = '/tmp/devtool_deploy.sh'
tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
shellscript = _prepare_remote_script(deploy=True,
verbose=args.show_status,
nopreserve=args.no_preserve,
nocheckspace=args.no_check_space)
# Write out the script to a file
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
f.write(shellscript)
# Write out the file list
with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
f.write('%d\n' % ftotalsize)
for fpath, fsize in filelist:
f.write('%s %d\n' % (fpath, fsize))
# Copy them to the target
ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
if ret != 0:
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
'get a complete error message' % args.target)
finally:
shutil.rmtree(tmpdir)
# Now run the script
ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
if ret != 0:
raise DevtoolError('Deploy failed - rerun with -s to get a complete '
'error message')
logger.info('Successfully deployed %s' % recipe_outdir)
files_list = []
for root, _, files in os.walk(recipe_outdir):
for filename in files:
filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
files_list.append(os.path.join(destdir, filename))
return 0
def undeploy(args, config, basepath, workspace):
"""Entry point for the devtool 'undeploy' subcommand"""
if args.all and args.recipename:
raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
elif not args.recipename and not args.all:
raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
extraoptions = ''
if args.no_host_check:
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
if not args.show_status:
extraoptions += ' -q'
scp_sshexec = ''
ssh_sshexec = 'ssh'
if args.ssh_exec:
scp_sshexec = "-S %s" % args.ssh_exec
ssh_sshexec = args.ssh_exec
scp_port = ''
ssh_port = ''
if args.port:
scp_port = "-P %s" % args.port
ssh_port = "-p %s" % args.port
args.target = args.target.split(':')[0]
tmpdir = tempfile.mkdtemp(prefix='devtool')
try:
tmpscript = '/tmp/devtool_undeploy.sh'
shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
# Write out the script to a file
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
f.write(shellscript)
# Copy it to the target
ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
if ret != 0:
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
'get a complete error message' % args.target)
finally:
shutil.rmtree(tmpdir)
# Now run the script
ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
if ret != 0:
raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
'error message')
if not args.all and not args.dry_run:
logger.info('Successfully undeployed %s' % args.recipename)
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from the deploy plugin"""
parser_deploy = subparsers.add_parser('deploy-target',
help='Deploy recipe output files to live target machine',
description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
group='testbuild')
parser_deploy.add_argument('recipename', help='Recipe to deploy')
parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
parser_deploy.add_argument('-I', '--key',
help='Specify ssh private key for connection to the target')
strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
strip_opts.add_argument('-S', '--strip',
help='Strip executables prior to deploying (default: %(default)s). '
'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
action='store_true')
strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
parser_deploy.set_defaults(func=deploy)
parser_undeploy = subparsers.add_parser('undeploy-target',
help='Undeploy recipe output files in live target machine',
description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
group='testbuild')
parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
parser_undeploy.add_argument('-I', '--key',
help='Specify ssh private key for connection to the target')
parser_undeploy.set_defaults(func=undeploy)

View File

@@ -0,0 +1,109 @@
# Development tool - export command plugin
#
# Copyright (C) 2014-2017 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool export plugin"""
import os
import argparse
import tarfile
import logging
import datetime
import json
logger = logging.getLogger('devtool')
# output files
default_arcname_prefix = "workspace-export"
metadata = '.export_metadata'
def export(args, config, basepath, workspace):
"""Entry point for the devtool 'export' subcommand"""
def add_metadata(tar):
"""Archive the workspace object"""
# finally store the workspace metadata
with open(metadata, 'w') as fd:
fd.write(json.dumps((config.workspace_path, workspace)))
tar.add(metadata)
os.unlink(metadata)
def add_recipe(tar, recipe, data):
"""Archive recipe with proper arcname"""
# Create a map of name/arcnames
arcnames = []
for key, name in data.items():
if name:
if key == 'srctree':
# all sources, no matter where are located, goes into the sources directory
arcname = 'sources/%s' % recipe
else:
arcname = name.replace(config.workspace_path, '')
arcnames.append((name, arcname))
for name, arcname in arcnames:
tar.add(name, arcname=arcname)
# Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
if not workspace:
logger.info('Workspace contains no recipes, nothing to export')
return 0
else:
for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
for recipe in recipes:
if recipe not in workspace:
logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
return 1
name = args.file
default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
if not name:
name = default_name
else:
# if name is a directory, append the default name
if os.path.isdir(name):
name = os.path.join(name, default_name)
if os.path.exists(name) and not args.overwrite:
logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
return 1
# if all workspace is excluded, quit
if not len(set(workspace.keys()).difference(set(args.exclude))):
logger.warning('All recipes in workspace excluded, nothing to export')
return 0
exported = []
with tarfile.open(name, 'w:gz') as tar:
if args.include:
for recipe in args.include:
add_recipe(tar, recipe, workspace[recipe])
exported.append(recipe)
else:
for recipe, data in workspace.items():
if recipe not in args.exclude:
add_recipe(tar, recipe, data)
exported.append(recipe)
add_metadata(tar)
logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
return 0
def register_commands(subparsers, context):
"""Register devtool export subcommands"""
parser = subparsers.add_parser('export',
help='Export workspace into a tar archive',
description='Export one or more recipes from current workspace into a tar archive',
group='advanced')
parser.add_argument('--file', '-f', help='Output archive file name')
parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
group = parser.add_mutually_exclusive_group()
group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
parser.set_defaults(func=export)

View File

@@ -0,0 +1,282 @@
#
# Copyright (C) 2023-2024 Siemens AG
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
import errno
import json
import logging
import os
import stat
from enum import Enum, auto
from devtool import DevtoolError
from bb.utils import mkdirhier
logger = logging.getLogger('devtool')
class BuildTool(Enum):
UNDEFINED = auto()
CMAKE = auto()
MESON = auto()
@property
def is_c_ccp(self):
if self is BuildTool.CMAKE:
return True
if self is BuildTool.MESON:
return True
return False
class GdbCrossConfig:
"""Base class defining the GDB configuration generator interface
Generate a GDB configuration for a binary on the target device.
Only one instance per binary is allowed. This allows to assign unique port
numbers for all gdbserver instances.
"""
_gdbserver_port_next = 1234
_binaries = []
def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
self.image_recipe = image_recipe
self.modified_recipe = modified_recipe
self.gdb_cross = modified_recipe.gdb_cross
self.binary = binary
if binary in GdbCrossConfig._binaries:
raise DevtoolError(
"gdbserver config for binary %s is already generated" % binary)
GdbCrossConfig._binaries.append(binary)
self.script_dir = modified_recipe.ide_sdk_scripts_dir
self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
self.gdbserver_multi = gdbserver_multi
self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
GdbCrossConfig._gdbserver_port_next += 1
self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
# gdbserver start script
gdbserver_script_file = 'gdbserver_' + self.id_pretty
if self.gdbserver_multi:
gdbserver_script_file += "_m"
self.gdbserver_script = os.path.join(
self.script_dir, gdbserver_script_file)
# gdbinit file
self.gdbinit = os.path.join(
self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
# gdb start script
self.gdb_script = os.path.join(
self.script_dir, 'gdb_' + self.id_pretty)
def _gen_gdbserver_start_script(self):
"""Generate a shell command starting the gdbserver on the remote device via ssh
GDB supports two modes:
multi: gdbserver remains running over several debug sessions
once: gdbserver terminates after the debugged process terminates
"""
cmd_lines = ['#!/bin/sh']
if self.gdbserver_multi:
temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
gdbserver_cmd_start = temp_dir
gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
self.gdb_cross.gdbserver_path, self.gdbserver_port)
gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
gdbserver_cmd_stop = temp_dir
gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
gdbserver_cmd_l = []
gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
gdbserver_cmd_l.append(' shift')
gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
gdbserver_cmd_l.append('else')
gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
gdbserver_cmd_l.append('fi')
gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
else:
gdbserver_cmd_start = "%s --once :%s %s" % (
self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
cmd_lines.append(gdbserver_cmd)
GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
def _gen_gdbinit_config(self):
"""Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
gdbinit_lines = ['# This file is generated by devtool ide-sdk']
if self.gdbserver_multi:
target_help = '# gdbserver --multi :%d' % self.gdbserver_port
remote_cmd = 'target extended-remote'
else:
target_help = '# gdbserver :%d %s' % (
self.gdbserver_port, self.binary)
remote_cmd = 'target remote'
gdbinit_lines.append('# On the remote target:')
gdbinit_lines.append(target_help)
gdbinit_lines.append('# On the build machine:')
gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
gdbinit_lines.append(
'# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
gdbinit_lines.append('set substitute-path "/usr/include" "' +
os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
# Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
gdbinit_lines.append('set debuginfod enabled off')
if self.image_recipe.rootfs_dbg:
gdbinit_lines.append(
'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
# First: Search for sources of this recipe in the workspace folder
if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
gdbinit_lines.append('set substitute-path "%s" "%s"' %
(self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
else:
logger.error(
"TARGET_DBGSRC_DIR must contain the recipe name PN.")
# Second: Search for sources of other recipes in the rootfs-dbg
if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
else:
logger.error(
"TARGET_DBGSRC_DIR must start with /usr/src/debug.")
else:
logger.warning(
"Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
gdbinit_lines.append(
'%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
gdbinit_lines.append('set remote exec-file ' + self.binary)
gdbinit_lines.append(
'run ' + os.path.join(self.modified_recipe.d, self.binary))
GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
def _gen_gdb_start_script(self):
"""Generate a script starting GDB with the corresponding gdbinit configuration."""
cmd_lines = ['#!/bin/sh']
cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
self.gdbinit + ' "$@"')
GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
def initialize(self):
self._gen_gdbserver_start_script()
self._gen_gdbinit_config()
self._gen_gdb_start_script()
@staticmethod
def write_file(script_file, cmd_lines, executable=False):
script_dir = os.path.dirname(script_file)
mkdirhier(script_dir)
with open(script_file, 'w') as script_f:
script_f.write(os.linesep.join(cmd_lines))
script_f.write(os.linesep)
if executable:
st = os.stat(script_file)
os.chmod(script_file, st.st_mode | stat.S_IEXEC)
logger.info("Created: %s" % script_file)
class IdeBase:
"""Base class defining the interface for IDE plugins"""
def __init__(self):
self.ide_name = 'undefined'
self.gdb_cross_configs = []
@classmethod
def ide_plugin_priority(cls):
"""Used to find the default ide handler if --ide is not passed"""
return 10
def setup_shared_sysroots(self, shared_env):
logger.warn("Shared sysroot mode is not supported for IDE %s" %
self.ide_name)
def setup_modified_recipe(self, args, image_recipe, modified_recipe):
logger.warn("Modified recipe mode is not supported for IDE %s" %
self.ide_name)
def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
binaries = modified_recipe.find_installed_binaries()
for binary in binaries:
gdb_cross_config = gdb_cross_config_class(
image_recipe, modified_recipe, binary)
gdb_cross_config.initialize()
self.gdb_cross_configs.append(gdb_cross_config)
@staticmethod
def gen_oe_scrtips_sym_link(modified_recipe):
# create a sym-link from sources to the scripts directory
if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
@staticmethod
def update_json_file(json_dir, json_file, update_dict):
"""Update a json file
By default it uses the dict.update function. If this is not sutiable
the update function might be passed via update_func parameter.
"""
json_path = os.path.join(json_dir, json_file)
logger.info("Updating IDE config file: %s (%s)" %
(json_file, json_path))
if not os.path.exists(json_dir):
os.makedirs(json_dir)
try:
with open(json_path) as f:
orig_dict = json.load(f)
except json.decoder.JSONDecodeError:
logger.info(
"Decoding %s failed. Probably because of comments in the json file" % json_path)
orig_dict = {}
except FileNotFoundError:
orig_dict = {}
orig_dict.update(update_dict)
with open(json_path, 'w') as f:
json.dump(orig_dict, f, indent=4)
@staticmethod
def symlink_force(tgt, dst):
try:
os.symlink(tgt, dst)
except OSError as err:
if err.errno == errno.EEXIST:
if os.readlink(dst) != tgt:
os.remove(dst)
os.symlink(tgt, dst)
else:
raise err
def get_devtool_deploy_opts(args):
"""Filter args for devtool deploy-target args"""
if not args.target:
return None
devtool_deploy_opts = [args.target]
if args.no_host_check:
devtool_deploy_opts += ["-c"]
if args.show_status:
devtool_deploy_opts += ["-s"]
if args.no_preserve:
devtool_deploy_opts += ["-p"]
if args.no_check_space:
devtool_deploy_opts += ["--no-check-space"]
if args.ssh_exec:
devtool_deploy_opts += ["-e", args.ssh.exec]
if args.port:
devtool_deploy_opts += ["-P", args.port]
if args.key:
devtool_deploy_opts += ["-I", args.key]
if args.strip is False:
devtool_deploy_opts += ["--no-strip"]
return devtool_deploy_opts

View File

@@ -0,0 +1,463 @@
#
# Copyright (C) 2023-2024 Siemens AG
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
import json
import logging
import os
import shutil
from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
logger = logging.getLogger('devtool')
class GdbCrossConfigVSCode(GdbCrossConfig):
def __init__(self, image_recipe, modified_recipe, binary):
super().__init__(image_recipe, modified_recipe, binary, False)
def initialize(self):
self._gen_gdbserver_start_script()
class IdeVSCode(IdeBase):
"""Manage IDE configurations for VSCode
Modified recipe mode:
- cmake: use the cmake-preset generated by devtool ide-sdk
- meson: meson is called via a wrapper script generated by devtool ide-sdk
Shared sysroot mode:
In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
A workspace cannot be created because there is no recipe that defines how a workspace could
be set up.
- cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
The cmake-kit uses the environment script and the tool-chain file
generated by meta-ide-support.
- meson: Meson needs manual workspace configuration.
"""
@classmethod
def ide_plugin_priority(cls):
"""If --ide is not passed this is the default plugin"""
if shutil.which('code'):
return 100
return 0
def setup_shared_sysroots(self, shared_env):
"""Expose the toolchain of the shared sysroots SDK"""
datadir = shared_env.ide_support.datadir
deploy_dir_image = shared_env.ide_support.deploy_dir_image
real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
vscode_ws_path = os.path.join(
os.environ['HOME'], '.local', 'share', 'CMakeTools')
cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
oecmake_generator = "Ninja"
env_script = os.path.join(
deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
if not os.path.isdir(vscode_ws_path):
os.makedirs(vscode_ws_path)
cmake_kits_old = []
if os.path.exists(cmake_kits_path):
with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
cmake_kits_old = json.load(cmake_kits_file)
cmake_kits = cmake_kits_old.copy()
cmake_kit_new = {
"name": "OE " + real_multimach_target_sys,
"environmentSetupScript": env_script,
"toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
"preferredGenerator": {
"name": oecmake_generator
}
}
def merge_kit(cmake_kits, cmake_kit_new):
i = 0
while i < len(cmake_kits):
if 'environmentSetupScript' in cmake_kits[i] and \
cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
cmake_kits[i] = cmake_kit_new
return
i += 1
cmake_kits.append(cmake_kit_new)
merge_kit(cmake_kits, cmake_kit_new)
if cmake_kits != cmake_kits_old:
logger.info("Updating: %s" % cmake_kits_path)
with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
json.dump(cmake_kits, cmake_kits_file, indent=4)
else:
logger.info("Already up to date: %s" % cmake_kits_path)
cmake_native = os.path.join(
shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
if os.path.isfile(cmake_native):
logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
else:
logger.error("Cannot find cmake native at: %s" % cmake_native)
def dot_code_dir(self, modified_recipe):
return os.path.join(modified_recipe.srctree, '.vscode')
def __vscode_settings_meson(self, settings_dict, modified_recipe):
if modified_recipe.build_tool is not BuildTool.MESON:
return
settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
confopts = modified_recipe.mesonopts.split()
confopts += modified_recipe.meson_cross_file.split()
confopts += modified_recipe.extra_oemeson.split()
settings_dict["mesonbuild.configureOptions"] = confopts
settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
def __vscode_settings_cmake(self, settings_dict, modified_recipe):
"""Add cmake specific settings to settings.json.
Note: most settings are passed to the cmake preset.
"""
if modified_recipe.build_tool is not BuildTool.CMAKE:
return
settings_dict["cmake.configureOnOpen"] = True
settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
def vscode_settings(self, modified_recipe, image_recipe):
files_excludes = {
"**/.git/**": True,
"**/oe-logs/**": True,
"**/oe-workdir/**": True,
"**/source-date-epoch/**": True
}
python_exclude = [
"**/.git/**",
"**/oe-logs/**",
"**/oe-workdir/**",
"**/source-date-epoch/**"
]
files_readonly = {
modified_recipe.recipe_sysroot + '/**': True,
modified_recipe.recipe_sysroot_native + '/**': True,
}
if image_recipe.rootfs_dbg is not None:
files_readonly[image_recipe.rootfs_dbg + '/**'] = True
settings_dict = {
"files.watcherExclude": files_excludes,
"files.exclude": files_excludes,
"files.readonlyInclude": files_readonly,
"python.analysis.exclude": python_exclude
}
self.__vscode_settings_cmake(settings_dict, modified_recipe)
self.__vscode_settings_meson(settings_dict, modified_recipe)
settings_file = 'settings.json'
IdeBase.update_json_file(
self.dot_code_dir(modified_recipe), settings_file, settings_dict)
def __vscode_extensions_cmake(self, modified_recipe, recommendations):
if modified_recipe.build_tool is not BuildTool.CMAKE:
return
recommendations += [
"twxs.cmake",
"ms-vscode.cmake-tools",
"ms-vscode.cpptools",
"ms-vscode.cpptools-extension-pack",
"ms-vscode.cpptools-themes"
]
def __vscode_extensions_meson(self, modified_recipe, recommendations):
if modified_recipe.build_tool is not BuildTool.MESON:
return
recommendations += [
'mesonbuild.mesonbuild',
"ms-vscode.cpptools",
"ms-vscode.cpptools-extension-pack",
"ms-vscode.cpptools-themes"
]
def vscode_extensions(self, modified_recipe):
recommendations = []
self.__vscode_extensions_cmake(modified_recipe, recommendations)
self.__vscode_extensions_meson(modified_recipe, recommendations)
extensions_file = 'extensions.json'
IdeBase.update_json_file(
self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
def vscode_c_cpp_properties(self, modified_recipe):
properties_dict = {
"name": modified_recipe.recipe_id_pretty,
}
if modified_recipe.build_tool is BuildTool.CMAKE:
properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
elif modified_recipe.build_tool is BuildTool.MESON:
properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
else: # no C/C++ build
return
properties_dicts = {
"configurations": [
properties_dict
],
"version": 4
}
prop_file = 'c_cpp_properties.json'
IdeBase.update_json_file(
self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
def vscode_launch_bin_dbg(self, gdb_cross_config):
modified_recipe = gdb_cross_config.modified_recipe
launch_config = {
"name": gdb_cross_config.id_pretty,
"type": "cppdbg",
"request": "launch",
"program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
"stopAtEntry": True,
"cwd": "${workspaceFolder}",
"environment": [],
"externalConsole": False,
"MIMode": "gdb",
"preLaunchTask": gdb_cross_config.id_pretty,
"miDebuggerPath": modified_recipe.gdb_cross.gdb,
"miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
}
# Search for header files in recipe-sysroot.
src_file_map = {
"/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
}
# First of all search for not stripped binaries in the image folder.
# These binaries are copied (and optionally stripped) by deploy-target
setup_commands = [
{
"description": "sysroot",
"text": "set sysroot " + modified_recipe.d
}
]
if gdb_cross_config.image_recipe.rootfs_dbg:
launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
gdb_cross_config.image_recipe)
# First: Search for sources of this recipe in the workspace folder
if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
else:
logger.error(
"TARGET_DBGSRC_DIR must contain the recipe name PN.")
# Second: Search for sources of other recipes in the rootfs-dbg
if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
src_file_map["/usr/src/debug"] = os.path.join(
gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
else:
logger.error(
"TARGET_DBGSRC_DIR must start with /usr/src/debug.")
else:
logger.warning(
"Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
launch_config['sourceFileMap'] = src_file_map
launch_config['setupCommands'] = setup_commands
return launch_config
def vscode_launch(self, modified_recipe):
"""GDB Launch configuration for binaries (elf files)"""
configurations = []
for gdb_cross_config in self.gdb_cross_configs:
if gdb_cross_config.modified_recipe is modified_recipe:
configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
launch_dict = {
"version": "0.2.0",
"configurations": configurations
}
launch_file = 'launch.json'
IdeBase.update_json_file(
self.dot_code_dir(modified_recipe), launch_file, launch_dict)
def vscode_tasks_cpp(self, args, modified_recipe):
run_install_deploy = modified_recipe.gen_install_deploy_script(args)
install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
tasks_dict = {
"version": "2.0.0",
"tasks": [
{
"label": install_task_name,
"type": "shell",
"command": run_install_deploy,
"problemMatcher": []
}
]
}
for gdb_cross_config in self.gdb_cross_configs:
if gdb_cross_config.modified_recipe is not modified_recipe:
continue
tasks_dict['tasks'].append(
{
"label": gdb_cross_config.id_pretty,
"type": "shell",
"isBackground": True,
"dependsOn": [
install_task_name
],
"command": gdb_cross_config.gdbserver_script,
"problemMatcher": [
{
"pattern": [
{
"regexp": ".",
"file": 1,
"location": 2,
"message": 3
}
],
"background": {
"activeOnStart": True,
"beginsPattern": ".",
"endsPattern": ".",
}
}
]
})
tasks_file = 'tasks.json'
IdeBase.update_json_file(
self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
def vscode_tasks_fallback(self, args, modified_recipe):
oe_init_dir = modified_recipe.oe_init_dir
oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
dt_build = "devtool build "
dt_build_label = dt_build + modified_recipe.recipe_id_pretty
dt_build_cmd = dt_build + modified_recipe.bpn
clean_opt = " --clean"
dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
dt_deploy = "devtool deploy-target "
dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
dt_deploy_cmd = dt_deploy + modified_recipe.bpn
dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
deploy_opts = ' '.join(get_devtool_deploy_opts(args))
tasks_dict = {
"version": "2.0.0",
"tasks": [
{
"label": dt_build_label,
"type": "shell",
"command": "bash",
"linux": {
"options": {
"cwd": oe_init_dir
}
},
"args": [
"--login",
"-c",
"%s%s" % (oe_init, dt_build_cmd)
],
"problemMatcher": []
},
{
"label": dt_deploy_label,
"type": "shell",
"command": "bash",
"linux": {
"options": {
"cwd": oe_init_dir
}
},
"args": [
"--login",
"-c",
"%s%s %s" % (
oe_init, dt_deploy_cmd, deploy_opts)
],
"problemMatcher": []
},
{
"label": dt_build_deploy_label,
"dependsOrder": "sequence",
"dependsOn": [
dt_build_label,
dt_deploy_label
],
"problemMatcher": [],
"group": {
"kind": "build",
"isDefault": True
}
},
{
"label": dt_build_clean_label,
"type": "shell",
"command": "bash",
"linux": {
"options": {
"cwd": oe_init_dir
}
},
"args": [
"--login",
"-c",
"%s%s" % (oe_init, dt_build_clean_cmd)
],
"problemMatcher": []
}
]
}
if modified_recipe.gdb_cross:
for gdb_cross_config in self.gdb_cross_configs:
if gdb_cross_config.modified_recipe is not modified_recipe:
continue
tasks_dict['tasks'].append(
{
"label": gdb_cross_config.id_pretty,
"type": "shell",
"isBackground": True,
"dependsOn": [
dt_build_deploy_label
],
"command": gdb_cross_config.gdbserver_script,
"problemMatcher": [
{
"pattern": [
{
"regexp": ".",
"file": 1,
"location": 2,
"message": 3
}
],
"background": {
"activeOnStart": True,
"beginsPattern": ".",
"endsPattern": ".",
}
}
]
})
tasks_file = 'tasks.json'
IdeBase.update_json_file(
self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
def vscode_tasks(self, args, modified_recipe):
if modified_recipe.build_tool.is_c_ccp:
self.vscode_tasks_cpp(args, modified_recipe)
else:
self.vscode_tasks_fallback(args, modified_recipe)
def setup_modified_recipe(self, args, image_recipe, modified_recipe):
self.vscode_settings(modified_recipe, image_recipe)
self.vscode_extensions(modified_recipe)
self.vscode_c_cpp_properties(modified_recipe)
if args.target:
self.initialize_gdb_cross_configs(
image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
self.vscode_launch(modified_recipe)
self.vscode_tasks(args, modified_recipe)
def register_ide_plugin(ide_plugins):
ide_plugins['code'] = IdeVSCode

View File

@@ -0,0 +1,53 @@
#
# Copyright (C) 2023-2024 Siemens AG
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool ide-sdk generic IDE plugin"""
import os
import logging
from devtool.ide_plugins import IdeBase, GdbCrossConfig
logger = logging.getLogger('devtool')
class IdeNone(IdeBase):
"""Generate some generic helpers for other IDEs
Modified recipe mode:
Generate some helper scripts for remote debugging with GDB
Shared sysroot mode:
A wrapper for bitbake meta-ide-support and bitbake build-sysroots
"""
def __init__(self):
super().__init__()
def setup_shared_sysroots(self, shared_env):
real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
deploy_dir_image = shared_env.ide_support.deploy_dir_image
env_script = os.path.join(
deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
logger.info(
"To use this SDK please source this: %s" % env_script)
def setup_modified_recipe(self, args, image_recipe, modified_recipe):
"""generate some helper scripts and config files
- Execute the do_install task
- Execute devtool deploy-target
- Generate a gdbinit file per executable
- Generate the oe-scripts sym-link
"""
script_path = modified_recipe.gen_install_deploy_script(args)
logger.info("Created: %s" % script_path)
self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
def register_ide_plugin(ide_plugins):
ide_plugins['none'] = IdeNone

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,134 @@
# Development tool - import command plugin
#
# Copyright (C) 2014-2017 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool import plugin"""
import os
import tarfile
import logging
import collections
import json
import fnmatch
from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
from devtool import export
logger = logging.getLogger('devtool')
def devimport(args, config, basepath, workspace):
"""Entry point for the devtool 'import' subcommand"""
def get_pn(name):
""" Returns the filename of a workspace recipe/append"""
metadata = name.split('/')[-1]
fn, _ = os.path.splitext(metadata)
return fn
if not os.path.exists(args.file):
raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
with tarfile.open(args.file) as tar:
# Get exported metadata
export_workspace_path = export_workspace = None
try:
metadata = tar.getmember(export.metadata)
except KeyError as ke:
raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
tar.extract(metadata)
with open(metadata.name) as fdm:
export_workspace_path, export_workspace = json.load(fdm)
os.unlink(metadata.name)
members = tar.getmembers()
# Get appends and recipes from the exported archive, these
# will be needed to find out those appends without corresponding
# recipe pair
append_fns, recipe_fns = set(), set()
for member in members:
if member.name.startswith('appends'):
append_fns.add(get_pn(member.name))
elif member.name.startswith('recipes'):
recipe_fns.add(get_pn(member.name))
# Setup tinfoil, get required data and shutdown
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
finally:
tinfoil.shutdown()
# Find those appends that do not have recipes in current metadata
non_importables = []
for fn in append_fns - recipe_fns:
# Check on current metadata (covering those layers indicated in bblayers.conf)
for current_fn in current_fns:
if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
break
else:
non_importables.append(fn)
logger.warning('No recipe to append %s.bbapppend, skipping' % fn)
# Extract
imported = []
for member in members:
if member.name == export.metadata:
continue
for nonimp in non_importables:
pn = nonimp.split('_')[0]
# do not extract data from non-importable recipes or metadata
if member.name.startswith('appends/%s' % nonimp) or \
member.name.startswith('recipes/%s' % nonimp) or \
member.name.startswith('sources/%s' % pn):
break
else:
path = os.path.join(config.workspace_path, member.name)
if os.path.exists(path):
# by default, no file overwrite is done unless -o is given by the user
if args.overwrite:
try:
tar.extract(member, path=config.workspace_path)
except PermissionError as pe:
logger.warning(pe)
else:
logger.warning('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
continue
else:
tar.extract(member, path=config.workspace_path)
# Update EXTERNALSRC and the devtool md5 file
if member.name.startswith('appends'):
if export_workspace_path:
# appends created by 'devtool modify' just need to update the workspace
replace_from_file(path, export_workspace_path, config.workspace_path)
# appends created by 'devtool add' need replacement of exported source tree
pn = get_pn(member.name).split('_')[0]
exported_srctree = export_workspace[pn]['srctree']
if exported_srctree:
replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
standard._add_md5(config, pn, path)
imported.append(pn)
if imported:
logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
else:
logger.warning('No recipes imported into the workspace')
return 0
def register_commands(subparsers, context):
"""Register devtool import subcommands"""
parser = subparsers.add_parser('import',
help='Import exported tar archive into workspace',
description='Import tar archive previously created by "devtool export" into workspace',
group='advanced')
parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
parser.set_defaults(func=devimport)

View File

@@ -0,0 +1,81 @@
# OpenEmbedded Development tool - menuconfig command plugin
#
# Copyright (C) 2018 Xilinx
# Written by: Chandana Kalluri <ckalluri@xilinx.com>
#
# SPDX-License-Identifier: MIT
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool menuconfig plugin"""
import os
import bb
import logging
import argparse
import re
import glob
from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
from devtool import check_workspace_recipe
logger = logging.getLogger('devtool')
def menuconfig(args, config, basepath, workspace):
"""Entry point for the devtool 'menuconfig' subcommand"""
rd = ""
kconfigpath = ""
pn_src = ""
localfilesdir = ""
workspace_dir = ""
tinfoil = setup_tinfoil(basepath=basepath)
try:
rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
if not rd:
return 1
check_workspace_recipe(workspace, args.component)
pn = rd.getVar('PN')
if not rd.getVarFlag('do_menuconfig','task'):
raise DevtoolError("This recipe does not support menuconfig option")
workspace_dir = os.path.join(config.workspace_path,'sources')
kconfigpath = rd.getVar('B')
pn_src = os.path.join(workspace_dir,pn)
# add check to see if oe_local_files exists or not
localfilesdir = os.path.join(pn_src,'oe-local-files')
if not os.path.exists(localfilesdir):
bb.utils.mkdirhier(localfilesdir)
# Add gitignore to ensure source tree is clean
gitignorefile = os.path.join(localfilesdir,'.gitignore')
with open(gitignorefile, 'w') as f:
f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
f.write('*\n')
finally:
tinfoil.shutdown()
logger.info('Launching menuconfig')
exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
res = standard._create_kconfig_diff(pn_src,rd,fragment)
return 0
def register_commands(subparsers, context):
"""register devtool subcommands from this plugin"""
parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
parser_menuconfig.add_argument('component', help='compenent to alter config')
parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)

View File

@@ -0,0 +1,50 @@
# Development tool - package command plugin
#
# Copyright (C) 2014-2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool plugin containing the package subcommands"""
import os
import subprocess
import logging
from bb.process import ExecutionError
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
logger = logging.getLogger('devtool')
def package(args, config, basepath, workspace):
"""Entry point for the devtool 'package' subcommand"""
check_workspace_recipe(workspace, args.recipename)
tinfoil = setup_tinfoil(basepath=basepath, config_only=True)
try:
image_pkgtype = config.get('Package', 'image_pkgtype', '')
if not image_pkgtype:
image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
finally:
tinfoil.shutdown()
package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
try:
exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
except bb.process.ExecutionError as e:
# We've already seen the output since watch=True, so just ensure we return something to the user
return e.exitcode
logger.info('Your packages are in %s' % deploy_dir_pkg)
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from the package plugin"""
if context.fixed_setup:
parser_package = subparsers.add_parser('package',
help='Build packages for a recipe',
description='Builds packages for a recipe\'s output files',
group='testbuild', order=-5)
parser_package.add_argument('recipename', help='Recipe to package')
parser_package.set_defaults(func=package)

View File

@@ -0,0 +1,64 @@
# Development tool - runqemu command plugin
#
# Copyright (C) 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool runqemu plugin"""
import os
import bb
import logging
import argparse
import glob
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
logger = logging.getLogger('devtool')
def runqemu(args, config, basepath, workspace):
"""Entry point for the devtool 'runqemu' subcommand"""
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
try:
machine = tinfoil.config_data.getVar('MACHINE')
bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'),
tinfoil.config_data.getVar('BUILD_ARCH'),
tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep))
finally:
tinfoil.shutdown()
if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
raise DevtoolError('QEMU is not available within this SDK')
imagename = args.imagename
if not imagename:
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
if sdk_targets:
imagename = sdk_targets[0]
if not imagename:
raise DevtoolError('Unable to determine image name to run, please specify one')
try:
# FIXME runqemu assumes that if OECORE_NATIVE_SYSROOT is set then it shouldn't
# run bitbake to find out the values of various environment variables, which
# isn't the case for the extensible SDK. Work around it for now.
newenv = dict(os.environ)
newenv.pop('OECORE_NATIVE_SYSROOT', '')
exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True, env=newenv)
except bb.process.ExecutionError as e:
# We've already seen the output since watch=True, so just ensure we return something to the user
return e.exitcode
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
if context.fixed_setup:
parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
description='Runs QEMU to boot the specified image',
group='testbuild', order=-20)
parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
nargs=argparse.REMAINDER)
parser_runqemu.set_defaults(func=runqemu)

View File

@@ -0,0 +1,330 @@
# Development tool - sdk-update command plugin
#
# Copyright (C) 2015-2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import subprocess
import logging
import glob
import shutil
import errno
import sys
import tempfile
import re
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
logger = logging.getLogger('devtool')
def parse_locked_sigs(sigfile_path):
"""Return <pn:task>:<hash> dictionary"""
sig_dict = {}
with open(sigfile_path) as f:
lines = f.readlines()
for line in lines:
if ':' in line:
taskkey, _, hashval = line.rpartition(':')
sig_dict[taskkey.strip()] = hashval.split()[0]
return sig_dict
def generate_update_dict(sigfile_new, sigfile_old):
"""Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
update_dict = {}
sigdict_new = parse_locked_sigs(sigfile_new)
sigdict_old = parse_locked_sigs(sigfile_old)
for k in sigdict_new:
if k not in sigdict_old:
update_dict[k] = sigdict_new[k]
continue
if sigdict_new[k] != sigdict_old[k]:
update_dict[k] = sigdict_new[k]
continue
return update_dict
def get_sstate_objects(update_dict, sstate_dir):
"""Return a list containing sstate objects which are to be installed"""
sstate_objects = []
for k in update_dict:
files = set()
hashval = update_dict[k]
p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
files |= set(glob.glob(p))
p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
files |= set(glob.glob(p))
files = list(files)
if len(files) == 1:
sstate_objects.extend(files)
elif len(files) > 1:
logger.error("More than one matching sstate object found for %s" % hashval)
return sstate_objects
def mkdir(d):
try:
os.makedirs(d)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
"""Install sstate objects into destination SDK"""
sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
if not os.path.exists(sstate_dir):
logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
raise
for sb in sstate_objects:
dst = sb.replace(src_sdk, dest_sdk)
destdir = os.path.dirname(dst)
mkdir(destdir)
logger.debug("Copying %s to %s" % (sb, dst))
shutil.copy(sb, dst)
def check_manifest(fn, basepath):
import bb.utils
changedfiles = []
with open(fn, 'r') as f:
for line in f:
splitline = line.split()
if len(splitline) > 1:
chksum = splitline[0]
fpath = splitline[1]
curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
if chksum != curr_chksum:
logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
changedfiles.append(fpath)
return changedfiles
def sdk_update(args, config, basepath, workspace):
"""Entry point for devtool sdk-update command"""
updateserver = args.updateserver
if not updateserver:
updateserver = config.get('SDK', 'updateserver', '')
logger.debug("updateserver: %s" % updateserver)
# Make sure we are using sdk-update from within SDK
logger.debug("basepath = %s" % basepath)
old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
if not os.path.exists(old_locked_sig_file_path):
logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
return -1
else:
logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
if not '://' in updateserver:
logger.error("Update server must be a URL")
return -1
layers_dir = os.path.join(basepath, 'layers')
conf_dir = os.path.join(basepath, 'conf')
# Grab variable values
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
try:
stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR')
sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS')
site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION')
finally:
tinfoil.shutdown()
tmpsdk_dir = tempfile.mkdtemp()
try:
os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
# Fetch manifest from server
tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
if ret != 0:
logger.error("Cannot dowload files from %s" % updateserver)
return ret
changedfiles = check_manifest(tmpmanifest, basepath)
if not changedfiles:
logger.info("Already up-to-date")
return 0
# Update metadata
logger.debug("Updating metadata via git ...")
#Check for the status before doing a fetch and reset
if os.path.exists(os.path.join(basepath, 'layers/.git')):
out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
if not out:
ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
else:
logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
logger.error("Changed files:\n%s" % out);
return -1
else:
ret = -1
if ret != 0:
ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
if ret != 0:
logger.error("Updating metadata via git failed")
return ret
logger.debug("Updating conf files ...")
for changedfile in changedfiles:
ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
if ret != 0:
logger.error("Updating %s failed" % changedfile)
return ret
# Check if UNINATIVE_CHECKSUM changed
uninative = False
if 'conf/local.conf' in changedfiles:
def read_uninative_checksums(fn):
chksumitems = []
with open(fn, 'r') as f:
for line in f:
if line.startswith('UNINATIVE_CHECKSUM'):
splitline = re.split(r'[\[\]"\']', line)
if len(splitline) > 3:
chksumitems.append((splitline[1], splitline[3]))
return chksumitems
oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
if oldsums != newsums:
uninative = True
for buildarch, chksum in newsums:
uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
# Ok, all is well at this point - move everything over
tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
if os.path.exists(tmplayers_dir):
shutil.rmtree(layers_dir)
shutil.move(tmplayers_dir, layers_dir)
for changedfile in changedfiles:
destfile = os.path.join(basepath, changedfile)
os.remove(destfile)
shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
shutil.move(tmpmanifest, conf_dir)
if uninative:
shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
if not sstate_mirrors:
with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
finally:
shutil.rmtree(tmpsdk_dir)
if not args.skip_prepare:
# Find all potentially updateable tasks
sdk_update_targets = []
tasks = ['do_populate_sysroot', 'do_packagedata']
for root, _, files in os.walk(stamps_dir):
for fn in files:
if not '.sigdata.' in fn:
for task in tasks:
if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
# Run bitbake command for the whole SDK
logger.info("Preparing build system... (This may take some time.)")
try:
exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
runlines = []
for line in output.splitlines():
if 'Running task ' in line:
runlines.append(line)
if runlines:
logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
return -1
except bb.process.ExecutionError as e:
logger.error('Preparation failed:\n%s' % e.stdout)
return -1
return 0
def sdk_install(args, config, basepath, workspace):
"""Entry point for the devtool sdk-install command"""
import oe.recipeutils
import bb.process
for recipe in args.recipename:
if recipe in workspace:
raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
tasks = ['do_populate_sysroot', 'do_packagedata']
stampprefixes = {}
def checkstamp(recipe):
stampprefix = stampprefixes[recipe]
stamps = glob.glob(stampprefix + '*')
for stamp in stamps:
if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
return True
else:
return False
install_recipes = []
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
for recipe in args.recipename:
rd = parse_recipe(config, tinfoil, recipe, True)
if not rd:
return 1
stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0])
if checkstamp(recipe):
logger.info('%s is already installed' % recipe)
else:
install_recipes.append(recipe)
finally:
tinfoil.shutdown()
if install_recipes:
logger.info('Installing %s...' % ', '.join(install_recipes))
install_tasks = []
for recipe in install_recipes:
for task in tasks:
if recipe.endswith('-native') and 'package' in task:
continue
install_tasks.append('%s:%s' % (recipe, task))
options = ''
if not args.allow_build:
options += ' --setscene-only'
try:
exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
except bb.process.ExecutionError as e:
raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
failed = False
for recipe in install_recipes:
if checkstamp(recipe):
logger.info('Successfully installed %s' % recipe)
else:
raise DevtoolError('Failed to install %s - unavailable' % recipe)
failed = True
if failed:
return 2
try:
exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
except bb.process.ExecutionError as e:
raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
def register_commands(subparsers, context):
"""Register devtool subcommands from the sdk plugin"""
if context.fixed_setup:
parser_sdk = subparsers.add_parser('sdk-update',
help='Update SDK components',
description='Updates installed SDK components from a remote server',
group='sdk')
updateserver = context.config.get('SDK', 'updateserver', '')
if updateserver:
parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
else:
parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
parser_sdk.set_defaults(func=sdk_update)
parser_sdk_install = subparsers.add_parser('sdk-install',
help='Install additional SDK components',
description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
group='sdk')
parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
parser_sdk_install.set_defaults(func=sdk_install)

View File

@@ -0,0 +1,109 @@
# Development tool - search command plugin
#
# Copyright (C) 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool search plugin"""
import os
import bb
import logging
import argparse
import re
from devtool import setup_tinfoil, parse_recipe, DevtoolError
logger = logging.getLogger('devtool')
def search(args, config, basepath, workspace):
"""Entry point for the devtool 'search' subcommand"""
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
keyword_rc = re.compile(args.keyword)
def print_match(pn):
rd = parse_recipe(config, tinfoil, pn, True)
if not rd:
return
summary = rd.getVar('SUMMARY')
if summary == rd.expand(defsummary):
summary = ''
print("%s %s" % (pn.ljust(20), summary))
matches = []
if os.path.exists(pkgdata_dir):
for fn in os.listdir(pkgdata_dir):
pfn = os.path.join(pkgdata_dir, fn)
if not os.path.isfile(pfn):
continue
packages = []
match = False
if keyword_rc.search(fn):
match = True
if not match:
with open(pfn, 'r') as f:
for line in f:
if line.startswith('PACKAGES:'):
packages = line.split(':', 1)[1].strip().split()
for pkg in packages:
if keyword_rc.search(pkg):
match = True
break
if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
for line in f:
if ': ' in line:
splitline = line.split(': ', 1)
key = splitline[0]
value = splitline[1].strip()
key = key.replace(":" + pkg, "")
if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']:
if keyword_rc.search(value):
match = True
break
if match:
print_match(fn)
matches.append(fn)
else:
logger.warning('Package data is not available, results may be limited')
for recipe in tinfoil.all_recipes():
if args.fixed_setup and 'nativesdk' in recipe.inherits():
continue
match = False
if keyword_rc.search(recipe.pn):
match = True
else:
for prov in recipe.provides:
if keyword_rc.search(prov):
match = True
break
if not match:
for rprov in recipe.rprovides:
if keyword_rc.search(rprov):
match = True
break
if match and not recipe.pn in matches:
print_match(recipe.pn)
finally:
tinfoil.shutdown()
return 0
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
parser_search = subparsers.add_parser('search', help='Search available recipes',
description='Searches for available recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name and summary on match.',
group='info')
parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed, use quotes to avoid shell expansion)')
parser_search.set_defaults(func=search, no_workspace=True, fixed_setup=context.fixed_setup)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,693 @@
# Development tool - upgrade command plugin
#
# Copyright (C) 2014-2017 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool upgrade plugin"""
import os
import sys
import re
import shutil
import tempfile
import logging
import argparse
import scriptutils
import errno
import bb
devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib'
sys.path = sys.path + [devtool_path]
import oe.recipeutils
from devtool import standard
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs, check_prerelease_version
logger = logging.getLogger('devtool')
def _run(cmd, cwd=''):
logger.debug("Running command %s> %s" % (cwd,cmd))
return bb.process.run('%s' % cmd, cwd=cwd)
def _get_srctree(tmpdir):
srctree = tmpdir
dirs = scriptutils.filter_src_subdirs(tmpdir)
if len(dirs) == 1:
srctree = os.path.join(tmpdir, dirs[0])
else:
raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
return srctree
def _copy_source_code(orig, dest):
for path in standard._ls_tree(orig):
dest_dir = os.path.join(dest, os.path.dirname(path))
bb.utils.mkdirhier(dest_dir)
dest_path = os.path.join(dest, path)
shutil.move(os.path.join(orig, path), dest_path)
def _remove_patch_dirs(recipefolder):
for root, dirs, files in os.walk(recipefolder):
for d in dirs:
shutil.rmtree(os.path.join(root,d))
def _recipe_contains(rd, var):
rf = rd.getVar('FILE')
varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
for var, fn in varfiles.items():
if fn and fn.startswith(os.path.dirname(rf) + os.sep):
return True
return False
def _rename_recipe_dirs(oldpv, newpv, path):
for root, dirs, files in os.walk(path):
# Rename directories with the version in their name
for olddir in dirs:
if olddir.find(oldpv) != -1:
newdir = olddir.replace(oldpv, newpv)
if olddir != newdir:
shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
# Rename any inc files with the version in their name (unusual, but possible)
for oldfile in files:
if oldfile.endswith('.inc'):
if oldfile.find(oldpv) != -1:
newfile = oldfile.replace(oldpv, newpv)
if oldfile != newfile:
bb.utils.rename(os.path.join(path, oldfile),
os.path.join(path, newfile))
def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
oldrecipe = os.path.basename(oldrecipe)
if oldrecipe.endswith('_%s.bb' % oldpv):
newrecipe = '%s_%s.bb' % (bpn, newpv)
if oldrecipe != newrecipe:
shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
else:
newrecipe = oldrecipe
return os.path.join(path, newrecipe)
def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
_rename_recipe_dirs(oldpv, newpv, path)
return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
"""Writes an append file"""
if not os.path.exists(rc):
raise DevtoolError("bbappend not created because %s does not exist" % rc)
appendpath = os.path.join(workspace, 'appends')
if not os.path.exists(appendpath):
bb.utils.mkdirhier(appendpath)
brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
srctree = os.path.abspath(srctree)
pn = d.getVar('PN')
af = os.path.join(appendpath, '%s.bbappend' % brf)
with open(af, 'w') as f:
f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
# Local files can be modified/tracked in separate subdir under srctree
# Mostly useful for packages with S != WORKDIR
f.write('FILESPATH:prepend := "%s:"\n' %
os.path.join(srctreebase, 'oe-local-files'))
f.write('# srctreebase: %s\n' % srctreebase)
f.write('inherit externalsrc\n')
f.write(('# NOTE: We use pn- overrides here to avoid affecting'
'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
b_is_s = use_external_build(same_dir, no_same_dir, d)
if b_is_s:
f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
f.write('\n')
if revs:
for name, rev in revs.items():
f.write('# initial_rev %s: %s\n' % (name, rev))
if copied:
f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
f.write('# original_files: %s\n' % ' '.join(copied))
return af
def _cleanup_on_error(rd, srctree):
if os.path.exists(rd):
shutil.rmtree(rd)
srctree = os.path.abspath(srctree)
if os.path.exists(srctree):
shutil.rmtree(srctree)
def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
if not keep_failure:
_cleanup_on_error(rd, srctree)
logger.error(e)
if extramsg:
logger.error(extramsg)
if keep_failure:
logger.info('Preserving failed upgrade files (--keep-failure)')
sys.exit(1)
def _get_uri(rd):
srcuris = rd.getVar('SRC_URI').split()
if not len(srcuris):
raise DevtoolError('SRC_URI not found on recipe')
# Get first non-local entry in SRC_URI - usually by convention it's
# the first entry, but not always!
srcuri = None
for entry in srcuris:
if not entry.startswith('file://'):
srcuri = entry
break
if not srcuri:
raise DevtoolError('Unable to find non-local entry in SRC_URI')
srcrev = '${AUTOREV}'
if '://' in srcuri:
# Fetch a URL
rev_re = re.compile(';rev=([^;]+)')
res = rev_re.search(srcuri)
if res:
srcrev = res.group(1)
srcuri = rev_re.sub('', srcuri)
return srcuri, srcrev
def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
"""Extract sources of a recipe with a new version"""
def __run(cmd):
"""Simple wrapper which calls _run with srctree as cwd"""
return _run(cmd, srctree)
crd = rd.createCopy()
pv = crd.getVar('PV')
crd.setVar('PV', newpv)
tmpsrctree = None
uri, rev = _get_uri(crd)
if srcrev:
rev = srcrev
paths = [srctree]
if uri.startswith('git://') or uri.startswith('gitsm://'):
__run('git fetch')
__run('git checkout %s' % rev)
__run('git tag -f devtool-base-new')
__run('git submodule update --recursive')
__run('git submodule foreach \'git tag -f devtool-base-new\'')
(stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
checksums = {}
_, _, _, _, _, params = bb.fetch2.decodeurl(uri)
srcsubdir_rel = params.get('destsuffix', 'git')
if not srcbranch:
check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
get_branch = [x.strip() for x in check_branch.splitlines()]
# Remove HEAD reference point and drop remote prefix
get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
if len(get_branch) == 1:
# If srcrev is on only ONE branch, then use that branch
srcbranch = get_branch[0]
elif 'main' in get_branch:
# If srcrev is on multiple branches, then choose 'main' if it is one of them
srcbranch = 'main'
elif 'master' in get_branch:
# Otherwise choose 'master' if it is one of the branches
srcbranch = 'master'
else:
# If get_branch contains more than one objects, then display error and exit.
mbrch = '\n ' + '\n '.join(get_branch)
raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
else:
__run('git checkout devtool-base -b devtool-%s' % newpv)
tmpdir = tempfile.mkdtemp(prefix='devtool')
try:
checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
except scriptutils.FetchUrlFailure as e:
raise DevtoolError(e)
if ftmpdir and keep_temp:
logger.info('Fetch temp directory is %s' % ftmpdir)
tmpsrctree = _get_srctree(tmpdir)
srctree = os.path.abspath(srctree)
srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
# Delete all sources so we ensure no stray files are left over
for item in os.listdir(srctree):
if item in ['.git', 'oe-local-files']:
continue
itempath = os.path.join(srctree, item)
if os.path.isdir(itempath):
shutil.rmtree(itempath)
else:
os.remove(itempath)
# Copy in new ones
_copy_source_code(tmpsrctree, srctree)
(stdout,_) = __run('git ls-files --modified --others')
filelist = stdout.splitlines()
pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist))
pbar.start()
batchsize = 100
for i in range(0, len(filelist), batchsize):
batch = filelist[i:i+batchsize]
__run('git add -f -A %s' % ' '.join(['"%s"' % item for item in batch]))
pbar.update(i)
pbar.finish()
useroptions = []
oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
__run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
__run('git tag -f devtool-base-%s' % newpv)
revs = {}
for path in paths:
(stdout, _) = _run('git rev-parse HEAD', cwd=path)
revs[os.path.relpath(path, srctree)] = stdout.rstrip()
if no_patch:
patches = oe.recipeutils.get_recipe_patches(crd)
if patches:
logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
else:
for path in paths:
_run('git checkout devtool-patched -b %s' % branch, cwd=path)
(stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
branches_to_rebase = [branch] + stdout.split()
target_branch = revs[os.path.relpath(path, srctree)]
# There is a bug (or feature?) in git rebase where if a commit with
# a note is fully rebased away by being part of an old commit, the
# note is still attached to the old commit. Avoid this by making
# sure all old devtool related commits have a note attached to them
# (this assumes git config notes.rewriteMode is set to ignore).
(stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
for rev in stdout.splitlines():
if not oe.patch.GitApplyTree.getNotes(path, rev):
oe.patch.GitApplyTree.addNote(path, rev, "dummy")
for b in branches_to_rebase:
logger.info("Rebasing {} onto {}".format(b, target_branch))
_run('git checkout %s' % b, cwd=path)
try:
_run('git rebase %s' % target_branch, cwd=path)
except bb.process.ExecutionError as e:
if 'conflict' in e.stdout:
logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
_run('git rebase --abort', cwd=path)
else:
logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
# Remove any dummy notes added above.
(stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
for rev in stdout.splitlines():
oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
_run('git checkout %s' % branch, cwd=path)
if tmpsrctree:
if keep_temp:
logger.info('Preserving temporary directory %s' % tmpsrctree)
else:
shutil.rmtree(tmpsrctree)
if tmpdir != tmpsrctree:
shutil.rmtree(tmpdir)
return (revs, checksums, srcbranch, srcsubdir_rel)
def _add_license_diff_to_recipe(path, diff):
notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
# The following is the difference between the old and the new license text.
# Please update the LICENSE value if needed, and summarize the changes in
# the commit message via 'License-Update:' tag.
# (example: 'License-Update: copyright years updated.')
#
# The changes:
#
"""
commented_diff = "\n".join(["# {}".format(l) for l in diff.split('\n')])
with open(path, 'rb') as f:
orig_content = f.read()
with open(path, 'wb') as f:
f.write(notice_text.encode())
f.write(commented_diff.encode())
f.write("\n#\n\n".encode())
f.write(orig_content)
def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
"""Creates the new recipe under workspace"""
bpn = rd.getVar('BPN')
path = os.path.join(workspace, 'recipes', bpn)
bb.utils.mkdirhier(path)
copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
if not copied:
raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn)
logger.debug('Copied %s to %s' % (copied, path))
oldpv = rd.getVar('PV')
if not newpv:
newpv = oldpv
origpath = rd.getVar('FILE')
fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
logger.debug('Upgraded %s => %s' % (origpath, fullpath))
newvalues = {}
if _recipe_contains(rd, 'PV') and newpv != oldpv:
newvalues['PV'] = newpv
if srcrev:
newvalues['SRCREV'] = srcrev
if srcbranch:
src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
changed = False
replacing = True
new_src_uri = []
for entry in src_uri:
try:
scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
except bb.fetch2.MalformedUrl as e:
raise DevtoolError("Could not decode SRC_URI: {}".format(e))
if replacing and scheme in ['git', 'gitsm']:
branch = params.get('branch', 'master')
if rd.expand(branch) != srcbranch:
# Handle case where branch is set through a variable
res = re.match(r'\$\{([^}@]+)\}', branch)
if res:
newvalues[res.group(1)] = srcbranch
# We know we won't change SRC_URI now, so break out
break
else:
params['branch'] = srcbranch
entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
changed = True
replacing = False
new_src_uri.append(entry)
if changed:
newvalues['SRC_URI'] = ' '.join(new_src_uri)
newvalues['PR'] = None
# Work out which SRC_URI entries have changed in case the entry uses a name
crd = rd.createCopy()
crd.setVar('PV', newpv)
for var, value in newvalues.items():
crd.setVar(var, value)
old_src_uri = (rd.getVar('SRC_URI') or '').split()
new_src_uri = (crd.getVar('SRC_URI') or '').split()
newnames = []
addnames = []
for newentry in new_src_uri:
_, _, _, _, _, params = bb.fetch2.decodeurl(newentry)
if 'name' in params:
newnames.append(params['name'])
if newentry not in old_src_uri:
addnames.append(params['name'])
# Find what's been set in the original recipe
oldnames = []
oldsums = []
noname = False
for varflag in rd.getVarFlags('SRC_URI'):
for checksum in checksums:
if varflag.endswith('.' + checksum):
name = varflag.rsplit('.', 1)[0]
if name not in oldnames:
oldnames.append(name)
oldsums.append(checksum)
elif varflag == checksum:
noname = True
oldsums.append(checksum)
# Even if SRC_URI has named entries it doesn't have to actually use the name
if noname and addnames and addnames[0] not in oldnames:
addnames = []
# Drop any old names (the name actually might include ${PV})
for name in oldnames:
if name not in newnames:
for checksum in oldsums:
newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
nameprefix = '%s.' % addnames[0] if addnames else ''
# md5sum is deprecated, remove any traces of it. If it was the only old
# checksum, then replace it with the default checksums.
if 'md5sum' in oldsums:
newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
oldsums.remove('md5sum')
if not oldsums:
oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
for checksum in oldsums:
newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
if srcsubdir_new != srcsubdir_old:
s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
s_subdir_new = os.path.relpath(os.path.abspath(crd.getVar('S')), crd.getVar('WORKDIR'))
if srcsubdir_old == s_subdir_old and srcsubdir_new != s_subdir_new:
# Subdir for old extracted source matches what S points to (it should!)
# but subdir for new extracted source doesn't match what S will be
newvalues['S'] = '${WORKDIR}/%s' % srcsubdir_new.replace(newpv, '${PV}')
if crd.expand(newvalues['S']) == crd.expand('${WORKDIR}/${BP}'):
# It's the default, drop it
# FIXME what if S is being set in a .inc?
newvalues['S'] = None
logger.info('Source subdirectory has changed, dropping S value since it now matches the default ("${WORKDIR}/${BP}")')
else:
logger.info('Source subdirectory has changed, updating S value')
if license_diff:
newlicchksum = " ".join(["file://{}".format(l['path']) +
(";beginline={}".format(l['beginline']) if l['beginline'] else "") +
(";endline={}".format(l['endline']) if l['endline'] else "") +
(";md5={}".format(l['actual_md5'])) for l in new_licenses])
newvalues["LIC_FILES_CHKSUM"] = newlicchksum
_add_license_diff_to_recipe(fullpath, license_diff)
tinfoil.modified_files()
try:
rd = tinfoil.parse_recipe_file(fullpath, False)
except bb.tinfoil.TinfoilCommandFailed as e:
_upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
return fullpath, copied
def _check_git_config():
def getconfig(name):
try:
value = bb.process.run('git config %s' % name)[0].strip()
except bb.process.ExecutionError as e:
if e.exitcode == 1:
value = None
else:
raise
return value
username = getconfig('user.name')
useremail = getconfig('user.email')
configerr = []
if not username:
configerr.append('Please set your name using:\n git config --global user.name')
if not useremail:
configerr.append('Please set your email using:\n git config --global user.email')
if configerr:
raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
def _extract_licenses(srcpath, recipe_licenses):
licenses = []
for url in recipe_licenses.split():
license = {}
(type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
license['path'] = path
license['md5'] = parm.get('md5', '')
license['beginline'], license['endline'] = 0, 0
if 'beginline' in parm:
license['beginline'] = int(parm['beginline'])
if 'endline' in parm:
license['endline'] = int(parm['endline'])
license['text'] = []
with open(os.path.join(srcpath, path), 'rb') as f:
import hashlib
actual_md5 = hashlib.md5()
lineno = 0
for line in f:
lineno += 1
if (lineno >= license['beginline']) and ((lineno <= license['endline']) or not license['endline']):
license['text'].append(line.decode(errors='ignore'))
actual_md5.update(line)
license['actual_md5'] = actual_md5.hexdigest()
licenses.append(license)
return licenses
def _generate_license_diff(old_licenses, new_licenses):
need_diff = False
for l in new_licenses:
if l['md5'] != l['actual_md5']:
need_diff = True
break
if need_diff == False:
return None
import difflib
diff = ''
for old, new in zip(old_licenses, new_licenses):
for line in difflib.unified_diff(old['text'], new['text'], old['path'], new['path']):
diff = diff + line
return diff
def upgrade(args, config, basepath, workspace):
"""Entry point for the devtool 'upgrade' subcommand"""
if args.recipename in workspace:
raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
if args.srcbranch and not args.srcrev:
raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
_check_git_config()
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
if not rd:
return 1
pn = rd.getVar('PN')
if pn != args.recipename:
logger.info('Mapping %s to %s' % (args.recipename, pn))
if pn in workspace:
raise DevtoolError("recipe %s is already in your workspace" % pn)
if args.srctree:
srctree = os.path.abspath(args.srctree)
else:
srctree = standard.get_default_srctree(config, pn)
srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
# try to automatically discover latest version and revision if not provided on command line
if not args.version and not args.srcrev:
version_info = oe.recipeutils.get_recipe_upstream_version(rd)
if version_info['version'] and not version_info['version'].endswith("new-commits-available"):
args.version = version_info['version']
if version_info['revision']:
args.srcrev = version_info['revision']
if not args.version and not args.srcrev:
raise DevtoolError("Automatic discovery of latest version/revision failed - you must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option.")
standard._check_compatible_recipe(pn, rd)
old_srcrev = rd.getVar('SRCREV')
if old_srcrev == 'INVALID':
old_srcrev = None
if old_srcrev and not args.srcrev:
raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
old_ver = rd.getVar('PV')
if old_ver == args.version and old_srcrev == args.srcrev:
raise DevtoolError("Current and upgrade versions are the same version")
if args.version:
if bb.utils.vercmp_string(args.version, old_ver) < 0:
logger.warning('Upgrade version %s compares as less than the current version %s. If you are using a package feed for on-target upgrades or providing this recipe for general consumption, then you should increment PE in the recipe (or if there is no current PE value set, set it to "1")' % (args.version, old_ver))
check_prerelease_version(args.version, 'devtool upgrade')
rf = None
license_diff = None
try:
logger.info('Extracting current version source...')
rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
logger.info('Extracting upgraded version source...')
rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
args.srcrev, args.srcbranch, args.branch, args.keep_temp,
tinfoil, rd)
new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
license_diff = _generate_license_diff(old_licenses, new_licenses)
rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
except (bb.process.CmdError, DevtoolError) as e:
recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN'))
_upgrade_error(e, recipedir, srctree, args.keep_failure)
standard._add_md5(config, pn, os.path.dirname(rf))
af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
copied, config.workspace_path, rd)
standard._add_md5(config, pn, af)
update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
logger.info('Upgraded source extracted to %s' % srctree)
logger.info('New recipe is %s' % rf)
if license_diff:
logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.')
preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN'))
if preferred_version:
logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version)
finally:
tinfoil.shutdown()
return 0
def latest_version(args, config, basepath, workspace):
"""Entry point for the devtool 'latest_version' subcommand"""
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
if not rd:
return 1
version_info = oe.recipeutils.get_recipe_upstream_version(rd)
# "new-commits-available" is an indication that upstream never issues version tags
if not version_info['version'].endswith("new-commits-available"):
logger.info("Current version: {}".format(version_info['current_version']))
logger.info("Latest version: {}".format(version_info['version']))
if version_info['revision']:
logger.info("Latest version's commit: {}".format(version_info['revision']))
else:
logger.info("Latest commit: {}".format(version_info['revision']))
finally:
tinfoil.shutdown()
return 0
def check_upgrade_status(args, config, basepath, workspace):
if not args.recipe:
logger.info("Checking the upstream status for all recipes may take a few minutes")
results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
for result in results:
# pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
if args.all or result[1] != 'MATCH':
print("{:25} {:15} {:15} {} {} {}".format( result[0],
result[2],
result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
result[4],
result[5] if result[5] != 'N/A' else "",
"cannot be updated due to: %s" %(result[6]) if result[6] else ""))
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
defsrctree = standard.get_default_srctree(context.config)
parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
group='starting')
parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV). If omitted, latest upstream version will be determined and used, if possible.')
parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (useful when fetching from an SCM such as git)')
parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
parser_upgrade.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
group = parser_upgrade.add_mutually_exclusive_group()
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
description='Queries the upstream server for what the latest upstream release is (for git, tags are checked, for tarballs, a list of them is obtained, and one with the highest version number is reported)',
group='info')
parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
parser_latest_version.set_defaults(func=latest_version)
parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
group='info')
parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
parser_check_upgrade_status.set_defaults(func=check_upgrade_status)

View File

@@ -0,0 +1,242 @@
# Development tool - utility commands plugin
#
# Copyright (C) 2015-2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool utility plugins"""
import os
import sys
import shutil
import tempfile
import logging
import argparse
import subprocess
import scriptutils
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
from devtool import parse_recipe
logger = logging.getLogger('devtool')
def _find_recipe_path(args, config, basepath, workspace):
if args.any_recipe:
logger.warning('-a/--any-recipe option is now always active, and thus the option will be removed in a future release')
if args.recipename in workspace:
recipefile = workspace[args.recipename]['recipefile']
else:
recipefile = None
if not recipefile:
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
if not rd:
raise DevtoolError("Failed to find specified recipe")
recipefile = rd.getVar('FILE')
finally:
tinfoil.shutdown()
return recipefile
def find_recipe(args, config, basepath, workspace):
"""Entry point for the devtool 'find-recipe' subcommand"""
recipefile = _find_recipe_path(args, config, basepath, workspace)
print(recipefile)
return 0
def edit_recipe(args, config, basepath, workspace):
"""Entry point for the devtool 'edit-recipe' subcommand"""
return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
def configure_help(args, config, basepath, workspace):
"""Entry point for the devtool 'configure-help' subcommand"""
import oe.utils
check_workspace_recipe(workspace, args.recipename)
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
try:
rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
if not rd:
return 1
b = rd.getVar('B')
s = rd.getVar('S')
configurescript = os.path.join(s, 'configure')
confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or [])
configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
do_configure = rd.getVar('do_configure') or ''
do_configure_noexpand = rd.getVar('do_configure', False) or ''
packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
cmake_do_configure = rd.getVar('cmake_do_configure')
pn = rd.getVar('PN')
finally:
tinfoil.shutdown()
if 'doc' in packageconfig:
del packageconfig['doc']
if autotools and not os.path.exists(configurescript):
logger.info('Running do_configure to generate configure script')
try:
stdout, _ = exec_build_env_command(config.init_path, basepath,
'bitbake -c configure %s' % args.recipename,
stderr=subprocess.STDOUT)
except bb.process.ExecutionError:
pass
if confdisabled or do_configure.strip() in ('', ':'):
raise DevtoolError("do_configure task has been disabled for this recipe")
elif args.no_pager and not os.path.exists(configurescript):
raise DevtoolError("No configure script found and no other information to display")
else:
configopttext = ''
if autotools and configureopts:
configopttext = '''
Arguments currently passed to the configure script:
%s
Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
if extra_oeconf:
configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
%s''' % extra_oeconf
elif cmake:
in_cmake = False
cmake_cmd = ''
for line in cmake_do_configure.splitlines():
if in_cmake:
cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
if not line.endswith('\\'):
break
if line.lstrip().startswith('cmake '):
cmake_cmd = line.strip().rstrip('\\')
if line.endswith('\\'):
in_cmake = True
else:
break
if cmake_cmd:
configopttext = '''
The current cmake command line:
%s
Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
else:
configopttext = '''
The current implementation of cmake_do_configure:
cmake_do_configure() {
%s
}
Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
elif do_configure:
configopttext = '''
The current implementation of do_configure:
do_configure() {
%s
}''' % do_configure.rstrip()
if '${EXTRA_OECONF}' in do_configure_noexpand:
configopttext += '''
Arguments specified through EXTRA_OECONF (which you can change or add to easily):
%s''' % extra_oeconf
if packageconfig:
configopttext += '''
Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
if args.arg:
helpargs = ' '.join(args.arg)
elif cmake:
helpargs = '-LH'
else:
helpargs = '--help'
msg = '''configure information for %s
------------------------------------------
%s''' % (pn, configopttext)
if cmake:
msg += '''
The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
------------------------------------------''' % (helpargs, pn)
elif os.path.exists(configurescript):
msg += '''
The ./configure %s output for %s follows.
------------------------------------------''' % (helpargs, pn)
olddir = os.getcwd()
tmppath = tempfile.mkdtemp()
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
if not args.no_header:
tf.write(msg + '\n')
tf.close()
try:
try:
cmd = 'cat %s' % tf.name
if cmake:
cmd += '; cmake %s %s 2>&1' % (helpargs, s)
os.chdir(b)
elif os.path.exists(configurescript):
cmd += '; %s %s' % (configurescript, helpargs)
if sys.stdout.isatty() and not args.no_pager:
pager = os.environ.get('PAGER', 'less')
cmd = '(%s) | %s' % (cmd, pager)
subprocess.check_call(cmd, shell=True)
except subprocess.CalledProcessError as e:
return e.returncode
finally:
os.chdir(olddir)
shutil.rmtree(tmppath)
os.remove(tf.name)
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file',
description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
group='working')
parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
# FIXME drop -a at some point in future
parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
parser_edit_recipe.set_defaults(func=edit_recipe)
# Find-recipe
parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file',
description='Finds a recipe file. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
group='working')
parser_find_recipe.add_argument('recipename', help='Recipe to find')
# FIXME drop -a at some point in future
parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
parser_find_recipe.set_defaults(func=find_recipe)
# NOTE: Needed to override the usage string here since the default
# gets the order wrong - recipename must come before --arg
parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
usage='devtool configure-help [options] recipename [--arg ...]',
description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
group='working')
parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
parser_configure_help.set_defaults(func=configure_help)

View File

@@ -0,0 +1,477 @@
# Recipe creation tool - append plugin
#
# Copyright (C) 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
import glob
import fnmatch
import re
import subprocess
import logging
import stat
import shutil
import scriptutils
import errno
from collections import defaultdict
import difflib
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
# FIXME guessing when we don't have pkgdata?
# FIXME mode to create patch rather than directly substitute
class InvalidTargetFileError(Exception):
pass
def find_target_file(targetpath, d, pkglist=None):
"""Find the recipe installing the specified target path, optionally limited to a select list of packages"""
import json
pkgdata_dir = d.getVar('PKGDATA_DIR')
# The mix between /etc and ${sysconfdir} here may look odd, but it is just
# being consistent with usage elsewhere
invalidtargets = {'${sysconfdir}/version': '${sysconfdir}/version is written out at image creation time',
'/etc/timestamp': '/etc/timestamp is written out at image creation time',
'/dev/*': '/dev is handled by udev (or equivalent) and the kernel (devtmpfs)',
'/etc/passwd': '/etc/passwd should be managed through the useradd and extrausers classes',
'/etc/group': '/etc/group should be managed through the useradd and extrausers classes',
'/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes',
'/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes',
'${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname:pn-base-files = "value" in configuration',}
for pthspec, message in invalidtargets.items():
if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)):
raise InvalidTargetFileError(d.expand(message))
targetpath_re = re.compile(r'\s+(\$D)?%s(\s|$)' % targetpath)
recipes = defaultdict(list)
for root, dirs, files in os.walk(os.path.join(pkgdata_dir, 'runtime')):
if pkglist:
filelist = pkglist
else:
filelist = files
for fn in filelist:
pkgdatafile = os.path.join(root, fn)
if pkglist and not os.path.exists(pkgdatafile):
continue
with open(pkgdatafile, 'r') as f:
pn = ''
# This does assume that PN comes before other values, but that's a fairly safe assumption
for line in f:
if line.startswith('PN:'):
pn = line.split(': ', 1)[1].strip()
elif line.startswith('FILES_INFO'):
val = line.split(': ', 1)[1].strip()
dictval = json.loads(val)
for fullpth in dictval.keys():
if fnmatch.fnmatchcase(fullpth, targetpath):
recipes[targetpath].append(pn)
elif line.startswith('pkg_preinst:') or line.startswith('pkg_postinst:'):
scriptval = line.split(': ', 1)[1].strip().encode('utf-8').decode('unicode_escape')
if 'update-alternatives --install %s ' % targetpath in scriptval:
recipes[targetpath].append('?%s' % pn)
elif targetpath_re.search(scriptval):
recipes[targetpath].append('!%s' % pn)
return recipes
def _parse_recipe(pn, tinfoil):
try:
rd = tinfoil.parse_recipe(pn)
except bb.providers.NoProvider as e:
logger.error(str(e))
return None
return rd
def determine_file_source(targetpath, rd):
"""Assuming we know a file came from a specific recipe, figure out exactly where it came from"""
import oe.recipeutils
# See if it's in do_install for the recipe
workdir = rd.getVar('WORKDIR')
src_uri = rd.getVar('SRC_URI')
srcfile = ''
modpatches = []
elements = check_do_install(rd, targetpath)
if elements:
logger.debug('do_install line:\n%s' % ' '.join(elements))
srcpath = get_source_path(elements)
logger.debug('source path: %s' % srcpath)
if not srcpath.startswith('/'):
# Handle non-absolute path
srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
if srcpath.startswith(workdir):
# OK, now we have the source file name, look for it in SRC_URI
workdirfile = os.path.relpath(srcpath, workdir)
# FIXME this is where we ought to have some code in the fetcher, because this is naive
for item in src_uri.split():
localpath = bb.fetch2.localpath(item, rd)
# Source path specified in do_install might be a glob
if fnmatch.fnmatch(os.path.basename(localpath), workdirfile):
srcfile = 'file://%s' % localpath
elif '/' in workdirfile:
if item == 'file://%s' % workdirfile:
srcfile = 'file://%s' % localpath
# Check patches
srcpatches = []
patchedfiles = oe.recipeutils.get_recipe_patched_files(rd)
for patch, filelist in patchedfiles.items():
for fileitem in filelist:
if fileitem[0] == srcpath:
srcpatches.append((patch, fileitem[1]))
if srcpatches:
addpatch = None
for patch in srcpatches:
if patch[1] == 'A':
addpatch = patch[0]
else:
modpatches.append(patch[0])
if addpatch:
srcfile = 'patch://%s' % addpatch
return (srcfile, elements, modpatches)
def get_source_path(cmdelements):
"""Find the source path specified within a command"""
command = cmdelements[0]
if command in ['install', 'cp']:
helptext = subprocess.check_output('LC_ALL=C %s --help' % command, shell=True).decode('utf-8')
argopts = ''
argopt_line_re = re.compile('^-([a-zA-Z0-9]), --[a-z-]+=')
for line in helptext.splitlines():
line = line.lstrip()
res = argopt_line_re.search(line)
if res:
argopts += res.group(1)
if not argopts:
# Fallback
if command == 'install':
argopts = 'gmoSt'
elif command == 'cp':
argopts = 't'
else:
raise Exception('No fallback arguments for command %s' % command)
skipnext = False
for elem in cmdelements[1:-1]:
if elem.startswith('-'):
if len(elem) > 1 and elem[1] in argopts:
skipnext = True
continue
if skipnext:
skipnext = False
continue
return elem
else:
raise Exception('get_source_path: no handling for command "%s"')
def get_func_deps(func, d):
"""Find the function dependencies of a shell function"""
deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func))
deps |= set((d.getVarFlag(func, "vardeps") or "").split())
funcdeps = []
for dep in deps:
if d.getVarFlag(dep, 'func'):
funcdeps.append(dep)
return funcdeps
def check_do_install(rd, targetpath):
"""Look at do_install for a command that installs/copies the specified target path"""
instpath = os.path.abspath(os.path.join(rd.getVar('D'), targetpath.lstrip('/')))
do_install = rd.getVar('do_install')
# Handle where do_install calls other functions (somewhat crudely, but good enough for this purpose)
deps = get_func_deps('do_install', rd)
for dep in deps:
do_install = do_install.replace(dep, rd.getVar(dep))
# Look backwards through do_install as we want to catch where a later line (perhaps
# from a bbappend) is writing over the top
for line in reversed(do_install.splitlines()):
line = line.strip()
if (line.startswith('install ') and ' -m' in line) or line.startswith('cp '):
elements = line.split()
destpath = os.path.abspath(elements[-1])
if destpath == instpath:
return elements
elif destpath.rstrip('/') == os.path.dirname(instpath):
# FIXME this doesn't take recursive copy into account; unsure if it's practical to do so
srcpath = get_source_path(elements)
if fnmatch.fnmatchcase(os.path.basename(instpath), os.path.basename(srcpath)):
return elements
return None
def appendfile(args):
import oe.recipeutils
stdout = ''
try:
(stdout, _) = bb.process.run('LANG=C file -b %s' % args.newfile, shell=True)
if 'cannot open' in stdout:
raise bb.process.ExecutionError(stdout)
except bb.process.ExecutionError as err:
logger.debug('file command returned error: %s' % err)
stdout = ''
if stdout:
logger.debug('file command output: %s' % stdout.rstrip())
if ('executable' in stdout and not 'shell script' in stdout) or 'shared object' in stdout:
logger.warning('This file looks like it is a binary or otherwise the output of compilation. If it is, you should consider building it properly instead of substituting a binary file directly.')
if args.recipe:
recipes = {args.targetpath: [args.recipe],}
else:
try:
recipes = find_target_file(args.targetpath, tinfoil.config_data)
except InvalidTargetFileError as e:
logger.error('%s cannot be handled by this tool: %s' % (args.targetpath, e))
return 1
if not recipes:
logger.error('Unable to find any package producing path %s - this may be because the recipe packaging it has not been built yet' % args.targetpath)
return 1
alternative_pns = []
postinst_pns = []
selectpn = None
for targetpath, pnlist in recipes.items():
for pn in pnlist:
if pn.startswith('?'):
alternative_pns.append(pn[1:])
elif pn.startswith('!'):
postinst_pns.append(pn[1:])
elif selectpn:
# hit here with multilibs
continue
else:
selectpn = pn
if not selectpn and len(alternative_pns) == 1:
selectpn = alternative_pns[0]
logger.error('File %s is an alternative possibly provided by recipe %s but seemingly no other, selecting it by default - you should double check other recipes' % (args.targetpath, selectpn))
if selectpn:
logger.debug('Selecting recipe %s for file %s' % (selectpn, args.targetpath))
if postinst_pns:
logger.warning('%s be modified by postinstall scripts for the following recipes:\n %s\nThis may or may not be an issue depending on what modifications these postinstall scripts make.' % (args.targetpath, '\n '.join(postinst_pns)))
rd = _parse_recipe(selectpn, tinfoil)
if not rd:
# Error message already shown
return 1
sourcefile, instelements, modpatches = determine_file_source(args.targetpath, rd)
sourcepath = None
if sourcefile:
sourcetype, sourcepath = sourcefile.split('://', 1)
logger.debug('Original source file is %s (%s)' % (sourcepath, sourcetype))
if sourcetype == 'patch':
logger.warning('File %s is added by the patch %s - you may need to remove or replace this patch in order to replace the file.' % (args.targetpath, sourcepath))
sourcepath = None
else:
logger.debug('Unable to determine source file, proceeding anyway')
if modpatches:
logger.warning('File %s is modified by the following patches:\n %s' % (args.targetpath, '\n '.join(modpatches)))
if instelements and sourcepath:
install = None
else:
# Auto-determine permissions
# Check destination
binpaths = '${bindir}:${sbindir}:${base_bindir}:${base_sbindir}:${libexecdir}:${sysconfdir}/init.d'
perms = '0644'
if os.path.abspath(os.path.dirname(args.targetpath)) in rd.expand(binpaths).split(':'):
# File is going into a directory normally reserved for executables, so it should be executable
perms = '0755'
else:
# Check source
st = os.stat(args.newfile)
if st.st_mode & stat.S_IXUSR:
perms = '0755'
install = {args.newfile: (args.targetpath, perms)}
if sourcepath:
sourcepath = os.path.basename(sourcepath)
oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
tinfoil.modified_files()
return 0
else:
if alternative_pns:
logger.error('File %s is an alternative possibly provided by the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(alternative_pns)))
elif postinst_pns:
logger.error('File %s may be written out in a pre/postinstall script of the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(postinst_pns)))
return 3
def appendsrc(args, files, rd, extralines=None):
import oe.recipeutils
srcdir = rd.getVar('S')
workdir = rd.getVar('WORKDIR')
import bb.fetch
simplified = {}
src_uri = rd.getVar('SRC_URI').split()
for uri in src_uri:
if uri.endswith(';'):
uri = uri[:-1]
simple_uri = bb.fetch.URI(uri)
simple_uri.params = {}
simplified[str(simple_uri)] = uri
copyfiles = {}
extralines = extralines or []
params = []
for newfile, srcfile in files.items():
src_destdir = os.path.dirname(srcfile)
if not args.use_workdir:
if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
srcdir = os.path.join(workdir, 'git')
if not bb.data.inherits_class('kernel-yocto', rd):
logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git')
src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir)
src_destdir = os.path.normpath(src_destdir)
if src_destdir and src_destdir != '.':
params.append({'subdir': src_destdir})
else:
params.append({})
copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
dry_run_output = None
dry_run_outdir = None
if args.dry_run:
import tempfile
dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
dry_run_outdir = dry_run_output.name
appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
if not appendfile:
return
if args.dry_run:
output = ''
appendfilename = os.path.basename(appendfile)
newappendfile = appendfile
if appendfile and os.path.exists(appendfile):
with open(appendfile, 'r') as f:
oldlines = f.readlines()
else:
appendfile = '/dev/null'
oldlines = []
with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
newlines = f.readlines()
diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
difflines = list(diff)
if difflines:
output += ''.join(difflines)
if output:
logger.info('Diff of changed files:\n%s' % output)
else:
logger.info('No changed files')
tinfoil.modified_files()
def appendsrcfiles(parser, args):
recipedata = _parse_recipe(args.recipe, tinfoil)
if not recipedata:
parser.error('RECIPE must be a valid recipe name')
files = dict((f, os.path.join(args.destdir, os.path.basename(f)))
for f in args.files)
return appendsrc(args, files, recipedata)
def appendsrcfile(parser, args):
recipedata = _parse_recipe(args.recipe, tinfoil)
if not recipedata:
parser.error('RECIPE must be a valid recipe name')
if not args.destfile:
args.destfile = os.path.basename(args.file)
elif args.destfile.endswith('/'):
args.destfile = os.path.join(args.destfile, os.path.basename(args.file))
return appendsrc(args, {args.file: args.destfile}, recipedata)
def layer(layerpath):
if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')):
raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath))
return layerpath
def existing_path(filepath):
if not os.path.exists(filepath):
raise argparse.ArgumentTypeError('{0!r} must be an existing path'.format(filepath))
return filepath
def existing_file(filepath):
filepath = existing_path(filepath)
if os.path.isdir(filepath):
raise argparse.ArgumentTypeError('{0!r} must be a file, not a directory'.format(filepath))
return filepath
def destination_path(destpath):
if os.path.isabs(destpath):
raise argparse.ArgumentTypeError('{0!r} must be a relative path, not absolute'.format(destpath))
return destpath
def target_path(targetpath):
if not os.path.isabs(targetpath):
raise argparse.ArgumentTypeError('{0!r} must be an absolute path, not relative'.format(targetpath))
return targetpath
def register_commands(subparsers):
common = argparse.ArgumentParser(add_help=False)
common.add_argument('-m', '--machine', help='Make bbappend changes specific to a machine only', metavar='MACHINE')
common.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
common.add_argument('destlayer', metavar='DESTLAYER', help='Base directory of the destination layer to write the bbappend to', type=layer)
parser_appendfile = subparsers.add_parser('appendfile',
parents=[common],
help='Create/update a bbappend to replace a target file',
description='Creates a bbappend (or updates an existing one) to replace the specified file that appears in the target system, determining the recipe that packages the file and the required path and name for the bbappend automatically. Note that the ability to determine the recipe packaging a particular file depends upon the recipe\'s do_packagedata task having already run prior to running this command (which it will have when the recipe has been built successfully, which in turn will have happened if one or more of the recipe\'s packages is included in an image that has been built successfully).')
parser_appendfile.add_argument('targetpath', help='Path to the file to be replaced (as it would appear within the target image, e.g. /etc/motd)', type=target_path)
parser_appendfile.add_argument('newfile', help='Custom file to replace the target file with', type=existing_file)
parser_appendfile.add_argument('-r', '--recipe', help='Override recipe to apply to (default is to find which recipe already packages the file)')
parser_appendfile.set_defaults(func=appendfile, parserecipes=True)
common_src = argparse.ArgumentParser(add_help=False, parents=[common])
common_src.add_argument('-W', '--workdir', help='Unpack file into WORKDIR rather than S', dest='use_workdir', action='store_true')
common_src.add_argument('recipe', metavar='RECIPE', help='Override recipe to apply to')
parser = subparsers.add_parser('appendsrcfiles',
parents=[common_src],
help='Create/update a bbappend to add or replace source files',
description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
parser = subparsers.add_parser('appendsrcfile',
parents=[common_src],
help='Create/update a bbappend to add or replace a source file',
description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,875 @@
# Recipe creation tool - create command build system handlers
#
# Copyright (C) 2014-2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import re
import logging
from recipetool.create import RecipeHandler, validate_pv
logger = logging.getLogger('recipetool')
tinfoil = None
plugins = None
def plugin_init(pluginlist):
# Take a reference to the list so we can use it later
global plugins
plugins = pluginlist
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class CmakeRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']):
classes.append('cmake')
values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues)
classes.extend(values.pop('inherit', '').split())
for var, value in values.items():
lines_before.append('%s = "%s"' % (var, value))
lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:')
lines_after.append('EXTRA_OECMAKE = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
@staticmethod
def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None):
# Find all plugins that want to register handlers
logger.debug('Loading cmake handlers')
handlers = []
for plugin in plugins:
if hasattr(plugin, 'register_cmake_handlers'):
plugin.register_cmake_handlers(handlers)
values = {}
inherits = []
if cmakelistsfile:
srcfiles = [cmakelistsfile]
else:
srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt'])
# Note that some of these are non-standard, but probably better to
# be able to map them anyway if we see them
cmake_pkgmap = {'alsa': 'alsa-lib',
'aspell': 'aspell',
'atk': 'atk',
'bison': 'bison-native',
'boost': 'boost',
'bzip2': 'bzip2',
'cairo': 'cairo',
'cups': 'cups',
'curl': 'curl',
'curses': 'ncurses',
'cvs': 'cvs',
'drm': 'libdrm',
'dbus': 'dbus',
'dbusglib': 'dbus-glib',
'egl': 'virtual/egl',
'expat': 'expat',
'flex': 'flex-native',
'fontconfig': 'fontconfig',
'freetype': 'freetype',
'gettext': '',
'git': '',
'gio': 'glib-2.0',
'giounix': 'glib-2.0',
'glew': 'glew',
'glib': 'glib-2.0',
'glib2': 'glib-2.0',
'glu': 'libglu',
'glut': 'freeglut',
'gobject': 'glib-2.0',
'gperf': 'gperf-native',
'gnutls': 'gnutls',
'gtk2': 'gtk+',
'gtk3': 'gtk+3',
'gtk': 'gtk+3',
'harfbuzz': 'harfbuzz',
'icu': 'icu',
'intl': 'virtual/libintl',
'jpeg': 'jpeg',
'libarchive': 'libarchive',
'libiconv': 'virtual/libiconv',
'liblzma': 'xz',
'libxml2': 'libxml2',
'libxslt': 'libxslt',
'opengl': 'virtual/libgl',
'openmp': '',
'openssl': 'openssl',
'pango': 'pango',
'perl': '',
'perllibs': '',
'pkgconfig': '',
'png': 'libpng',
'pthread': '',
'pythoninterp': '',
'pythonlibs': '',
'ruby': 'ruby-native',
'sdl': 'libsdl',
'sdl2': 'libsdl2',
'subversion': 'subversion-native',
'swig': 'swig-native',
'tcl': 'tcl-native',
'threads': '',
'tiff': 'tiff',
'wget': 'wget',
'x11': 'libx11',
'xcb': 'libxcb',
'xext': 'libxext',
'xfixes': 'libxfixes',
'zlib': 'zlib',
}
pcdeps = []
libdeps = []
deps = []
unmappedpkgs = []
proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
def find_cmake_package(pkg):
RecipeHandler.load_devel_filemap(tinfoil.config_data)
for fn, pn in RecipeHandler.recipecmakefilemap.items():
splitname = fn.split('/')
if len(splitname) > 1:
if splitname[0].lower().startswith(pkg.lower()):
if splitname[1] == '%s-config.cmake' % pkg.lower() or splitname[1] == '%sConfig.cmake' % pkg or splitname[1] == 'Find%s.cmake' % pkg:
return pn
return None
def interpret_value(value):
return value.strip('"')
def parse_cmake_file(fn, paths=None):
searchpaths = (paths or []) + [os.path.dirname(fn)]
logger.debug('Parsing file %s' % fn)
with open(fn, 'r', errors='surrogateescape') as f:
for line in f:
line = line.strip()
for handler in handlers:
if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
continue
res = include_re.match(line)
if res:
includefn = bb.utils.which(':'.join(searchpaths), res.group(1))
if includefn:
parse_cmake_file(includefn, searchpaths)
else:
logger.debug('Unable to recurse into include file %s' % res.group(1))
continue
res = subdir_re.match(line)
if res:
subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt')
if os.path.exists(subdirfn):
parse_cmake_file(subdirfn, searchpaths)
else:
logger.debug('Unable to recurse into subdirectory file %s' % subdirfn)
continue
res = proj_re.match(line)
if res:
extravalues['PN'] = interpret_value(res.group(1).split()[0])
continue
res = pkgcm_re.match(line)
if res:
res = dep_re.findall(res.group(2))
if res:
pcdeps.extend([interpret_value(x[0]) for x in res])
inherits.append('pkgconfig')
continue
res = pkgsm_re.match(line)
if res:
res = dep_re.findall(res.group(2))
if res:
# Note: appending a tuple here!
item = tuple((interpret_value(x[0]) for x in res))
if len(item) == 1:
item = item[0]
pcdeps.append(item)
inherits.append('pkgconfig')
continue
res = findpackage_re.match(line)
if res:
origpkg = res.group(1)
pkg = interpret_value(origpkg)
found = False
for handler in handlers:
if handler.process_findpackage(srctree, fn, pkg, deps, outlines, inherits, values):
logger.debug('Mapped CMake package %s via handler %s' % (pkg, handler.__class__.__name__))
found = True
break
if found:
continue
elif pkg == 'Gettext':
inherits.append('gettext')
elif pkg == 'Perl':
inherits.append('perlnative')
elif pkg == 'PkgConfig':
inherits.append('pkgconfig')
elif pkg == 'PythonInterp':
inherits.append('python3native')
elif pkg == 'PythonLibs':
inherits.append('python3-dir')
else:
# Try to map via looking at installed CMake packages in pkgdata
dep = find_cmake_package(pkg)
if dep:
logger.debug('Mapped CMake package %s to recipe %s via pkgdata' % (pkg, dep))
deps.append(dep)
else:
dep = cmake_pkgmap.get(pkg.lower(), None)
if dep:
logger.debug('Mapped CMake package %s to recipe %s via internal list' % (pkg, dep))
deps.append(dep)
elif dep is None:
unmappedpkgs.append(origpkg)
continue
res = checklib_re.match(line)
if res:
lib = interpret_value(res.group(1))
if not lib.startswith('$'):
libdeps.append(lib)
res = findlibrary_re.match(line)
if res:
libs = res.group(2).split()
for lib in libs:
if lib in ['HINTS', 'PATHS', 'PATH_SUFFIXES', 'DOC', 'NAMES_PER_DIR'] or lib.startswith(('NO_', 'CMAKE_', 'ONLY_CMAKE_')):
break
lib = interpret_value(lib)
if not lib.startswith('$'):
libdeps.append(lib)
if line.lower().startswith('useswig'):
deps.append('swig-native')
continue
parse_cmake_file(srcfiles[0])
if unmappedpkgs:
outlines.append('# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(list(set(unmappedpkgs))))
RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
for handler in handlers:
handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
if inherits:
values['inherit'] = ' '.join(list(set(inherits)))
return values
class CmakeExtensionHandler(object):
'''Base class for CMake extension handlers'''
def process_line(self, srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
'''
Handle a line parsed out of an CMake file.
Return True if you've completely handled the passed in line, otherwise return False.
'''
return False
def process_findpackage(self, srctree, fn, pkg, deps, outlines, inherits, values):
'''
Handle a find_package package parsed out of a CMake file.
Return True if you've completely handled the passed in package, otherwise return False.
'''
return False
def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
'''
Apply any desired post-processing on the output
'''
return
class SconsRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['SConstruct', 'Sconstruct', 'sconstruct']):
classes.append('scons')
lines_after.append('# Specify any options you want to pass to scons using EXTRA_OESCONS:')
lines_after.append('EXTRA_OESCONS = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
class QmakeRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['*.pro']):
classes.append('qmake2')
handled.append('buildsystem')
return True
return False
class AutotoolsRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
autoconf = False
if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']):
autoconf = True
values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, extravalues)
classes.extend(values.pop('inherit', '').split())
for var, value in values.items():
lines_before.append('%s = "%s"' % (var, value))
else:
conffile = RecipeHandler.checkfiles(srctree, ['configure'])
if conffile:
# Check if this is just a pre-generated autoconf configure script
with open(conffile[0], 'r', errors='surrogateescape') as f:
for i in range(1, 10):
if 'Generated by GNU Autoconf' in f.readline():
autoconf = True
break
if autoconf and not ('PV' in extravalues and 'PN' in extravalues):
# Last resort
conffile = RecipeHandler.checkfiles(srctree, ['configure'])
if conffile:
with open(conffile[0], 'r', errors='surrogateescape') as f:
for line in f:
line = line.strip()
if line.startswith('VERSION=') or line.startswith('PACKAGE_VERSION='):
pv = line.split('=')[1].strip('"\'')
if pv and not 'PV' in extravalues and validate_pv(pv):
extravalues['PV'] = pv
elif line.startswith('PACKAGE_NAME=') or line.startswith('PACKAGE='):
pn = line.split('=')[1].strip('"\'')
if pn and not 'PN' in extravalues:
extravalues['PN'] = pn
if autoconf:
lines_before.append('')
lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory')
lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the')
lines_before.append('# inherit line')
classes.append('autotools')
lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:')
lines_after.append('EXTRA_OECONF = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
@staticmethod
def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None):
import shlex
# Find all plugins that want to register handlers
logger.debug('Loading autotools handlers')
handlers = []
for plugin in plugins:
if hasattr(plugin, 'register_autotools_handlers'):
plugin.register_autotools_handlers(handlers)
values = {}
inherits = []
# Hardcoded map, we also use a dynamic one based on what's in the sysroot
progmap = {'flex': 'flex-native',
'bison': 'bison-native',
'm4': 'm4-native',
'tar': 'tar-native',
'ar': 'binutils-native',
'ranlib': 'binutils-native',
'ld': 'binutils-native',
'strip': 'binutils-native',
'libtool': '',
'autoconf': '',
'autoheader': '',
'automake': '',
'uname': '',
'rm': '',
'cp': '',
'mv': '',
'find': '',
'awk': '',
'sed': '',
}
progclassmap = {'gconftool-2': 'gconf',
'pkg-config': 'pkgconfig',
'python': 'python3native',
'python3': 'python3native',
'perl': 'perlnative',
'makeinfo': 'texinfo',
}
pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
version_re = re.compile(r'([0-9.]+)')
defines = {}
def subst_defines(value):
newvalue = value
for define, defval in defines.items():
newvalue = newvalue.replace(define, defval)
if newvalue != value:
return subst_defines(newvalue)
return value
def process_value(value):
value = value.replace('[', '').replace(']', '')
if value.startswith('m4_esyscmd(') or value.startswith('m4_esyscmd_s('):
cmd = subst_defines(value[value.index('(')+1:-1])
try:
if '|' in cmd:
cmd = 'set -o pipefail; ' + cmd
stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True)
ret = stdout.rstrip()
except bb.process.ExecutionError as e:
ret = ''
elif value.startswith('m4_'):
return None
ret = subst_defines(value)
if ret:
ret = ret.strip('"\'')
return ret
# Since a configure.ac file is essentially a program, this is only ever going to be
# a hack unfortunately; but it ought to be enough of an approximation
if acfile:
srcfiles = [acfile]
else:
srcfiles = RecipeHandler.checkfiles(srctree, ['acinclude.m4', 'configure.ac', 'configure.in'])
pcdeps = []
libdeps = []
deps = []
unmapped = []
RecipeHandler.load_binmap(tinfoil.config_data)
def process_macro(keyword, value):
for handler in handlers:
if handler.process_macro(srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
return
logger.debug('Found keyword %s with value "%s"' % (keyword, value))
if keyword == 'PKG_CHECK_MODULES':
res = pkg_re.search(value)
if res:
res = dep_re.findall(res.group(1))
if res:
pcdeps.extend([x[0] for x in res])
inherits.append('pkgconfig')
elif keyword == 'PKG_CHECK_EXISTS':
res = pkgce_re.search(value)
if res:
res = dep_re.findall(res.group(1))
if res:
pcdeps.extend([x[0] for x in res])
inherits.append('pkgconfig')
elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'):
inherits.append('gettext')
elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'):
deps.append('intltool-native')
elif keyword == 'AM_PATH_GLIB_2_0':
deps.append('glib-2.0')
elif keyword in ('AC_CHECK_PROG', 'AC_PATH_PROG', 'AX_WITH_PROG'):
res = progs_re.search(value)
if res:
for prog in shlex.split(res.group(1)):
prog = prog.split()[0]
for handler in handlers:
if handler.process_prog(srctree, keyword, value, prog, deps, outlines, inherits, values):
return
progclass = progclassmap.get(prog, None)
if progclass:
inherits.append(progclass)
else:
progdep = RecipeHandler.recipebinmap.get(prog, None)
if not progdep:
progdep = progmap.get(prog, None)
if progdep:
deps.append(progdep)
elif progdep is None:
if not prog.startswith('$'):
unmapped.append(prog)
elif keyword == 'AC_CHECK_LIB':
res = lib_re.search(value)
if res:
lib = res.group(1)
if not lib.startswith('$'):
libdeps.append(lib)
elif keyword == 'AX_CHECK_LIBRARY':
res = libx_re.search(value)
if res:
lib = res.group(2)
if not lib.startswith('$'):
header = res.group(1)
libdeps.append((lib, header))
elif keyword == 'AC_PATH_X':
deps.append('libx11')
elif keyword in ('AX_BOOST', 'BOOST_REQUIRE'):
deps.append('boost')
elif keyword in ('AC_PROG_LEX', 'AM_PROG_LEX', 'AX_PROG_FLEX'):
deps.append('flex-native')
elif keyword in ('AC_PROG_YACC', 'AX_PROG_BISON'):
deps.append('bison-native')
elif keyword == 'AX_CHECK_ZLIB':
deps.append('zlib')
elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'):
deps.append('openssl')
elif keyword in ('AX_LIB_CURL', 'LIBCURL_CHECK_CONFIG'):
deps.append('curl')
elif keyword == 'AX_LIB_BEECRYPT':
deps.append('beecrypt')
elif keyword == 'AX_LIB_EXPAT':
deps.append('expat')
elif keyword == 'AX_LIB_GCRYPT':
deps.append('libgcrypt')
elif keyword == 'AX_LIB_NETTLE':
deps.append('nettle')
elif keyword == 'AX_LIB_READLINE':
deps.append('readline')
elif keyword == 'AX_LIB_SQLITE3':
deps.append('sqlite3')
elif keyword == 'AX_LIB_TAGLIB':
deps.append('taglib')
elif keyword in ['AX_PKG_SWIG', 'AC_PROG_SWIG']:
deps.append('swig-native')
elif keyword == 'AX_PROG_XSLTPROC':
deps.append('libxslt-native')
elif keyword in ['AC_PYTHON_DEVEL', 'AX_PYTHON_DEVEL', 'AM_PATH_PYTHON']:
pythonclass = 'python3native'
elif keyword == 'AX_WITH_CURSES':
deps.append('ncurses')
elif keyword == 'AX_PATH_BDB':
deps.append('db')
elif keyword == 'AX_PATH_LIB_PCRE':
deps.append('libpcre')
elif keyword == 'AC_INIT':
if extravalues is not None:
res = ac_init_re.match(value)
if res:
extravalues['PN'] = process_value(res.group(1))
pv = process_value(res.group(2))
if validate_pv(pv):
extravalues['PV'] = pv
elif keyword == 'AM_INIT_AUTOMAKE':
if extravalues is not None:
if 'PN' not in extravalues:
res = am_init_re.match(value)
if res:
if res.group(1) != 'AC_PACKAGE_NAME':
extravalues['PN'] = process_value(res.group(1))
pv = process_value(res.group(2))
if validate_pv(pv):
extravalues['PV'] = pv
elif keyword == 'define(':
res = define_re.match(value)
if res:
key = res.group(2).strip('[]')
value = process_value(res.group(3))
if value is not None:
defines[key] = value
keywords = ['PKG_CHECK_MODULES',
'PKG_CHECK_EXISTS',
'AM_GNU_GETTEXT',
'AM_GLIB_GNU_GETTEXT',
'GETTEXT_PACKAGE',
'AC_PROG_INTLTOOL',
'IT_PROG_INTLTOOL',
'AM_PATH_GLIB_2_0',
'AC_CHECK_PROG',
'AC_PATH_PROG',
'AX_WITH_PROG',
'AC_CHECK_LIB',
'AX_CHECK_LIBRARY',
'AC_PATH_X',
'AX_BOOST',
'BOOST_REQUIRE',
'AC_PROG_LEX',
'AM_PROG_LEX',
'AX_PROG_FLEX',
'AC_PROG_YACC',
'AX_PROG_BISON',
'AX_CHECK_ZLIB',
'AX_CHECK_OPENSSL',
'AX_LIB_CRYPTO',
'AX_LIB_CURL',
'LIBCURL_CHECK_CONFIG',
'AX_LIB_BEECRYPT',
'AX_LIB_EXPAT',
'AX_LIB_GCRYPT',
'AX_LIB_NETTLE',
'AX_LIB_READLINE'
'AX_LIB_SQLITE3',
'AX_LIB_TAGLIB',
'AX_PKG_SWIG',
'AC_PROG_SWIG',
'AX_PROG_XSLTPROC',
'AC_PYTHON_DEVEL',
'AX_PYTHON_DEVEL',
'AM_PATH_PYTHON',
'AX_WITH_CURSES',
'AX_PATH_BDB',
'AX_PATH_LIB_PCRE',
'AC_INIT',
'AM_INIT_AUTOMAKE',
'define(',
]
for handler in handlers:
handler.extend_keywords(keywords)
for srcfile in srcfiles:
nesting = 0
in_keyword = ''
partial = ''
with open(srcfile, 'r', errors='surrogateescape') as f:
for line in f:
if in_keyword:
partial += ' ' + line.strip()
if partial.endswith('\\'):
partial = partial[:-1]
nesting = nesting + line.count('(') - line.count(')')
if nesting == 0:
process_macro(in_keyword, partial)
partial = ''
in_keyword = ''
else:
for keyword in keywords:
if keyword in line:
nesting = line.count('(') - line.count(')')
if nesting > 0:
partial = line.strip()
if partial.endswith('\\'):
partial = partial[:-1]
in_keyword = keyword
else:
process_macro(keyword, line.strip())
break
if in_keyword:
process_macro(in_keyword, partial)
if extravalues:
for k,v in list(extravalues.items()):
if v:
if v.startswith('$') or v.startswith('@') or v.startswith('%'):
del extravalues[k]
else:
extravalues[k] = v.strip('"\'').rstrip('()')
if unmapped:
outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped))))
RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
for handler in handlers:
handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
if inherits:
values['inherit'] = ' '.join(list(set(inherits)))
return values
class AutotoolsExtensionHandler(object):
'''Base class for Autotools extension handlers'''
def process_macro(self, srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
'''
Handle a macro parsed out of an autotools file. Note that if you want this to be called
for any macro other than the ones AutotoolsRecipeHandler already looks for, you'll need
to add it to the keywords list in extend_keywords().
Return True if you've completely handled the passed in macro, otherwise return False.
'''
return False
def extend_keywords(self, keywords):
'''Adds keywords to be recognised by the parser (so that you get a call to process_macro)'''
return
def process_prog(self, srctree, keyword, value, prog, deps, outlines, inherits, values):
'''
Handle an AC_PATH_PROG, AC_CHECK_PROG etc. line
Return True if you've completely handled the passed in macro, otherwise return False.
'''
return False
def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
'''
Apply any desired post-processing on the output
'''
return
class MakefileRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
makefile = RecipeHandler.checkfiles(srctree, ['Makefile', 'makefile', 'GNUmakefile'])
if makefile:
lines_after.append('# NOTE: this is a Makefile-only piece of software, so we cannot generate much of the')
lines_after.append('# recipe automatically - you will need to examine the Makefile yourself and ensure')
lines_after.append('# that the appropriate arguments are passed in.')
lines_after.append('')
scanfile = os.path.join(srctree, 'configure.scan')
skipscan = False
try:
stdout, stderr = bb.process.run('autoscan', cwd=srctree, shell=True)
except bb.process.ExecutionError as e:
skipscan = True
if scanfile and os.path.exists(scanfile):
values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, acfile=scanfile)
classes.extend(values.pop('inherit', '').split())
for var, value in values.items():
if var == 'DEPENDS':
lines_before.append('# NOTE: some of these dependencies may be optional, check the Makefile and/or upstream documentation')
lines_before.append('%s = "%s"' % (var, value))
lines_before.append('')
for f in ['configure.scan', 'autoscan.log']:
fp = os.path.join(srctree, f)
if os.path.exists(fp):
os.remove(fp)
self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
func = []
func.append('# You will almost certainly need to add additional arguments here')
func.append('oe_runmake')
self.genfunction(lines_after, 'do_compile', func)
installtarget = True
try:
stdout, stderr = bb.process.run('make -n install', cwd=srctree, shell=True)
except bb.process.ExecutionError as e:
if e.exitcode != 1:
installtarget = False
func = []
if installtarget:
func.append('# This is a guess; additional arguments may be required')
makeargs = ''
with open(makefile[0], 'r', errors='surrogateescape') as f:
for i in range(1, 100):
if 'DESTDIR' in f.readline():
makeargs += " 'DESTDIR=${D}'"
break
func.append('oe_runmake install%s' % makeargs)
else:
func.append('# NOTE: unable to determine what to put here - there is a Makefile but no')
func.append('# target named "install", so you will need to define this yourself')
self.genfunction(lines_after, 'do_install', func)
handled.append('buildsystem')
else:
lines_after.append('# NOTE: no Makefile found, unable to determine what needs to be done')
lines_after.append('')
self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
self.genfunction(lines_after, 'do_compile', ['# Specify compilation commands here'])
self.genfunction(lines_after, 'do_install', ['# Specify install commands here'])
class VersionFileRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'PV' not in extravalues:
# Look for a VERSION or version file containing a single line consisting
# only of a version number
filelist = RecipeHandler.checkfiles(srctree, ['VERSION', 'version'])
version = None
for fileitem in filelist:
linecount = 0
with open(fileitem, 'r', errors='surrogateescape') as f:
for line in f:
line = line.rstrip().strip('"\'')
linecount += 1
if line:
if linecount > 1:
version = None
break
else:
if validate_pv(line):
version = line
if version:
extravalues['PV'] = version
break
class SpecFileRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'PV' in extravalues and 'PN' in extravalues:
return
filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True)
valuemap = {'Name': 'PN',
'Version': 'PV',
'Summary': 'SUMMARY',
'Url': 'HOMEPAGE',
'License': 'LICENSE'}
foundvalues = {}
for fileitem in filelist:
linecount = 0
with open(fileitem, 'r', errors='surrogateescape') as f:
for line in f:
for value, varname in valuemap.items():
if line.startswith(value + ':') and not varname in foundvalues:
foundvalues[varname] = line.split(':', 1)[1].strip()
break
if len(foundvalues) == len(valuemap):
break
# Drop values containing unexpanded RPM macros
for k in list(foundvalues.keys()):
if '%' in foundvalues[k]:
del foundvalues[k]
if 'PV' in foundvalues:
if not validate_pv(foundvalues['PV']):
del foundvalues['PV']
license = foundvalues.pop('LICENSE', None)
if license:
liccomment = '# NOTE: spec file indicates the license may be "%s"' % license
for i, line in enumerate(lines_before):
if line.startswith('LICENSE ='):
lines_before.insert(i, liccomment)
break
else:
lines_before.append(liccomment)
extravalues.update(foundvalues)
def register_recipe_handlers(handlers):
# Set priorities with some gaps so that other plugins can insert
# their own handlers (so avoid changing these numbers)
handlers.append((CmakeRecipeHandler(), 50))
handlers.append((AutotoolsRecipeHandler(), 40))
handlers.append((SconsRecipeHandler(), 30))
handlers.append((QmakeRecipeHandler(), 20))
handlers.append((MakefileRecipeHandler(), 10))
handlers.append((VersionFileRecipeHandler(), -1))
handlers.append((SpecFileRecipeHandler(), -1))

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,777 @@
# Recipe creation tool - go support plugin
#
# The code is based on golang internals. See the afftected
# methods for further reference and information.
#
# Copyright (C) 2023 Weidmueller GmbH & Co KG
# Author: Lukas Funke <lukas.funke@weidmueller.com>
#
# SPDX-License-Identifier: GPL-2.0-only
#
from collections import namedtuple
from enum import Enum
from html.parser import HTMLParser
from recipetool.create import RecipeHandler, handle_license_vars
from recipetool.create import guess_license, tidy_licenses, fixup_license
from recipetool.create import determine_from_url
from urllib.error import URLError, HTTPError
import bb.utils
import json
import logging
import os
import re
import subprocess
import sys
import shutil
import tempfile
import urllib.parse
import urllib.request
GoImport = namedtuple('GoImport', 'root vcs url suffix')
logger = logging.getLogger('recipetool')
CodeRepo = namedtuple(
'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
tinfoil = None
# Regular expression to parse pseudo semantic version
# see https://go.dev/ref/mod#pseudo-versions
re_pseudo_semver = re.compile(
r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
# Regular expression to parse semantic version
re_semver = re.compile(
r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class GoRecipeHandler(RecipeHandler):
"""Class to handle the go recipe creation"""
@staticmethod
def __ensure_go():
"""Check if the 'go' command is available in the recipes"""
recipe = "go-native"
if not tinfoil.recipes_parsed:
tinfoil.parse_recipes()
try:
rd = tinfoil.parse_recipe(recipe)
except bb.providers.NoProvider:
bb.error(
"Nothing provides '%s' which is required for the build" % (recipe))
bb.note(
"You will likely need to add a layer that provides '%s'" % (recipe))
return None
bindir = rd.getVar('STAGING_BINDIR_NATIVE')
gopath = os.path.join(bindir, 'go')
if not os.path.exists(gopath):
tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
if not os.path.exists(gopath):
logger.error(
'%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
return None
return bindir
def __resolve_repository_static(self, modulepath):
"""Resolve the repository in a static manner
The method is based on the go implementation of
`repoRootFromVCSPaths` in
https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
"""
url = urllib.parse.urlparse("https://" + modulepath)
req = urllib.request.Request(url.geturl())
try:
resp = urllib.request.urlopen(req)
# Some modulepath are just redirects to github (or some other vcs
# hoster). Therefore, we check if this modulepath redirects to
# somewhere else
if resp.geturl() != url.geturl():
bb.debug(1, "%s is redirectred to %s" %
(url.geturl(), resp.geturl()))
url = urllib.parse.urlparse(resp.geturl())
modulepath = url.netloc + url.path
except URLError as url_err:
# This is probably because the module path
# contains the subdir and major path. Thus,
# we ignore this error for now
logger.debug(
1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
host, _, _ = modulepath.partition('/')
class vcs(Enum):
pathprefix = "pathprefix"
regexp = "regexp"
type = "type"
repo = "repo"
check = "check"
schemelessRepo = "schemelessRepo"
# GitHub
vcsGitHub = {}
vcsGitHub[vcs.pathprefix] = "github.com"
vcsGitHub[vcs.regexp] = re.compile(
r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsGitHub[vcs.type] = "git"
vcsGitHub[vcs.repo] = "https://\\g<root>"
# Bitbucket
vcsBitbucket = {}
vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
vcsBitbucket[vcs.regexp] = re.compile(
r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsBitbucket[vcs.type] = "git"
vcsBitbucket[vcs.repo] = "https://\\g<root>"
# IBM DevOps Services (JazzHub)
vcsIBMDevOps = {}
vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
vcsIBMDevOps[vcs.regexp] = re.compile(
r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsIBMDevOps[vcs.type] = "git"
vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
# Git at Apache
vcsApacheGit = {}
vcsApacheGit[vcs.pathprefix] = "git.apache.org"
vcsApacheGit[vcs.regexp] = re.compile(
r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsApacheGit[vcs.type] = "git"
vcsApacheGit[vcs.repo] = "https://\\g<root>"
# Git at OpenStack
vcsOpenStackGit = {}
vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
vcsOpenStackGit[vcs.regexp] = re.compile(
r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsOpenStackGit[vcs.type] = "git"
vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
# chiselapp.com for fossil
vcsChiselapp = {}
vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
vcsChiselapp[vcs.regexp] = re.compile(
r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
vcsChiselapp[vcs.type] = "fossil"
vcsChiselapp[vcs.repo] = "https://\\g<root>"
# General syntax for any server.
# Must be last.
vcsGeneralServer = {}
vcsGeneralServer[vcs.regexp] = re.compile(
"(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
vcsGeneralServer[vcs.schemelessRepo] = True
vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
vcsGeneralServer]
if modulepath.startswith("example.net") or modulepath == "rsc.io":
logger.warning("Suspicious module path %s" % modulepath)
return None
if modulepath.startswith("http:") or modulepath.startswith("https:"):
logger.warning("Import path should not start with %s %s" %
("http", "https"))
return None
rootpath = None
vcstype = None
repourl = None
suffix = None
for srv in vcsPaths:
m = srv[vcs.regexp].match(modulepath)
if vcs.pathprefix in srv:
if host == srv[vcs.pathprefix]:
rootpath = m.group('root')
vcstype = srv[vcs.type]
repourl = m.expand(srv[vcs.repo])
suffix = m.group('suffix')
break
elif m and srv[vcs.schemelessRepo]:
rootpath = m.group('root')
vcstype = m[vcs.type]
repourl = m[vcs.repo]
suffix = m.group('suffix')
break
return GoImport(rootpath, vcstype, repourl, suffix)
def __resolve_repository_dynamic(self, modulepath):
"""Resolve the repository root in a dynamic manner.
The method is based on the go implementation of
`repoRootForImportDynamic` in
https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
"""
url = urllib.parse.urlparse("https://" + modulepath)
class GoImportHTMLParser(HTMLParser):
def __init__(self):
super().__init__()
self.__srv = {}
def handle_starttag(self, tag, attrs):
if tag == 'meta' and list(
filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
content = list(
filter(lambda a: (a[0] == 'content'), attrs))
if content:
srv = content[0][1].split()
self.__srv[srv[0]] = srv
def go_import(self, modulepath):
if modulepath in self.__srv:
srv = self.__srv[modulepath]
return GoImport(srv[0], srv[1], srv[2], None)
return None
url = url.geturl() + "?go-get=1"
req = urllib.request.Request(url)
try:
body = urllib.request.urlopen(req).read()
except HTTPError as http_err:
logger.warning(
"Unclean status when fetching page from [%s]: %s", url, str(http_err))
body = http_err.fp.read()
except URLError as url_err:
logger.warning(
"Failed to fetch page from [%s]: %s", url, str(url_err))
return None
parser = GoImportHTMLParser()
parser.feed(body.decode('utf-8'))
parser.close()
return parser.go_import(modulepath)
def __resolve_from_golang_proxy(self, modulepath, version):
"""
Resolves repository data from golang proxy
"""
url = urllib.parse.urlparse("https://proxy.golang.org/"
+ modulepath
+ "/@v/"
+ version
+ ".info")
# Transform url to lower case, golang proxy doesn't like mixed case
req = urllib.request.Request(url.geturl().lower())
try:
resp = urllib.request.urlopen(req)
except URLError as url_err:
logger.warning(
"Failed to fetch page from [%s]: %s", url, str(url_err))
return None
golang_proxy_res = resp.read().decode('utf-8')
modinfo = json.loads(golang_proxy_res)
if modinfo and 'Origin' in modinfo:
origin = modinfo['Origin']
_root_url = urllib.parse.urlparse(origin['URL'])
# We normalize the repo URL since we don't want the scheme in it
_subdir = origin['Subdir'] if 'Subdir' in origin else None
_root, _, _ = self.__split_path_version(modulepath)
if _subdir:
_root = _root[:-len(_subdir)].strip('/')
_commit = origin['Hash']
_vcs = origin['VCS']
return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
return None
def __resolve_repository(self, modulepath):
"""
Resolves src uri from go module-path
"""
repodata = self.__resolve_repository_static(modulepath)
if not repodata or not repodata.url:
repodata = self.__resolve_repository_dynamic(modulepath)
if not repodata or not repodata.url:
logger.error(
"Could not resolve repository for module path '%s'" % modulepath)
# There is no way to recover from this
sys.exit(14)
if repodata:
logger.debug(1, "Resolved download path for import '%s' => %s" % (
modulepath, repodata.url))
return repodata
def __split_path_version(self, path):
i = len(path)
dot = False
for j in range(i, 0, -1):
if path[j - 1] < '0' or path[j - 1] > '9':
break
if path[j - 1] == '.':
dot = True
break
i = j - 1
if i <= 1 or i == len(
path) or path[i - 1] != 'v' or path[i - 2] != '/':
return path, "", True
prefix, pathMajor = path[:i - 2], path[i - 2:]
if dot or len(
pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
return path, "", False
return prefix, pathMajor, True
def __get_path_major(self, pathMajor):
if not pathMajor:
return ""
if pathMajor[0] != '/' and pathMajor[0] != '.':
logger.error(
"pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
pathMajor = pathMajor[:len("-unstable") - 2]
return pathMajor[1:]
def __build_coderepo(self, repo, path):
codedir = ""
pathprefix, pathMajor, _ = self.__split_path_version(path)
if repo.root == path:
pathprefix = path
elif path.startswith(repo.root):
codedir = pathprefix[len(repo.root):].strip('/')
pseudoMajor = self.__get_path_major(pathMajor)
logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
return CodeRepo(path, repo.root, codedir,
pathMajor, pathprefix, pseudoMajor)
def __resolve_version(self, repo, path, version):
hash = None
coderoot = self.__build_coderepo(repo, path)
def vcs_fetch_all():
tmpdir = tempfile.mkdtemp()
clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
bb.process.run(clone_cmd)
log_cmd = "git log --all --pretty='%H %d' --decorate=short"
output, _ = bb.process.run(
log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
bb.utils.prunedir(tmpdir)
return output.strip().split('\n')
def vcs_fetch_remote(tag):
# add * to grab ^{}
refs = {}
ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
repo.url, tag)
output, _ = bb.process.run(ls_remote_cmd)
output = output.strip().split('\n')
for line in output:
f = line.split(maxsplit=1)
if len(f) != 2:
continue
for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
if f[1].startswith(prefix):
refs[f[1][len(prefix):]] = f[0]
for key, hash in refs.items():
if key.endswith(r"^{}"):
refs[key.strip(r"^{}")] = hash
return refs[tag]
m_pseudo_semver = re_pseudo_semver.match(version)
if m_pseudo_semver:
remote_refs = vcs_fetch_all()
short_commit = m_pseudo_semver.group('commithash')
for l in remote_refs:
r = l.split(maxsplit=1)
sha1 = r[0] if len(r) else None
if not sha1:
logger.error(
"Ups: could not resolve abbref commit for %s" % short_commit)
elif sha1.startswith(short_commit):
hash = sha1
break
else:
m_semver = re_semver.match(version)
if m_semver:
def get_sha1_remote(re):
rsha1 = None
for line in remote_refs:
# Split lines of the following format:
# 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
lineparts = line.split(maxsplit=1)
sha1 = lineparts[0] if len(lineparts) else None
refstring = lineparts[1] if len(
lineparts) == 2 else None
if refstring:
# Normalize tag string and split in case of multiple
# regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
refs = refstring.strip('(), ').split(',')
for ref in refs:
if re.match(ref.strip()):
rsha1 = sha1
return rsha1
semver = "v" + m_semver.group('major') + "."\
+ m_semver.group('minor') + "."\
+ m_semver.group('patch') \
+ (("-" + m_semver.group('prerelease'))
if m_semver.group('prerelease') else "")
tag = os.path.join(
coderoot.codeDir, semver) if coderoot.codeDir else semver
# probe tag using 'ls-remote', which is faster than fetching
# complete history
hash = vcs_fetch_remote(tag)
if not hash:
# backup: fetch complete history
remote_refs = vcs_fetch_all()
hash = get_sha1_remote(
re.compile(fr"(tag:|HEAD ->) ({tag})"))
logger.debug(
"Resolving commit for tag '%s' -> '%s'", tag, hash)
return hash
def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
"""Generate SRC_URI functions for go imports"""
logger.info("Resolving repository for module %s", path)
# First try to resolve repo and commit from golang proxy
# Most info is already there and we don't have to go through the
# repository or even perform the version resolve magic
golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
if golang_proxy_info:
repo = golang_proxy_info[0]
commit = golang_proxy_info[1]
else:
# Fallback
# Resolve repository by 'hand'
repo = self.__resolve_repository(path)
commit = self.__resolve_version(repo, path, version)
url = urllib.parse.urlparse(repo.url)
repo_url = url.netloc + url.path
coderoot = self.__build_coderepo(repo, path)
inline_fcn = "${@go_src_uri("
inline_fcn += f"'{repo_url}','{version}'"
if repo_url != path:
inline_fcn += f",path='{path}'"
if coderoot.codeDir:
inline_fcn += f",subdir='{coderoot.codeDir}'"
if repo.vcs != 'git':
inline_fcn += f",vcs='{repo.vcs}'"
if replaces:
inline_fcn += f",replaces='{replaces}'"
if coderoot.pathMajor:
inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
inline_fcn += ")}"
return inline_fcn, commit
def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
import re
src_uris = []
src_revs = []
def generate_src_rev(path, version, commithash):
src_rev = f"# {path}@{version} => {commithash}\n"
# Ups...maybe someone manipulated the source repository and the
# version or commit could not be resolved. This is a sign of
# a) the supply chain was manipulated (bad)
# b) the implementation for the version resolving didn't work
# anymore (less bad)
if not commithash:
src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
src_rev += f"#!!! Could not resolve version !!!\n"
src_rev += f"#!!! Possible supply chain attack !!!\n"
src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
return src_rev
# we first go over replacement list, because we are essentialy
# interested only in the replaced path
if go_mod['Replace']:
for replacement in go_mod['Replace']:
oldpath = replacement['Old']['Path']
path = replacement['New']['Path']
version = ''
if 'Version' in replacement['New']:
version = replacement['New']['Version']
if os.path.exists(os.path.join(srctree, path)):
# the module refers to the local path, remove it from requirement list
# because it's a local module
go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
else:
# Replace the path and the version, so we don't iterate replacement list anymore
for require in go_mod['Require']:
if require['Path'] == oldpath:
require.update({'Path': path, 'Version': version})
break
for require in go_mod['Require']:
path = require['Path']
version = require['Version']
inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
path, version)
src_uris.append(inline_fcn)
src_revs.append(generate_src_rev(path, version, commithash))
# strip version part from module URL /vXX
baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
pn, _ = determine_from_url(baseurl)
go_mods_basename = "%s-modules.inc" % pn
go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
with open(go_mods_filename, "w") as f:
# We introduce this indirection to make the tests a little easier
f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
for uri in src_uris:
f.write(" " + uri + " \\\n")
f.write("\"\n\n")
for rev in src_revs:
f.write(rev + "\n")
extravalues['extrafiles'][go_mods_basename] = go_mods_filename
def __go_run_cmd(self, cmd, cwd, d):
return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
shell=True, cwd=cwd)
def __go_native_version(self, d):
stdout, _ = self.__go_run_cmd("go version", None, d)
m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
major = int(m.group(2))
minor = int(m.group(3))
patch = int(m.group(4))
return major, minor, patch
def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
patchfilename = "go.mod.patch"
go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
d)
self.__go_run_cmd("go mod tidy -go=%d.%d" %
(go_native_version_major, go_native_version_minor), srctree, d)
stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
# Create patch in order to upgrade go version
self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
# Restore original state
self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
go_mod = json.loads(stdout)
tmpfile = os.path.join(localfilesdir, patchfilename)
shutil.move(os.path.join(srctree, patchfilename), tmpfile)
extravalues['extrafiles'][patchfilename] = tmpfile
return go_mod, patchfilename
def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
# Perform vendoring to retrieve the correct modules.txt
tmp_vendor_dir = tempfile.mkdtemp()
# -v causes to go to print modules.txt to stderr
_, stderr = self.__go_run_cmd(
"go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
modules_txt_basename = "modules.txt"
modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
with open(modules_txt_filename, "w") as f:
f.write(stderr)
extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
licenses = []
lic_files_chksum = []
licvalues = guess_license(tmp_vendor_dir, d)
shutil.rmtree(tmp_vendor_dir)
if licvalues:
for licvalue in licvalues:
license = licvalue[0]
lics = tidy_licenses(fixup_license(license))
lics = [lic for lic in lics if lic not in licenses]
if len(lics):
licenses.extend(lics)
lic_files_chksum.append(
'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
# strip version part from module URL /vXX
baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
pn, _ = determine_from_url(baseurl)
licenses_basename = "%s-licenses.inc" % pn
licenses_filename = os.path.join(localfilesdir, licenses_basename)
with open(licenses_filename, "w") as f:
f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
' & '.join(sorted(licenses, key=str.casefold)))
# We introduce this indirection to make the tests a little easier
f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
for lic in lic_files_chksum:
f.write(" " + lic + " \\\n")
f.write("\"\n")
extravalues['extrafiles'][licenses_basename] = licenses_filename
def process(self, srctree, classes, lines_before,
lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
files = RecipeHandler.checkfiles(srctree, ['go.mod'])
if not files:
return False
d = bb.data.createCopy(tinfoil.config_data)
go_bindir = self.__ensure_go()
if not go_bindir:
sys.exit(14)
d.prependVar('PATH', '%s:' % go_bindir)
handled.append('buildsystem')
classes.append("go-vendor")
stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
go_mod = json.loads(stdout)
go_import = go_mod['Module']['Path']
go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go'])
go_version_major = int(go_version_match.group(1))
go_version_minor = int(go_version_match.group(2))
src_uris = []
localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
extravalues.setdefault('extrafiles', {})
# Use an explicit name determined from the module name because it
# might differ from the actual URL for replaced modules
# strip version part from module URL /vXX
baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
pn, _ = determine_from_url(baseurl)
# go.mod files with version < 1.17 may not include all indirect
# dependencies. Thus, we have to upgrade the go version.
if go_version_major == 1 and go_version_minor < 17:
logger.warning(
"go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.")
go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir,
extravalues, d)
src_uris.append(
"file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
# Check whether the module is vendored. If so, we have nothing to do.
# Otherwise we gather all dependencies and add them to the recipe
if not os.path.exists(os.path.join(srctree, "vendor")):
# Write additional $BPN-modules.inc file
self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d)
lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"")
lines_before.append("require %s-licenses.inc" % (pn))
self.__rewrite_src_uri(lines_before, ["file://modules.txt"])
self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d)
lines_before.append("require %s-modules.inc" % (pn))
# Do generic license handling
handle_license_vars(srctree, lines_before, handled, extravalues, d)
self.__rewrite_lic_uri(lines_before)
lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
def __update_lines_before(self, updated, newlines, lines_before):
if updated:
del lines_before[:]
for line in newlines:
# Hack to avoid newlines that edit_metadata inserts
if line.endswith('\n'):
line = line[:-1]
lines_before.append(line)
return updated
def __rewrite_lic_uri(self, lines_before):
def varfunc(varname, origvalue, op, newlines):
if varname == 'LIC_FILES_CHKSUM':
new_licenses = []
licenses = origvalue.split('\\')
for license in licenses:
if not license:
logger.warning("No license file was detected for the main module!")
# the license list of the main recipe must be empty
# this can happen for example in case of CLOSED license
# Fall through to complete recipe generation
continue
license = license.strip()
uri, chksum = license.split(';', 1)
url = urllib.parse.urlparse(uri)
new_uri = os.path.join(
url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
new_licenses.append(new_uri)
return new_licenses, None, -1, True
return origvalue, None, 0, True
updated, newlines = bb.utils.edit_metadata(
lines_before, ['LIC_FILES_CHKSUM'], varfunc)
return self.__update_lines_before(updated, newlines, lines_before)
def __rewrite_src_uri(self, lines_before, additional_uris = []):
def varfunc(varname, origvalue, op, newlines):
if varname == 'SRC_URI':
src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"]
src_uri.extend(additional_uris)
return src_uri, None, -1, True
return origvalue, None, 0, True
updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
return self.__update_lines_before(updated, newlines, lines_before)
def register_recipe_handlers(handlers):
handlers.append((GoRecipeHandler(), 60))

View File

@@ -0,0 +1,89 @@
# Recipe creation tool - kernel support plugin
#
# Copyright (C) 2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import logging
from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class KernelRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
import bb.process
if 'buildsystem' in handled:
return False
for tell in ['arch', 'firmware', 'Kbuild', 'Kconfig']:
if not os.path.exists(os.path.join(srctree, tell)):
return False
handled.append('buildsystem')
del lines_after[:]
del classes[:]
template = os.path.join(tinfoil.config_data.getVar('COREBASE'), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb')
def handle_var(varname, origvalue, op, newlines):
if varname in ['SRCREV', 'SRCREV_machine']:
while newlines[-1].startswith('#'):
del newlines[-1]
try:
stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree, shell=True)
except bb.process.ExecutionError as e:
stdout = None
if stdout:
return stdout.strip(), op, 0, True
elif varname == 'LINUX_VERSION':
makefile = os.path.join(srctree, 'Makefile')
if os.path.exists(makefile):
kversion = -1
kpatchlevel = -1
ksublevel = -1
kextraversion = ''
with open(makefile, 'r', errors='surrogateescape') as f:
for i, line in enumerate(f):
if i > 10:
break
if line.startswith('VERSION ='):
kversion = int(line.split('=')[1].strip())
elif line.startswith('PATCHLEVEL ='):
kpatchlevel = int(line.split('=')[1].strip())
elif line.startswith('SUBLEVEL ='):
ksublevel = int(line.split('=')[1].strip())
elif line.startswith('EXTRAVERSION ='):
kextraversion = line.split('=')[1].strip()
version = ''
if kversion > -1 and kpatchlevel > -1:
version = '%d.%d' % (kversion, kpatchlevel)
if ksublevel > -1:
version += '.%d' % ksublevel
version += kextraversion
if version:
return version, op, 0, True
elif varname == 'SRC_URI':
while newlines[-1].startswith('#'):
del newlines[-1]
elif varname == 'COMPATIBLE_MACHINE':
while newlines[-1].startswith('#'):
del newlines[-1]
machine = tinfoil.config_data.getVar('MACHINE')
return machine, op, 0, True
return origvalue, op, 0, True
with open(template, 'r') as f:
varlist = ['SRCREV', 'SRCREV_machine', 'SRC_URI', 'LINUX_VERSION', 'COMPATIBLE_MACHINE']
(_, newlines) = bb.utils.edit_metadata(f, varlist, handle_var)
lines_before[:] = [line.rstrip('\n') for line in newlines]
return True
def register_recipe_handlers(handlers):
handlers.append((KernelRecipeHandler(), 100))

View File

@@ -0,0 +1,142 @@
# Recipe creation tool - kernel module support plugin
#
# Copyright (C) 2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import logging
from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class KernelModuleRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
import bb.process
if 'buildsystem' in handled:
return False
module_inc_re = re.compile(r'^#include\s+<linux/module.h>$')
makefiles = []
is_module = False
makefiles = []
files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True, excludedirs=['contrib', 'test', 'examples'])
if files:
for cfile in files:
# Look in same dir or parent for Makefile
for makefile in [os.path.join(os.path.dirname(cfile), 'Makefile'), os.path.join(os.path.dirname(os.path.dirname(cfile)), 'Makefile')]:
if makefile in makefiles:
break
else:
if os.path.exists(makefile):
makefiles.append(makefile)
break
else:
continue
with open(cfile, 'r', errors='surrogateescape') as f:
for line in f:
if module_inc_re.match(line.strip()):
is_module = True
break
if is_module:
break
if is_module:
classes.append('module')
handled.append('buildsystem')
# module.bbclass and the classes it inherits do most of the hard
# work, but we need to tweak it slightly depending on what the
# Makefile does (and there is a range of those)
# Check the makefile for the appropriate install target
install_lines = []
compile_lines = []
in_install = False
in_compile = False
install_target = None
with open(makefile, 'r', errors='surrogateescape') as f:
for line in f:
if line.startswith('install:'):
if not install_lines:
in_install = True
install_target = 'install'
elif line.startswith('modules_install:'):
install_lines = []
in_install = True
install_target = 'modules_install'
elif line.startswith('modules:'):
compile_lines = []
in_compile = True
elif line.startswith(('all:', 'default:')):
if not compile_lines:
in_compile = True
elif line:
if line[0] == '\t':
if in_install:
install_lines.append(line)
elif in_compile:
compile_lines.append(line)
elif ':' in line:
in_install = False
in_compile = False
def check_target(lines, install):
kdirpath = ''
manual_install = False
for line in lines:
splitline = line.split()
if splitline[0] in ['make', 'gmake', '$(MAKE)']:
if '-C' in splitline:
idx = splitline.index('-C') + 1
if idx < len(splitline):
kdirpath = splitline[idx]
break
elif install and splitline[0] == 'install':
if '.ko' in line:
manual_install = True
return kdirpath, manual_install
kdirpath = None
manual_install = False
if install_lines:
kdirpath, manual_install = check_target(install_lines, install=True)
if compile_lines and not kdirpath:
kdirpath, _ = check_target(compile_lines, install=False)
if manual_install or not install_lines:
lines_after.append('EXTRA_OEMAKE:append:task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"')
elif install_target and install_target != 'modules_install':
lines_after.append('MODULES_INSTALL_TARGET = "install"')
warnmsg = None
kdirvar = None
if kdirpath:
res = re.match(r'\$\(([^$)]+)\)', kdirpath)
if res:
kdirvar = res.group(1)
if kdirvar != 'KERNEL_SRC':
lines_after.append('EXTRA_OEMAKE += "%s=${STAGING_KERNEL_DIR}"' % kdirvar)
elif kdirpath.startswith('/lib/'):
warnmsg = 'Kernel path in install makefile is hardcoded - you will need to patch the makefile'
if not kdirvar and not warnmsg:
warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile'
if warnmsg:
warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.'
logger.warning(warnmsg)
lines_after.append('# %s' % warnmsg)
return True
return False
def register_recipe_handlers(handlers):
handlers.append((KernelModuleRecipeHandler(), 15))

Some files were not shown because too many files have changed in this diff Show More