Complete Yocto mirror with license table for TQMa6UL (2038-compliance)

- 264 license table entries with exact download URLs (224/264 resolved)
- Complete sources/ directory with all BitBake recipes
- Build configuration: tqma6ul-multi-mba6ulx, spaetzle (musl)
- Full traceability for Softwarefreigabeantrag
- GCC 13.4.0, Linux 6.6.102, U-Boot 2023.04, musl 1.2.4
- License distribution: GPL-2.0 (24), MIT (23), GPL-2.0+ (18), BSD-3 (16)
This commit is contained in:
Siggi (OpenClaw Agent)
2026-03-01 20:58:18 +00:00
commit 16accb6b24
15086 changed files with 1292356 additions and 0 deletions

View File

@@ -0,0 +1,124 @@
#!/bin/bash
#
# Copyright (c) 2011, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script operates on the .dat file generated by bb-matrix.sh. It tolerates
# the header by skipping the first line, but error messages and bad data records
# need to be removed first. It will generate three views of the plot, and leave
# an interactive view open for further analysis.
#
# AUTHORS
# Darren Hart <dvhart@linux.intel.com>
#
# Setup the defaults
DATFILE="bb-matrix.dat"
XLABEL="BB\\\\_NUMBER\\\\_THREADS"
YLABEL="PARALLEL\\\\_MAKE"
FIELD=3
DEF_TITLE="Elapsed Time (seconds)"
PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100"
SIZE="640,480"
function usage {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-d datfile] [-f field] [-h] [-t title] [-w]
-d datfile The data file generated by bb-matrix.sh (default: $DATFILE)
-f field The field index to plot as the Z axis from the data file
(default: $FIELD, "$DEF_TITLE")
-h Display this help message
-s W,H PNG and window size in pixels (default: $SIZE)
-t title The title to display, should describe the field (-f) and units
(default: "$DEF_TITLE")
-w Render the plot as wireframe with a 2D colormap projected on the
XY plane rather than as the texture for the surface
EOM
}
# Parse and validate arguments
while getopts "d:f:hs:t:w" OPT; do
case $OPT in
d)
DATFILE="$OPTARG"
;;
f)
FIELD="$OPTARG"
;;
h)
usage
exit 0
;;
s)
SIZE="$OPTARG"
;;
t)
TITLE="$OPTARG"
;;
w)
PM3D_FRAGMENT="set pm3d at b"
W="-w"
;;
*)
usage
exit 1
;;
esac
done
# Ensure the data file exists
if [ ! -f "$DATFILE" ]; then
echo "ERROR: $DATFILE does not exist"
usage
exit 1
fi
PLOT_BASENAME=${DATFILE%.*}-f$FIELD$W
# Set a sane title
# TODO: parse the header and define titles for each format parameter for TIME(1)
if [ -z "$TITLE" ]; then
if [ ! "$FIELD" == "3" ]; then
TITLE="Field $FIELD"
else
TITLE="$DEF_TITLE"
fi
fi
# Determine the dgrid3d mesh dimensions size
MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | head -n1)
MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | tail -n1)
BB_CNT=$[${MAX} - $MIN + 1]
MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | head -n1)
MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | tail -n1)
PM_CNT=$[${MAX} - $MIN + 1]
(cat <<EOF
set title "$TITLE"
set xlabel "$XLABEL"
set ylabel "$YLABEL"
set style line 100 lt 5 lw 1.5
$PM3D_FRAGMENT
set dgrid3d $PM_CNT,$BB_CNT splines
set ticslevel 0.2
set term png size $SIZE
set output "$PLOT_BASENAME.png"
splot "$DATFILE" every ::1 using 1:2:$FIELD with lines ls 100
set view 90,0
set output "$PLOT_BASENAME-bb.png"
replot
set view 90,90
set output "$PLOT_BASENAME-pm.png"
replot
set view 60,30
set term wxt size $SIZE
replot
EOF
) | gnuplot --persist

View File

@@ -0,0 +1,66 @@
#!/bin/bash
#
# Copyright (c) 2011, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script runs BB_CMD (typically building core-image-sato) for all
# combincations of BB_RANGE and PM_RANGE values. It saves off all the console
# logs, the buildstats directories, and creates a bb-pm-runtime.dat file which
# can be used to postprocess the results with a plotting tool, spreadsheet, etc.
# Before running this script, it is recommended that you pre-download all the
# necessary sources by performing the BB_CMD once manually. It is also a good
# idea to disable cron to avoid runtime variations caused by things like the
# locate process. Be sure to sanitize the dat file prior to post-processing as
# it may contain error messages or bad runs that should be removed.
#
# AUTHORS
# Darren Hart <dvhart@linux.intel.com>
#
# The following ranges are appropriate for a 4 core system with 8 logical units
# Use leading 0s to ensure all digits are the same string length, this results
# in nice log file names and columnar dat files.
BB_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16"
PM_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16"
DATADIR="bb-matrix-$$"
BB_CMD="bitbake core-image-minimal"
RUNTIME_LOG="$DATADIR/bb-matrix.dat"
# See TIME(1) for a description of the time format parameters
# The following all report 0: W K r s t w
TIME_STR="%e %S %U %P %c %w %R %F %M %x"
# Prepare the DATADIR
mkdir $DATADIR
if [ $? -ne 0 ]; then
echo "Failed to create $DATADIR."
exit 1
fi
# Add a simple header
echo "BB PM $TIME_STR" > $RUNTIME_LOG
for BB in $BB_RANGE; do
for PM in $PM_RANGE; do
RUNDIR="$DATADIR/$BB-$PM-build"
mkdir $RUNDIR
BB_LOG=$RUNDIR/$BB-$PM-bitbake.log
date
echo "BB=$BB PM=$PM Logging to $BB_LOG"
echo -n " Preparing the work directory... "
rm -rf pseudodone tmp sstate-cache tmp-eglibc &> /dev/null
echo "done"
# Export the variables under test and run the bitbake command
# Strip any leading zeroes before passing to bitbake
export BB_NUMBER_THREADS=$(echo $BB | sed 's/^0*//')
export PARALLEL_MAKE="-j $(echo $PM | sed 's/^0*//')"
/usr/bin/time -f "$BB $PM $TIME_STR" -a -o $RUNTIME_LOG $BB_CMD &> $BB_LOG
echo " $(tail -n1 $RUNTIME_LOG)"
cp -a tmp/buildstats $RUNDIR/$BB-$PM-buildstats
done
done

View File

@@ -0,0 +1,160 @@
#!/usr/bin/env bash
#
# Copyright (c) 2011, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
#
# Produces script data to be consumed by gnuplot. There are two possible plots
# depending if either the -S parameter is present or not:
#
# * without -S: Produces a histogram listing top N recipes/tasks versus
# stats. The first stat defined in the -s parameter is the one taken
# into account for ranking
# * -S: Produces a histogram listing tasks versus stats. In this case,
# the value of each stat is the sum for that particular stat in all recipes found.
# Stats values are in descending order defined by the first stat defined on -s
#
# EXAMPLES
#
# 1. Top recipes' tasks taking into account utime
#
# $ buildstats-plot.sh -s utime | gnuplot -p
#
# 2. Tasks versus utime:stime
#
# $ buildstats-plot.sh -s utime:stime -S | gnuplot -p
#
# 3. Tasks versus IO write_bytes:IO read_bytes
#
# $ buildstats-plot.sh -s 'IO write_bytes:IO read_bytes' -S | gnuplot -p
#
# AUTHORS
# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
#
set -o nounset
set -o errexit
BS_DIR="tmp/buildstats"
N=10
RECIPE=""
TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
STATS="utime"
ACCUMULATE=""
SUM=""
OUTDATA_FILE="$PWD/buildstats-plot.out"
function usage {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-b buildstats_dir] [-t do_task]
-b buildstats The path where the folder resides
(default: "$BS_DIR")
-n N Top N recipes to display. Ignored if -S is present
(default: "$N")
-r recipe The recipe mask to be searched
-t tasks The tasks to be computed
(default: "$TASKS")
-s stats The stats to be matched. If more that one stat, units
should be the same because data is plot as histogram.
(see buildstats.sh -h for all options) or any other defined
(build)stat separated by colons, i.e. stime:utime
(default: "$STATS")
-a Accumulate all stats values for found recipes
-S Sum values for a particular stat for found recipes
-o Output data file.
(default: "$OUTDATA_FILE")
-h Display this help message
EOM
}
# Parse and validate arguments
while getopts "b:n:r:t:s:o:aSh" OPT; do
case $OPT in
b)
BS_DIR="$OPTARG"
;;
n)
N="$OPTARG"
;;
r)
RECIPE="-r $OPTARG"
;;
t)
TASKS="$OPTARG"
;;
s)
STATS="$OPTARG"
;;
a)
ACCUMULATE="-a"
;;
S)
SUM="y"
;;
o)
OUTDATA_FILE="$OPTARG"
;;
h)
usage
exit 0
;;
*)
usage
exit 1
;;
esac
done
# Get number of stats
IFS=':'; statsarray=(${STATS}); unset IFS
nstats=${#statsarray[@]}
# Get script folder, use to run buildstats.sh
CD=$(dirname $0)
# Parse buildstats recipes to produce a single table
OUTBUILDSTATS="$PWD/buildstats.log"
$CD/buildstats.sh -b "$BS_DIR" -s "$STATS" -t "$TASKS" $RECIPE $ACCUMULATE -H > $OUTBUILDSTATS
# Get headers
HEADERS=$(cat $OUTBUILDSTATS | sed -n -e 's/\(.*\)/"\1"/' -e '1s/ /\\\\\\\\ /g' -e 's/_/\\\\\\\\_/g' -e '1s/:/" "/gp')
echo -e "set boxwidth 0.9 relative"
echo -e "set style data histograms"
echo -e "set style fill solid 1.0 border lt -1"
echo -e "set xtics rotate by 45 right"
# Get output data
if [ -z "$SUM" ]; then
cat $OUTBUILDSTATS | sed -e '1d' -e 's/_/\\\\_/g' | sort -k3 -n -r | head -$N > $OUTDATA_FILE
# include task at recipe column
sed -i -e "1i\
${HEADERS}" $OUTDATA_FILE
echo -e "set title \"Top task/recipes\""
echo -e "plot for [COL=3:`expr 3 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(stringcolumn(1).' '.stringcolumn(2)) title columnheader(COL)"
else
# Construct datatamash sum argument (sum 3 sum 4 ...)
declare -a sumargs
j=0
for i in `seq $nstats`; do
sumargs[j]=sum; j=$(( $j + 1 ))
sumargs[j]=`expr 3 + $i - 1`; j=$(( $j + 1 ))
done
# Do the processing with datamash
cat $OUTBUILDSTATS | sed -e '1d' | datamash -t ' ' -g1 ${sumargs[*]} | sort -k2 -n -r > $OUTDATA_FILE
# Include headers into resulted file, so we can include gnuplot xtics
HEADERS=$(echo $HEADERS | sed -e 's/recipe//1')
sed -i -e "1i\
${HEADERS}" $OUTDATA_FILE
# Plot
echo -e "set title \"Sum stats values per task for all recipes\""
echo -e "plot for [COL=2:`expr 2 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(1) title columnheader(COL)"
fi

View File

@@ -0,0 +1,167 @@
#!/bin/bash
#
# Copyright (c) 2011, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# Given 'buildstats' data (generate by bitbake when setting
# USER_CLASSES ?= "buildstats" on local.conf), task names and a stats values
# (these are the ones preset on the buildstats files), outputs
# '<task> <recipe> <value_1> <value_2> ... <value_n>'. The units are the ones
# defined at buildstats, which in turn takes data from /proc/[pid] files
#
# Some useful pipelines
#
# 1. Tasks with largest stime (Amount of time that this process has been scheduled
# in kernel mode) values
# $ buildstats.sh -b <buildstats> -s stime | sort -k3 -n -r | head
#
# 2. Min, max, sum utime (Amount of time that this process has been scheduled
# in user mode) per task (in needs GNU datamash)
# $ buildstats.sh -b <buildstats> -s utime | datamash -t' ' -g1 min 3 max 3 sum 3 | sort -k4 -n -r
#
# AUTHORS
# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
#
# Stats, by type
TIME="utime:stime:cutime:cstime"
IO="IO wchar:IO write_bytes:IO syscr:IO read_bytes:IO rchar:IO syscw:IO cancelled_write_bytes"
RUSAGE="rusage ru_utime:rusage ru_stime:rusage ru_maxrss:rusage ru_minflt:rusage ru_majflt:\
rusage ru_inblock:rusage ru_oublock:rusage ru_nvcsw:rusage ru_nivcsw"
CHILD_RUSAGE="Child rusage ru_utime:Child rusage ru_stime:Child rusage ru_maxrss:Child rusage ru_minflt:\
Child rusage ru_majflt:Child rusage ru_inblock:Child rusage ru_oublock:Child rusage ru_nvcsw:\
Child rusage ru_nivcsw"
BS_DIR="tmp/buildstats"
RECIPE=""
TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
STATS="$TIME"
ACCUMULATE=""
HEADER="" # No header by default
function usage {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-b buildstats_dir] [-t do_task]
-b buildstats The path where the folder resides
(default: "$BS_DIR")
-r recipe The recipe to be computed
-t tasks The tasks to be computed
(default: "$TASKS")
-s stats The stats to be matched. Options: TIME, IO, RUSAGE, CHILD_RUSAGE
or any other defined buildstat separated by colons, i.e. stime:utime
(default: "$STATS")
Default stat sets:
TIME=$TIME
IO=$IO
RUSAGE=$RUSAGE
CHILD_RUSAGE=$CHILD_RUSAGE
-a Accumulate all stats values for found recipes
-h Display this help message
EOM
}
# Parse and validate arguments
while getopts "b:r:t:s:aHh" OPT; do
case $OPT in
b)
BS_DIR="$OPTARG"
;;
r)
RECIPE="$OPTARG"
;;
t)
TASKS="$OPTARG"
;;
s)
STATS="$OPTARG"
;;
a)
ACCUMULATE="y"
;;
H)
HEADER="y"
;;
h)
usage
exit 0
;;
*)
usage
exit 1
;;
esac
done
# Ensure the buildstats folder exists
if [ ! -d "$BS_DIR" ]; then
echo "ERROR: $BS_DIR does not exist"
usage
exit 1
fi
stats=""
IFS=":"
for stat in ${STATS}; do
case $stat in
TIME)
stats="${stats}:${TIME}"
;;
IO)
stats="${stats}:${IO}"
;;
RUSAGE)
stats="${stats}:${RUSAGE}"
;;
CHILD_RUSAGE)
stats="${stats}:${CHILD_RUSAGE}"
;;
*)
stats="${STATS}"
;;
esac
done
# remove possible colon at the beginning
stats="$(echo "$stats" | sed -e 's/^://1')"
# Provide a header if required by the user
if [ -n "$HEADER" ] ; then
if [ -n "$ACCUMULATE" ]; then
echo "task:recipe:accumulated(${stats//:/;})"
else
echo "task:recipe:$stats"
fi
fi
for task in ${TASKS}; do
task="do_${task}"
for file in $(find ${BS_DIR} -type f -path *${RECIPE}*/${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do
recipe="$(basename $(dirname $file))"
times=""
for stat in ${stats}; do
[ -z "$stat" ] && { echo "empty stats"; }
time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file)
# in case the stat is not present, set the value as NA
[ -z "$time" ] && { time="NA"; }
# Append it to times
if [ -z "$times" ]; then
times="${time}"
else
times="${times} ${time}"
fi
done
if [ -n "$ACCUMULATE" ]; then
IFS=' '; valuesarray=(${times}); IFS=':'
times=0
for value in "${valuesarray[@]}"; do
[ "$value" == "NA" ] && { echo "ERROR: stat is not present."; usage; exit 1; }
times=$(( $times + $value ))
done
fi
echo "${task} ${recipe} ${times}"
done
done

View File

@@ -0,0 +1,168 @@
#!/usr/bin/env python3
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# Copyright (C) Darren Hart <dvhart@linux.intel.com>, 2010
import sys
import getopt
import os
import os.path
import re
# Set up sys.path to let us import tinfoil
scripts_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
lib_path = scripts_path + '/lib'
sys.path.insert(0, lib_path)
import scriptpath
scriptpath.add_bitbake_lib_path()
import bb.tinfoil
def usage():
print('Usage: %s -d FILENAME [-d FILENAME]*' % os.path.basename(sys.argv[0]))
print(' -d FILENAME documentation file to search')
print(' -h, --help display this help and exit')
print(' -t FILENAME documentation config file (for doc tags)')
print(' -T Only display variables with doc tags (requires -t)')
def bbvar_is_documented(var, documented_vars):
''' Check if variable (var) is in the list of documented variables(documented_vars) '''
if var in documented_vars:
return True
else:
return False
def collect_documented_vars(docfiles):
''' Walk the docfiles and collect the documented variables '''
documented_vars = []
prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-")
var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>')
for d in docfiles:
with open(d) as f:
documented_vars += var_prog.findall(f.read())
return documented_vars
def bbvar_doctag(var, docconf):
prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var))
if docconf == "":
return "?"
try:
f = open(docconf)
except IOError as err:
return err.args[1]
for line in f:
m = prog.search(line)
if m:
return m.group(1)
f.close()
return ""
def main():
docfiles = []
bbvars = set()
undocumented = []
docconf = ""
onlydoctags = False
# Collect and validate input
try:
opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"])
except getopt.GetoptError as err:
print('%s' % str(err))
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(0)
elif o == '-d':
if os.path.isfile(a):
docfiles.append(a)
else:
print('ERROR: documentation file %s is not a regular file' % a)
sys.exit(3)
elif o == "-t":
if os.path.isfile(a):
docconf = a
elif o == "-T":
onlydoctags = True
else:
assert False, "unhandled option"
if len(docfiles) == 0:
print('ERROR: no docfile specified')
usage()
sys.exit(5)
if onlydoctags and docconf == "":
print('ERROR: no docconf specified')
usage()
sys.exit(7)
prog = re.compile("^[^a-z]*$")
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.prepare(config_only=False)
parser = bb.codeparser.PythonParser('parser', None)
datastore = tinfoil.config_data
def bbvars_update(data):
if prog.match(data):
bbvars.add(data)
if tinfoil.config_data.getVarFlag(data, 'python'):
try:
parser.parse_python(tinfoil.config_data.getVar(data))
except bb.data_smart.ExpansionError:
pass
for var in parser.references:
if prog.match(var):
bbvars.add(var)
else:
try:
expandedVar = datastore.expandWithRefs(datastore.getVar(data, False), data)
for var in expandedVar.references:
if prog.match(var):
bbvars.add(var)
except bb.data_smart.ExpansionError:
pass
# Use tinfoil to collect all the variable names globally
for data in datastore:
bbvars_update(data)
# Collect variables from all recipes
for recipe in tinfoil.all_recipe_files(variants=False):
print("Checking %s" % recipe)
for data in tinfoil.parse_recipe_file(recipe):
bbvars_update(data)
documented_vars = collect_documented_vars(docfiles)
# Check each var for documentation
varlen = 0
for v in bbvars:
if len(v) > varlen:
varlen = len(v)
if not bbvar_is_documented(v, documented_vars):
undocumented.append(v)
undocumented.sort()
varlen = varlen + 1
# Report all undocumented variables
print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars)))
header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7))
print(header)
print(str("").ljust(len(header), '='))
for v in undocumented:
doctag = bbvar_doctag(v, docconf)
if not onlydoctags or not doctag == "":
print('%s%s' % (v.ljust(varlen), doctag))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,247 @@
#!/bin/bash
#
# Build performance test script wrapper
#
# Copyright (c) 2016, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
# This script is a simple wrapper around the actual build performance tester
# script. This script initializes the build environment, runs
# oe-build-perf-test and archives the results.
script=`basename $0`
script_dir=$(realpath $(dirname $0))
archive_dir=~/perf-results/archives
usage () {
cat << EOF
Usage: $script [-h] [-c COMMITISH] [-C GIT_REPO]
Optional arguments:
-h show this help and exit.
-a ARCHIVE_DIR archive results tarball here, give an empty string to
disable tarball archiving (default: $archive_dir)
-c COMMITISH test (checkout) this commit, <branch>:<commit> can be
specified to test specific commit of certain branch
-C GIT_REPO commit results into Git
-d DOWNLOAD_DIR directory to store downloaded sources in
-E EMAIL_ADDR send email report
-g GLOBALRES_DIR where to place the globalres file
-P GIT_REMOTE push results to a remote Git repository
-R DEST rsync reports to a remote destination
-w WORK_DIR work dir for this script
(default: GIT_TOP_DIR/build-perf-test)
-x create xml report (instead of json)
EOF
}
get_os_release_var () {
( source /etc/os-release; eval echo '$'$1 )
}
# Parse command line arguments
commitish=""
oe_build_perf_test_extra_opts=()
oe_git_archive_extra_opts=()
while getopts "ha:c:C:d:E:g:P:R:w:x" opt; do
case $opt in
h) usage
exit 0
;;
a) mkdir -p "$OPTARG"
archive_dir=`realpath -s "$OPTARG"`
;;
c) commitish=$OPTARG
;;
C) mkdir -p "$OPTARG"
results_repo=`realpath -s "$OPTARG"`
;;
d) download_dir=`realpath -s "$OPTARG"`
;;
E) email_to="$OPTARG"
;;
g) mkdir -p "$OPTARG"
globalres_dir=`realpath -s "$OPTARG"`
;;
P) oe_git_archive_extra_opts+=("--push" "$OPTARG")
;;
R) rsync_dst="$OPTARG"
;;
w) base_dir=`realpath -s "$OPTARG"`
;;
x) oe_build_perf_test_extra_opts+=("--xml")
;;
*) usage
exit 1
;;
esac
done
# Check positional args
shift "$((OPTIND - 1))"
if [ $# -ne 0 ]; then
echo "ERROR: No positional args are accepted."
usage
exit 1
fi
# Open a file descriptor for flock and acquire lock
LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
if ! exec 3> "$LOCK_FILE"; then
echo "ERROR: Unable to open loemack file"
exit 1
fi
if ! flock -n 3; then
echo "ERROR: Another instance of this script is running"
exit 1
fi
echo "Running on `uname -n`"
if ! git_topdir=$(git rev-parse --show-toplevel); then
echo "The current working dir doesn't seem to be a git clone. Please cd there before running `basename $0`"
exit 1
fi
cd "$git_topdir"
if [ -n "$commitish" ]; then
echo "Running git fetch"
git fetch &> /dev/null
git checkout HEAD^0 &> /dev/null
# Handle <branch>:<commit> format
if echo "$commitish" | grep -q ":"; then
commit=`echo "$commitish" | cut -d":" -f2`
branch=`echo "$commitish" | cut -d":" -f1`
else
commit="$commitish"
branch="$commitish"
fi
echo "Checking out $commitish"
git branch -D $branch &> /dev/null
if ! git checkout -f $branch &> /dev/null; then
echo "ERROR: Git checkout failed"
exit 1
fi
# Check that the specified branch really contains the commit
commit_hash=`git rev-parse --revs-only $commit --`
if [ -z "$commit_hash" -o "`git merge-base $branch $commit`" != "$commit_hash" ]; then
echo "ERROR: branch $branch does not contain commit $commit"
exit 1
fi
git reset --hard $commit > /dev/null
fi
# Determine name of the current branch
branch=`git symbolic-ref HEAD 2> /dev/null`
# Strip refs/heads/
branch=${branch:11}
# Setup build environment
if [ -z "$base_dir" ]; then
base_dir="$git_topdir/build-perf-test"
fi
echo "Using working dir $base_dir"
if [ -z "$download_dir" ]; then
download_dir="$base_dir/downloads"
fi
if [ -z "$globalres_dir" ]; then
globalres_dir="$base_dir"
fi
timestamp=`date "+%Y%m%d%H%M%S"`
git_rev=$(git rev-parse --short HEAD) || exit 1
build_dir="$base_dir/build-$git_rev-$timestamp"
results_dir="$base_dir/results-$git_rev-$timestamp"
globalres_log="$globalres_dir/globalres.log"
machine="qemux86"
mkdir -p "$base_dir"
source ./oe-init-build-env $build_dir >/dev/null || exit 1
# Additional config
auto_conf="$build_dir/conf/auto.conf"
echo "MACHINE = \"$machine\"" > "$auto_conf"
echo 'BB_NUMBER_THREADS = "8"' >> "$auto_conf"
echo 'PARALLEL_MAKE = "-j 8"' >> "$auto_conf"
echo "DL_DIR = \"$download_dir\"" >> "$auto_conf"
# Disabling network sanity check slightly reduces the variance of timing results
echo 'CONNECTIVITY_CHECK_URIS = ""' >> "$auto_conf"
# Possibility to define extra settings
if [ -f "$base_dir/auto.conf.extra" ]; then
cat "$base_dir/auto.conf.extra" >> "$auto_conf"
fi
# Run actual test script
oe-build-perf-test --out-dir "$results_dir" \
--globalres-file "$globalres_log" \
"${oe_build_perf_test_extra_opts[@]}" \
--lock-file "$base_dir/oe-build-perf.lock"
case $? in
1) echo "ERROR: oe-build-perf-test script failed!"
exit 1
;;
2) echo "NOTE: some tests failed!"
;;
esac
# Commit results to git
if [ -n "$results_repo" ]; then
echo -e "\nArchiving results in $results_repo"
oe-git-archive \
--git-dir "$results_repo" \
--branch-name "{hostname}/{branch}/{machine}" \
--tag-name "{hostname}/{branch}/{machine}/{commit_count}-g{commit}/{tag_number}" \
--exclude "buildstats.json" \
--notes "buildstats/{branch_name}" "$results_dir/buildstats.json" \
"${oe_git_archive_extra_opts[@]}" \
"$results_dir"
# Generate test reports
sanitized_branch=`echo $branch | tr / _`
report_txt=`hostname`_${sanitized_branch}_${machine}.txt
report_html=`hostname`_${sanitized_branch}_${machine}.html
echo -e "\nGenerating test report"
oe-build-perf-report -r "$results_repo" > $report_txt
oe-build-perf-report -r "$results_repo" --html > $report_html
# Send email report
if [ -n "$email_to" ]; then
echo "Emailing test report"
os_name=`get_os_release_var PRETTY_NAME`
"$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
fi
# Upload report files, unless we're on detached head
if [ -n "$rsync_dst" -a -n "$branch" ]; then
echo "Uploading test report"
rsync $report_txt $report_html $rsync_dst
fi
fi
echo -ne "\n\n-----------------\n"
echo "Global results file:"
echo -ne "\n"
cat "$globalres_log"
if [ -n "$archive_dir" ]; then
echo -ne "\n\n-----------------\n"
echo "Archiving results in $archive_dir"
mkdir -p "$archive_dir"
results_basename=`basename "$results_dir"`
results_dirname=`dirname "$results_dir"`
tar -czf "$archive_dir/`uname -n`-${results_basename}.tar.gz" -C "$results_dirname" "$results_basename"
fi
rm -rf "$build_dir"
rm -rf "$results_dir"
echo "DONE"

View File

@@ -0,0 +1,155 @@
#!/usr/bin/env python3
#
# Conversion script to add new override syntax to existing bitbake metadata
#
# Copyright (C) 2021 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
#
# To use this script on a new layer you need to list the overrides the
# layer is known to use in the list below.
#
# Known constraint: Matching is 'loose' and in particular will find variable
# and function names with "_append" and "_remove" in them. Those need to be
# filtered out manually or in the skip list below.
#
import re
import os
import sys
import tempfile
import shutil
import mimetypes
import argparse
parser = argparse.ArgumentParser(description="Convert override syntax")
parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros")
parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override")
parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')")
parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables")
parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables")
parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides")
parser.add_argument("path", nargs="+", help="Paths to convert")
args = parser.parse_args()
# List of strings to treat as overrides
vars = args.override
vars += ["append", "prepend", "remove"]
vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
vars += ["genericx86", "edgerouter", "beaglebone-yocto"]
vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
vars += ["tune-", "pn-", "forcevariable"]
vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
vars += ["linux-gnueabi", "eabi"]
vars += ["virtclass-multilib", "virtclass-mcextend"]
# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
# Handles issues with arc matching arch.
shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override
# Variables which take packagenames as an override
packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
"pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
"PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
"pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
"INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars
# Expressions to skip if encountered, these are not overrides
skips = args.skip
skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars
packagevars += imagevars
skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext
vars_re = {}
for exp in vars:
vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
shortvars_re = {}
for exp in shortvars:
shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3")
package_re = {}
for exp in packagevars:
package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
# Other substitutions to make
subs = {
'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")',
"val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
"f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
"d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)',
}
def processfile(fn):
print("processing file '%s'" % fn)
try:
fh, abs_path = tempfile.mkstemp()
with os.fdopen(fh, 'w') as new_file:
with open(fn, "r") as old_file:
for line in old_file:
skip = False
for s in skips:
if s in line:
skip = True
if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line:
skip = False
for sub in subs:
if sub in line:
line = line.replace(sub, subs[sub])
skip = True
if not skip:
for pvar in packagevars:
line = package_re[pvar][0].sub(package_re[pvar][1], line)
for var in vars:
line = vars_re[var][0].sub(vars_re[var][1], line)
for shortvar in shortvars:
line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line)
if "pkg_postinst:ontarget" in line:
line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget")
new_file.write(line)
shutil.copymode(fn, abs_path)
os.remove(fn)
shutil.move(abs_path, fn)
except UnicodeDecodeError:
pass
ourname = os.path.basename(sys.argv[0])
ourversion = "0.9.3"
for p in args.path:
if os.path.isfile(p):
processfile(p)
else:
print("processing directory '%s'" % p)
for root, dirs, files in os.walk(p):
for name in files:
if name == ourname:
continue
fn = os.path.join(root, name)
if os.path.islink(fn):
continue
if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext):
continue
processfile(fn)
print("All files processed with version %s" % ourversion)

View File

@@ -0,0 +1,145 @@
#!/usr/bin/env python3
#
# Conversion script to change LICENSE entries to SPDX identifiers
#
# Copyright (C) 2021-2022 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import os
import sys
import tempfile
import shutil
import mimetypes
if len(sys.argv) < 2:
print("Please specify a directory to run the conversion script against.")
sys.exit(1)
license_map = {
"AGPL-3" : "AGPL-3.0-only",
"AGPL-3+" : "AGPL-3.0-or-later",
"AGPLv3" : "AGPL-3.0-only",
"AGPLv3+" : "AGPL-3.0-or-later",
"AGPLv3.0" : "AGPL-3.0-only",
"AGPLv3.0+" : "AGPL-3.0-or-later",
"AGPL-3.0" : "AGPL-3.0-only",
"AGPL-3.0+" : "AGPL-3.0-or-later",
"BSD-0-Clause" : "0BSD",
"GPL-1" : "GPL-1.0-only",
"GPL-1+" : "GPL-1.0-or-later",
"GPLv1" : "GPL-1.0-only",
"GPLv1+" : "GPL-1.0-or-later",
"GPLv1.0" : "GPL-1.0-only",
"GPLv1.0+" : "GPL-1.0-or-later",
"GPL-1.0" : "GPL-1.0-only",
"GPL-1.0+" : "GPL-1.0-or-later",
"GPL-2" : "GPL-2.0-only",
"GPL-2+" : "GPL-2.0-or-later",
"GPLv2" : "GPL-2.0-only",
"GPLv2+" : "GPL-2.0-or-later",
"GPLv2.0" : "GPL-2.0-only",
"GPLv2.0+" : "GPL-2.0-or-later",
"GPL-2.0" : "GPL-2.0-only",
"GPL-2.0+" : "GPL-2.0-or-later",
"GPL-3" : "GPL-3.0-only",
"GPL-3+" : "GPL-3.0-or-later",
"GPLv3" : "GPL-3.0-only",
"GPLv3+" : "GPL-3.0-or-later",
"GPLv3.0" : "GPL-3.0-only",
"GPLv3.0+" : "GPL-3.0-or-later",
"GPL-3.0" : "GPL-3.0-only",
"GPL-3.0+" : "GPL-3.0-or-later",
"LGPLv2" : "LGPL-2.0-only",
"LGPLv2+" : "LGPL-2.0-or-later",
"LGPLv2.0" : "LGPL-2.0-only",
"LGPLv2.0+" : "LGPL-2.0-or-later",
"LGPL-2.0" : "LGPL-2.0-only",
"LGPL-2.0+" : "LGPL-2.0-or-later",
"LGPL2.1" : "LGPL-2.1-only",
"LGPL2.1+" : "LGPL-2.1-or-later",
"LGPLv2.1" : "LGPL-2.1-only",
"LGPLv2.1+" : "LGPL-2.1-or-later",
"LGPL-2.1" : "LGPL-2.1-only",
"LGPL-2.1+" : "LGPL-2.1-or-later",
"LGPLv3" : "LGPL-3.0-only",
"LGPLv3+" : "LGPL-3.0-or-later",
"LGPL-3.0" : "LGPL-3.0-only",
"LGPL-3.0+" : "LGPL-3.0-or-later",
"MPL-1" : "MPL-1.0",
"MPLv1" : "MPL-1.0",
"MPLv1.1" : "MPL-1.1",
"MPLv2" : "MPL-2.0",
"MIT-X" : "MIT",
"MIT-style" : "MIT",
"openssl" : "OpenSSL",
"PSF" : "PSF-2.0",
"PSFv2" : "PSF-2.0",
"Python-2" : "Python-2.0",
"Apachev2" : "Apache-2.0",
"Apache-2" : "Apache-2.0",
"Artisticv1" : "Artistic-1.0",
"Artistic-1" : "Artistic-1.0",
"AFL-2" : "AFL-2.0",
"AFL-1" : "AFL-1.2",
"AFLv2" : "AFL-2.0",
"AFLv1" : "AFL-1.2",
"CDDLv1" : "CDDL-1.0",
"CDDL-1" : "CDDL-1.0",
"EPLv1.0" : "EPL-1.0",
"FreeType" : "FTL",
"Nauman" : "Naumen",
"tcl" : "TCL",
"vim" : "Vim",
"SGIv1" : "SGI-1",
}
def processfile(fn):
print("processing file '%s'" % fn)
try:
fh, abs_path = tempfile.mkstemp()
modified = False
with os.fdopen(fh, 'w') as new_file:
with open(fn, "r") as old_file:
for line in old_file:
if not line.startswith("LICENSE"):
new_file.write(line)
continue
orig = line
for license in sorted(license_map, key=len, reverse=True):
for ending in ['"', "'", " ", ")"]:
line = line.replace(license + ending, license_map[license] + ending)
if orig != line:
modified = True
new_file.write(line)
new_file.close()
if modified:
shutil.copymode(fn, abs_path)
os.remove(fn)
shutil.move(abs_path, fn)
except UnicodeDecodeError:
pass
ourname = os.path.basename(sys.argv[0])
ourversion = "0.01"
if os.path.isfile(sys.argv[1]):
processfile(sys.argv[1])
sys.exit(0)
for targetdir in sys.argv[1:]:
print("processing directory '%s'" % targetdir)
for root, dirs, files in os.walk(targetdir):
for name in files:
if name == ourname:
continue
fn = os.path.join(root, name)
if os.path.islink(fn):
continue
if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
continue
processfile(fn)
print("All files processed with version %s" % ourversion)

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
#
# Conversion script to update SRC_URI to add branch to git urls
#
# Copyright (C) 2021 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import os
import sys
import tempfile
import shutil
import mimetypes
if len(sys.argv) < 2:
print("Please specify a directory to run the conversion script against.")
sys.exit(1)
def processfile(fn):
def matchline(line):
if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line:
return False
return True
print("processing file '%s'" % fn)
try:
if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn:
return
fh, abs_path = tempfile.mkstemp()
modified = False
with os.fdopen(fh, 'w') as new_file:
with open(fn, "r") as old_file:
for line in old_file:
if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line):
if line.endswith('"\n'):
line = line.replace('"\n', ';branch=master"\n')
elif re.search('\s*\\\\$', line):
line = re.sub('\s*\\\\$', ';branch=master \\\\', line)
modified = True
if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line):
if "protocol=git" in line:
line = line.replace('protocol=git', 'protocol=https')
elif line.endswith('"\n'):
line = line.replace('"\n', ';protocol=https"\n')
elif re.search('\s*\\\\$', line):
line = re.sub('\s*\\\\$', ';protocol=https \\\\', line)
modified = True
new_file.write(line)
if modified:
shutil.copymode(fn, abs_path)
os.remove(fn)
shutil.move(abs_path, fn)
except UnicodeDecodeError:
pass
ourname = os.path.basename(sys.argv[0])
ourversion = "0.1"
if os.path.isfile(sys.argv[1]):
processfile(sys.argv[1])
sys.exit(0)
for targetdir in sys.argv[1:]:
print("processing directory '%s'" % targetdir)
for root, dirs, files in os.walk(targetdir):
for name in files:
if name == ourname:
continue
fn = os.path.join(root, name)
if os.path.islink(fn):
continue
if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
continue
processfile(fn)
print("All files processed with version %s" % ourversion)

View File

@@ -0,0 +1,116 @@
#!/usr/bin/env python3
#
# Conversion script to rename variables to versions with improved terminology.
# Also highlights potentially problematic language and removed variables.
#
# Copyright (C) 2021 Richard Purdie
# Copyright (C) 2022 Wind River Systems, Inc.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import os
import sys
import tempfile
import shutil
import mimetypes
if len(sys.argv) < 2:
print("Please specify a directory to run the conversion script against.")
sys.exit(1)
renames = {
"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
"PNBLACKLIST" : "SKIP_RECIPE",
"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
}
removed_list = [
"BB_STAMP_WHITELIST",
"BB_STAMP_POLICY",
"INHERIT_BLACKLIST",
"TUNEABI_WHITELIST",
]
context_check_list = [
"blacklist",
"whitelist",
"abort",
]
def processfile(fn):
print("processing file '%s'" % fn)
try:
fh, abs_path = tempfile.mkstemp()
modified = False
with os.fdopen(fh, 'w') as new_file:
with open(fn, "r") as old_file:
lineno = 0
for line in old_file:
lineno += 1
if not line or "BB_RENAMED_VARIABLE" in line:
continue
# Do the renames
for old_name, new_name in renames.items():
if old_name in line:
line = line.replace(old_name, new_name)
modified = True
# Find removed names
for removed_name in removed_list:
if removed_name in line:
print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
for check_word in context_check_list:
if re.search(check_word, line, re.IGNORECASE):
print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
new_file.write(line)
new_file.close()
if modified:
print("*** Modified file '%s'" % (fn))
shutil.copymode(fn, abs_path)
os.remove(fn)
shutil.move(abs_path, fn)
except UnicodeDecodeError:
pass
ourname = os.path.basename(sys.argv[0])
ourversion = "0.1"
if os.path.isfile(sys.argv[1]):
processfile(sys.argv[1])
sys.exit(0)
for targetdir in sys.argv[1:]:
print("processing directory '%s'" % targetdir)
for root, dirs, files in os.walk(targetdir):
for name in files:
if name == ourname:
continue
fn = os.path.join(root, name)
if os.path.islink(fn):
continue
if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
continue
processfile(fn)
print("All files processed with version %s" % ourversion)

View File

@@ -0,0 +1,172 @@
#!/bin/sh
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# 1MB blocksize
BLOCKSIZE=1048576
usage() {
echo "Usage: $(basename $0) IMAGE DEVICE"
}
image_details() {
IMG=$1
echo "Image details"
echo "============="
echo " image: $(basename $IMG)"
# stat format is different on Mac OS and Linux
if [ "$(uname)" = "Darwin" ]; then
echo " size: $(stat -L -f '%z bytes' $IMG)"
echo " modified: $(stat -L -f '%Sm' $IMG)"
else
echo " size: $(stat -L -c '%s bytes' $IMG)"
echo " modified: $(stat -L -c '%y' $IMG)"
fi
echo " type: $(file -L -b $IMG)"
echo ""
}
device_details() {
BLOCK_SIZE=512
echo "Device details"
echo "=============="
# Collect disk info using diskutil on Mac OS
if [ "$(uname)" = "Darwin" ]; then
diskutil info $DEVICE | egrep "(Device Node|Media Name|Total Size)"
return
fi
# Default / Linux information collection
ACTUAL_DEVICE=`readlink -f $DEVICE`
DEV=`basename $ACTUAL_DEVICE`
if [ "$ACTUAL_DEVICE" != "$DEVICE" ] ; then
echo " device: $DEVICE -> $ACTUAL_DEVICE"
else
echo " device: $DEVICE"
fi
if [ -f "/sys/class/block/$DEV/device/vendor" ]; then
echo " vendor: $(cat /sys/class/block/$DEV/device/vendor)"
else
echo " vendor: UNKNOWN"
fi
if [ -f "/sys/class/block/$DEV/device/model" ]; then
echo " model: $(cat /sys/class/block/$DEV/device/model)"
else
echo " model: UNKNOWN"
fi
if [ -f "/sys/class/block/$DEV/size" ]; then
echo " size: $(($(cat /sys/class/block/$DEV/size) * $BLOCK_SIZE)) bytes"
else
echo " size: UNKNOWN"
fi
echo ""
}
check_mount_device() {
if cat /proc/self/mounts | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1$" ; then
return 0
fi
return 1
}
is_mounted() {
if [ "$(uname)" = "Darwin" ]; then
if df | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1(s[0-9]+)?$" ; then
return 0
fi
else
if check_mount_device $1 ; then
return 0
fi
DEV=`basename $1`
if [ -d /sys/class/block/$DEV/ ] ; then
PARENT_BLKDEV=`basename $(readlink -f "/sys/class/block/$DEV/..")`
if [ "$PARENT_BLKDEV" != "block" ] ; then
if check_mount_device $PARENT_BLKDEV ; then
return 0
fi
fi
for CHILD_BLKDEV in `find /sys/class/block/$DEV/ -mindepth 1 -maxdepth 1 -name "$DEV*" -type d`
do
if check_mount_device /dev/`basename $CHILD_BLKDEV` ; then
return 0
fi
done
fi
fi
return 1
}
is_inuse() {
HOLDERS_DIR="/sys/class/block/`basename $1`/holders"
if [ -d $HOLDERS_DIR ] && [ `ls -A $HOLDERS_DIR` ] ; then
return 0
fi
return 1
}
if [ $# -ne 2 ]; then
usage
exit 1
fi
IMAGE=$1
DEVICE=$2
if [ ! -e "$IMAGE" ]; then
echo "ERROR: Image $IMAGE does not exist"
usage
exit 1
fi
if [ ! -e "$DEVICE" ]; then
echo "ERROR: Device $DEVICE does not exist"
usage
exit 1
fi
if [ "$(uname)" = "Darwin" ]; then
# readlink doesn't support -f on MacOS, just assume it isn't a symlink
ACTUAL_DEVICE=$DEVICE
else
ACTUAL_DEVICE=`readlink -f $DEVICE`
fi
if is_mounted $ACTUAL_DEVICE ; then
echo "ERROR: Device $DEVICE is currently mounted - check if this is the right device, and unmount it first if so"
device_details
exit 1
fi
if is_inuse $ACTUAL_DEVICE ; then
echo "ERROR: Device $DEVICE is currently in use (possibly part of LVM) - check if this is the right device!"
device_details
exit 1
fi
if [ ! -w "$DEVICE" ]; then
echo "ERROR: Device $DEVICE is not writable - possibly use sudo?"
usage
exit 1
fi
image_details $IMAGE
device_details
printf "Write $IMAGE to $DEVICE [y/N]? "
read RESPONSE
if [ "$RESPONSE" != "y" ]; then
echo "Write aborted"
exit 0
fi
echo "Writing image..."
if which pv >/dev/null 2>&1; then
pv "$IMAGE" | dd of="$DEVICE" bs="$BLOCKSIZE"
else
dd if="$IMAGE" of="$DEVICE" bs="$BLOCKSIZE"
fi
sync

View File

@@ -0,0 +1,245 @@
#!/usr/bin/env python3
# devtool stress tester
#
# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
#
# Copyright 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import os.path
import subprocess
import re
import argparse
import logging
import tempfile
import shutil
import signal
import fnmatch
scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
sys.path.insert(0, scripts_lib_path)
import scriptutils
import argparse_oe
logger = scriptutils.logger_create('devtool-stress')
def select_recipes(args):
import bb.tinfoil
tinfoil = bb.tinfoil.Tinfoil()
tinfoil.prepare(False)
pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn
(latest_versions, preferred_versions) = bb.providers.findProviders(tinfoil.config_data, tinfoil.cooker.recipecaches[''], pkg_pn)
skip_classes = args.skip_classes.split(',')
recipelist = []
for pn in sorted(pkg_pn):
pref = preferred_versions[pn]
inherits = [os.path.splitext(os.path.basename(f))[0] for f in tinfoil.cooker.recipecaches[''].inherits[pref[1]]]
for cls in skip_classes:
if cls in inherits:
break
else:
recipelist.append(pn)
tinfoil.shutdown()
resume_from = args.resume_from
if resume_from:
if not resume_from in recipelist:
print('%s is not a testable recipe' % resume_from)
return 1
if args.only:
only = args.only.split(',')
for onlyitem in only:
for pn in recipelist:
if fnmatch.fnmatch(pn, onlyitem):
break
else:
print('%s does not match any testable recipe' % onlyitem)
return 1
else:
only = None
if args.skip:
skip = args.skip.split(',')
else:
skip = []
recipes = []
for pn in recipelist:
if resume_from:
if pn == resume_from:
resume_from = None
else:
continue
if args.only:
for item in only:
if fnmatch.fnmatch(pn, item):
break
else:
continue
skipit = False
for item in skip:
if fnmatch.fnmatch(pn, item):
skipit = True
if skipit:
continue
recipes.append(pn)
return recipes
def stress_extract(args):
import bb.process
recipes = select_recipes(args)
failures = 0
tmpdir = tempfile.mkdtemp()
os.setpgrp()
try:
for pn in recipes:
sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
sys.stdout.flush()
failed = False
skipped = None
srctree = os.path.join(tmpdir, pn)
try:
bb.process.run('devtool extract %s %s' % (pn, srctree))
except bb.process.ExecutionError as exc:
if exc.exitcode == 4:
skipped = 'incompatible'
else:
failed = True
with open('stress_%s_extract.log' % pn, 'w') as f:
f.write(str(exc))
if os.path.exists(srctree):
shutil.rmtree(srctree)
if failed:
print('failed')
failures += 1
elif skipped:
print('skipped (%s)' % skipped)
else:
print('ok')
except KeyboardInterrupt:
# We want any child processes killed. This is crude, but effective.
os.killpg(0, signal.SIGTERM)
if failures:
return 1
else:
return 0
def stress_modify(args):
import bb.process
recipes = select_recipes(args)
failures = 0
tmpdir = tempfile.mkdtemp()
os.setpgrp()
try:
for pn in recipes:
sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
sys.stdout.flush()
failed = False
reset = True
skipped = None
srctree = os.path.join(tmpdir, pn)
try:
bb.process.run('devtool modify -x %s %s' % (pn, srctree))
except bb.process.ExecutionError as exc:
if exc.exitcode == 4:
skipped = 'incompatible'
else:
with open('stress_%s_modify.log' % pn, 'w') as f:
f.write(str(exc))
failed = 'modify'
reset = False
if not skipped:
if not failed:
try:
bb.process.run('bitbake -c install %s' % pn)
except bb.process.CmdError as exc:
with open('stress_%s_install.log' % pn, 'w') as f:
f.write(str(exc))
failed = 'build'
if reset:
try:
bb.process.run('devtool reset %s' % pn)
except bb.process.CmdError as exc:
print('devtool reset failed: %s' % str(exc))
break
if os.path.exists(srctree):
shutil.rmtree(srctree)
if failed:
print('failed (%s)' % failed)
failures += 1
elif skipped:
print('skipped (%s)' % skipped)
else:
print('ok')
except KeyboardInterrupt:
# We want any child processes killed. This is crude, but effective.
os.killpg(0, signal.SIGTERM)
if failures:
return 1
else:
return 0
def main():
parser = argparse_oe.ArgumentParser(description="devtool stress tester",
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-r', '--resume-from', help='Resume from specified recipe', metavar='PN')
parser.add_argument('-o', '--only', help='Only test specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST')
parser.add_argument('-s', '--skip', help='Skip specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST', default='gcc-source-*,kernel-devsrc,package-index,perf,meta-world-pkgdata,glibc-locale,glibc-mtrace,glibc-scripts,os-release')
parser.add_argument('-c', '--skip-classes', help='Skip recipes inheriting specified classes (comma-separated) - default %(default)s', metavar='CLASSLIST', default='native,nativesdk,cross,cross-canadian,image,populate_sdk,meta,packagegroup')
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
subparsers.required = True
parser_modify = subparsers.add_parser('modify',
help='Run "devtool modify" followed by a build with bitbake on matching recipes',
description='Runs "devtool modify" followed by a build with bitbake on matching recipes')
parser_modify.set_defaults(func=stress_modify)
parser_extract = subparsers.add_parser('extract',
help='Run "devtool extract" on matching recipes',
description='Runs "devtool extract" on matching recipes')
parser_extract.set_defaults(func=stress_extract)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
import scriptpath
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
return 1
logger.debug('Found bitbake path: %s' % bitbakepath)
ret = args.func(args)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,57 @@
#!/bin/sh
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Simple script to show a manual power prompt for when you want to use
# automated hardware testing with testimage.bbclass but you don't have a
# web-enabled power strip or similar to do the power on/off/cycle.
#
# You can enable it by enabling testimage (see the Yocto Project
# Development manual "Performing Automated Runtime Testing" section)
# and setting the following in your local.conf:
#
# TEST_POWERCONTROL_CMD = "${COREBASE}/scripts/contrib/dialog-power-control"
#
PROMPT=""
while true; do
case $1 in
on)
PROMPT="Please turn device power on";;
off)
PROMPT="Please turn device power off";;
cycle)
PROMPT="Please click Done, then turn the device power off then on";;
"")
break;;
esac
shift
done
if [ "$PROMPT" = "" ] ; then
echo "ERROR: no power action specified on command line"
exit 2
fi
if [ "`which kdialog 2>/dev/null`" != "" ] ; then
DIALOGUTIL="kdialog"
elif [ "`which zenity 2>/dev/null`" != "" ] ; then
DIALOGUTIL="zenity"
else
echo "ERROR: couldn't find program to display a message, install kdialog or zenity"
exit 3
fi
if [ "$DIALOGUTIL" = "kdialog" ] ; then
kdialog --yesno "$PROMPT" --title "TestImage Power Control" --yes-label "Done" --no-label "Cancel test"
elif [ "$DIALOGUTIL" = "zenity" ] ; then
zenity --question --text="$PROMPT" --title="TestImage Power Control" --ok-label="Done" --cancel-label="Cancel test"
fi
if [ "$?" != "0" ] ; then
echo "User cancelled test at power prompt"
exit 1
fi

View File

@@ -0,0 +1,97 @@
#!/bin/bash
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Perform an audit of which packages provide documentation and which
# are missing -doc packages.
#
# Setup requirements: be sure to be building for MACHINE=qemux86. Run
# this script after source'ing the build environment script, so you're
# running it from build/ directory.
#
REPORT_DOC_SIMPLE="documentation_exists.txt"
REPORT_DOC_DETAIL="documentation_exists_detail.txt"
REPORT_MISSING_SIMPLE="documentation_missing.txt"
REPORT_MISSING_DETAIL="documentation_missing_detail.txt"
REPORT_BUILD_ERRORS="build_errors.txt"
rm -rf $REPORT_DOC_SIMPLE $REPORT_DOC_DETAIL $REPORT_MISSING_SIMPLE $REPORT_MISSING_DETAIL
BITBAKE=`which bitbake`
if [ -z "$BITBAKE" ]; then
echo "Error: bitbake command not found."
echo "Did you forget to source the build environment script?"
exit 1
fi
echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
for pkg in `bitbake -s | awk '{ print \$1 }'`; do
if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
"$pkg" == "Recipe" ||
"$pkg" == "Parsing" || "$pkg" == "Package" ||
"$pkg" == "NOTE:" || "$pkg" == "WARNING:" ||
"$pkg" == "done." || "$pkg" == "===========" ]]
then
# Skip initial bitbake output
continue
fi
if [[ "$pkg" =~ -native$ || "$pkg" =~ -nativesdk$ ||
"$pkg" =~ -cross-canadian ]]; then
# Skip native/nativesdk/cross-canadian recipes
continue
fi
if [[ "$pkg" =~ ^meta- || "$pkg" =~ ^packagegroup- || "$pkg" =~ -image ]]; then
# Skip meta, task and image recipes
continue
fi
if [[ "$pkg" =~ ^glibc- || "$pkg" =~ ^libiconv$ ||
"$pkg" =~ -toolchain$ || "$pkg" =~ ^package-index$ ||
"$pkg" =~ ^linux- || "$pkg" =~ ^adt-installer$ ||
"$pkg" =~ ^eds-tools$ || "$pkg" =~ ^external-python-tarball$ ||
"$pkg" =~ ^qt4-embedded$ || "$pkg" =~ ^qt-mobility ]]; then
# Skip glibc, libiconv, -toolchain, and other recipes known
# to cause build conflicts or trigger false positives.
continue
fi
echo "Building package $pkg..."
bitbake $pkg > /dev/null
if [ $? -ne 0 ]; then
echo "There was an error building package $pkg" >> "$REPORT_MISSING_DETAIL"
echo "$pkg" >> $REPORT_BUILD_ERRORS
# Do not skip the remaining tests, as sometimes the
# exit status is 1 due to QA errors, and we can still
# perform the -doc checks.
fi
echo "$pkg built successfully, checking for a documentation package..."
WORKDIR=`bitbake -e $pkg | grep ^WORKDIR | awk -F '=' '{ print \$2 }' | awk -F '"' '{ print \$2 }'`
FIND_DOC_PKG=`find $WORKDIR/packages-split/*-doc -maxdepth 0 -type d`
if [ -z "$FIND_DOC_PKG" ]; then
# No -doc package was generated:
echo "No -doc package: $pkg" >> "$REPORT_MISSING_DETAIL"
echo "$pkg" >> $REPORT_MISSING_SIMPLE
continue
fi
FIND_DOC_FILES=`find $FIND_DOC_PKG -type f`
if [ -z "$FIND_DOC_FILES" ]; then
# No files shipped with the -doc package:
echo "No files shipped with the -doc package: $pkg" >> "$REPORT_MISSING_DETAIL"
echo "$pkg" >> $REPORT_MISSING_SIMPLE
continue
fi
echo "Documentation shipped with $pkg:" >> "$REPORT_DOC_DETAIL"
echo "$FIND_DOC_FILES" >> "$REPORT_DOC_DETAIL"
echo "" >> "$REPORT_DOC_DETAIL"
echo "$pkg" >> "$REPORT_DOC_SIMPLE"
done

View File

@@ -0,0 +1,118 @@
#!/usr/bin/env python3
# Simple graph query utility
# useful for getting answers from .dot files produced by bitbake -g
#
# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
#
# Copyright 2013 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
sys.path.insert(0, scripts_lib_path)
import argparse_oe
def get_path_networkx(dotfile, fromnode, tonode):
try:
import networkx
except ImportError:
print('ERROR: Please install the networkx python module')
sys.exit(1)
graph = networkx.DiGraph(networkx.nx_pydot.read_dot(dotfile))
def node_missing(node):
import difflib
close_matches = difflib.get_close_matches(node, graph.nodes(), cutoff=0.7)
if close_matches:
print('ERROR: no node "%s" in graph. Close matches:\n %s' % (node, '\n '.join(close_matches)))
sys.exit(1)
if not fromnode in graph:
node_missing(fromnode)
if not tonode in graph:
node_missing(tonode)
return networkx.all_simple_paths(graph, source=fromnode, target=tonode)
def find_paths(args):
path = None
for path in get_path_networkx(args.dotfile, args.fromnode, args.tonode):
print(" -> ".join(map(str, path)))
if not path:
print("ERROR: no path from %s to %s in graph" % (args.fromnode, args.tonode))
return 1
def filter_graph(args):
import fnmatch
exclude_tasks = []
if args.exclude_tasks:
for task in args.exclude_tasks.split(','):
if not task.startswith('do_'):
task = 'do_%s' % task
exclude_tasks.append(task)
def checkref(strval):
strval = strval.strip().strip('"')
target, taskname = strval.rsplit('.', 1)
if exclude_tasks:
for extask in exclude_tasks:
if fnmatch.fnmatch(taskname, extask):
return False
if strval in args.ref or target in args.ref:
return True
return False
with open(args.infile, 'r') as f:
for line in f:
line = line.rstrip()
if line.startswith(('digraph', '}')):
print(line)
elif '->' in line:
linesplit = line.split('->')
if checkref(linesplit[0]) and checkref(linesplit[1]):
print(line)
elif (not args.no_nodes) and checkref(line.split()[0]):
print(line)
def main():
parser = argparse_oe.ArgumentParser(description='Small utility for working with .dot graph files')
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
subparsers.required = True
parser_find_paths = subparsers.add_parser('find-paths',
help='Find all of the paths between two nodes in a dot graph',
description='Finds all of the paths between two nodes in a dot graph')
parser_find_paths.add_argument('dotfile', help='.dot graph to search in')
parser_find_paths.add_argument('fromnode', help='starting node name')
parser_find_paths.add_argument('tonode', help='ending node name')
parser_find_paths.set_defaults(func=find_paths)
parser_filter = subparsers.add_parser('filter',
help='Pare down a task graph to contain only the specified references',
description='Pares down a task-depends.dot graph produced by bitbake -g to contain only the specified references')
parser_filter.add_argument('infile', help='Input file')
parser_filter.add_argument('ref', nargs='+', help='Reference to include (either recipe/target name or full target.taskname specification)')
parser_filter.add_argument('-n', '--no-nodes', action='store_true', help='Skip node formatting lines')
parser_filter.add_argument('-x', '--exclude-tasks', help='Comma-separated list of tasks to exclude (do_ prefix optional, wildcards allowed)')
parser_filter.set_defaults(func=filter_graph)
args = parser.parse_args()
ret = args.func(args)
return ret
if __name__ == "__main__":
ret = main()
sys.exit(ret)

View File

@@ -0,0 +1,523 @@
#!/usr/bin/env python3
# Script to extract information from image manifests
#
# Copyright (C) 2018 Intel Corporation
# Copyright (C) 2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
import logging
import json
import shutil
import tempfile
import tarfile
from collections import OrderedDict
scripts_path = os.path.dirname(__file__)
lib_path = scripts_path + '/../lib'
sys.path = sys.path + [lib_path]
import scriptutils
logger = scriptutils.logger_create(os.path.basename(__file__))
import argparse_oe
import scriptpath
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
sys.exit(1)
logger.debug('Using standard bitbake path %s' % bitbakepath)
scriptpath.add_oe_lib_path()
import bb.tinfoil
import bb.utils
import oe.utils
import oe.recipeutils
def get_pkg_list(manifest):
pkglist = []
with open(manifest, 'r') as f:
for line in f:
linesplit = line.split()
if len(linesplit) == 3:
# manifest file
pkglist.append(linesplit[0])
elif len(linesplit) == 1:
# build dependency file
pkglist.append(linesplit[0])
return sorted(pkglist)
def list_packages(args):
pkglist = get_pkg_list(args.manifest)
for pkg in pkglist:
print('%s' % pkg)
def pkg2recipe(tinfoil, pkg):
if "-native" in pkg:
logger.info('skipping %s' % pkg)
return None
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
logger.debug('pkgdatafile %s' % pkgdatafile)
try:
f = open(pkgdatafile, 'r')
for line in f:
if line.startswith('PN:'):
recipe = line.split(':', 1)[1].strip()
return recipe
except Exception:
logger.warning('%s is missing' % pkgdatafile)
return None
def get_recipe_list(manifest, tinfoil):
pkglist = get_pkg_list(manifest)
recipelist = []
for pkg in pkglist:
recipe = pkg2recipe(tinfoil,pkg)
if recipe:
if not recipe in recipelist:
recipelist.append(recipe)
return sorted(recipelist)
def list_recipes(args):
import bb.tinfoil
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only=True)
recipelist = get_recipe_list(args.manifest, tinfoil)
for recipe in sorted(recipelist):
print('%s' % recipe)
def list_layers(args):
def find_git_repo(pth):
checkpth = pth
while checkpth != os.sep:
if os.path.exists(os.path.join(checkpth, '.git')):
return checkpth
checkpth = os.path.dirname(checkpth)
return None
def get_git_remote_branch(repodir):
try:
stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir)
except bb.process.ExecutionError as e:
stdout = None
if stdout:
return stdout.strip()
else:
return None
def get_git_head_commit(repodir):
try:
stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir)
except bb.process.ExecutionError as e:
stdout = None
if stdout:
return stdout.strip()
else:
return None
def get_git_repo_url(repodir, remote='origin'):
import bb.process
# Try to get upstream repo location from origin remote
try:
stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir)
except bb.process.ExecutionError as e:
stdout = None
if stdout:
for line in stdout.splitlines():
splitline = line.split()
if len(splitline) > 1:
if splitline[0] == remote and scriptutils.is_src_url(splitline[1]):
return splitline[1]
return None
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only=False)
layers = OrderedDict()
for layerdir in tinfoil.config_data.getVar('BBLAYERS').split():
layerdata = OrderedDict()
layername = os.path.basename(layerdir)
logger.debug('layername %s, layerdir %s' % (layername, layerdir))
if layername in layers:
logger.warning('layername %s is not unique in configuration' % layername)
layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir)
logger.debug('trying layername %s' % layername)
if layername in layers:
logger.error('Layer name %s is not unique in configuration' % layername)
sys.exit(2)
repodir = find_git_repo(layerdir)
if repodir:
remotebranch = get_git_remote_branch(repodir)
remote = 'origin'
if remotebranch and '/' in remotebranch:
rbsplit = remotebranch.split('/', 1)
layerdata['actual_branch'] = rbsplit[1]
remote = rbsplit[0]
layerdata['vcs_url'] = get_git_repo_url(repodir, remote)
if os.path.abspath(repodir) != os.path.abspath(layerdir):
layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir)
commit = get_git_head_commit(repodir)
if commit:
layerdata['vcs_commit'] = commit
layers[layername] = layerdata
json.dump(layers, args.output, indent=2)
def get_recipe(args):
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only=True)
recipe = pkg2recipe(tinfoil, args.package)
print(' %s package provided by %s' % (args.package, recipe))
def pkg_dependencies(args):
def get_recipe_info(tinfoil, recipe):
try:
info = tinfoil.get_recipe_info(recipe)
except Exception:
logger.error('Failed to get recipe info for: %s' % recipe)
sys.exit(1)
if not info:
logger.warning('No recipe info found for: %s' % recipe)
sys.exit(1)
append_files = tinfoil.get_file_appends(info.fn)
appends = True
data = tinfoil.parse_recipe_file(info.fn, appends, append_files)
data.pn = info.pn
data.pv = info.pv
return data
def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order):
spaces = ' ' * order
data = recipe_info[rn]
if args.native:
logger.debug('%s- %s' % (spaces, data.pn))
elif "-native" not in data.pn:
if "cross" not in data.pn:
logger.debug('%s- %s' % (spaces, data.pn))
depends = []
for dep in data.depends:
if dep not in assume_provided:
depends.append(dep)
# First find all dependencies not in package list.
for dep in depends:
if dep not in packages:
packages.append(dep)
dep_data = get_recipe_info(tinfoil, dep)
# Do this once now to reduce the number of bitbake calls.
dep_data.depends = dep_data.getVar('DEPENDS').split()
recipe_info[dep] = dep_data
# Then recursively analyze all of the dependencies for the current recipe.
for dep in depends:
find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1)
with bb.tinfoil.Tinfoil() as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare()
assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split()
logger.debug('assumed provided:')
for ap in sorted(assume_provided):
logger.debug(' - %s' % ap)
recipe = pkg2recipe(tinfoil, args.package)
data = get_recipe_info(tinfoil, recipe)
data.depends = []
depends = data.getVar('DEPENDS').split()
for dep in depends:
if dep not in assume_provided:
data.depends.append(dep)
recipe_info = dict([(recipe, data)])
packages = []
find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1)
print('\nThe following packages are required to build %s' % recipe)
for p in sorted(packages):
data = recipe_info[p]
if "-native" not in data.pn:
if "cross" not in data.pn:
print(" %s (%s)" % (data.pn,p))
if args.native:
print('\nThe following native packages are required to build %s' % recipe)
for p in sorted(packages):
data = recipe_info[p]
if "-native" in data.pn:
print(" %s(%s)" % (data.pn,p))
if "cross" in data.pn:
print(" %s(%s)" % (data.pn,p))
def default_config():
vlist = OrderedDict()
vlist['PV'] = 'yes'
vlist['SUMMARY'] = 'no'
vlist['DESCRIPTION'] = 'no'
vlist['SECTION'] = 'no'
vlist['LICENSE'] = 'yes'
vlist['HOMEPAGE'] = 'no'
vlist['BUGTRACKER'] = 'no'
vlist['PROVIDES'] = 'no'
vlist['BBCLASSEXTEND'] = 'no'
vlist['DEPENDS'] = 'no'
vlist['PACKAGECONFIG'] = 'no'
vlist['SRC_URI'] = 'yes'
vlist['SRCREV'] = 'yes'
vlist['EXTRA_OECONF'] = 'no'
vlist['EXTRA_OESCONS'] = 'no'
vlist['EXTRA_OECMAKE'] = 'no'
vlist['EXTRA_OEMESON'] = 'no'
clist = OrderedDict()
clist['variables'] = vlist
clist['filepath'] = 'no'
clist['sha256sum'] = 'no'
clist['layerdir'] = 'no'
clist['layer'] = 'no'
clist['inherits'] = 'no'
clist['source_urls'] = 'no'
clist['packageconfig_opts'] = 'no'
clist['patches'] = 'no'
clist['packagedir'] = 'no'
return clist
def dump_config(args):
config = default_config()
f = open('default_config.json', 'w')
json.dump(config, f, indent=2)
logger.info('Default config list dumped to default_config.json')
def export_manifest_info(args):
def handle_value(value):
if value:
return oe.utils.squashspaces(value)
else:
return value
if args.config:
logger.debug('config: %s' % args.config)
f = open(args.config, 'r')
config = json.load(f, object_pairs_hook=OrderedDict)
else:
config = default_config()
if logger.isEnabledFor(logging.DEBUG):
print('Configuration:')
json.dump(config, sys.stdout, indent=2)
print('')
tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-')
logger.debug('tmp dir: %s' % tmpoutdir)
# export manifest
shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest"))
with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
tinfoil.logger.setLevel(logger.getEffectiveLevel())
tinfoil.prepare(config_only=False)
pkglist = get_pkg_list(args.manifest)
# export pkg list
f = open(os.path.join(tmpoutdir, "pkgs"), 'w')
for pkg in pkglist:
f.write('%s\n' % pkg)
f.close()
recipelist = []
for pkg in pkglist:
recipe = pkg2recipe(tinfoil,pkg)
if recipe:
if not recipe in recipelist:
recipelist.append(recipe)
recipelist.sort()
# export recipe list
f = open(os.path.join(tmpoutdir, "recipes"), 'w')
for recipe in recipelist:
f.write('%s\n' % recipe)
f.close()
try:
rvalues = OrderedDict()
for pn in sorted(recipelist):
logger.debug('Package: %s' % pn)
rd = tinfoil.parse_recipe(pn)
rvalues[pn] = OrderedDict()
for varname in config['variables']:
if config['variables'][varname] == 'yes':
rvalues[pn][varname] = handle_value(rd.getVar(varname))
fpth = rd.getVar('FILE')
layerdir = oe.recipeutils.find_layerdir(fpth)
if config['filepath'] == 'yes':
rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir)
if config['sha256sum'] == 'yes':
rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth)
if config['layerdir'] == 'yes':
rvalues[pn]['layerdir'] = layerdir
if config['layer'] == 'yes':
rvalues[pn]['layer'] = os.path.basename(layerdir)
if config['inherits'] == 'yes':
gr = set(tinfoil.config_data.getVar("__inherit_cache") or [])
lr = set(rd.getVar("__inherit_cache") or [])
rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr})
if config['source_urls'] == 'yes':
rvalues[pn]['source_urls'] = []
for url in (rd.getVar('SRC_URI') or '').split():
if not url.startswith('file://'):
url = url.split(';')[0]
rvalues[pn]['source_urls'].append(url)
if config['packageconfig_opts'] == 'yes':
rvalues[pn]['packageconfig_opts'] = OrderedDict()
for key in rd.getVarFlags('PACKAGECONFIG').keys():
if key == 'doc':
continue
rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key)
if config['patches'] == 'yes':
patches = oe.recipeutils.get_recipe_patches(rd)
rvalues[pn]['patches'] = []
if patches:
recipeoutdir = os.path.join(tmpoutdir, pn, 'patches')
bb.utils.mkdirhier(recipeoutdir)
for patch in patches:
# Patches may be in other layers too
patchlayerdir = oe.recipeutils.find_layerdir(patch)
# patchlayerdir will be None for remote patches, which we ignore
# (since currently they are considered as part of sources)
if patchlayerdir:
rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir)))
shutil.copy(patch, recipeoutdir)
if config['packagedir'] == 'yes':
pn_dir = os.path.join(tmpoutdir, pn)
bb.utils.mkdirhier(pn_dir)
f = open(os.path.join(pn_dir, 'recipe.json'), 'w')
json.dump(rvalues[pn], f, indent=2)
f.close()
with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f:
json.dump(rvalues, f, indent=2)
if args.output:
outname = os.path.basename(args.output)
else:
outname = os.path.splitext(os.path.basename(args.manifest))[0]
if outname.endswith('.tar.gz'):
outname = outname[:-7]
elif outname.endswith('.tgz'):
outname = outname[:-4]
tarfn = outname
if tarfn.endswith(os.sep):
tarfn = tarfn[:-1]
if not tarfn.endswith(('.tar.gz', '.tgz')):
tarfn += '.tar.gz'
with open(tarfn, 'wb') as f:
with tarfile.open(None, "w:gz", f) as tar:
tar.add(tmpoutdir, outname)
finally:
shutil.rmtree(tmpoutdir)
def main():
parser = argparse_oe.ArgumentParser(description="Image manifest utility",
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
subparsers.required = True
# get recipe info
parser_get_recipes = subparsers.add_parser('recipe-info',
help='Get recipe info',
description='Get recipe information for a package')
parser_get_recipes.add_argument('package', help='Package name')
parser_get_recipes.set_defaults(func=get_recipe)
# list runtime dependencies
parser_pkg_dep = subparsers.add_parser('list-depends',
help='List dependencies',
description='List dependencies required to build the package')
parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true')
parser_pkg_dep.add_argument('package', help='Package name')
parser_pkg_dep.set_defaults(func=pkg_dependencies)
# list recipes
parser_recipes = subparsers.add_parser('list-recipes',
help='List recipes producing packages within an image',
description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata')
parser_recipes.add_argument('manifest', help='Manifest file')
parser_recipes.set_defaults(func=list_recipes)
# list packages
parser_packages = subparsers.add_parser('list-packages',
help='List packages within an image',
description='Lists packages that went into an image, using the manifest')
parser_packages.add_argument('manifest', help='Manifest file')
parser_packages.set_defaults(func=list_packages)
# list layers
parser_layers = subparsers.add_parser('list-layers',
help='List included layers',
description='Lists included layers')
parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified',
default=sys.stdout, type=argparse.FileType('w'))
parser_layers.set_defaults(func=list_layers)
# dump default configuration file
parser_dconfig = subparsers.add_parser('dump-config',
help='Dump default config',
description='Dump default config to default_config.json')
parser_dconfig.set_defaults(func=dump_config)
# export recipe info for packages in manifest
parser_export = subparsers.add_parser('manifest-info',
help='Export recipe info for a manifest',
description='Export recipe information using the manifest')
parser_export.add_argument('-c', '--config', help='load config from json file')
parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified')
parser_export.add_argument('manifest', help='Manifest file')
parser_export.set_defaults(func=export_manifest_info)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
logger.debug("Debug Enabled")
elif args.quiet:
logger.setLevel(logging.ERROR)
ret = args.func(args)
return ret
if __name__ == "__main__":
try:
ret = main()
except Exception:
ret = 1
import traceback
traceback.print_exc()
sys.exit(ret)

View File

@@ -0,0 +1,167 @@
#!/usr/bin/env python3
# Copyright (C) 2013 Wind River Systems, Inc.
# Copyright (C) 2014 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# - list available recipes which have PACKAGECONFIG flags
# - list available PACKAGECONFIG flags and all affected recipes
# - list all recipes and PACKAGECONFIG information
import sys
import optparse
import os
scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
lib_path = os.path.abspath(scripts_path + '/../lib')
sys.path = sys.path + [lib_path]
import scriptpath
# For importing the following modules
bitbakepath = scriptpath.add_bitbake_lib_path()
if not bitbakepath:
sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
sys.exit(1)
import bb.cooker
import bb.providers
import bb.tinfoil
def get_fnlist(bbhandler, pkg_pn, preferred):
''' Get all recipe file names '''
if preferred:
(latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn)
fn_list = []
for pn in sorted(pkg_pn):
if preferred:
fn_list.append(preferred_versions[pn][1])
else:
fn_list.extend(pkg_pn[pn])
return fn_list
def get_recipesdata(bbhandler, preferred):
''' Get data of all available recipes which have PACKAGECONFIG flags '''
pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
data_dict = {}
for fn in get_fnlist(bbhandler, pkg_pn, preferred):
data = bbhandler.parse_recipe_file(fn)
flags = data.getVarFlags("PACKAGECONFIG")
flags.pop('doc', None)
if flags:
data_dict[fn] = data
return data_dict
def collect_pkgs(data_dict):
''' Collect available pkgs in which have PACKAGECONFIG flags '''
# pkg_dict = {'pkg1': ['flag1', 'flag2',...]}
pkg_dict = {}
for fn in data_dict:
pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG")
pkgconfigflags.pop('doc', None)
pkgname = data_dict[fn].getVar("PN")
pkg_dict[pkgname] = sorted(pkgconfigflags.keys())
return pkg_dict
def collect_flags(pkg_dict):
''' Collect available PACKAGECONFIG flags and all affected pkgs '''
# flag_dict = {'flag': ['pkg1', 'pkg2',...]}
flag_dict = {}
for pkgname, flaglist in pkg_dict.items():
for flag in flaglist:
if flag in flag_dict:
flag_dict[flag].append(pkgname)
else:
flag_dict[flag] = [pkgname]
return flag_dict
def display_pkgs(pkg_dict):
''' Display available pkgs which have PACKAGECONFIG flags '''
pkgname_len = len("RECIPE NAME") + 1
for pkgname in pkg_dict:
if pkgname_len < len(pkgname):
pkgname_len = len(pkgname)
pkgname_len += 1
header = '%-*s%s' % (pkgname_len, str("RECIPE NAME"), str("PACKAGECONFIG FLAGS"))
print(header)
print(str("").ljust(len(header), '='))
for pkgname in sorted(pkg_dict):
print('%-*s%s' % (pkgname_len, pkgname, ' '.join(pkg_dict[pkgname])))
def display_flags(flag_dict):
''' Display available PACKAGECONFIG flags and all affected pkgs '''
flag_len = len("PACKAGECONFIG FLAG") + 5
header = '%-*s%s' % (flag_len, str("PACKAGECONFIG FLAG"), str("RECIPE NAMES"))
print(header)
print(str("").ljust(len(header), '='))
for flag in sorted(flag_dict):
print('%-*s%s' % (flag_len, flag, ' '.join(sorted(flag_dict[flag]))))
def display_all(data_dict):
''' Display all pkgs and PACKAGECONFIG information '''
print(str("").ljust(50, '='))
for fn in data_dict:
print('%s' % data_dict[fn].getVar("P"))
print(fn)
packageconfig = data_dict[fn].getVar("PACKAGECONFIG") or ''
if packageconfig.strip() == '':
packageconfig = 'None'
print('PACKAGECONFIG %s' % packageconfig)
for flag,flag_val in data_dict[fn].getVarFlags("PACKAGECONFIG").items():
if flag == "doc":
continue
print('PACKAGECONFIG[%s] %s' % (flag, flag_val))
print('')
def main():
pkg_dict = {}
flag_dict = {}
# Collect and validate input
parser = optparse.OptionParser(
description = "Lists recipes and PACKAGECONFIG flags. Without -a or -f, recipes and their available PACKAGECONFIG flags are listed.",
usage = """
%prog [options]""")
parser.add_option("-f", "--flags",
help = "list available PACKAGECONFIG flags and affected recipes",
action="store_const", dest="listtype", const="flags", default="recipes")
parser.add_option("-a", "--all",
help = "list all recipes and PACKAGECONFIG information",
action="store_const", dest="listtype", const="all")
parser.add_option("-p", "--preferred-only",
help = "where multiple recipe versions are available, list only the preferred version",
action="store_true", dest="preferred", default=False)
options, args = parser.parse_args(sys.argv)
with bb.tinfoil.Tinfoil() as bbhandler:
bbhandler.prepare()
print("Gathering recipe data...")
data_dict = get_recipesdata(bbhandler, options.preferred)
if options.listtype == 'flags':
pkg_dict = collect_pkgs(data_dict)
flag_dict = collect_flags(pkg_dict)
display_flags(flag_dict)
elif options.listtype == 'recipes':
pkg_dict = collect_pkgs(data_dict)
display_pkgs(pkg_dict)
elif options.listtype == 'all':
display_all(data_dict)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,121 @@
#!/usr/bin/python3
#
# Send build performance test report emails
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
import base64
import logging
import os
import pwd
import re
import shutil
import smtplib
import socket
import subprocess
import sys
import tempfile
from email.mime.text import MIMEText
# Setup logging
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
log = logging.getLogger('oe-build-perf-report')
def parse_args(argv):
"""Parse command line arguments"""
description = """Email build perf test report"""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description=description)
parser.add_argument('--debug', '-d', action='store_true',
help="Verbose logging")
parser.add_argument('--quiet', '-q', action='store_true',
help="Only print errors")
parser.add_argument('--to', action='append',
help="Recipients of the email")
parser.add_argument('--cc', action='append',
help="Carbon copy recipients of the email")
parser.add_argument('--bcc', action='append',
help="Blind carbon copy recipients of the email")
parser.add_argument('--subject', default="Yocto build perf test report",
help="Email subject")
parser.add_argument('--outdir', '-o',
help="Store files in OUTDIR. Can be used to preserve "
"the email parts")
parser.add_argument('--text',
help="Plain text message")
args = parser.parse_args(argv)
if not args.text:
parser.error("Please specify --text")
return args
def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
# Generate email message
with open(text_fn) as f:
msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
pw_data = pwd.getpwuid(os.getuid())
full_name = pw_data.pw_gecos.split(',')[0]
email = os.environ.get('EMAIL',
'{}@{}'.format(pw_data.pw_name, socket.getfqdn()))
msg['From'] = "{} <{}>".format(full_name, email)
msg['To'] = ', '.join(recipients)
if copy:
msg['Cc'] = ', '.join(copy)
if blind_copy:
msg['Bcc'] = ', '.join(blind_copy)
msg['Subject'] = subject
# Send email
with smtplib.SMTP('localhost') as smtp:
smtp.send_message(msg)
def main(argv=None):
"""Script entry point"""
args = parse_args(argv)
if args.quiet:
log.setLevel(logging.ERROR)
if args.debug:
log.setLevel(logging.DEBUG)
if args.outdir:
outdir = args.outdir
if not os.path.exists(outdir):
os.mkdir(outdir)
else:
outdir = tempfile.mkdtemp(dir='.')
try:
log.debug("Storing email parts in %s", outdir)
if args.to:
log.info("Sending email to %s", ', '.join(args.to))
if args.cc:
log.info("Copying to %s", ', '.join(args.cc))
if args.bcc:
log.info("Blind copying to %s", ', '.join(args.bcc))
send_email(args.text, args.subject, args.to, args.cc, args.bcc)
except subprocess.CalledProcessError as err:
log.error("%s, with output:\n%s", str(err), err.output.decode())
return 1
finally:
if not args.outdir:
log.debug("Wiping %s", outdir)
shutil.rmtree(outdir)
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -0,0 +1,277 @@
#! /usr/bin/env python3
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
import collections
import json
import os
import os.path
import pathlib
import re
import subprocess
# TODO
# - option to just list all broken files
# - test suite
# - validate signed-off-by
status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream")
class Result:
# Whether the patch has an Upstream-Status or not
missing_upstream_status = False
# If the Upstream-Status tag is malformed in some way (string for bad bit)
malformed_upstream_status = None
# If the Upstream-Status value is unknown (boolean)
unknown_upstream_status = False
# The upstream status value (Pending, etc)
upstream_status = None
# Whether the patch has a Signed-off-by or not
missing_sob = False
# Whether the Signed-off-by tag is malformed in some way
malformed_sob = False
# The Signed-off-by tag value
sob = None
# Whether a patch looks like a CVE but doesn't have a CVE tag
missing_cve = False
def blame_patch(patch):
"""
From a patch filename, return a list of "commit summary (author name <author
email>)" strings representing the history.
"""
return subprocess.check_output(("git", "log",
"--follow", "--find-renames", "--diff-filter=A",
"--format=%s (%aN <%aE>)",
"--", patch)).decode("utf-8").splitlines()
def patchreview(patches):
# General pattern: start of line, optional whitespace, tag with optional
# hyphen or spaces, maybe a colon, some whitespace, then the value, all case
# insensitive.
sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE)
cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
results = {}
for patch in patches:
result = Result()
results[patch] = result
content = open(patch, encoding='ascii', errors='ignore').read()
# Find the Signed-off-by tag
match = sob_re.search(content)
if match:
value = match.group(1)
if value != "Signed-off-by:":
result.malformed_sob = value
result.sob = match.group(2)
else:
result.missing_sob = True
# Find the Upstream-Status tag
match = status_re.search(content)
if match:
value = match.group(1)
if value != "Upstream-Status:":
result.malformed_upstream_status = value
value = match.group(2).lower()
# TODO: check case
if value not in status_values:
result.unknown_upstream_status = True
result.upstream_status = value
else:
result.missing_upstream_status = True
# Check that patches which looks like CVEs have CVE tags
if cve_re.search(patch) or cve_re.search(content):
if not cve_tag_re.search(content):
result.missing_cve = True
# TODO: extract CVE list
return results
def analyse(results, want_blame=False, verbose=True):
"""
want_blame: display blame data for each malformed patch
verbose: display per-file results instead of just summary
"""
# want_blame requires verbose, so disable blame if we're not verbose
if want_blame and not verbose:
want_blame = False
total_patches = 0
missing_sob = 0
malformed_sob = 0
missing_status = 0
malformed_status = 0
missing_cve = 0
pending_patches = 0
for patch in sorted(results):
r = results[patch]
total_patches += 1
need_blame = False
# Build statistics
if r.missing_sob:
missing_sob += 1
if r.malformed_sob:
malformed_sob += 1
if r.missing_upstream_status:
missing_status += 1
if r.malformed_upstream_status or r.unknown_upstream_status:
malformed_status += 1
# Count patches with no status as pending
pending_patches +=1
if r.missing_cve:
missing_cve += 1
if r.upstream_status == "pending":
pending_patches += 1
# Output warnings
if r.missing_sob:
need_blame = True
if verbose:
print("Missing Signed-off-by tag (%s)" % patch)
if r.malformed_sob:
need_blame = True
if verbose:
print("Malformed Signed-off-by '%s' (%s)" % (r.malformed_sob, patch))
if r.missing_cve:
need_blame = True
if verbose:
print("Missing CVE tag (%s)" % patch)
if r.missing_upstream_status:
need_blame = True
if verbose:
print("Missing Upstream-Status tag (%s)" % patch)
if r.malformed_upstream_status:
need_blame = True
if verbose:
print("Malformed Upstream-Status '%s' (%s)" % (r.malformed_upstream_status, patch))
if r.unknown_upstream_status:
need_blame = True
if verbose:
print("Unknown Upstream-Status value '%s' (%s)" % (r.upstream_status, patch))
if want_blame and need_blame:
print("\n".join(blame_patch(patch)) + "\n")
def percent(num):
try:
return "%d (%d%%)" % (num, round(num * 100.0 / total_patches))
except ZeroDivisionError:
return "N/A"
if verbose:
print()
print("""Total patches found: %d
Patches missing Signed-off-by: %s
Patches with malformed Signed-off-by: %s
Patches missing CVE: %s
Patches missing Upstream-Status: %s
Patches with malformed Upstream-Status: %s
Patches in Pending state: %s""" % (total_patches,
percent(missing_sob),
percent(malformed_sob),
percent(missing_cve),
percent(missing_status),
percent(malformed_status),
percent(pending_patches)))
def histogram(results):
from toolz import recipes, dicttoolz
import math
counts = recipes.countby(lambda r: r.upstream_status, results.values())
bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
for k in bars:
print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
def find_layers(candidate):
# candidate can either be the path to a layer directly (eg meta-intel), or a
# repository that contains other layers (meta-arm). We can determine what by
# looking for a conf/layer.conf file. If that file exists then it's a layer,
# otherwise its a repository of layers and we can assume they're called
# meta-*.
if (candidate / "conf" / "layer.conf").exists():
return [candidate.absolute()]
else:
return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))]
# TODO these don't actually handle dynamic-layers/
def gather_patches(layers):
patches = []
for directory in layers:
filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split()
patches += [os.path.join(directory, f) for f in filenames]
return patches
def count_recipes(layers):
count = 0
for directory in layers:
output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True)
count += len(output.splitlines())
return count
if __name__ == "__main__":
args = argparse.ArgumentParser(description="Patch Review Tool")
args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
args.add_argument("-j", "--json", help="update JSON")
args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)")
args = args.parse_args()
layers = find_layers(args.directory)
print(f"Found layers {' '.join((d.name for d in layers))}")
patches = gather_patches(layers)
results = patchreview(patches)
analyse(results, want_blame=args.blame, verbose=args.verbose)
if args.json:
if os.path.isfile(args.json):
data = json.load(open(args.json))
else:
data = []
row = collections.Counter()
row["total"] = len(results)
row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip()
row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip()
row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip()
row['recipe_count'] = count_recipes(layers)
for r in results.values():
if r.upstream_status in status_values:
row[r.upstream_status] += 1
if r.malformed_upstream_status or r.missing_upstream_status:
row['malformed-upstream-status'] += 1
if r.malformed_sob or r.missing_sob:
row['malformed-sob'] += 1
data.append(row)
json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t")
if args.histogram:
print()
histogram(results)

View File

@@ -0,0 +1,104 @@
#!/bin/bash
#
# patchtest: Run patchtest on commits starting at master
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
set -o errexit
# Default values
pokydir=''
usage() {
CMD=$(basename $0)
cat <<EOM
Usage: $CMD [-h] [-p pokydir]
-p pokydir Defaults to current directory
EOM
>&2
exit 1
}
function clone() {
local REPOREMOTE=$1
local REPODIR=$2
if [ ! -d $REPODIR ]; then
git clone $REPOREMOTE $REPODIR --quiet
else
( cd $REPODIR; git pull --quiet )
fi
}
while getopts ":p:h" opt; do
case $opt in
p)
pokydir=$OPTARG
;;
h)
usage
;;
\?)
echo "Invalid option: -$OPTARG" >&2
usage
;;
:)
echo "Option -$OPTARG requires an argument." >&2
usage
;;
esac
done
shift $((OPTIND-1))
CDIR="$PWD"
# default pokydir to current directory if user did not specify one
if [ -z "$pokydir" ]; then
pokydir="$CDIR"
fi
PTENV="$PWD/patchtest"
PT="$PTENV/patchtest"
PTOE="$PTENV/patchtest-oe"
if ! which virtualenv > /dev/null; then
echo "Install virtualenv before proceeding"
exit 1;
fi
# activate the virtual env
virtualenv $PTENV --quiet
source $PTENV/bin/activate
cd $PTENV
# clone or pull
clone git://git.yoctoproject.org/patchtest $PT
clone git://git.yoctoproject.org/patchtest-oe $PTOE
# install requirements
pip install -r $PT/requirements.txt --quiet
pip install -r $PTOE/requirements.txt --quiet
PATH="$PT:$PT/scripts:$PATH"
# loop through parent to HEAD and execute patchtest on each commit
for commit in $(git rev-list master..HEAD --reverse)
do
shortlog="$(git log "$commit^1..$commit" --pretty='%h: %aN: %cd: %s')"
log="$(git format-patch "$commit^1..$commit" --stdout | patchtest - -r $pokydir -s $PTOE/tests --base-commit $commit^1 --json 2>/dev/null | create-summary --fail --only-results)"
if [ -z "$log" ]; then
shortlog="$shortlog: OK"
else
shortlog="$shortlog: FAIL"
fi
echo "$shortlog"
echo "$log" | sed -n -e '/Issue/p' -e '/Suggested fix/p'
echo ""
done
deactivate
cd $CDIR

View File

@@ -0,0 +1,61 @@
#!/bin/sh
# Copyright (C) 2014 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
if [ "$1" = "" -o "$1" = "--help" ] ; then
echo "Usage: $0 <serial terminal command>"
echo
echo "Simple script to handle maintaining a terminal for serial devices that"
echo "disappear when a device is powered down or reset, such as the USB"
echo "serial console on the original BeagleBone (white version)."
echo
echo "e.g. $0 picocom -b 115200 /dev/ttyUSB0"
echo
exit
fi
args="$@"
DEVICE=""
while [ "$1" != "" ]; do
case "$1" in
/dev/*)
DEVICE=$1
break;;
esac
shift
done
if [ "$DEVICE" != "" ] ; then
while true; do
if [ ! -e $DEVICE ] ; then
echo "serdevtry: waiting for $DEVICE to exist..."
while [ ! -e $DEVICE ]; do
sleep 0.1
done
fi
if [ ! -w $DEVICE ] ; then
# Sometimes (presumably because of a race with udev) we get to
# the device before its permissions have been set up
RETRYNUM=0
while [ ! -w $DEVICE ]; do
if [ "$RETRYNUM" = "2" ] ; then
echo "Device $DEVICE exists but is not writable!"
exit 1
fi
RETRYNUM=$((RETRYNUM+1))
sleep 0.1
done
fi
$args
if [ -e $DEVICE ] ; then
break
fi
done
else
echo "Unable to determine device node from command: $args"
exit 1
fi

View File

@@ -0,0 +1,223 @@
#!/bin/bash
# Build performance regression test script
#
# Copyright 2011 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
# DESCRIPTION
# This script is intended to be used in conjunction with "git bisect run"
# in order to find regressions in build time, however it can also be used
# independently. It cleans out the build output directories, runs a
# specified worker script (an example is test_build_time_worker.sh) under
# TIME(1), logs the results to TEST_LOGDIR (default /tmp) and returns a
# value telling "git bisect run" whether the build time is good (under
# the specified threshold) or bad (over it). There is also a tolerance
# option but it is not particularly useful as it only subtracts the
# tolerance from the given threshold and uses it as the actual threshold.
#
# It is also capable of taking a file listing git revision hashes to be
# test-applied to the repository in order to get past build failures that
# would otherwise cause certain revisions to have to be skipped; if a
# revision does not apply cleanly then the script assumes it does not
# need to be applied and ignores it.
#
# Please see the help output (syntax below) for some important setup
# instructions.
#
# AUTHORS
# Paul Eggleton <paul.eggleton@linux.intel.com>
syntax() {
echo "syntax: $0 <script> <time> <tolerance> [patchrevlist]"
echo ""
echo " script - worker script file (if in current dir, prefix with ./)"
echo " time - time threshold (in seconds, suffix m for minutes)"
echo " tolerance - tolerance (in seconds, suffix m for minutes or % for"
echo " percentage, can be 0)"
echo " patchrevlist - optional file listing revisions to apply as patches on top"
echo ""
echo "You must set TEST_BUILDDIR to point to a previously created build directory,"
echo "however please note that this script will wipe out the TMPDIR defined in"
echo "TEST_BUILDDIR/conf/local.conf as part of its initial setup (as well as your"
echo "~/.ccache)"
echo ""
echo "To get rid of the sudo prompt, please add the following line to /etc/sudoers"
echo "(use 'visudo' to edit this; also it is assumed that the user you are running"
echo "as is a member of the 'wheel' group):"
echo ""
echo "%wheel ALL=(ALL) NOPASSWD: /sbin/sysctl -w vm.drop_caches=[1-3]"
echo ""
echo "Note: it is recommended that you disable crond and any other process that"
echo "may cause significant CPU or I/O usage during build performance tests."
}
# Note - we exit with 250 here because that will tell git bisect run that
# something bad happened and stop
if [ "$1" = "" ] ; then
syntax
exit 250
fi
if [ "$2" = "" ] ; then
syntax
exit 250
fi
if [ "$3" = "" ] ; then
syntax
exit 250
fi
if ! [[ "$2" =~ ^[0-9][0-9m.]*$ ]] ; then
echo "'$2' is not a valid number for threshold"
exit 250
fi
if ! [[ "$3" =~ ^[0-9][0-9m.%]*$ ]] ; then
echo "'$3' is not a valid number for tolerance"
exit 250
fi
if [ "$TEST_BUILDDIR" = "" ] ; then
echo "Please set TEST_BUILDDIR to a previously created build directory"
exit 250
fi
if [ ! -d "$TEST_BUILDDIR" ] ; then
echo "TEST_BUILDDIR $TEST_BUILDDIR not found"
exit 250
fi
git diff --quiet
if [ $? != 0 ] ; then
echo "Working tree is dirty, cannot proceed"
exit 251
fi
if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
fi
runscript=$1
timethreshold=$2
tolerance=$3
if [ "$4" != "" ] ; then
patchrevlist=`cat $4`
else
patchrevlist=""
fi
if [[ timethreshold == *m* ]] ; then
timethreshold=`echo $timethreshold | sed s/m/*60/ | bc`
fi
if [[ $tolerance == *m* ]] ; then
tolerance=`echo $tolerance | sed s/m/*60/ | bc`
elif [[ $tolerance == *%* ]] ; then
tolerance=`echo $tolerance | sed s/%//`
tolerance=`echo "scale = 2; (($tolerance * $timethreshold) / 100)" | bc`
fi
tmpdir=`grep "^TMPDIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/TMPDIR[ \t]*=[ \t\?]*"//' -e 's/"//'`
if [ "x$tmpdir" = "x" ]; then
echo "Unable to determine TMPDIR from $TEST_BUILDDIR/conf/local.conf, bailing out"
exit 250
fi
sstatedir=`grep "^SSTATE_DIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/SSTATE_DIR[ \t\?]*=[ \t]*"//' -e 's/"//'`
if [ "x$sstatedir" = "x" ]; then
echo "Unable to determine SSTATE_DIR from $TEST_BUILDDIR/conf/local.conf, bailing out"
exit 250
fi
if [ `expr length $tmpdir` -lt 4 ] ; then
echo "TMPDIR $tmpdir is less than 4 characters, bailing out"
exit 250
fi
if [ `expr length $sstatedir` -lt 4 ] ; then
echo "SSTATE_DIR $sstatedir is less than 4 characters, bailing out"
exit 250
fi
echo -n "About to wipe out TMPDIR $tmpdir, press Ctrl+C to break out... "
for i in 9 8 7 6 5 4 3 2 1
do
echo -ne "\x08$i"
sleep 1
done
echo
pushd . > /dev/null
rm -f pseudodone
echo "Removing TMPDIR $tmpdir..."
rm -rf $tmpdir
echo "Removing TMPDIR $tmpdir-*libc..."
rm -rf $tmpdir-*libc
echo "Removing SSTATE_DIR $sstatedir..."
rm -rf $sstatedir
echo "Removing ~/.ccache..."
rm -rf ~/.ccache
echo "Syncing..."
sync
sync
echo "Dropping VM cache..."
#echo 3 > /proc/sys/vm/drop_caches
sudo /sbin/sysctl -w vm.drop_caches=3 > /dev/null
if [ "$TEST_LOGDIR" = "" ] ; then
logdir="/tmp"
else
logdir="$TEST_LOGDIR"
fi
rev=`git rev-parse HEAD`
logfile="$logdir/timelog_$rev.log"
echo -n > $logfile
gitroot=`git rev-parse --show-toplevel`
cd $gitroot
for patchrev in $patchrevlist ; do
echo "Applying $patchrev"
patchfile=`mktemp`
git show $patchrev > $patchfile
git apply --check $patchfile &> /dev/null
if [ $? != 0 ] ; then
echo " ... patch does not apply without errors, ignoring"
else
echo "Applied $patchrev" >> $logfile
git apply $patchfile &> /dev/null
fi
rm $patchfile
done
sync
echo "Quiescing for 5s..."
sleep 5
echo "Running $runscript at $rev..."
timeoutfile=`mktemp`
/usr/bin/time -o $timeoutfile -f "%e\nreal\t%E\nuser\t%Us\nsys\t%Ss\nmaxm\t%Mk" $runscript 2>&1 | tee -a $logfile
exitstatus=$PIPESTATUS
git reset --hard HEAD > /dev/null
popd > /dev/null
timeresult=`head -n1 $timeoutfile`
cat $timeoutfile | tee -a $logfile
rm $timeoutfile
if [ $exitstatus != 0 ] ; then
# Build failed, exit with 125 to tell git bisect run to skip this rev
echo "*** Build failed (exit code $exitstatus), skipping..." | tee -a $logfile
exit 125
fi
ret=`echo "scale = 2; $timeresult > $timethreshold - $tolerance" | bc`
echo "Returning $ret" | tee -a $logfile
exit $ret

View File

@@ -0,0 +1,41 @@
#!/bin/bash
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# This is an example script to be used in conjunction with test_build_time.sh
if [ "$TEST_BUILDDIR" = "" ] ; then
echo "TEST_BUILDDIR is not set"
exit 1
fi
buildsubdir=`basename $TEST_BUILDDIR`
if [ ! -d $buildsubdir ] ; then
echo "Unable to find build subdir $buildsubdir in current directory"
exit 1
fi
if [ -f oe-init-build-env ] ; then
. ./oe-init-build-env $buildsubdir
elif [ -f poky-init-build-env ] ; then
. ./poky-init-build-env $buildsubdir
else
echo "Unable to find build environment setup script"
exit 1
fi
if [ -f ../meta/recipes-sato/images/core-image-sato.bb ] ; then
target="core-image-sato"
else
target="poky-image-sato"
fi
echo "Build started at `date "+%Y-%m-%d %H:%M:%S"`"
echo "bitbake $target"
bitbake $target
ret=$?
echo "Build finished at `date "+%Y-%m-%d %H:%M:%S"`"
exit $ret

View File

@@ -0,0 +1,26 @@
#!/bin/bash -eur
#
# Find python modules uncovered by oe-seltest
#
# Copyright (c) 2016, Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Author: Ed Bartosh <ed.bartosh@linux.intel.com>
#
if [ ! "$#" -eq 1 -o -t 0 ] ; then
echo 'Usage: coverage report | ./scripts/contrib/uncovered <dir>' 1>&2
exit 1
fi
path=$(readlink -ev $1)
if [ ! -d "$path" ] ; then
echo "directory $1 doesn't exist" 1>&2
exit 1
fi
diff -u <(grep "$path" | grep -v '0%$' | cut -f1 -d: | sort) \
<(find $path | xargs file | grep 'Python script' | cut -f1 -d:| sort) | \
grep "^+$path" | cut -c2-

View File

@@ -0,0 +1,66 @@
#!/usr/bin/env python3
#
# Copyright OpenEmbedded Contributors
#
# SPDX-License-Identifier: GPL-2.0-only
#
# This script can be used to verify HOMEPAGE values for all recipes in
# the current configuration.
# The result is influenced by network environment, since the timeout of connect url is 5 seconds as default.
import sys
import os
import subprocess
import urllib.request
# Allow importing scripts/lib modules
scripts_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/..')
lib_path = scripts_path + '/lib'
sys.path = sys.path + [lib_path]
import scriptpath
import scriptutils
# Allow importing bitbake modules
bitbakepath = scriptpath.add_bitbake_lib_path()
import bb.tinfoil
logger = scriptutils.logger_create('verify_homepage')
def wgetHomepage(pn, homepage):
result = subprocess.call('wget ' + '-q -T 5 -t 1 --spider ' + homepage, shell = True)
if result:
logger.warning("%s: failed to verify HOMEPAGE: %s " % (pn, homepage))
return 1
else:
return 0
def verifyHomepage(bbhandler):
pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
pnlist = sorted(pkg_pn)
count = 0
checked = []
for pn in pnlist:
for fn in pkg_pn[pn]:
# There's no point checking multiple BBCLASSEXTENDed variants of the same recipe
realfn, _, _ = bb.cache.virtualfn2realfn(fn)
if realfn in checked:
continue
data = bbhandler.parse_recipe_file(realfn)
homepage = data.getVar("HOMEPAGE")
if homepage:
try:
urllib.request.urlopen(homepage, timeout=5)
except Exception:
count = count + wgetHomepage(os.path.basename(realfn), homepage)
checked.append(realfn)
return count
if __name__=='__main__':
with bb.tinfoil.Tinfoil() as bbhandler:
bbhandler.prepare()
logger.info("Start verifying HOMEPAGE:")
failcount = verifyHomepage(bbhandler)
logger.info("Finished verifying HOMEPAGE.")
logger.info("Summary: %s failed" % failcount)