Complete Yocto mirror with license table for TQMa6UL (2038-compliance)
- 264 license table entries with exact download URLs (224/264 resolved) - Complete sources/ directory with all BitBake recipes - Build configuration: tqma6ul-multi-mba6ulx, spaetzle (musl) - Full traceability for Softwarefreigabeantrag - GCC 13.4.0, Linux 6.6.102, U-Boot 2023.04, musl 1.2.4 - License distribution: GPL-2.0 (24), MIT (23), GPL-2.0+ (18), BSD-3 (16)
This commit is contained in:
@@ -0,0 +1,98 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import oeqa.utils.ftools as ftools
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
|
||||
from oeqa.selftest.cases.sstatetests import SStateBase
|
||||
|
||||
|
||||
class RebuildFromSState(SStateBase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(self):
|
||||
super(RebuildFromSState, self).setUpClass()
|
||||
self.builddir = os.path.join(os.environ.get('BUILDDIR'))
|
||||
|
||||
def get_dep_targets(self, primary_targets):
|
||||
found_targets = []
|
||||
bitbake("-g " + ' '.join(map(str, primary_targets)))
|
||||
with open(os.path.join(self.builddir, 'pn-buildlist'), 'r') as pnfile:
|
||||
found_targets = pnfile.read().splitlines()
|
||||
return found_targets
|
||||
|
||||
def configure_builddir(self, builddir):
|
||||
os.mkdir(builddir)
|
||||
self.track_for_cleanup(builddir)
|
||||
os.mkdir(os.path.join(builddir, 'conf'))
|
||||
shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(builddir, 'conf/local.conf'))
|
||||
config = {}
|
||||
config['default_sstate_dir'] = "SSTATE_DIR ?= \"${TOPDIR}/sstate-cache\""
|
||||
config['null_sstate_mirrors'] = "SSTATE_MIRRORS = \"\""
|
||||
config['default_tmp_dir'] = "TMPDIR = \"${TOPDIR}/tmp\""
|
||||
for key in config:
|
||||
ftools.append_file(os.path.join(builddir, 'conf/selftest.inc'), config[key])
|
||||
shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/bblayers.conf'), os.path.join(builddir, 'conf/bblayers.conf'))
|
||||
try:
|
||||
shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/auto.conf'), os.path.join(builddir, 'conf/auto.conf'))
|
||||
except:
|
||||
pass
|
||||
|
||||
def hardlink_tree(self, src, dst):
|
||||
os.mkdir(dst)
|
||||
self.track_for_cleanup(dst)
|
||||
for root, dirs, files in os.walk(src):
|
||||
if root == src:
|
||||
continue
|
||||
os.mkdir(os.path.join(dst, root.split(src)[1][1:]))
|
||||
for sstate_file in files:
|
||||
os.link(os.path.join(root, sstate_file), os.path.join(dst, root.split(src)[1][1:], sstate_file))
|
||||
|
||||
def run_test_sstate_rebuild(self, primary_targets, relocate=False, rebuild_dependencies=False):
|
||||
buildA = os.path.join(self.builddir, 'buildA')
|
||||
if relocate:
|
||||
buildB = os.path.join(self.builddir, 'buildB')
|
||||
else:
|
||||
buildB = buildA
|
||||
|
||||
if rebuild_dependencies:
|
||||
rebuild_targets = self.get_dep_targets(primary_targets)
|
||||
else:
|
||||
rebuild_targets = primary_targets
|
||||
|
||||
self.configure_builddir(buildA)
|
||||
runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildA)) + 'bitbake ' + ' '.join(map(str, primary_targets)), shell=True, executable='/bin/bash')
|
||||
self.hardlink_tree(os.path.join(buildA, 'sstate-cache'), os.path.join(self.builddir, 'sstate-cache-buildA'))
|
||||
shutil.rmtree(buildA)
|
||||
|
||||
failed_rebuild = []
|
||||
failed_cleansstate = []
|
||||
for target in rebuild_targets:
|
||||
self.configure_builddir(buildB)
|
||||
self.hardlink_tree(os.path.join(self.builddir, 'sstate-cache-buildA'), os.path.join(buildB, 'sstate-cache'))
|
||||
|
||||
result_cleansstate = runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildB)) + 'bitbake -ccleansstate ' + target, ignore_status=True, shell=True, executable='/bin/bash')
|
||||
if not result_cleansstate.status == 0:
|
||||
failed_cleansstate.append(target)
|
||||
shutil.rmtree(buildB)
|
||||
continue
|
||||
|
||||
result_build = runCmd((". %s/oe-init-build-env %s && " % (get_bb_var('COREBASE'), buildB)) + 'bitbake ' + target, ignore_status=True, shell=True, executable='/bin/bash')
|
||||
if not result_build.status == 0:
|
||||
failed_rebuild.append(target)
|
||||
|
||||
shutil.rmtree(buildB)
|
||||
|
||||
self.assertFalse(failed_rebuild, msg="The following recipes have failed to rebuild: %s" % ' '.join(map(str, failed_rebuild)))
|
||||
self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate)))
|
||||
|
||||
def test_sstate_relocation(self):
|
||||
self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=True, rebuild_dependencies=True)
|
||||
|
||||
def test_sstate_rebuild(self):
|
||||
self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=False, rebuild_dependencies=True)
|
||||
345
sources/poky/meta/lib/oeqa/selftest/cases/archiver.py
Normal file
345
sources/poky/meta/lib/oeqa/selftest/cases/archiver.py
Normal file
@@ -0,0 +1,345 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import re
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
class Archiver(OESelftestTestCase):
|
||||
|
||||
def test_archiver_allows_to_filter_on_recipe_name(self):
|
||||
"""
|
||||
Summary: The archiver should offer the possibility to filter on the recipe. (#6929)
|
||||
Expected: 1. Included recipe (busybox) should be included
|
||||
2. Excluded recipe (zlib) should be excluded
|
||||
Product: oe-core
|
||||
Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
"""
|
||||
|
||||
include_recipe = 'selftest-ed'
|
||||
exclude_recipe = 'initscripts'
|
||||
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "original"\n'
|
||||
features += 'COPYLEFT_PN_INCLUDE = "%s"\n' % include_recipe
|
||||
features += 'COPYLEFT_PN_EXCLUDE = "%s"\n' % exclude_recipe
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('-c clean %s %s' % (include_recipe, exclude_recipe))
|
||||
bitbake("-c deploy_archives %s %s" % (include_recipe, exclude_recipe))
|
||||
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS'])
|
||||
src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
|
||||
|
||||
# Check that include_recipe was included
|
||||
included_present = len(glob.glob(src_path + '/%s-*/*' % include_recipe))
|
||||
self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe)
|
||||
|
||||
# Check that exclude_recipe was excluded
|
||||
excluded_present = len(glob.glob(src_path + '/%s-*/*' % exclude_recipe))
|
||||
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe)
|
||||
|
||||
def test_archiver_filters_by_type(self):
|
||||
"""
|
||||
Summary: The archiver is documented to filter on the recipe type.
|
||||
Expected: 1. included recipe type (target) should be included
|
||||
2. other types should be excluded
|
||||
Product: oe-core
|
||||
Author: André Draszik <adraszik@tycoint.com>
|
||||
"""
|
||||
|
||||
target_recipe = 'selftest-ed'
|
||||
native_recipe = 'selftest-ed-native'
|
||||
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "original"\n'
|
||||
features += 'COPYLEFT_RECIPE_TYPES = "target"\n'
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('-c clean %s %s' % (target_recipe, native_recipe))
|
||||
bitbake("%s -c deploy_archives %s" % (target_recipe, native_recipe))
|
||||
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS', 'BUILD_SYS'])
|
||||
src_path_target = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
|
||||
src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
|
||||
|
||||
# Check that target_recipe was included
|
||||
included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipe))
|
||||
self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe)
|
||||
|
||||
# Check that native_recipe was excluded
|
||||
excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipe))
|
||||
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe)
|
||||
|
||||
def test_archiver_filters_by_type_and_name(self):
|
||||
"""
|
||||
Summary: Test that the archiver archives by recipe type, taking the
|
||||
recipe name into account.
|
||||
Expected: 1. included recipe type (target) should be included
|
||||
2. other types should be excluded
|
||||
3. recipe by name should be included / excluded,
|
||||
overriding previous decision by type
|
||||
Product: oe-core
|
||||
Author: André Draszik <adraszik@tycoint.com>
|
||||
"""
|
||||
|
||||
target_recipes = [ 'initscripts', 'selftest-ed' ]
|
||||
native_recipes = [ 'update-rc.d-native', 'selftest-ed-native' ]
|
||||
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "original"\n'
|
||||
features += 'COPYLEFT_RECIPE_TYPES = "target"\n'
|
||||
features += 'COPYLEFT_PN_INCLUDE = "%s"\n' % native_recipes[1]
|
||||
features += 'COPYLEFT_PN_EXCLUDE = "%s"\n' % target_recipes[1]
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('-c clean %s %s' % (' '.join(target_recipes), ' '.join(native_recipes)))
|
||||
bitbake('-c deploy_archives %s %s' % (' '.join(target_recipes), ' '.join(native_recipes)))
|
||||
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS', 'BUILD_SYS'])
|
||||
src_path_target = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
|
||||
src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
|
||||
|
||||
# Check that target_recipe[0] and native_recipes[1] were included
|
||||
included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[0]))
|
||||
self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0])
|
||||
|
||||
included_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[1]))
|
||||
self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1])
|
||||
|
||||
# Check that native_recipes[0] and target_recipes[1] were excluded
|
||||
excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[0]))
|
||||
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0])
|
||||
|
||||
excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1]))
|
||||
self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
|
||||
|
||||
def test_archiver_multiconfig_shared_unpack_and_patch(self):
|
||||
"""
|
||||
Test that shared recipes in original mode with diff enabled works in multiconfig,
|
||||
otherwise it will not build when using the same TMP dir.
|
||||
"""
|
||||
|
||||
features = 'BBMULTICONFIG = "mc1 mc2"\n'
|
||||
features += 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "original"\n'
|
||||
features += 'ARCHIVER_MODE[diff] = "1"\n'
|
||||
self.write_config(features)
|
||||
|
||||
# We can use any machine in multiconfig as long as they are different
|
||||
self.write_config('MACHINE = "qemuarm"\n', 'mc1')
|
||||
self.write_config('MACHINE = "qemux86"\n', 'mc2')
|
||||
|
||||
task = 'do_unpack_and_patch'
|
||||
# Use gcc-source as it is a shared recipe (appends the pv to the pn)
|
||||
pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
|
||||
|
||||
# Generate the tasks signatures
|
||||
bitbake('mc:mc1:%s mc:mc2:%s -c %s -S lockedsigs' % (pn, pn, task))
|
||||
|
||||
# Check the tasks signatures
|
||||
# To be machine agnostic the tasks needs to generate the same signature for each machine
|
||||
locked_sigs_inc = "%s/locked-sigs.inc" % self.builddir
|
||||
locked_sigs = open(locked_sigs_inc).read()
|
||||
task_sigs = re.findall(r"%s:%s:.*" % (pn, task), locked_sigs)
|
||||
uniq_sigs = set(task_sigs)
|
||||
self.assertFalse(len(uniq_sigs) - 1, \
|
||||
'The task "%s" of the recipe "%s" has different signatures in "%s" for each machine in multiconfig' \
|
||||
% (task, pn, locked_sigs_inc))
|
||||
|
||||
def test_archiver_srpm_mode(self):
|
||||
"""
|
||||
Test that in srpm mode, the added recipe dependencies at least exist/work [YOCTO #11121]
|
||||
"""
|
||||
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[srpm] = "1"\n'
|
||||
features += 'PACKAGE_CLASSES = "package_rpm"\n'
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('-n selftest-nopackages selftest-ed')
|
||||
|
||||
def _test_archiver_mode(self, mode, target_file_name, extra_config=None):
|
||||
target = 'selftest-ed-native'
|
||||
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "%s"\n' % (mode)
|
||||
if extra_config:
|
||||
features += extra_config
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('-c clean %s' % (target))
|
||||
bitbake('-c deploy_archives %s' % (target))
|
||||
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'BUILD_SYS'])
|
||||
glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'], '%s-*' % (target))
|
||||
glob_result = glob.glob(glob_str)
|
||||
self.assertTrue(glob_result, 'Missing archiver directory for %s' % (target))
|
||||
|
||||
archive_path = os.path.join(glob_result[0], target_file_name)
|
||||
self.assertTrue(os.path.exists(archive_path), 'Missing archive file %s' % (target_file_name))
|
||||
|
||||
def test_archiver_mode_original(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[src] = "original"`.
|
||||
"""
|
||||
|
||||
self._test_archiver_mode('original', 'ed-1.14.1.tar.lz')
|
||||
|
||||
def test_archiver_mode_patched(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`.
|
||||
"""
|
||||
|
||||
self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.xz')
|
||||
|
||||
def test_archiver_mode_configured(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`.
|
||||
"""
|
||||
|
||||
self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.xz')
|
||||
|
||||
def test_archiver_mode_recipe(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`.
|
||||
"""
|
||||
|
||||
self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.xz',
|
||||
'ARCHIVER_MODE[recipe] = "1"\n')
|
||||
|
||||
def test_archiver_mode_diff(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[diff] = "1"`.
|
||||
Exclusions controlled by `ARCHIVER_MODE[diff-exclude]` are not yet tested.
|
||||
"""
|
||||
|
||||
self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-diff.gz',
|
||||
'ARCHIVER_MODE[diff] = "1"\n')
|
||||
|
||||
def test_archiver_mode_dumpdata(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[dumpdata] = "1"`.
|
||||
"""
|
||||
|
||||
self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-showdata.dump',
|
||||
'ARCHIVER_MODE[dumpdata] = "1"\n')
|
||||
|
||||
def test_archiver_mode_mirror(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`.
|
||||
"""
|
||||
|
||||
self._test_archiver_mode('mirror', 'ed-1.14.1.tar.lz',
|
||||
'BB_GENERATE_MIRROR_TARBALLS = "1"\n')
|
||||
|
||||
def test_archiver_mode_mirror_excludes(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"` and
|
||||
correctly excludes an archive when its URL matches
|
||||
`ARCHIVER_MIRROR_EXCLUDE`.
|
||||
"""
|
||||
|
||||
target='selftest-ed'
|
||||
target_file_name = 'ed-1.14.1.tar.lz'
|
||||
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "mirror"\n'
|
||||
features += 'BB_GENERATE_MIRROR_TARBALLS = "1"\n'
|
||||
features += 'ARCHIVER_MIRROR_EXCLUDE = "${GNU_MIRROR}"\n'
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('-c clean %s' % (target))
|
||||
bitbake('-c deploy_archives %s' % (target))
|
||||
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS'])
|
||||
glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'], '%s-*' % (target))
|
||||
glob_result = glob.glob(glob_str)
|
||||
self.assertTrue(glob_result, 'Missing archiver directory for %s' % (target))
|
||||
|
||||
archive_path = os.path.join(glob_result[0], target_file_name)
|
||||
self.assertFalse(os.path.exists(archive_path), 'Failed to exclude archive file %s' % (target_file_name))
|
||||
|
||||
def test_archiver_mode_mirror_combined(self):
|
||||
"""
|
||||
Test that the archiver works with `ARCHIVER_MODE[src] = "mirror"`
|
||||
and `ARCHIVER_MODE[mirror] = "combined"`. Archives for multiple recipes
|
||||
should all end up in the 'mirror' directory.
|
||||
"""
|
||||
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "mirror"\n'
|
||||
features += 'ARCHIVER_MODE[mirror] = "combined"\n'
|
||||
features += 'BB_GENERATE_MIRROR_TARBALLS = "1"\n'
|
||||
features += 'COPYLEFT_LICENSE_INCLUDE = "*"\n'
|
||||
self.write_config(features)
|
||||
|
||||
for target in ['selftest-ed', 'selftest-hardlink']:
|
||||
bitbake('-c clean %s' % (target))
|
||||
bitbake('-c deploy_archives %s' % (target))
|
||||
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC'])
|
||||
for target_file_name in ['ed-1.14.1.tar.lz', 'hello.c']:
|
||||
glob_str = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name)
|
||||
glob_result = glob.glob(glob_str)
|
||||
self.assertTrue(glob_result, 'Missing archive file %s' % (target_file_name))
|
||||
|
||||
def test_archiver_mode_mirror_gitsm(self):
|
||||
"""
|
||||
Test that the archiver correctly handles git submodules with
|
||||
`ARCHIVER_MODE[src] = "mirror"`.
|
||||
"""
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "mirror"\n'
|
||||
features += 'ARCHIVER_MODE[mirror] = "combined"\n'
|
||||
features += 'BB_GENERATE_MIRROR_TARBALLS = "1"\n'
|
||||
features += 'COPYLEFT_LICENSE_INCLUDE = "*"\n'
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('-c clean git-submodule-test')
|
||||
bitbake('-c deploy_archives -f git-submodule-test')
|
||||
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC'])
|
||||
for target_file_name in [
|
||||
'git2_git.yoctoproject.org.git-submodule-test.tar.gz',
|
||||
'git2_git.yoctoproject.org.bitbake-gitsm-test1.tar.gz',
|
||||
'git2_git.yoctoproject.org.bitbake-gitsm-test2.tar.gz',
|
||||
'git2_git.openembedded.org.bitbake.tar.gz'
|
||||
]:
|
||||
target_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name)
|
||||
self.assertTrue(os.path.exists(target_path))
|
||||
|
||||
def test_archiver_mode_mirror_gitsm_shallow(self):
|
||||
"""
|
||||
Test that the archiver correctly handles git submodules with
|
||||
`ARCHIVER_MODE[src] = "mirror"`.
|
||||
"""
|
||||
features = 'INHERIT += "archiver"\n'
|
||||
features += 'ARCHIVER_MODE[src] = "mirror"\n'
|
||||
features += 'ARCHIVER_MODE[mirror] = "combined"\n'
|
||||
features += 'BB_GENERATE_MIRROR_TARBALLS = "1"\n'
|
||||
features += 'COPYLEFT_LICENSE_INCLUDE = "*"\n'
|
||||
features += 'BB_GIT_SHALLOW = "1"\n'
|
||||
features += 'BB_GENERATE_SHALLOW_TARBALLS = "1"\n'
|
||||
features += 'DL_DIR = "${TOPDIR}/downloads-shallow"\n'
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('-c clean git-submodule-test')
|
||||
bitbake('-c deploy_archives -f git-submodule-test')
|
||||
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_SRC'])
|
||||
for target_file_name in [
|
||||
'gitsmshallow_git.yoctoproject.org.git-submodule-test_a2885dd-1_master.tar.gz',
|
||||
'gitsmshallow_git.yoctoproject.org.bitbake-gitsm-test1_bare_120f4c7-1.tar.gz',
|
||||
'gitsmshallow_git.yoctoproject.org.bitbake-gitsm-test2_bare_f66699e-1.tar.gz',
|
||||
'gitsmshallow_git.openembedded.org.bitbake_bare_52a144a-1.tar.gz',
|
||||
'gitsmshallow_git.openembedded.org.bitbake_bare_c39b997-1.tar.gz'
|
||||
]:
|
||||
target_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], 'mirror', target_file_name)
|
||||
self.assertTrue(os.path.exists(target_path))
|
||||
14
sources/poky/meta/lib/oeqa/selftest/cases/baremetal.py
Normal file
14
sources/poky/meta/lib/oeqa/selftest/cases/baremetal.py
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class BaremetalTest(OESelftestTestCase):
|
||||
def test_baremetal(self):
|
||||
self.write_config('TCLIBC = "baremetal"')
|
||||
bitbake('baremetal-helloworld')
|
||||
242
sources/poky/meta/lib/oeqa/selftest/cases/bblayers.py
Normal file
242
sources/poky/meta/lib/oeqa/selftest/cases/bblayers.py
Normal file
@@ -0,0 +1,242 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import oeqa.utils.ftools as ftools
|
||||
from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
class BitbakeLayers(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(BitbakeLayers, cls).setUpClass()
|
||||
bitbake("python3-jsonschema-native")
|
||||
bitbake("-c addto_recipe_sysroot python3-jsonschema-native")
|
||||
|
||||
def test_bitbakelayers_layerindexshowdepends(self):
|
||||
result = runCmd('bitbake-layers layerindex-show-depends meta-poky')
|
||||
find_in_contents = re.search("openembedded-core", result.output)
|
||||
self.assertTrue(find_in_contents, msg = "openembedded-core should have been listed at this step. bitbake-layers layerindex-show-depends meta-poky output: %s" % result.output)
|
||||
|
||||
def test_bitbakelayers_showcrossdepends(self):
|
||||
result = runCmd('bitbake-layers show-cross-depends')
|
||||
self.assertIn('aspell', result.output)
|
||||
|
||||
def test_bitbakelayers_showlayers(self):
|
||||
result = runCmd('bitbake-layers show-layers')
|
||||
self.assertIn('meta-selftest', result.output)
|
||||
|
||||
def test_bitbakelayers_showappends(self):
|
||||
recipe = "xcursor-transparent-theme"
|
||||
bb_file = self.get_recipe_basename(recipe)
|
||||
result = runCmd('bitbake-layers show-appends')
|
||||
self.assertIn(bb_file, result.output)
|
||||
|
||||
def test_bitbakelayers_showoverlayed(self):
|
||||
result = runCmd('bitbake-layers show-overlayed')
|
||||
self.assertIn('aspell', result.output)
|
||||
|
||||
def test_bitbakelayers_flatten(self):
|
||||
recipe = "xcursor-transparent-theme"
|
||||
recipe_path = "recipes-graphics/xcursor-transparent-theme"
|
||||
recipe_file = self.get_recipe_basename(recipe)
|
||||
testoutdir = os.path.join(self.builddir, 'test_bitbakelayers_flatten')
|
||||
self.assertFalse(os.path.isdir(testoutdir), msg = "test_bitbakelayers_flatten should not exist at this point in time")
|
||||
self.track_for_cleanup(testoutdir)
|
||||
result = runCmd('bitbake-layers flatten %s' % testoutdir)
|
||||
bb_file = os.path.join(testoutdir, recipe_path, recipe_file)
|
||||
self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
|
||||
contents = ftools.read_file(bb_file)
|
||||
find_in_contents = re.search(r"##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
|
||||
self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
|
||||
|
||||
def test_bitbakelayers_add_remove(self):
|
||||
test_layer = os.path.join(get_bb_var('COREBASE'), 'meta-skeleton')
|
||||
result = runCmd('bitbake-layers show-layers')
|
||||
self.assertNotIn('meta-skeleton', result.output, "This test cannot run with meta-skeleton in bblayers.conf. bitbake-layers show-layers output: %s" % result.output)
|
||||
result = runCmd('bitbake-layers add-layer %s' % test_layer)
|
||||
result = runCmd('bitbake-layers show-layers')
|
||||
self.assertIn('meta-skeleton', result.output, msg = "Something wrong happened. meta-skeleton layer was not added to conf/bblayers.conf. bitbake-layers show-layers output: %s" % result.output)
|
||||
result = runCmd('bitbake-layers remove-layer %s' % test_layer)
|
||||
result = runCmd('bitbake-layers show-layers')
|
||||
self.assertNotIn('meta-skeleton', result.output, msg = "meta-skeleton should have been removed at this step. bitbake-layers show-layers output: %s" % result.output)
|
||||
result = runCmd('bitbake-layers add-layer %s' % test_layer)
|
||||
result = runCmd('bitbake-layers show-layers')
|
||||
self.assertIn('meta-skeleton', result.output, msg = "Something wrong happened. meta-skeleton layer was not added to conf/bblayers.conf. bitbake-layers show-layers output: %s" % result.output)
|
||||
result = runCmd('bitbake-layers remove-layer */meta-skeleton')
|
||||
result = runCmd('bitbake-layers show-layers')
|
||||
self.assertNotIn('meta-skeleton', result.output, msg = "meta-skeleton should have been removed at this step. bitbake-layers show-layers output: %s" % result.output)
|
||||
|
||||
def test_bitbakelayers_showrecipes(self):
|
||||
result = runCmd('bitbake-layers show-recipes')
|
||||
self.assertIn('aspell:', result.output)
|
||||
self.assertIn('mtd-utils:', result.output)
|
||||
self.assertIn('core-image-minimal:', result.output)
|
||||
result = runCmd('bitbake-layers show-recipes mtd-utils')
|
||||
self.assertIn('mtd-utils:', result.output)
|
||||
self.assertNotIn('aspell:', result.output)
|
||||
result = runCmd('bitbake-layers show-recipes -i image')
|
||||
self.assertIn('core-image-minimal', result.output)
|
||||
self.assertNotIn('mtd-utils:', result.output)
|
||||
result = runCmd('bitbake-layers show-recipes -i meson,pkgconfig')
|
||||
self.assertIn('libproxy:', result.output)
|
||||
result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
|
||||
self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either
|
||||
self.assertNotIn('wget:', result.output) # doesn't inherit cmake
|
||||
self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig
|
||||
result = runCmd('bitbake-layers show-recipes -i nonexistentclass', ignore_status=True)
|
||||
self.assertNotEqual(result.status, 0, 'bitbake-layers show-recipes -i nonexistentclass should have failed')
|
||||
self.assertIn('ERROR:', result.output)
|
||||
|
||||
def test_bitbakelayers_createlayer(self):
|
||||
priority = 10
|
||||
layername = 'test-bitbakelayer-layercreate'
|
||||
layerpath = os.path.join(self.builddir, layername)
|
||||
self.assertFalse(os.path.exists(layerpath), '%s should not exist at this point in time' % layerpath)
|
||||
result = runCmd('bitbake-layers create-layer --priority=%d %s' % (priority, layerpath))
|
||||
self.track_for_cleanup(layerpath)
|
||||
result = runCmd('bitbake-layers add-layer %s' % layerpath)
|
||||
self.add_command_to_tearDown('bitbake-layers remove-layer %s' % layerpath)
|
||||
result = runCmd('bitbake-layers show-layers')
|
||||
find_in_contents = re.search(re.escape(layername) + r'\s+' + re.escape(layerpath) + r'\s+' + re.escape(str(priority)), result.output)
|
||||
self.assertTrue(find_in_contents, "%s not found in layers\n%s" % (layername, result.output))
|
||||
|
||||
layervars = ['BBFILE_PRIORITY', 'BBFILE_PATTERN', 'LAYERDEPENDS', 'LAYERSERIES_COMPAT']
|
||||
bb_vars = get_bb_vars(['BBFILE_COLLECTIONS'] + ['%s_%s' % (v, layername) for v in layervars])
|
||||
|
||||
for v in layervars:
|
||||
varname = '%s_%s' % (v, layername)
|
||||
self.assertIsNotNone(bb_vars[varname], "%s not found" % varname)
|
||||
|
||||
find_in_contents = re.search(r'(^|\s)' + re.escape(layername) + r'($|\s)', bb_vars['BBFILE_COLLECTIONS'])
|
||||
self.assertTrue(find_in_contents, "%s not in BBFILE_COLLECTIONS" % layername)
|
||||
|
||||
self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority))
|
||||
|
||||
result = runCmd('bitbake-layers save-build-conf {} {}'.format(layerpath, "buildconf-1"))
|
||||
for f in ('local.conf.sample', 'bblayers.conf.sample', 'conf-summary.txt', 'conf-notes.txt'):
|
||||
fullpath = os.path.join(layerpath, "conf", "templates", "buildconf-1", f)
|
||||
self.assertTrue(os.path.exists(fullpath), "Template configuration file {} not found".format(fullpath))
|
||||
|
||||
def get_recipe_basename(self, recipe):
|
||||
recipe_file = ""
|
||||
result = runCmd("bitbake-layers show-recipes -f %s" % recipe)
|
||||
for line in result.output.splitlines():
|
||||
if recipe in line:
|
||||
recipe_file = line
|
||||
break
|
||||
|
||||
self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe)
|
||||
return os.path.basename(recipe_file)
|
||||
|
||||
def validate_layersjson(self, json):
|
||||
python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'nativepython3')
|
||||
jsonvalidator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'jsonschema')
|
||||
jsonschema = os.path.join(get_bb_var('COREBASE'), 'meta/files/layers.schema.json')
|
||||
result = runCmd("{} {} -i {} {}".format(python, jsonvalidator, json, jsonschema))
|
||||
|
||||
def test_validate_examplelayersjson(self):
|
||||
json = os.path.join(get_bb_var('COREBASE'), "meta/files/layers.example.json")
|
||||
self.validate_layersjson(json)
|
||||
|
||||
def test_bitbakelayers_setup(self):
|
||||
result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
|
||||
jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
|
||||
self.validate_layersjson(jsonfile)
|
||||
|
||||
# The revision-under-test may not necessarily be available on the remote server,
|
||||
# so replace it with a revision that has a yocto-4.1 tag.
|
||||
import json
|
||||
with open(jsonfile) as f:
|
||||
data = json.load(f)
|
||||
for s in data['sources']:
|
||||
data['sources'][s]['git-remote']['rev'] = '5200799866b92259e855051112520006e1aaaac0'
|
||||
with open(jsonfile, 'w') as f:
|
||||
json.dump(data, f)
|
||||
|
||||
testcheckoutdir = os.path.join(self.builddir, 'test-layer-checkout')
|
||||
result = runCmd('{}/setup-layers --destdir {}'.format(self.testlayer_path, testcheckoutdir))
|
||||
layers_json = os.path.join(testcheckoutdir, ".oe-layers.json")
|
||||
self.assertTrue(os.path.exists(layers_json), "File {} not found in test layer checkout".format(layers_json))
|
||||
|
||||
# As setup-layers checkout out an old revision of poky, there is no setup-build symlink,
|
||||
# and we need to run oe-setup-build directly from the current poky tree under test
|
||||
oe_setup_build = os.path.join(get_bb_var('COREBASE'), 'scripts/oe-setup-build')
|
||||
oe_setup_build_l = os.path.join(testcheckoutdir, 'setup-build')
|
||||
os.symlink(oe_setup_build,oe_setup_build_l)
|
||||
|
||||
cmd = '{} --layerlist {} list -v'.format(oe_setup_build_l, layers_json)
|
||||
result = runCmd(cmd)
|
||||
cond = "conf/templates/default" in result.output
|
||||
self.assertTrue(cond, "Incorrect output from {}: {}".format(cmd, result.output))
|
||||
|
||||
# rather than hardcode the build setup cmdline here, let's actually run what the tool suggests to the user
|
||||
conf = None
|
||||
if 'poky-default' in result.output:
|
||||
conf = 'poky-default'
|
||||
elif 'meta-default' in result.output:
|
||||
conf = 'meta-default'
|
||||
self.assertIsNotNone(conf, "Could not find the configuration to set up a build in the output: {}".format(result.output))
|
||||
|
||||
cmd = '{} --layerlist {} setup -c {} --no-shell'.format(oe_setup_build_l, layers_json, conf)
|
||||
result = runCmd(cmd)
|
||||
|
||||
def test_bitbakelayers_updatelayer(self):
|
||||
result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
|
||||
jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
|
||||
self.validate_layersjson(jsonfile)
|
||||
|
||||
import json
|
||||
with open(jsonfile) as f:
|
||||
data = json.load(f)
|
||||
repos = []
|
||||
for s in data['sources']:
|
||||
repos.append(s)
|
||||
|
||||
self.assertTrue(len(repos) > 1, "Not enough repositories available")
|
||||
self.validate_layersjson(jsonfile)
|
||||
|
||||
test_ref_1 = 'ref_1'
|
||||
test_ref_2 = 'ref_2'
|
||||
|
||||
# Create a new layers setup using custom references
|
||||
result = runCmd('bitbake-layers create-layers-setup --use-custom-reference {first_repo}:{test_ref} --use-custom-reference {second_repo}:{test_ref} {path}'
|
||||
.format(first_repo=repos[0], second_repo=repos[1], test_ref=test_ref_1, path=self.testlayer_path))
|
||||
self.validate_layersjson(jsonfile)
|
||||
|
||||
with open(jsonfile) as f:
|
||||
data = json.load(f)
|
||||
first_rev_1 = data['sources'][repos[0]]['git-remote']['rev']
|
||||
first_desc_1 = data['sources'][repos[0]]['git-remote']['describe']
|
||||
second_rev_1 = data['sources'][repos[1]]['git-remote']['rev']
|
||||
second_desc_1 = data['sources'][repos[1]]['git-remote']['describe']
|
||||
|
||||
self.assertEqual(first_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(first_rev_1))
|
||||
self.assertEqual(first_desc_1, '', "Describe not cleared: '{}'".format(first_desc_1))
|
||||
self.assertEqual(second_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(second_rev_1))
|
||||
self.assertEqual(second_desc_1, '', "Describe not cleared: '{}'".format(second_desc_1))
|
||||
|
||||
# Update one of the repositories in the layers setup using a different custom reference
|
||||
# This should only update the selected repository, everything else should remain as is
|
||||
result = runCmd('bitbake-layers create-layers-setup --update --use-custom-reference {first_repo}:{test_ref} {path}'
|
||||
.format(first_repo=repos[0], test_ref=test_ref_2, path=self.testlayer_path))
|
||||
self.validate_layersjson(jsonfile)
|
||||
|
||||
with open(jsonfile) as f:
|
||||
data = json.load(f)
|
||||
first_rev_2 = data['sources'][repos[0]]['git-remote']['rev']
|
||||
first_desc_2 = data['sources'][repos[0]]['git-remote']['describe']
|
||||
second_rev_2 = data['sources'][repos[1]]['git-remote']['rev']
|
||||
second_desc_2 = data['sources'][repos[1]]['git-remote']['describe']
|
||||
|
||||
self.assertEqual(first_rev_2, test_ref_2, "Revision not set correctly: '{}'".format(first_rev_2))
|
||||
self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2))
|
||||
self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2))
|
||||
self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2))
|
||||
203
sources/poky/meta/lib/oeqa/selftest/cases/bblock.py
Normal file
203
sources/poky/meta/lib/oeqa/selftest/cases/bblock.py
Normal file
@@ -0,0 +1,203 @@
|
||||
#
|
||||
# Copyright (c) 2023 BayLibre, SAS
|
||||
# Author: Julien Stepahn <jstephan@baylibre.com>
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-only
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import bb.tinfoil
|
||||
|
||||
import oeqa.utils.ftools as ftools
|
||||
from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
|
||||
class BBLock(OESelftestTestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(BBLock, cls).setUpClass()
|
||||
cls.lockfile = cls.builddir + "/conf/bblock.conf"
|
||||
|
||||
def unlock_recipes(self, recipes=None, tasks=None):
|
||||
cmd = "bblock -r "
|
||||
if recipes:
|
||||
cmd += " ".join(recipes)
|
||||
if tasks:
|
||||
cmd += " -t " + ",".join(tasks)
|
||||
result = runCmd(cmd)
|
||||
|
||||
if recipes:
|
||||
# ensure all signatures are removed from lockfile
|
||||
contents = ftools.read_file(self.lockfile)
|
||||
for recipe in recipes:
|
||||
for task in tasks:
|
||||
find_in_contents = re.search(
|
||||
'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
|
||||
contents,
|
||||
)
|
||||
self.assertFalse(
|
||||
find_in_contents,
|
||||
msg="%s:%s should not be present into bblock.conf anymore"
|
||||
% (recipe, task),
|
||||
)
|
||||
self.assertExists(self.lockfile)
|
||||
else:
|
||||
self.assertNotExists(self.lockfile)
|
||||
|
||||
def lock_recipes(self, recipes, tasks=None):
|
||||
cmd = "bblock " + " ".join(recipes)
|
||||
if tasks:
|
||||
cmd += " -t " + ",".join(tasks)
|
||||
|
||||
result = runCmd(cmd)
|
||||
|
||||
self.assertExists(self.lockfile)
|
||||
|
||||
# ensure all signatures are added to lockfile
|
||||
contents = ftools.read_file(self.lockfile)
|
||||
for recipe in recipes:
|
||||
if tasks:
|
||||
for task in tasks:
|
||||
find_in_contents = re.search(
|
||||
'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
|
||||
contents,
|
||||
)
|
||||
self.assertTrue(
|
||||
find_in_contents,
|
||||
msg="%s:%s was not added into bblock.conf. bblock output: %s"
|
||||
% (recipe, task, result.output),
|
||||
)
|
||||
|
||||
def modify_tasks(self, recipes, tasks):
|
||||
task_append = ""
|
||||
for recipe in recipes:
|
||||
bb_vars = get_bb_vars(["PV"], recipe)
|
||||
recipe_pv = bb_vars["PV"]
|
||||
recipe_append_file = recipe + "_" + recipe_pv + ".bbappend"
|
||||
|
||||
os.mkdir(os.path.join(self.testlayer_path, "recipes-test", recipe))
|
||||
recipe_append_path = os.path.join(
|
||||
self.testlayer_path, "recipes-test", recipe, recipe_append_file
|
||||
)
|
||||
|
||||
for task in tasks:
|
||||
task_append += "%s:append() {\n#modify task hash \n}\n" % task
|
||||
ftools.write_file(recipe_append_path, task_append)
|
||||
self.add_command_to_tearDown(
|
||||
"rm -rf %s" % os.path.join(self.testlayer_path, "recipes-test", recipe)
|
||||
)
|
||||
|
||||
def test_lock_single_recipe_single_task(self):
|
||||
recipes = ["quilt"]
|
||||
tasks = ["do_compile"]
|
||||
self._run_test(recipes, tasks)
|
||||
|
||||
def test_lock_single_recipe_multiple_tasks(self):
|
||||
recipes = ["quilt"]
|
||||
tasks = ["do_compile", "do_install"]
|
||||
self._run_test(recipes, tasks)
|
||||
|
||||
def test_lock_single_recipe_all_tasks(self):
|
||||
recipes = ["quilt"]
|
||||
self._run_test(recipes, None)
|
||||
|
||||
def test_lock_multiple_recipe_single_task(self):
|
||||
recipes = ["quilt", "bc"]
|
||||
tasks = ["do_compile"]
|
||||
self._run_test(recipes, tasks)
|
||||
|
||||
def test_lock_architecture_specific(self):
|
||||
# unlock all recipes and ensure no bblock.conf file exist
|
||||
self.unlock_recipes()
|
||||
|
||||
recipes = ["quilt"]
|
||||
tasks = ["do_compile"]
|
||||
|
||||
# lock quilt's do_compile task for another machine
|
||||
if self.td["MACHINE"] == "qemux86-64":
|
||||
machine = "qemuarm"
|
||||
else:
|
||||
machine = "qemux86-64"
|
||||
|
||||
self.write_config('MACHINE = "%s"\n' % machine)
|
||||
|
||||
self.lock_recipes(recipes, tasks)
|
||||
|
||||
self.write_config('MACHINE = "%s"\n' % self.td["MACHINE"])
|
||||
# modify quilt's do_compile task
|
||||
self.modify_tasks(recipes, tasks)
|
||||
|
||||
# build quilt using the default machine
|
||||
# No Note/Warning should be emitted since sig is locked for another machine
|
||||
# (quilt package is architecture dependant)
|
||||
info_message = "NOTE: The following recipes have locked tasks: " + recipes[0]
|
||||
warn_message = "The %s:%s sig is computed to be" % (recipes[0], tasks[0])
|
||||
result = bitbake(recipes[0] + " -n")
|
||||
self.assertNotIn(info_message, result.output)
|
||||
self.assertNotIn(warn_message, result.output)
|
||||
|
||||
# unlock all recipes
|
||||
self.unlock_recipes()
|
||||
|
||||
def _run_test(self, recipes, tasks=None):
|
||||
# unlock all recipes and ensure no bblock.conf file exist
|
||||
self.unlock_recipes()
|
||||
|
||||
self.write_config('BB_SIGNATURE_HANDLER = "OEBasicHash"')
|
||||
|
||||
# lock tasks for recipes
|
||||
result = self.lock_recipes(recipes, tasks)
|
||||
|
||||
if not tasks:
|
||||
tasks = []
|
||||
result = bitbake("-c listtasks " + recipes[0])
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
d = tinfoil.parse_recipe(recipes[0])
|
||||
|
||||
for line in result.output.splitlines():
|
||||
if line.startswith("do_"):
|
||||
task = line.split()[0]
|
||||
if "setscene" in task:
|
||||
continue
|
||||
if d.getVarFlag(task, "nostamp"):
|
||||
continue
|
||||
tasks.append(task)
|
||||
|
||||
# build recipes. At this stage we should have a Note about recipes
|
||||
# having locked task's sig, but no warning since sig still match
|
||||
info_message = "NOTE: The following recipes have locked tasks: " + " ".join(
|
||||
recipes
|
||||
)
|
||||
for recipe in recipes:
|
||||
result = bitbake(recipe + " -n")
|
||||
self.assertIn(info_message, result.output)
|
||||
for task in tasks:
|
||||
warn_message = "The %s:%s sig is computed to be" % (recipe, task)
|
||||
self.assertNotIn(warn_message, result.output)
|
||||
|
||||
# modify all tasks that are locked to trigger a sig change then build the recipes
|
||||
# at this stage we should have a Note as before, but also a Warning for all
|
||||
# locked tasks indicating the sig mismatch
|
||||
self.modify_tasks(recipes, tasks)
|
||||
for recipe in recipes:
|
||||
result = bitbake(recipe + " -n")
|
||||
self.assertIn(info_message, result.output)
|
||||
for task in tasks:
|
||||
warn_message = "The %s:%s sig is computed to be" % (recipe, task)
|
||||
self.assertIn(warn_message, result.output)
|
||||
|
||||
# unlock all tasks and rebuild, no more Note/Warning should remain
|
||||
self.unlock_recipes(recipes, tasks)
|
||||
for recipe in recipes:
|
||||
result = bitbake(recipe + " -n")
|
||||
self.assertNotIn(info_message, result.output)
|
||||
for task in tasks:
|
||||
warn_message = "The %s:%s sig is computed to be" % (recipe, task)
|
||||
self.assertNotIn(warn_message, result.output)
|
||||
|
||||
# unlock all recipes
|
||||
self.unlock_recipes()
|
||||
182
sources/poky/meta/lib/oeqa/selftest/cases/bblogging.py
Normal file
182
sources/poky/meta/lib/oeqa/selftest/cases/bblogging.py
Normal file
@@ -0,0 +1,182 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class BitBakeLogging(OESelftestTestCase):
|
||||
|
||||
def assertCount(self, item, entry, count):
|
||||
self.assertEqual(item.count(entry), count, msg="Output:\n'''\n%s\n'''\ndoesn't contain %d copies of:\n'''\n%s\n'''\n" % (item, count, entry))
|
||||
|
||||
def test_shell_loggingA(self):
|
||||
# no logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c shelltest -f", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertNotIn("This is shell stdout", result.output)
|
||||
self.assertNotIn("This is shell stderr", result.output)
|
||||
|
||||
def test_shell_loggingB(self):
|
||||
# logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c shelltest -f", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertCount(result.output, "This is shell stdout", 1)
|
||||
self.assertCount(result.output, "This is shell stderr", 1)
|
||||
|
||||
def test_shell_loggingC(self):
|
||||
# no logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
# two copies due to set +x
|
||||
self.assertCount(result.output, "This is shell stdout", 2)
|
||||
self.assertCount(result.output, "This is shell stderr", 2)
|
||||
|
||||
def test_shell_loggingD(self):
|
||||
# logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
# two copies due to set +x
|
||||
self.assertCount(result.output, "This is shell stdout", 2)
|
||||
self.assertCount(result.output, "This is shell stderr", 2)
|
||||
|
||||
def test_python_exec_func_shell_loggingA(self):
|
||||
# no logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c pythontest_exec_func_shell -f",
|
||||
ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertNotIn("This is shell stdout", result.output)
|
||||
self.assertNotIn("This is shell stderr", result.output)
|
||||
|
||||
def test_python_exec_func_shell_loggingB(self):
|
||||
# logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c pythontest_exec_func_shell -f",
|
||||
ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertCount(result.output, "This is shell stdout", 1)
|
||||
self.assertCount(result.output, "This is shell stderr", 1)
|
||||
|
||||
def test_python_exec_func_shell_loggingC(self):
|
||||
# no logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
|
||||
ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
# two copies due to set +x
|
||||
self.assertCount(result.output, "This is shell stdout", 2)
|
||||
self.assertCount(result.output, "This is shell stderr", 2)
|
||||
|
||||
def test_python_exec_func_shell_loggingD(self):
|
||||
# logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
|
||||
ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
# two copies due to set +x
|
||||
self.assertCount(result.output, "This is shell stdout", 2)
|
||||
self.assertCount(result.output, "This is shell stderr", 2)
|
||||
|
||||
def test_python_exit_loggingA(self):
|
||||
# no logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertNotIn("This is python stdout", result.output)
|
||||
|
||||
def test_python_exit_loggingB(self):
|
||||
# logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
# A sys.exit() should include the output
|
||||
self.assertCount(result.output, "This is python stdout", 1)
|
||||
|
||||
def test_python_exit_loggingC(self):
|
||||
# no logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertCount(result.output, "This is python stdout", 1)
|
||||
|
||||
def test_python_exit_loggingD(self):
|
||||
# logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertCount(result.output, "This is python stdout", 1)
|
||||
|
||||
def test_python_exec_func_python_loggingA(self):
|
||||
# no logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c pythontest_exec_func_python -f",
|
||||
ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertNotIn("This is python stdout", result.output)
|
||||
|
||||
def test_python_exec_func_python_loggingB(self):
|
||||
# logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c pythontest_exec_func_python -f",
|
||||
ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
# A sys.exit() should include the output
|
||||
self.assertCount(result.output, "This is python stdout", 1)
|
||||
|
||||
def test_python_exec_func_python_loggingC(self):
|
||||
# no logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
|
||||
ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertCount(result.output, "This is python stdout", 1)
|
||||
|
||||
def test_python_exec_func_python_loggingD(self):
|
||||
# logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
|
||||
ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertCount(result.output, "This is python stdout", 1)
|
||||
|
||||
def test_python_fatal_loggingA(self):
|
||||
# no logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertNotIn("This is python fatal test stdout", result.output)
|
||||
self.assertCount(result.output, "This is a fatal error", 1)
|
||||
|
||||
def test_python_fatal_loggingB(self):
|
||||
# logs, no verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
# A bb.fatal() should not include the output
|
||||
self.assertNotIn("This is python fatal test stdout", result.output)
|
||||
self.assertCount(result.output, "This is a fatal error", 1)
|
||||
|
||||
def test_python_fatal_loggingC(self):
|
||||
# no logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = ""')
|
||||
result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertCount(result.output, "This is python fatal test stdout", 1)
|
||||
self.assertCount(result.output, "This is a fatal error", 1)
|
||||
|
||||
def test_python_fatal_loggingD(self):
|
||||
# logs, verbose
|
||||
self.write_config('BBINCLUDELOGS = "yes"')
|
||||
result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
|
||||
self.assertIn("ERROR: Logfile of failure stored in:", result.output)
|
||||
self.assertCount(result.output, "This is python fatal test stdout", 1)
|
||||
self.assertCount(result.output, "This is a fatal error", 1)
|
||||
|
||||
377
sources/poky/meta/lib/oeqa/selftest/cases/bbtests.py
Normal file
377
sources/poky/meta/lib/oeqa/selftest/cases/bbtests.py
Normal file
@@ -0,0 +1,377 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
import oeqa.utils.ftools as ftools
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
class BitbakeTests(OESelftestTestCase):
|
||||
|
||||
def getline(self, res, line):
|
||||
for l in res.output.split('\n'):
|
||||
if line in l:
|
||||
return l
|
||||
|
||||
# Test bitbake can run from the <builddir>/conf directory
|
||||
def test_run_bitbake_from_dir_1(self):
|
||||
os.chdir(os.path.join(self.builddir, 'conf'))
|
||||
self.assertEqual(bitbake('-e').status, 0, msg = "bitbake couldn't run from \"conf\" dir")
|
||||
|
||||
# Test bitbake can run from the <builddir>'s parent directory
|
||||
def test_run_bitbake_from_dir_2(self):
|
||||
my_env = os.environ.copy()
|
||||
my_env['BBPATH'] = my_env['BUILDDIR']
|
||||
os.chdir(os.path.dirname(os.environ['BUILDDIR']))
|
||||
self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from builddir's parent directory")
|
||||
|
||||
# Test bitbake can run from some other random system location (we use /tmp/)
|
||||
def test_run_bitbake_from_dir_3(self):
|
||||
my_env = os.environ.copy()
|
||||
my_env['BBPATH'] = my_env['BUILDDIR']
|
||||
os.chdir("/tmp/")
|
||||
self.assertEqual(bitbake('-e', env=my_env).status, 0, msg = "bitbake couldn't run from /tmp/")
|
||||
|
||||
|
||||
def test_event_handler(self):
|
||||
self.write_config("INHERIT += \"test_events\"")
|
||||
result = bitbake('selftest-hello-native')
|
||||
find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output)
|
||||
find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
|
||||
self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
|
||||
self.assertTrue(find_build_completed, msg = "Match failed in:\n%s" % result.output)
|
||||
self.assertNotIn('Test for bb.event.InvalidEvent', result.output)
|
||||
|
||||
def test_local_sstate(self):
|
||||
bitbake('selftest-hello-native')
|
||||
bitbake('selftest-hello-native -cclean')
|
||||
result = bitbake('selftest-hello-native')
|
||||
find_setscene = re.search("selftest-hello-native.*do_.*_setscene", result.output)
|
||||
self.assertTrue(find_setscene, msg = "No \"selftest-hello-native.*do_.*_setscene\" message found during bitbake selftest-hello-native. bitbake output: %s" % result.output )
|
||||
|
||||
def test_bitbake_invalid_recipe(self):
|
||||
result = bitbake('-b asdf', ignore_status=True)
|
||||
self.assertTrue("ERROR: Unable to find any recipe file matching 'asdf'" in result.output, msg = "Though asdf recipe doesn't exist, bitbake didn't output any err. message. bitbake output: %s" % result.output)
|
||||
|
||||
def test_bitbake_invalid_target(self):
|
||||
result = bitbake('asdf', ignore_status=True)
|
||||
self.assertIn("ERROR: Nothing PROVIDES 'asdf'", result.output)
|
||||
|
||||
def test_warnings_errors(self):
|
||||
result = bitbake('-b asdf', ignore_status=True)
|
||||
find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages*", result.output)
|
||||
find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages*", result.output)
|
||||
self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
|
||||
self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
|
||||
|
||||
def test_invalid_patch(self):
|
||||
# This patch should fail to apply.
|
||||
self.write_recipeinc('man-db', 'FILESEXTRAPATHS:prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"')
|
||||
self.write_config("INHERIT:remove = \"report-error\"")
|
||||
result = bitbake('man-db -c patch', ignore_status=True)
|
||||
self.delete_recipeinc('man-db')
|
||||
bitbake('-cclean man-db')
|
||||
found = False
|
||||
for l in result.output.split('\n'):
|
||||
if l.startswith("ERROR:") and "failed" in l and "do_patch" in l:
|
||||
found = l
|
||||
self.assertTrue(found and found.startswith("ERROR:"), msg = "Incorrectly formed patch application didn't fail. bitbake output: %s" % result.output)
|
||||
|
||||
def test_force_task_1(self):
|
||||
# test 1 from bug 5875
|
||||
import uuid
|
||||
test_recipe = 'zlib'
|
||||
# Need to use uuid otherwise hash equivlance would change the workflow
|
||||
test_data = "Microsoft Made No Profit From Anyone's Zunes Yo %s" % uuid.uuid1()
|
||||
bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
|
||||
image_dir = bb_vars['D']
|
||||
pkgsplit_dir = bb_vars['PKGDEST']
|
||||
man_dir = bb_vars['mandir']
|
||||
self.write_config("PACKAGE_CLASSES = \"package_rpm\"")
|
||||
|
||||
bitbake('-c clean %s' % test_recipe)
|
||||
bitbake('-c package -f %s' % test_recipe)
|
||||
self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
|
||||
|
||||
man_file = os.path.join(image_dir + man_dir, 'man3/zlib.3')
|
||||
ftools.append_file(man_file, test_data)
|
||||
bitbake('-c package -f %s' % test_recipe)
|
||||
|
||||
man_split_file = os.path.join(pkgsplit_dir, 'zlib-doc' + man_dir, 'man3/zlib.3')
|
||||
man_split_content = ftools.read_file(man_split_file)
|
||||
self.assertIn(test_data, man_split_content, 'The man file has not changed in packages-split.')
|
||||
|
||||
ret = bitbake(test_recipe)
|
||||
self.assertIn('task do_package_write_rpm:', ret.output, 'Task do_package_write_rpm did not re-executed.')
|
||||
|
||||
def test_force_task_2(self):
|
||||
# test 2 from bug 5875
|
||||
test_recipe = 'zlib'
|
||||
|
||||
bitbake(test_recipe)
|
||||
self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
|
||||
|
||||
result = bitbake('-C compile %s' % test_recipe)
|
||||
look_for_tasks = ['do_compile:', 'do_install:', 'do_populate_sysroot:', 'do_package:']
|
||||
for task in look_for_tasks:
|
||||
self.assertIn(task, result.output, msg="Couldn't find %s task.")
|
||||
|
||||
def test_bitbake_g(self):
|
||||
recipe = 'base-files'
|
||||
result = bitbake('-g %s' % recipe)
|
||||
for f in ['pn-buildlist', 'task-depends.dot']:
|
||||
self.addCleanup(os.remove, f)
|
||||
self.assertTrue('Task dependencies saved to \'task-depends.dot\'' in result.output, msg = "No task dependency \"task-depends.dot\" file was generated for the given task target. bitbake output: %s" % result.output)
|
||||
self.assertIn(recipe, ftools.read_file(os.path.join(self.builddir, 'task-depends.dot')))
|
||||
|
||||
def test_image_manifest(self):
|
||||
bitbake('core-image-minimal')
|
||||
bb_vars = get_bb_vars(["DEPLOY_DIR_IMAGE", "IMAGE_LINK_NAME"], "core-image-minimal")
|
||||
deploydir = bb_vars["DEPLOY_DIR_IMAGE"]
|
||||
imagename = bb_vars["IMAGE_LINK_NAME"]
|
||||
manifest = os.path.join(deploydir, imagename + ".manifest")
|
||||
self.assertTrue(os.path.islink(manifest), msg="No manifest file created for image. It should have been created in %s" % manifest)
|
||||
|
||||
def test_invalid_recipe_src_uri(self):
|
||||
data = 'SRC_URI = "file://invalid"'
|
||||
self.write_recipeinc('man-db', data)
|
||||
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
|
||||
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
|
||||
INHERIT:remove = \"report-error\"
|
||||
""")
|
||||
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
|
||||
|
||||
result = bitbake('-c fetch man-db', ignore_status=True)
|
||||
self.delete_recipeinc('man-db')
|
||||
self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
|
||||
self.assertIn('Unable to get checksum for man-db SRC_URI entry invalid: file could not be found', result.output)
|
||||
|
||||
def test_rename_downloaded_file(self):
|
||||
# TODO unique dldir instead of using cleanall
|
||||
# TODO: need to set sstatedir?
|
||||
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
|
||||
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
|
||||
""")
|
||||
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
|
||||
|
||||
data = 'SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
|
||||
self.write_recipeinc('aspell', data)
|
||||
result = bitbake('-f -c fetch aspell', ignore_status=True)
|
||||
self.delete_recipeinc('aspell')
|
||||
self.assertEqual(result.status, 0, msg = "Couldn't fetch aspell. %s" % result.output)
|
||||
dl_dir = get_bb_var("DL_DIR")
|
||||
self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % dl_dir)
|
||||
self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % dl_dir)
|
||||
|
||||
def test_environment(self):
|
||||
self.write_config("TEST_ENV=\"localconf\"")
|
||||
result = runCmd('bitbake -e | grep TEST_ENV=')
|
||||
self.assertIn('localconf', result.output)
|
||||
|
||||
def test_dry_run(self):
|
||||
result = runCmd('bitbake -n selftest-hello-native')
|
||||
self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
|
||||
|
||||
def test_just_parse(self):
|
||||
result = runCmd('bitbake -p')
|
||||
self.assertEqual(0, result.status, "errors encountered when parsing recipes. %s" % result.output)
|
||||
|
||||
def test_version(self):
|
||||
result = runCmd('bitbake -s | grep wget')
|
||||
find = re.search(r"wget *:([0-9a-zA-Z\.\-]+)", result.output)
|
||||
self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
|
||||
|
||||
def test_prefile(self):
|
||||
# Test when the prefile does not exist
|
||||
result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True)
|
||||
self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output)
|
||||
# Test when the prefile exists
|
||||
preconf = os.path.join(self.builddir, 'conf/prefile.conf')
|
||||
self.track_for_cleanup(preconf)
|
||||
ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
|
||||
result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
|
||||
self.assertIn('prefile', result.output)
|
||||
self.write_config("TEST_PREFILE=\"localconf\"")
|
||||
result = runCmd('bitbake -r conf/prefile.conf -e | grep TEST_PREFILE=')
|
||||
self.assertIn('localconf', result.output)
|
||||
|
||||
def test_postfile(self):
|
||||
# Test when the postfile does not exist
|
||||
result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True)
|
||||
self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output)
|
||||
# Test when the postfile exists
|
||||
postconf = os.path.join(self.builddir, 'conf/postfile.conf')
|
||||
self.track_for_cleanup(postconf)
|
||||
ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
|
||||
self.write_config("TEST_POSTFILE=\"localconf\"")
|
||||
result = runCmd('bitbake -R conf/postfile.conf -e | grep TEST_POSTFILE=')
|
||||
self.assertIn('postfile', result.output)
|
||||
|
||||
def test_checkuri(self):
|
||||
result = runCmd('bitbake -c checkuri m4')
|
||||
self.assertEqual(0, result.status, msg = "\"checkuri\" task was not executed. bitbake output: %s" % result.output)
|
||||
|
||||
def test_continue(self):
|
||||
self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
|
||||
SSTATE_DIR = \"${TOPDIR}/download-selftest\"
|
||||
INHERIT:remove = \"report-error\"
|
||||
""")
|
||||
self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
|
||||
self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
|
||||
runCmd('bitbake -c cleanall man-db xcursor-transparent-theme')
|
||||
result = runCmd('bitbake -c unpack -k man-db xcursor-transparent-theme', ignore_status=True)
|
||||
errorpos = result.output.find('ERROR: Function failed: do_fail_task')
|
||||
manver = re.search("NOTE: recipe xcursor-transparent-theme-(.*?): task do_unpack: Started", result.output)
|
||||
continuepos = result.output.find('NOTE: recipe xcursor-transparent-theme-%s: task do_unpack: Started' % manver.group(1))
|
||||
self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
|
||||
|
||||
def test_non_gplv3(self):
|
||||
self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"
|
||||
require conf/distro/include/no-gplv3.inc
|
||||
''')
|
||||
result = bitbake('selftest-ed', ignore_status=True)
|
||||
self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
|
||||
lic_dir = get_bb_var('LICENSE_DIRECTORY')
|
||||
arch = get_bb_var('SSTATE_PKGARCH')
|
||||
filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later')
|
||||
self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename)
|
||||
filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-or-later')
|
||||
self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename)
|
||||
|
||||
def test_setscene_only(self):
|
||||
""" Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
|
||||
test_recipe = 'selftest-hello-native'
|
||||
|
||||
bitbake(test_recipe)
|
||||
bitbake('-c clean %s' % test_recipe)
|
||||
ret = bitbake('--setscene-only %s' % test_recipe)
|
||||
|
||||
tasks = re.findall(r'task\s+(do_\S+):', ret.output)
|
||||
|
||||
for task in tasks:
|
||||
self.assertIn('_setscene', task, 'A task different from _setscene ran: %s.\n'
|
||||
'Executed tasks were: %s' % (task, str(tasks)))
|
||||
|
||||
def test_skip_setscene(self):
|
||||
test_recipe = 'selftest-hello-native'
|
||||
|
||||
bitbake(test_recipe)
|
||||
bitbake('-c clean %s' % test_recipe)
|
||||
|
||||
ret = bitbake('--setscene-only %s' % test_recipe)
|
||||
tasks = re.findall(r'task\s+(do_\S+):', ret.output)
|
||||
|
||||
for task in tasks:
|
||||
self.assertIn('_setscene', task, 'A task different from _setscene ran: %s.\n'
|
||||
'Executed tasks were: %s' % (task, str(tasks)))
|
||||
|
||||
# Run without setscene. Should do nothing
|
||||
ret = bitbake('--skip-setscene %s' % test_recipe)
|
||||
tasks = re.findall(r'task\s+(do_\S+):', ret.output)
|
||||
|
||||
self.assertFalse(tasks, 'Tasks %s ran when they should not have' % (str(tasks)))
|
||||
|
||||
# Clean (leave sstate cache) and run with --skip-setscene. No setscene
|
||||
# tasks should run
|
||||
bitbake('-c clean %s' % test_recipe)
|
||||
|
||||
ret = bitbake('--skip-setscene %s' % test_recipe)
|
||||
tasks = re.findall(r'task\s+(do_\S+):', ret.output)
|
||||
|
||||
for task in tasks:
|
||||
self.assertNotIn('_setscene', task, 'A _setscene task ran: %s.\n'
|
||||
'Executed tasks were: %s' % (task, str(tasks)))
|
||||
|
||||
def test_bbappend_order(self):
|
||||
""" Bitbake should bbappend to recipe in a predictable order """
|
||||
test_recipe = 'ed'
|
||||
bb_vars = get_bb_vars(['SUMMARY', 'PV'], test_recipe)
|
||||
test_recipe_summary_before = bb_vars['SUMMARY']
|
||||
test_recipe_pv = bb_vars['PV']
|
||||
recipe_append_file = test_recipe + '_' + test_recipe_pv + '.bbappend'
|
||||
expected_recipe_summary = test_recipe_summary_before
|
||||
|
||||
for i in range(5):
|
||||
recipe_append_dir = test_recipe + '_test_' + str(i)
|
||||
recipe_append_path = os.path.join(self.testlayer_path, 'recipes-test', recipe_append_dir, recipe_append_file)
|
||||
os.mkdir(os.path.join(self.testlayer_path, 'recipes-test', recipe_append_dir))
|
||||
feature = 'SUMMARY += "%s"\n' % i
|
||||
ftools.write_file(recipe_append_path, feature)
|
||||
expected_recipe_summary += ' %s' % i
|
||||
|
||||
self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, 'recipes-test',
|
||||
test_recipe + '_test_*'))
|
||||
|
||||
test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe)
|
||||
self.assertEqual(expected_recipe_summary, test_recipe_summary_after)
|
||||
|
||||
def test_git_patchtool(self):
|
||||
""" PATCHTOOL=git should work with non-git sources like tarballs
|
||||
test recipe for the test must NOT containt git:// repository in SRC_URI
|
||||
"""
|
||||
test_recipe = "man-db"
|
||||
self.write_recipeinc(test_recipe, 'PATCHTOOL=\"git\"')
|
||||
src = get_bb_var("SRC_URI",test_recipe)
|
||||
gitscm = re.search("git://", src)
|
||||
self.assertFalse(gitscm, "test_git_patchtool pre-condition failed: {} test recipe contains git repo!".format(test_recipe))
|
||||
result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
|
||||
fatal = re.search("fatal: not a git repository (or any of the parent directories)", result.output)
|
||||
self.assertFalse(fatal, "Failed to patch using PATCHTOOL=\"git\"")
|
||||
self.delete_recipeinc(test_recipe)
|
||||
bitbake('-cclean {}'.format(test_recipe))
|
||||
|
||||
def test_git_patchtool2(self):
|
||||
""" Test if PATCHTOOL=git works with git repo and doesn't reinitialize it
|
||||
"""
|
||||
test_recipe = "gitrepotest"
|
||||
src = get_bb_var("SRC_URI",test_recipe)
|
||||
gitscm = re.search("git://", src)
|
||||
self.assertTrue(gitscm, "test_git_patchtool pre-condition failed: {} test recipe doesn't contains git repo!".format(test_recipe))
|
||||
result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
|
||||
srcdir = get_bb_var('S', test_recipe)
|
||||
result = runCmd("git log", cwd = srcdir)
|
||||
self.assertFalse("bitbake_patching_started" in result.output, msg = "Repository has been reinitialized. {}".format(srcdir))
|
||||
self.delete_recipeinc(test_recipe)
|
||||
bitbake('-cclean {}'.format(test_recipe))
|
||||
|
||||
|
||||
def test_git_unpack_nonetwork(self):
|
||||
"""
|
||||
Test that a recipe with a floating tag that needs to be resolved upstream doesn't
|
||||
access the network in a patch task run in a separate builld invocation
|
||||
"""
|
||||
|
||||
# Enable the recipe to float using a distro override
|
||||
self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
|
||||
|
||||
bitbake('gitunpackoffline -c fetch')
|
||||
bitbake('gitunpackoffline -c patch')
|
||||
|
||||
def test_git_unpack_nonetwork_fail(self):
|
||||
"""
|
||||
Test that a recipe with a floating tag which doesn't call get_srcrev() in the fetcher
|
||||
raises an error when the fetcher is called.
|
||||
"""
|
||||
|
||||
# Enable the recipe to float using a distro override
|
||||
self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
|
||||
|
||||
result = bitbake('gitunpackoffline-fail -c fetch', ignore_status=True)
|
||||
self.assertTrue(re.search("Recipe uses a floating tag/branch .* for repo .* without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev()", result.output), msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
|
||||
|
||||
def test_unexpanded_variable_in_path(self):
|
||||
"""
|
||||
Test that bitbake fails if directory contains unexpanded bitbake variable in the name
|
||||
"""
|
||||
recipe_name = "gitunpackoffline"
|
||||
self.write_config('PV:pn-gitunpackoffline:append = "+${UNDEFVAL}"')
|
||||
result = bitbake('{}'.format(recipe_name), ignore_status=True)
|
||||
self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path")
|
||||
self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution",
|
||||
result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output)
|
||||
56
sources/poky/meta/lib/oeqa/selftest/cases/binutils.py
Normal file
56
sources/poky/meta/lib/oeqa/selftest/cases/binutils.py
Normal file
@@ -0,0 +1,56 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
import os
|
||||
import time
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.core.case import OEPTestResultTestCase
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars
|
||||
|
||||
def parse_values(content):
|
||||
for i in content:
|
||||
for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
|
||||
if i.startswith(v + ": "):
|
||||
yield i[len(v) + 2:].strip(), v
|
||||
break
|
||||
|
||||
@OETestTag("toolchain-user", "toolchain-system")
|
||||
class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
|
||||
def test_binutils(self):
|
||||
self.run_binutils("binutils")
|
||||
|
||||
def test_gas(self):
|
||||
self.run_binutils("gas")
|
||||
|
||||
def test_ld(self):
|
||||
self.run_binutils("ld")
|
||||
|
||||
def run_binutils(self, suite):
|
||||
features = []
|
||||
features.append('CHECK_TARGETS = "{0}"'.format(suite))
|
||||
self.write_config("\n".join(features))
|
||||
|
||||
recipe = "binutils-cross-testsuite"
|
||||
bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
|
||||
builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
bitbake("{0} -c check".format(recipe))
|
||||
|
||||
end_time = time.time()
|
||||
|
||||
sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite))
|
||||
if not os.path.exists(sumspath):
|
||||
sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite))
|
||||
logpath = os.path.splitext(sumspath)[0] + ".log"
|
||||
|
||||
ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite
|
||||
self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
|
||||
with open(sumspath, "r") as f:
|
||||
for test, result in parse_values(f):
|
||||
self.ptest_result(ptestsuite, test, result)
|
||||
|
||||
52
sources/poky/meta/lib/oeqa/selftest/cases/buildhistory.py
Normal file
52
sources/poky/meta/lib/oeqa/selftest/cases/buildhistory.py
Normal file
@@ -0,0 +1,52 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import datetime
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars
|
||||
|
||||
|
||||
class BuildhistoryBase(OESelftestTestCase):
|
||||
|
||||
def config_buildhistory(self, tmp_bh_location=False):
|
||||
bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT'])
|
||||
if (not 'buildhistory' in bb_vars['USER_CLASSES']) and (not 'buildhistory' in bb_vars['INHERIT']):
|
||||
add_buildhistory_config = 'INHERIT += "buildhistory"\nBUILDHISTORY_COMMIT = "1"'
|
||||
self.append_config(add_buildhistory_config)
|
||||
|
||||
if tmp_bh_location:
|
||||
# Using a temporary buildhistory location for testing
|
||||
tmp_bh_dir = os.path.join(self.builddir, "tmp_buildhistory_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
|
||||
buildhistory_dir_config = "BUILDHISTORY_DIR = \"%s\"" % tmp_bh_dir
|
||||
self.append_config(buildhistory_dir_config)
|
||||
self.track_for_cleanup(tmp_bh_dir)
|
||||
|
||||
def run_buildhistory_operation(self, target, global_config='', target_config='', change_bh_location=False, expect_error=False, error_regex=''):
|
||||
if change_bh_location:
|
||||
tmp_bh_location = True
|
||||
else:
|
||||
tmp_bh_location = False
|
||||
self.config_buildhistory(tmp_bh_location)
|
||||
|
||||
self.append_config(global_config)
|
||||
self.append_recipeinc(target, target_config)
|
||||
bitbake("-cclean %s" % target)
|
||||
result = bitbake(target, ignore_status=True)
|
||||
self.remove_config(global_config)
|
||||
self.remove_recipeinc(target, target_config)
|
||||
|
||||
if expect_error:
|
||||
self.assertEqual(result.status, 1, msg="Error expected for global config '%s' and target config '%s'" % (global_config, target_config))
|
||||
search_for_error = re.search(error_regex, result.output)
|
||||
self.assertTrue(search_for_error, msg="Could not find desired error in output: %s (%s)" % (error_regex, result.output))
|
||||
else:
|
||||
self.assertEqual(result.status, 0, msg="Command 'bitbake %s' has failed unexpectedly: %s" % (target, result.output))
|
||||
|
||||
# No tests should be added to the base class.
|
||||
# Please create a new class that inherit this one, or use one of those already available for adding tests.
|
||||
239
sources/poky/meta/lib/oeqa/selftest/cases/buildoptions.py
Normal file
239
sources/poky/meta/lib/oeqa/selftest/cases/buildoptions.py
Normal file
@@ -0,0 +1,239 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import glob as g
|
||||
import shutil
|
||||
import tempfile
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.selftest.cases.buildhistory import BuildhistoryBase
|
||||
from oeqa.core.decorator.data import skipIfMachine
|
||||
from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
|
||||
import oeqa.utils.ftools as ftools
|
||||
from oeqa.core.decorator import OETestTag
|
||||
|
||||
class ImageOptionsTests(OESelftestTestCase):
|
||||
|
||||
def test_incremental_image_generation(self):
|
||||
image_pkgtype = get_bb_var("IMAGE_PKGTYPE")
|
||||
if image_pkgtype != 'rpm':
|
||||
self.skipTest('Not using RPM as main package format')
|
||||
bitbake("-c clean core-image-minimal")
|
||||
self.write_config('INC_RPM_IMAGE_GEN = "1"')
|
||||
self.append_config('IMAGE_FEATURES += "ssh-server-openssh"')
|
||||
bitbake("core-image-minimal")
|
||||
log_data_file = os.path.join(get_bb_var("WORKDIR", "core-image-minimal"), "temp/log.do_rootfs")
|
||||
log_data_created = ftools.read_file(log_data_file)
|
||||
incremental_created = re.search(r"Installing\s*:\s*packagegroup-core-ssh-openssh", log_data_created)
|
||||
self.remove_config('IMAGE_FEATURES += "ssh-server-openssh"')
|
||||
self.assertTrue(incremental_created, msg = "Match failed in:\n%s" % log_data_created)
|
||||
bitbake("core-image-minimal")
|
||||
log_data_removed = ftools.read_file(log_data_file)
|
||||
incremental_removed = re.search(r"Erasing\s*:\s*packagegroup-core-ssh-openssh", log_data_removed)
|
||||
self.assertTrue(incremental_removed, msg = "Match failed in:\n%s" % log_data_removed)
|
||||
|
||||
def test_ccache_tool(self):
|
||||
bitbake("ccache-native")
|
||||
bb_vars = get_bb_vars(['SYSROOT_DESTDIR', 'bindir'], 'ccache-native')
|
||||
p = bb_vars['SYSROOT_DESTDIR'] + bb_vars['bindir'] + "/" + "ccache"
|
||||
self.assertTrue(os.path.isfile(p), msg = "No ccache found (%s)" % p)
|
||||
self.write_config('INHERIT += "ccache"')
|
||||
recipe = "libgcc-initial"
|
||||
self.add_command_to_tearDown('bitbake -c clean %s' % recipe)
|
||||
bitbake("%s -c clean" % recipe)
|
||||
bitbake("%s -f -c compile" % recipe)
|
||||
log_compile = os.path.join(get_bb_var("WORKDIR", recipe), "temp/log.do_compile")
|
||||
with open(log_compile, "r") as f:
|
||||
loglines = "".join(f.readlines())
|
||||
self.assertIn("ccache", loglines, msg="No match for ccache in %s log.do_compile. For further details: %s" % (recipe , log_compile))
|
||||
|
||||
def test_read_only_image(self):
|
||||
distro_features = get_bb_var('DISTRO_FEATURES')
|
||||
if not ('x11' in distro_features and 'opengl' in distro_features):
|
||||
self.skipTest('core-image-sato/weston requires x11 and opengl in distro features')
|
||||
self.write_config('IMAGE_FEATURES += "read-only-rootfs"')
|
||||
bitbake("core-image-sato core-image-weston")
|
||||
# do_image will fail if there are any pending postinsts
|
||||
|
||||
class DiskMonTest(OESelftestTestCase):
|
||||
|
||||
def test_stoptask_behavior(self):
|
||||
self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
|
||||
res = bitbake("delay -c delay", ignore_status = True)
|
||||
self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output)
|
||||
self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
|
||||
self.write_config('BB_DISKMON_DIRS = "HALT,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
|
||||
res = bitbake("delay -c delay", ignore_status = True)
|
||||
self.assertTrue('ERROR: Immediately halt since the disk space monitor action is "HALT"!' in res.output, "Tasks should have been halted immediately. Disk monitor is set to HALT: %s" % res.output)
|
||||
self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
|
||||
self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
|
||||
res = bitbake("delay -c delay")
|
||||
self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output)
|
||||
|
||||
class SanityOptionsTest(OESelftestTestCase):
|
||||
def getline(self, res, line):
|
||||
for l in res.output.split('\n'):
|
||||
if line in l:
|
||||
return l
|
||||
|
||||
def test_options_warnqa_errorqa_switch(self):
|
||||
|
||||
self.write_config("INHERIT:remove = \"report-error\"")
|
||||
if "packages-list" not in get_bb_var("ERROR_QA"):
|
||||
self.append_config("ERROR_QA:append = \" packages-list\"")
|
||||
|
||||
self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
|
||||
self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme')
|
||||
res = bitbake("xcursor-transparent-theme -f -c package", ignore_status=True)
|
||||
self.delete_recipeinc('xcursor-transparent-theme')
|
||||
line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
|
||||
self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
|
||||
self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
|
||||
self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
|
||||
self.append_config('ERROR_QA:remove = "packages-list"')
|
||||
self.append_config('WARN_QA:append = " packages-list"')
|
||||
res = bitbake("xcursor-transparent-theme -f -c package")
|
||||
self.delete_recipeinc('xcursor-transparent-theme')
|
||||
line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
|
||||
self.assertTrue(line and line.startswith("WARNING:"), msg=res.output)
|
||||
|
||||
def test_layer_without_git_dir(self):
|
||||
"""
|
||||
Summary: Test that layer git revisions are displayed and do not fail without git repository
|
||||
Expected: The build to be successful and without "fatal" errors
|
||||
Product: oe-core
|
||||
Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
"""
|
||||
|
||||
dirpath = tempfile.mkdtemp()
|
||||
|
||||
dummy_layer_name = 'meta-dummy'
|
||||
dummy_layer_path = os.path.join(dirpath, dummy_layer_name)
|
||||
dummy_layer_conf_dir = os.path.join(dummy_layer_path, 'conf')
|
||||
os.makedirs(dummy_layer_conf_dir)
|
||||
dummy_layer_conf_path = os.path.join(dummy_layer_conf_dir, 'layer.conf')
|
||||
|
||||
dummy_layer_content = 'BBPATH .= ":${LAYERDIR}"\n' \
|
||||
'BBFILES += "${LAYERDIR}/recipes-*/*/*.bb ${LAYERDIR}/recipes-*/*/*.bbappend"\n' \
|
||||
'BBFILE_COLLECTIONS += "%s"\n' \
|
||||
'BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' \
|
||||
'BBFILE_PRIORITY_%s = "6"\n' % (dummy_layer_name, dummy_layer_name, dummy_layer_name)
|
||||
|
||||
ftools.write_file(dummy_layer_conf_path, dummy_layer_content)
|
||||
|
||||
bblayers_conf = 'BBLAYERS += "%s"\n' % dummy_layer_path
|
||||
self.write_bblayers_config(bblayers_conf)
|
||||
|
||||
test_recipe = 'ed'
|
||||
|
||||
ret = bitbake('-n %s' % test_recipe)
|
||||
|
||||
err = 'fatal: Not a git repository'
|
||||
|
||||
shutil.rmtree(dirpath)
|
||||
|
||||
self.assertNotIn(err, ret.output)
|
||||
|
||||
|
||||
class BuildhistoryTests(BuildhistoryBase):
|
||||
|
||||
def test_buildhistory_basic(self):
|
||||
self.run_buildhistory_operation('xcursor-transparent-theme')
|
||||
self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')), "buildhistory dir was not created.")
|
||||
|
||||
def test_buildhistory_buildtime_pr_backwards(self):
|
||||
target = 'xcursor-transparent-theme'
|
||||
error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds \(from .*-r1.* to .*-r0.*\)" % target
|
||||
self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
|
||||
self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
|
||||
|
||||
def test_fileinfo(self):
|
||||
self.config_buildhistory()
|
||||
bitbake('hicolor-icon-theme')
|
||||
history_dir = get_bb_var('BUILDHISTORY_DIR_PACKAGE', 'hicolor-icon-theme')
|
||||
self.assertTrue(os.path.isdir(history_dir), 'buildhistory dir was not created.')
|
||||
|
||||
def load_bh(f):
|
||||
d = {}
|
||||
for line in open(f):
|
||||
split = [s.strip() for s in line.split('=', 1)]
|
||||
if len(split) > 1:
|
||||
d[split[0]] = split[1]
|
||||
return d
|
||||
|
||||
data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme', 'latest'))
|
||||
self.assertIn('FILELIST', data)
|
||||
self.assertEqual(data['FILELIST'], '/usr/share/icons/hicolor/index.theme')
|
||||
self.assertGreater(int(data['PKGSIZE']), 0)
|
||||
|
||||
data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest'))
|
||||
if 'FILELIST' in data:
|
||||
self.assertEqual(data['FILELIST'], '')
|
||||
self.assertEqual(int(data['PKGSIZE']), 0)
|
||||
|
||||
class ArchiverTest(OESelftestTestCase):
|
||||
def test_arch_work_dir_and_export_source(self):
|
||||
"""
|
||||
Test for archiving the work directory and exporting the source files.
|
||||
"""
|
||||
self.write_config("""
|
||||
INHERIT += "archiver"
|
||||
PACKAGE_CLASSES = "package_rpm"
|
||||
ARCHIVER_MODE[src] = "original"
|
||||
ARCHIVER_MODE[srpm] = "1"
|
||||
""")
|
||||
res = bitbake("xcursor-transparent-theme", ignore_status=True)
|
||||
self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output)
|
||||
deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC')
|
||||
pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*")
|
||||
src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm"
|
||||
tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.xz"
|
||||
self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.xz files under %s/allarch*/xcursor*" % deploy_dir_src)
|
||||
|
||||
class ToolchainOptions(OESelftestTestCase):
|
||||
def test_toolchain_fortran(self):
|
||||
"""
|
||||
Test that Fortran works by building a Hello, World binary.
|
||||
"""
|
||||
|
||||
features = 'FORTRAN:forcevariable = ",fortran"\n'
|
||||
self.write_config(features)
|
||||
bitbake('fortran-helloworld')
|
||||
|
||||
@OETestTag("yocto-mirrors")
|
||||
class SourceMirroring(OESelftestTestCase):
|
||||
# Can we download everything from the Yocto Sources Mirror over http only
|
||||
def test_yocto_source_mirror(self):
|
||||
self.write_config("""
|
||||
BB_ALLOWED_NETWORKS = "downloads.yoctoproject.org"
|
||||
MIRRORS = ""
|
||||
DL_DIR = "${TMPDIR}/test_downloads"
|
||||
STAMPS_DIR = "${TMPDIR}/test_stamps"
|
||||
SSTATE_DIR = "${TMPDIR}/test_sstate-cache"
|
||||
PREMIRRORS = "\\
|
||||
bzr://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
cvs://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
git://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
gitsm://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
hg://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
osc://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
p4://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
svn://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
ftp://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
http://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n \\
|
||||
https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \\n"
|
||||
""")
|
||||
|
||||
bitbake("world --runall fetch")
|
||||
|
||||
|
||||
class Poisoning(OESelftestTestCase):
|
||||
def test_poisoning(self):
|
||||
res = bitbake("poison", ignore_status=True)
|
||||
self.assertNotEqual(res.status, 0)
|
||||
self.assertTrue("is unsafe for cross-compilation" in res.output)
|
||||
60
sources/poky/meta/lib/oeqa/selftest/cases/c_cpp.py
Normal file
60
sources/poky/meta/lib/oeqa/selftest/cases/c_cpp.py
Normal file
@@ -0,0 +1,60 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.core.decorator.data import skipIfNotQemuUsermode
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
|
||||
class CCppTests(OESelftestTestCase):
|
||||
|
||||
@skipIfNotQemuUsermode()
|
||||
def _qemu_usermode(self, recipe_name):
|
||||
self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
|
||||
bitbake("%s -c run_tests" % recipe_name)
|
||||
|
||||
@skipIfNotQemuUsermode()
|
||||
def _qemu_usermode_failing(self, recipe_name):
|
||||
config = 'PACKAGECONFIG:pn-%s = "failing_test"' % recipe_name
|
||||
self.write_config(config)
|
||||
self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
|
||||
result = bitbake("%s -c run_tests" % recipe_name, ignore_status=True)
|
||||
self.assertNotEqual(0, result.status, "command: %s is expected to fail but passed, status: %s, output: %s, error: %s" % (
|
||||
result.command, result.status, result.output, result.error))
|
||||
|
||||
|
||||
class CMakeTests(CCppTests):
|
||||
def test_cmake_qemu(self):
|
||||
"""Test for cmake-qemu.bbclass good case
|
||||
|
||||
compile the cmake-example and verify the CTests pass in qemu-user.
|
||||
qemu-user is configured by CMAKE_CROSSCOMPILING_EMULATOR.
|
||||
"""
|
||||
self._qemu_usermode("cmake-example")
|
||||
|
||||
def test_cmake_qemu_failing(self):
|
||||
"""Test for cmake-qemu.bbclass bad case
|
||||
|
||||
Break the comparison in the test code and verify the CTests do not pass.
|
||||
"""
|
||||
self._qemu_usermode_failing("cmake-example")
|
||||
|
||||
|
||||
class MesonTests(CCppTests):
|
||||
def test_meson_qemu(self):
|
||||
"""Test the qemu-user feature of the meson.bbclass good case
|
||||
|
||||
compile the meson-example and verify the Unit Test pass in qemu-user.
|
||||
qemu-user is configured by meson's exe_wrapper option.
|
||||
"""
|
||||
self._qemu_usermode("meson-example")
|
||||
|
||||
def test_meson_qemu_failing(self):
|
||||
"""Test the qemu-user feature of the meson.bbclass bad case
|
||||
|
||||
Break the comparison in the test code and verify the Unit Test does not pass in qemu-user.
|
||||
"""
|
||||
self._qemu_usermode_failing("meson-example")
|
||||
89
sources/poky/meta/lib/oeqa/selftest/cases/containerimage.py
Normal file
89
sources/poky/meta/lib/oeqa/selftest/cases/containerimage.py
Normal file
@@ -0,0 +1,89 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
|
||||
|
||||
# This test builds an image with using the "container" IMAGE_FSTYPE, and
|
||||
# ensures that then files in the image are only the ones expected.
|
||||
#
|
||||
# The only package added to the image is container_image_testpkg, which
|
||||
# contains one file. However, due to some other things not cleaning up during
|
||||
# rootfs creation, there is some cruft. Ideally bugs will be filed and the
|
||||
# cruft removed, but for now we ignore some known set.
|
||||
#
|
||||
# Also for performance reasons we're only checking the cruft when using ipk.
|
||||
# When using deb, and rpm it is a bit different and we could test all
|
||||
# of them, but this test is more to catch if other packages get added by
|
||||
# default other than what is in ROOTFS_BOOTSTRAP_INSTALL.
|
||||
#
|
||||
class ContainerImageTests(OESelftestTestCase):
|
||||
|
||||
# Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
|
||||
# the conversion type bar gets added as a dep as well
|
||||
def test_expected_files(self):
|
||||
|
||||
def get_each_path_part(path):
|
||||
if path:
|
||||
part = [ '.' + path + '/' ]
|
||||
result = get_each_path_part(path.rsplit('/', 1)[0])
|
||||
if result:
|
||||
return part + result
|
||||
else:
|
||||
return part
|
||||
else:
|
||||
return None
|
||||
|
||||
self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy"
|
||||
IMAGE_FSTYPES = "container"
|
||||
PACKAGE_CLASSES = "package_ipk"
|
||||
IMAGE_FEATURES = ""
|
||||
IMAGE_BUILDINFO_FILE = ""
|
||||
INIT_MANAGER = "sysvinit"
|
||||
IMAGE_INSTALL:remove = "ssh-pregen-hostkeys"
|
||||
|
||||
""")
|
||||
|
||||
bbvars = get_bb_vars(['bindir', 'sysconfdir', 'localstatedir',
|
||||
'DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'],
|
||||
target='container-test-image')
|
||||
expected_files = [
|
||||
'./',
|
||||
'.{bindir}/theapp',
|
||||
'.{sysconfdir}/default/',
|
||||
'.{sysconfdir}/default/postinst',
|
||||
'.{sysconfdir}/ld.so.cache',
|
||||
'.{sysconfdir}/timestamp',
|
||||
'.{sysconfdir}/version',
|
||||
'./run/',
|
||||
'.{localstatedir}/cache/',
|
||||
'.{localstatedir}/lib/'
|
||||
]
|
||||
|
||||
expected_files = [ x.format(bindir=bbvars['bindir'],
|
||||
sysconfdir=bbvars['sysconfdir'],
|
||||
localstatedir=bbvars['localstatedir'])
|
||||
for x in expected_files ]
|
||||
|
||||
# Since tar lists all directories individually, make sure each element
|
||||
# from bindir, sysconfdir, etc is added
|
||||
expected_files += get_each_path_part(bbvars['bindir'])
|
||||
expected_files += get_each_path_part(bbvars['sysconfdir'])
|
||||
expected_files += get_each_path_part(bbvars['localstatedir'])
|
||||
|
||||
expected_files = sorted(expected_files)
|
||||
|
||||
# Build the image of course
|
||||
bitbake('container-test-image')
|
||||
|
||||
image = os.path.join(bbvars['DEPLOY_DIR_IMAGE'],
|
||||
bbvars['IMAGE_LINK_NAME'] + '.tar.bz2')
|
||||
|
||||
# Ensure the files in the image are what we expect
|
||||
result = runCmd("tar tf {} | sort".format(image), shell=True)
|
||||
self.assertEqual(result.output.split('\n'), expected_files)
|
||||
242
sources/poky/meta/lib/oeqa/selftest/cases/cve_check.py
Normal file
242
sources/poky/meta/lib/oeqa/selftest/cases/cve_check.py
Normal file
@@ -0,0 +1,242 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import json
|
||||
import os
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars
|
||||
|
||||
class CVECheck(OESelftestTestCase):
|
||||
|
||||
def test_version_compare(self):
|
||||
from oe.cve_check import Version
|
||||
|
||||
result = Version("100") > Version("99")
|
||||
self.assertTrue( result, msg="Failed to compare version '100' > '99'")
|
||||
result = Version("2.3.1") > Version("2.2.3")
|
||||
self.assertTrue( result, msg="Failed to compare version '2.3.1' > '2.2.3'")
|
||||
result = Version("2021-01-21") > Version("2020-12-25")
|
||||
self.assertTrue( result, msg="Failed to compare version '2021-01-21' > '2020-12-25'")
|
||||
result = Version("1.2-20200910") < Version("1.2-20200920")
|
||||
self.assertTrue( result, msg="Failed to compare version '1.2-20200910' < '1.2-20200920'")
|
||||
|
||||
result = Version("1.0") >= Version("1.0beta")
|
||||
self.assertTrue( result, msg="Failed to compare version '1.0' >= '1.0beta'")
|
||||
result = Version("1.0-rc2") > Version("1.0-rc1")
|
||||
self.assertTrue( result, msg="Failed to compare version '1.0-rc2' > '1.0-rc1'")
|
||||
result = Version("1.0.alpha1") < Version("1.0")
|
||||
self.assertTrue( result, msg="Failed to compare version '1.0.alpha1' < '1.0'")
|
||||
result = Version("1.0_dev") <= Version("1.0")
|
||||
self.assertTrue( result, msg="Failed to compare version '1.0_dev' <= '1.0'")
|
||||
|
||||
# ignore "p1" and "p2", so these should be equal
|
||||
result = Version("1.0p2") == Version("1.0p1")
|
||||
self.assertTrue( result ,msg="Failed to compare version '1.0p2' to '1.0p1'")
|
||||
# ignore the "b" and "r"
|
||||
result = Version("1.0b") == Version("1.0r")
|
||||
self.assertTrue( result ,msg="Failed to compare version '1.0b' to '1.0r'")
|
||||
|
||||
# consider the trailing alphabet as patched level when comparing
|
||||
result = Version("1.0b","alphabetical") < Version("1.0r","alphabetical")
|
||||
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'")
|
||||
result = Version("1.0b","alphabetical") > Version("1.0","alphabetical")
|
||||
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'")
|
||||
|
||||
# consider the trailing "p" and "patch" as patched released when comparing
|
||||
result = Version("1.0","patch") < Version("1.0p1","patch")
|
||||
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0' < '1.0p1'")
|
||||
result = Version("1.0p2","patch") > Version("1.0p1","patch")
|
||||
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'")
|
||||
result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch")
|
||||
self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
|
||||
|
||||
|
||||
def test_convert_cve_version(self):
|
||||
from oe.cve_check import convert_cve_version
|
||||
|
||||
# Default format
|
||||
self.assertEqual(convert_cve_version("8.3"), "8.3")
|
||||
self.assertEqual(convert_cve_version(""), "")
|
||||
|
||||
# OpenSSL format version
|
||||
self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t")
|
||||
|
||||
# OpenSSH format
|
||||
self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1")
|
||||
self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22")
|
||||
|
||||
# Linux kernel format
|
||||
self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
|
||||
self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
|
||||
|
||||
|
||||
def test_recipe_report_json(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
|
||||
|
||||
try:
|
||||
os.remove(summary_json)
|
||||
os.remove(recipe_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("m4-native -c cve_check")
|
||||
|
||||
def check_m4_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
package = report["package"][0]
|
||||
self.assertEqual(package["name"], "m4-native")
|
||||
found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
|
||||
self.assertIn("CVE-2008-1687", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2008-1687"], "Patched")
|
||||
|
||||
self.assertExists(summary_json)
|
||||
check_m4_json(summary_json)
|
||||
self.assertExists(recipe_json)
|
||||
check_m4_json(recipe_json)
|
||||
|
||||
|
||||
def test_image_json(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
print(report_json)
|
||||
try:
|
||||
os.remove(report_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("core-image-minimal-initramfs")
|
||||
self.assertExists(report_json)
|
||||
|
||||
# Check that the summary report lists at least one package
|
||||
with open(report_json) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertGreater(len(report["package"]), 1)
|
||||
|
||||
# Check that a random recipe wrote a recipe report to deploy/cve/
|
||||
recipename = report["package"][0]["name"]
|
||||
recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json")
|
||||
self.assertExists(recipe_report)
|
||||
with open(recipe_report) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
self.assertEqual(report["package"][0]["name"], recipename)
|
||||
|
||||
|
||||
def test_recipe_report_json_unpatched(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
CVE_CHECK_REPORT_PATCHED = "0"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
|
||||
|
||||
try:
|
||||
os.remove(summary_json)
|
||||
os.remove(recipe_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("m4-native -c cve_check")
|
||||
|
||||
def check_m4_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
package = report["package"][0]
|
||||
self.assertEqual(package["name"], "m4-native")
|
||||
#m4 had only Patched CVEs, so the issues array will be empty
|
||||
self.assertEqual(package["issue"], [])
|
||||
|
||||
self.assertExists(summary_json)
|
||||
check_m4_json(summary_json)
|
||||
self.assertExists(recipe_json)
|
||||
check_m4_json(recipe_json)
|
||||
|
||||
|
||||
def test_recipe_report_json_ignored(self):
|
||||
config = """
|
||||
INHERIT += "cve-check"
|
||||
CVE_CHECK_FORMAT_JSON = "1"
|
||||
CVE_CHECK_REPORT_PATCHED = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
|
||||
recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
|
||||
|
||||
try:
|
||||
os.remove(summary_json)
|
||||
os.remove(recipe_json)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("logrotate -c cve_check")
|
||||
|
||||
def check_m4_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertEqual(report["version"], "1")
|
||||
self.assertEqual(len(report["package"]), 1)
|
||||
package = report["package"][0]
|
||||
self.assertEqual(package["name"], "logrotate")
|
||||
found_cves = {}
|
||||
for issue in package["issue"]:
|
||||
found_cves[issue["id"]] = {
|
||||
"status" : issue["status"],
|
||||
"detail" : issue["detail"] if "detail" in issue else "",
|
||||
"description" : issue["description"] if "description" in issue else ""
|
||||
}
|
||||
# m4 CVE should not be in logrotate
|
||||
self.assertNotIn("CVE-2008-1687", found_cves)
|
||||
# logrotate has both Patched and Ignored CVEs
|
||||
self.assertIn("CVE-2011-1098", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
|
||||
self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0)
|
||||
self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
|
||||
detail = "not-applicable-platform"
|
||||
description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
|
||||
self.assertIn("CVE-2011-1548", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1548"]["status"], "Ignored")
|
||||
self.assertEqual(found_cves["CVE-2011-1548"]["detail"], detail)
|
||||
self.assertEqual(found_cves["CVE-2011-1548"]["description"], description)
|
||||
self.assertIn("CVE-2011-1549", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1549"]["status"], "Ignored")
|
||||
self.assertEqual(found_cves["CVE-2011-1549"]["detail"], detail)
|
||||
self.assertEqual(found_cves["CVE-2011-1549"]["description"], description)
|
||||
self.assertIn("CVE-2011-1550", found_cves)
|
||||
self.assertEqual(found_cves["CVE-2011-1550"]["status"], "Ignored")
|
||||
self.assertEqual(found_cves["CVE-2011-1550"]["detail"], detail)
|
||||
self.assertEqual(found_cves["CVE-2011-1550"]["description"], description)
|
||||
|
||||
self.assertExists(summary_json)
|
||||
check_m4_json(summary_json)
|
||||
self.assertExists(recipe_json)
|
||||
check_m4_json(recipe_json)
|
||||
160
sources/poky/meta/lib/oeqa/selftest/cases/debuginfod.py
Normal file
160
sources/poky/meta/lib/oeqa/selftest/cases/debuginfod.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
import os
|
||||
import socketserver
|
||||
import subprocess
|
||||
import time
|
||||
import urllib
|
||||
import pathlib
|
||||
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_var, runqemu
|
||||
|
||||
|
||||
class Debuginfod(OESelftestTestCase):
|
||||
|
||||
def wait_for_debuginfod(self, port):
|
||||
"""
|
||||
debuginfod takes time to scan the packages and requesting too early may
|
||||
result in a test failure if the right packages haven't been scanned yet.
|
||||
|
||||
Request the metrics endpoint periodically and wait for there to be no
|
||||
busy scanning threads.
|
||||
|
||||
Returns if debuginfod is ready, raises an exception if not within the
|
||||
timeout.
|
||||
"""
|
||||
|
||||
# Wait two minutes
|
||||
countdown = 24
|
||||
delay = 5
|
||||
latest = None
|
||||
|
||||
while countdown:
|
||||
self.logger.info("waiting...")
|
||||
time.sleep(delay)
|
||||
|
||||
self.logger.info("polling server")
|
||||
if self.debuginfod.poll():
|
||||
self.logger.info("server dead")
|
||||
self.debuginfod.communicate()
|
||||
self.fail("debuginfod terminated unexpectedly")
|
||||
self.logger.info("server alive")
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen("http://localhost:%d/metrics" % port, timeout=10) as f:
|
||||
for line in f.read().decode("ascii").splitlines():
|
||||
key, value = line.rsplit(" ", 1)
|
||||
if key == "thread_busy{role=\"scan\"}":
|
||||
latest = int(value)
|
||||
self.logger.info("Waiting for %d scan jobs to finish" % latest)
|
||||
if latest == 0:
|
||||
return
|
||||
except urllib.error.URLError as e:
|
||||
# TODO: how to catch just timeouts?
|
||||
self.logger.error(e)
|
||||
|
||||
countdown -= 1
|
||||
|
||||
raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
|
||||
|
||||
def start_debuginfod(self, feed_dir):
|
||||
# We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
|
||||
|
||||
# Save some useful paths for later
|
||||
native_sysroot = pathlib.Path(get_bb_var("RECIPE_SYSROOT_NATIVE", "elfutils-native"))
|
||||
native_bindir = native_sysroot / "usr" / "bin"
|
||||
self.debuginfod = native_bindir / "debuginfod"
|
||||
self.debuginfod_find = native_bindir / "debuginfod-find"
|
||||
|
||||
cmd = [
|
||||
self.debuginfod,
|
||||
"--verbose",
|
||||
# In-memory database, this is a one-shot test
|
||||
"--database=:memory:",
|
||||
# Don't use all the host cores
|
||||
"--concurrency=8",
|
||||
"--connection-pool=8",
|
||||
# Disable rescanning, this is a one-shot test
|
||||
"--rescan-time=0",
|
||||
"--groom-time=0",
|
||||
feed_dir,
|
||||
]
|
||||
|
||||
format = get_bb_var("PACKAGE_CLASSES").split()[0]
|
||||
if format == "package_deb":
|
||||
cmd.append("--scan-deb-dir")
|
||||
elif format == "package_ipk":
|
||||
cmd.append("--scan-deb-dir")
|
||||
elif format == "package_rpm":
|
||||
cmd.append("--scan-rpm-dir")
|
||||
else:
|
||||
self.fail("Unknown package class %s" % format)
|
||||
|
||||
# Find a free port. Racey but the window is small.
|
||||
with socketserver.TCPServer(("localhost", 0), None) as s:
|
||||
self.port = s.server_address[1]
|
||||
cmd.append("--port=%d" % self.port)
|
||||
|
||||
self.logger.info(f"Starting server {cmd}")
|
||||
self.debuginfod = subprocess.Popen(cmd, env={})
|
||||
self.wait_for_debuginfod(self.port)
|
||||
|
||||
|
||||
def test_debuginfod_native(self):
|
||||
"""
|
||||
Test debuginfod outside of qemu, by building a package and looking up a
|
||||
binary's debuginfo using elfutils-native.
|
||||
"""
|
||||
|
||||
self.write_config("""
|
||||
TMPDIR = "${TOPDIR}/tmp-debuginfod"
|
||||
DISTRO_FEATURES:append = " debuginfod"
|
||||
INHERIT += "localpkgfeed"
|
||||
""")
|
||||
bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package xz:do_localpkgfeed")
|
||||
|
||||
try:
|
||||
self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
|
||||
|
||||
env = os.environ.copy()
|
||||
env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
|
||||
|
||||
pkgs = pathlib.Path(get_bb_var("PKGDEST", "xz"))
|
||||
cmd = (self.debuginfod_find, "debuginfo", pkgs / "xz" / "usr" / "bin" / "xz.xz")
|
||||
self.logger.info(f"Starting client {cmd}")
|
||||
output = subprocess.check_output(cmd, env=env, text=True)
|
||||
# This should be more comprehensive
|
||||
self.assertIn("/.cache/debuginfod_client/", output)
|
||||
finally:
|
||||
self.debuginfod.kill()
|
||||
|
||||
@OETestTag("runqemu")
|
||||
def test_debuginfod_qemu(self):
|
||||
"""
|
||||
Test debuginfod-find inside a qemu, talking to a debuginfod on the host.
|
||||
"""
|
||||
|
||||
self.write_config("""
|
||||
TMPDIR = "${TOPDIR}/tmp-debuginfod"
|
||||
DISTRO_FEATURES:append = " debuginfod"
|
||||
INHERIT += "localpkgfeed"
|
||||
CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
|
||||
""")
|
||||
bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot xz:do_localpkgfeed")
|
||||
|
||||
try:
|
||||
self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
|
||||
|
||||
with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
|
||||
cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
|
||||
self.logger.info(f"Starting client {cmd}")
|
||||
status, output = qemu.run_serial(cmd)
|
||||
# This should be more comprehensive
|
||||
self.assertIn("/.cache/debuginfod_client/", output)
|
||||
finally:
|
||||
self.debuginfod.kill()
|
||||
2969
sources/poky/meta/lib/oeqa/selftest/cases/devtool.py
Normal file
2969
sources/poky/meta/lib/oeqa/selftest/cases/devtool.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
A
|
||||
@@ -0,0 +1 @@
|
||||
B
|
||||
117
sources/poky/meta/lib/oeqa/selftest/cases/distrodata.py
Normal file
117
sources/poky/meta/lib/oeqa/selftest/cases/distrodata.py
Normal file
@@ -0,0 +1,117 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
import oe.recipeutils
|
||||
|
||||
class Distrodata(OESelftestTestCase):
|
||||
|
||||
def test_checkpkg(self):
|
||||
"""
|
||||
Summary: Test that upstream version checks do not regress
|
||||
Expected: Upstream version checks should succeed except for the recipes listed in the exception list.
|
||||
Product: oe-core
|
||||
Author: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
"""
|
||||
feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n'
|
||||
self.write_config(feature)
|
||||
|
||||
pkgs = oe.recipeutils.get_recipe_upgrade_status()
|
||||
|
||||
regressed_failures = [pkg[0] for pkg in pkgs if pkg[1] == 'UNKNOWN_BROKEN']
|
||||
regressed_successes = [pkg[0] for pkg in pkgs if pkg[1] == 'KNOWN_BROKEN']
|
||||
msg = ""
|
||||
if len(regressed_failures) > 0:
|
||||
msg = msg + """
|
||||
The following packages failed upstream version checks. Please fix them using UPSTREAM_CHECK_URI/UPSTREAM_CHECK_REGEX
|
||||
(when using tarballs) or UPSTREAM_CHECK_GITTAGREGEX (when using git). If an upstream version check cannot be performed
|
||||
(for example, if upstream does not use git tags), you can set UPSTREAM_VERSION_UNKNOWN to '1' in the recipe to acknowledge
|
||||
that the check cannot be performed.
|
||||
""" + "\n".join(regressed_failures)
|
||||
if len(regressed_successes) > 0:
|
||||
msg = msg + """
|
||||
The following packages have been checked successfully for upstream versions,
|
||||
but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please remove that line from the recipes.
|
||||
""" + "\n".join(regressed_successes)
|
||||
self.assertTrue(len(regressed_failures) == 0 and len(regressed_successes) == 0, msg)
|
||||
|
||||
def test_maintainers(self):
|
||||
"""
|
||||
Summary: Test that oe-core recipes have a maintainer and entries in maintainers list have a recipe
|
||||
Expected: All oe-core recipes (except a few special static/testing ones) should have a maintainer listed in maintainers.inc file.
|
||||
Expected: All entries in maintainers list should have a recipe file that matches them
|
||||
Product: oe-core
|
||||
Author: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
"""
|
||||
def is_exception(pkg):
|
||||
exceptions = ["packagegroup-",]
|
||||
for i in exceptions:
|
||||
if i in pkg:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_maintainer_exception(entry):
|
||||
exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
|
||||
"cve-update-nvd2-native",]
|
||||
for i in exceptions:
|
||||
if i in entry:
|
||||
return True
|
||||
return False
|
||||
|
||||
feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_ACCEPTED += " commercial"\nPARSE_ALL_RECIPES = "1"\nPACKAGE_CLASSES = "package_ipk package_deb package_rpm"\n'
|
||||
self.write_config(feature)
|
||||
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False)
|
||||
|
||||
with_maintainer_list = []
|
||||
no_maintainer_list = []
|
||||
|
||||
missing_recipes = []
|
||||
recipes = []
|
||||
prefix = "RECIPE_MAINTAINER:pn-"
|
||||
|
||||
# We could have used all_recipes() here, but this method will find
|
||||
# every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files
|
||||
# instead of maintainers.inc
|
||||
for fn in tinfoil.all_recipe_files(variants=False):
|
||||
if not '/meta/recipes-' in fn:
|
||||
# We are only interested in OE-Core
|
||||
continue
|
||||
rd = tinfoil.parse_recipe_file(fn, appends=False)
|
||||
pn = rd.getVar('PN')
|
||||
recipes.append(pn)
|
||||
if is_exception(pn):
|
||||
continue
|
||||
if rd.getVar('RECIPE_MAINTAINER'):
|
||||
with_maintainer_list.append((pn, fn))
|
||||
else:
|
||||
no_maintainer_list.append((pn, fn))
|
||||
|
||||
maintainers = tinfoil.config_data.keys()
|
||||
for key in maintainers:
|
||||
if key.startswith(prefix):
|
||||
recipe = tinfoil.config_data.expand(key[len(prefix):])
|
||||
if is_maintainer_exception(recipe):
|
||||
continue
|
||||
if recipe not in recipes:
|
||||
missing_recipes.append(recipe)
|
||||
|
||||
if no_maintainer_list:
|
||||
self.fail("""
|
||||
The following recipes do not have a maintainer assigned to them. Please add an entry to meta/conf/distro/include/maintainers.inc file.
|
||||
""" + "\n".join(['%s (%s)' % i for i in no_maintainer_list]))
|
||||
|
||||
if not with_maintainer_list:
|
||||
self.fail("""
|
||||
The list of oe-core recipes with maintainers is empty. This may indicate that the test has regressed and needs fixing.
|
||||
""")
|
||||
|
||||
if missing_recipes:
|
||||
self.fail("""
|
||||
Unable to find recipes for the following entries in maintainers.inc:
|
||||
""" + "\n".join(['%s' % i for i in missing_recipes]))
|
||||
@@ -0,0 +1,33 @@
|
||||
# Based on runqemu.py test file
|
||||
#
|
||||
# Copyright (c) 2017 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, runqemu
|
||||
from oeqa.core.decorator.data import skipIfNotMachine
|
||||
import oe.types
|
||||
|
||||
class GenericEFITest(OESelftestTestCase):
|
||||
"""EFI booting test class"""
|
||||
@skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently")
|
||||
def test_boot_efi(self):
|
||||
cmd = "runqemu nographic serial wic ovmf"
|
||||
if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
|
||||
cmd += " kvm"
|
||||
image = "core-image-minimal"
|
||||
|
||||
self.write_config("""
|
||||
EFI_PROVIDER = "systemd-boot"
|
||||
IMAGE_FSTYPES:pn-%s:append = " wic"
|
||||
MACHINE_FEATURES:append = " efi"
|
||||
WKS_FILE = "efi-bootdisk.wks.in"
|
||||
IMAGE_INSTALL:append = " grub-efi systemd-boot kernel-image-bzimage"
|
||||
"""
|
||||
% (image))
|
||||
|
||||
bitbake(image + " ovmf")
|
||||
with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
|
||||
self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
|
||||
122
sources/poky/meta/lib/oeqa/selftest/cases/esdk.py
Normal file
122
sources/poky/meta/lib/oeqa/selftest/cases/esdk.py
Normal file
@@ -0,0 +1,122 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import tempfile
|
||||
import shutil
|
||||
import os
|
||||
import glob
|
||||
import time
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
|
||||
|
||||
class oeSDKExtSelfTest(OESelftestTestCase):
|
||||
"""
|
||||
# Bugzilla Test Plan: 6033
|
||||
# This code is planned to be part of the automation for eSDK containig
|
||||
# Install libraries and headers, image generation binary feeds, sdk-update.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_esdk_environment(env_eSDK, tmpdir_eSDKQA):
|
||||
# XXX: at this time use the first env need to investigate
|
||||
# what environment load oe-selftest, i586, x86_64
|
||||
pattern = os.path.join(tmpdir_eSDKQA, 'environment-setup-*')
|
||||
return glob.glob(pattern)[0]
|
||||
|
||||
@staticmethod
|
||||
def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, postconfig=None, **options):
|
||||
if postconfig:
|
||||
esdk_conf_file = os.path.join(tmpdir_eSDKQA, 'conf', 'local.conf')
|
||||
with open(esdk_conf_file, 'a+') as f:
|
||||
f.write(postconfig)
|
||||
if not options:
|
||||
options = {}
|
||||
if not 'shell' in options:
|
||||
options['shell'] = True
|
||||
|
||||
runCmd("cd %s; unset BBPATH; unset BUILDDIR; . %s; %s" % (tmpdir_eSDKQA, env_eSDK, cmd), **options)
|
||||
|
||||
@staticmethod
|
||||
def generate_eSDK(image):
|
||||
pn_task = '%s -c populate_sdk_ext' % image
|
||||
bitbake(pn_task)
|
||||
|
||||
@staticmethod
|
||||
def get_eSDK_toolchain(image):
|
||||
pn_task = '%s -c populate_sdk_ext' % image
|
||||
|
||||
bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAINEXT_OUTPUTNAME'], pn_task)
|
||||
sdk_deploy = bb_vars['SDK_DEPLOY']
|
||||
toolchain_name = bb_vars['TOOLCHAINEXT_OUTPUTNAME']
|
||||
return os.path.join(sdk_deploy, toolchain_name + '.sh')
|
||||
|
||||
@staticmethod
|
||||
def update_configuration(cls, image, tmpdir_eSDKQA, env_eSDK, ext_sdk_path):
|
||||
sstate_dir = os.path.join(os.environ['BUILDDIR'], 'sstate-cache')
|
||||
|
||||
oeSDKExtSelfTest.generate_eSDK(cls.image)
|
||||
|
||||
cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image)
|
||||
runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA))
|
||||
|
||||
cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
|
||||
|
||||
sstate_config="""
|
||||
ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
|
||||
SSTATE_MIRRORS = "file://.* file://%s/PATH"
|
||||
CORE_IMAGE_EXTRA_INSTALL = "perl"
|
||||
""" % sstate_dir
|
||||
|
||||
with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
|
||||
f.write(sstate_config)
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(oeSDKExtSelfTest, cls).setUpClass()
|
||||
cls.image = 'core-image-minimal'
|
||||
|
||||
bb_vars = get_bb_vars(['SSTATE_DIR', 'WORKDIR'], cls.image)
|
||||
bb.utils.mkdirhier(bb_vars["WORKDIR"])
|
||||
cls.tmpdirobj = tempfile.TemporaryDirectory(prefix="selftest-esdk-", dir=bb_vars["WORKDIR"])
|
||||
cls.tmpdir_eSDKQA = cls.tmpdirobj.name
|
||||
|
||||
oeSDKExtSelfTest.generate_eSDK(cls.image)
|
||||
|
||||
# Install eSDK
|
||||
cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image)
|
||||
runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA))
|
||||
|
||||
cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
|
||||
|
||||
# Configure eSDK to use sstate mirror from poky
|
||||
sstate_config="""
|
||||
ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
|
||||
SSTATE_MIRRORS = "file://.* file://%s/PATH"
|
||||
""" % bb_vars["SSTATE_DIR"]
|
||||
with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
|
||||
f.write(sstate_config)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
for i in range(0, 10):
|
||||
if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')):
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
cls.tmpdirobj.cleanup()
|
||||
super().tearDownClass()
|
||||
|
||||
def test_install_libraries_headers(self):
|
||||
pn_sstate = 'bc'
|
||||
bitbake(pn_sstate)
|
||||
cmd = "devtool sdk-install %s " % pn_sstate
|
||||
oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
|
||||
|
||||
def test_image_generation_binary_feeds(self):
|
||||
image = 'core-image-minimal'
|
||||
cmd = "devtool build-image %s" % image
|
||||
oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd)
|
||||
|
||||
44
sources/poky/meta/lib/oeqa/selftest/cases/externalsrc.py
Normal file
44
sources/poky/meta/lib/oeqa/selftest/cases/externalsrc.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import get_bb_var, runCmd
|
||||
|
||||
class ExternalSrc(OESelftestTestCase):
|
||||
# test that srctree_hash_files does not crash
|
||||
# we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
|
||||
# so we check only that a recipe with externalsrc can be parsed
|
||||
def test_externalsrc_srctree_hash_files(self):
|
||||
test_recipe = "git-submodule-test"
|
||||
git_url = "git://git.yoctoproject.org/git-submodule-test"
|
||||
externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
|
||||
|
||||
self.write_config(
|
||||
"""
|
||||
INHERIT += "externalsrc"
|
||||
EXTERNALSRC:pn-%s = "%s"
|
||||
""" % (test_recipe, externalsrc_dir)
|
||||
)
|
||||
|
||||
# test with git without submodules
|
||||
runCmd('git clone %s %s' % (git_url, externalsrc_dir))
|
||||
os.unlink(externalsrc_dir + "/.gitmodules")
|
||||
open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
|
||||
self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
|
||||
os.unlink(".gitmodules")
|
||||
|
||||
# test with git with submodules
|
||||
runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
|
||||
runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
|
||||
self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
|
||||
|
||||
# test without git
|
||||
shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
|
||||
self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
|
||||
110
sources/poky/meta/lib/oeqa/selftest/cases/fetch.py
Normal file
110
sources/poky/meta/lib/oeqa/selftest/cases/fetch.py
Normal file
@@ -0,0 +1,110 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import tempfile
|
||||
import textwrap
|
||||
import bb.tinfoil
|
||||
import oe.path
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class Fetch(OESelftestTestCase):
|
||||
def test_git_mirrors(self):
|
||||
"""
|
||||
Verify that the git fetcher will fall back to the HTTP mirrors. The
|
||||
recipe needs to be one that we have on the Yocto Project source mirror
|
||||
and is hosted in git.
|
||||
"""
|
||||
|
||||
# TODO: mktempd instead of hardcoding
|
||||
dldir = os.path.join(self.builddir, "download-git-mirrors")
|
||||
self.track_for_cleanup(dldir)
|
||||
|
||||
# No mirrors, should use git to fetch successfully
|
||||
features = """
|
||||
DL_DIR = "%s"
|
||||
MIRRORS:forcevariable = ""
|
||||
PREMIRRORS:forcevariable = ""
|
||||
""" % dldir
|
||||
self.write_config(features)
|
||||
oe.path.remove(dldir, recurse=True)
|
||||
bitbake("dbus-wait -c fetch -f")
|
||||
|
||||
# No mirrors and broken git, should fail
|
||||
features = """
|
||||
DL_DIR = "%s"
|
||||
SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
|
||||
GIT_PROXY_COMMAND = "false"
|
||||
MIRRORS:forcevariable = ""
|
||||
PREMIRRORS:forcevariable = ""
|
||||
""" % dldir
|
||||
self.write_config(features)
|
||||
oe.path.remove(dldir, recurse=True)
|
||||
with self.assertRaises(AssertionError):
|
||||
bitbake("dbus-wait -c fetch -f")
|
||||
|
||||
# Broken git but a specific mirror
|
||||
features = """
|
||||
DL_DIR = "%s"
|
||||
SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
|
||||
GIT_PROXY_COMMAND = "false"
|
||||
MIRRORS:forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
|
||||
""" % dldir
|
||||
self.write_config(features)
|
||||
oe.path.remove(dldir, recurse=True)
|
||||
bitbake("dbus-wait -c fetch -f")
|
||||
|
||||
|
||||
class Dependencies(OESelftestTestCase):
|
||||
def write_recipe(self, content, tempdir):
|
||||
f = os.path.join(tempdir, "test.bb")
|
||||
with open(f, "w") as fd:
|
||||
fd.write(content)
|
||||
return f
|
||||
|
||||
def test_dependencies(self):
|
||||
"""
|
||||
Verify that the correct dependencies are generated for specific SRC_URI entries.
|
||||
"""
|
||||
|
||||
with bb.tinfoil.Tinfoil() as tinfoil, tempfile.TemporaryDirectory(prefix="selftest-fetch") as tempdir:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
|
||||
r = """
|
||||
LICENSE="CLOSED"
|
||||
SRC_URI="http://example.com/tarball.zip"
|
||||
"""
|
||||
f = self.write_recipe(textwrap.dedent(r), tempdir)
|
||||
d = tinfoil.parse_recipe_file(f)
|
||||
self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
|
||||
self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends"))
|
||||
|
||||
# Verify that the downloadfilename overrides the URI
|
||||
r = """
|
||||
LICENSE="CLOSED"
|
||||
SRC_URI="https://example.com/tarball;downloadfilename=something.zip"
|
||||
"""
|
||||
f = self.write_recipe(textwrap.dedent(r), tempdir)
|
||||
d = tinfoil.parse_recipe_file(f)
|
||||
self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
|
||||
self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "")
|
||||
|
||||
r = """
|
||||
LICENSE="CLOSED"
|
||||
SRC_URI="ftp://example.com/tarball.lz"
|
||||
"""
|
||||
f = self.write_recipe(textwrap.dedent(r), tempdir)
|
||||
d = tinfoil.parse_recipe_file(f)
|
||||
self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
|
||||
self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends"))
|
||||
|
||||
r = """
|
||||
LICENSE="CLOSED"
|
||||
SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
|
||||
"""
|
||||
f = self.write_recipe(textwrap.dedent(r), tempdir)
|
||||
d = tinfoil.parse_recipe_file(f)
|
||||
self.assertIn("git-native", d.getVarFlag("do_fetch", "depends"))
|
||||
846
sources/poky/meta/lib/oeqa/selftest/cases/fitimage.py
Normal file
846
sources/poky/meta/lib/oeqa/selftest/cases/fitimage.py
Normal file
@@ -0,0 +1,846 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
|
||||
import os
|
||||
import re
|
||||
|
||||
class FitImageTests(OESelftestTestCase):
|
||||
|
||||
def test_fit_image(self):
|
||||
"""
|
||||
Summary: Check if FIT image and Image Tree Source (its) are built
|
||||
and the Image Tree Source has the correct fields.
|
||||
Expected: 1. fitImage and fitImage-its can be built
|
||||
2. The type, load address, entrypoint address and
|
||||
default values of kernel and ramdisk are as expected
|
||||
in the Image Tree Source. Not all the fields are tested,
|
||||
only the key fields that wont vary between different
|
||||
architectures.
|
||||
Product: oe-core
|
||||
Author: Usama Arif <usama.arif@arm.com>
|
||||
"""
|
||||
config = """
|
||||
# Enable creation of fitImage
|
||||
KERNEL_IMAGETYPE = "Image"
|
||||
KERNEL_IMAGETYPES += " fitImage "
|
||||
KERNEL_CLASSES = " kernel-fitimage "
|
||||
|
||||
# RAM disk variables including load address and entrypoint for kernel and RAM disk
|
||||
IMAGE_FSTYPES += "cpio.gz"
|
||||
INITRAMFS_IMAGE = "core-image-minimal"
|
||||
# core-image-minimal is used as initramfs here, drop the rootfs suffix
|
||||
IMAGE_NAME_SUFFIX:pn-core-image-minimal = ""
|
||||
UBOOT_RD_LOADADDRESS = "0x88000000"
|
||||
UBOOT_RD_ENTRYPOINT = "0x88000000"
|
||||
UBOOT_LOADADDRESS = "0x80080000"
|
||||
UBOOT_ENTRYPOINT = "0x80080000"
|
||||
FIT_DESC = "A model description"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
# fitImage is created as part of linux recipe
|
||||
image = "virtual/kernel"
|
||||
bitbake(image)
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'INITRAMFS_IMAGE_NAME', 'KERNEL_FIT_LINK_NAME'], image)
|
||||
|
||||
fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
|
||||
"fitImage-its-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
|
||||
fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
|
||||
"fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
|
||||
|
||||
self.assertTrue(os.path.exists(fitimage_its_path),
|
||||
"%s image tree source doesn't exist" % (fitimage_its_path))
|
||||
self.assertTrue(os.path.exists(fitimage_path),
|
||||
"%s FIT image doesn't exist" % (fitimage_path))
|
||||
|
||||
# Check that the type, load address, entrypoint address and default
|
||||
# values for kernel and ramdisk in Image Tree Source are as expected.
|
||||
# The order of fields in the below array is important. Not all the
|
||||
# fields are tested, only the key fields that wont vary between
|
||||
# different architectures.
|
||||
its_field_check = [
|
||||
'description = "A model description";',
|
||||
'type = "kernel";',
|
||||
'load = <0x80080000>;',
|
||||
'entry = <0x80080000>;',
|
||||
'type = "ramdisk";',
|
||||
'load = <0x88000000>;',
|
||||
'entry = <0x88000000>;',
|
||||
'default = "conf-1";',
|
||||
'kernel = "kernel-1";',
|
||||
'ramdisk = "ramdisk-1";'
|
||||
]
|
||||
|
||||
with open(fitimage_its_path) as its_file:
|
||||
field_index = 0
|
||||
for line in its_file:
|
||||
if field_index == len(its_field_check):
|
||||
break
|
||||
if its_field_check[field_index] in line:
|
||||
field_index +=1
|
||||
|
||||
if field_index != len(its_field_check): # if its equal, the test passed
|
||||
self.assertTrue(field_index == len(its_field_check),
|
||||
"Fields in Image Tree Source File %s did not match, error in finding %s"
|
||||
% (fitimage_its_path, its_field_check[field_index]))
|
||||
|
||||
|
||||
def test_sign_fit_image(self):
|
||||
"""
|
||||
Summary: Check if FIT image and Image Tree Source (its) are created
|
||||
and signed correctly.
|
||||
Expected: 1) its and FIT image are built successfully
|
||||
2) Scanning the its file indicates signing is enabled
|
||||
as requested by UBOOT_SIGN_ENABLE (using keys generated
|
||||
via FIT_GENERATE_KEYS)
|
||||
3) Dumping the FIT image indicates signature values
|
||||
are present (including for images as enabled via
|
||||
FIT_SIGN_INDIVIDUAL)
|
||||
4) Examination of the do_assemble_fitimage runfile/logfile
|
||||
indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN and
|
||||
UBOOT_MKIMAGE_SIGN_ARGS are working as expected.
|
||||
Product: oe-core
|
||||
Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon
|
||||
work by Usama Arif <usama.arif@arm.com>
|
||||
"""
|
||||
config = """
|
||||
# Enable creation of fitImage
|
||||
MACHINE = "beaglebone-yocto"
|
||||
KERNEL_IMAGETYPES += " fitImage "
|
||||
KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
|
||||
UBOOT_SIGN_ENABLE = "1"
|
||||
FIT_GENERATE_KEYS = "1"
|
||||
UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
|
||||
UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
|
||||
UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
|
||||
FIT_SIGN_INDIVIDUAL = "1"
|
||||
UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
# fitImage is created as part of linux recipe
|
||||
image = "virtual/kernel"
|
||||
bitbake(image)
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'KERNEL_FIT_LINK_NAME'], image)
|
||||
|
||||
fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
|
||||
"fitImage-its-%s" % (bb_vars['KERNEL_FIT_LINK_NAME']))
|
||||
fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
|
||||
"fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME']))
|
||||
|
||||
self.assertTrue(os.path.exists(fitimage_its_path),
|
||||
"%s image tree source doesn't exist" % (fitimage_its_path))
|
||||
self.assertTrue(os.path.exists(fitimage_path),
|
||||
"%s FIT image doesn't exist" % (fitimage_path))
|
||||
|
||||
req_itspaths = [
|
||||
['/', 'images', 'kernel-1'],
|
||||
['/', 'images', 'kernel-1', 'signature-1'],
|
||||
['/', 'images', 'fdt-am335x-boneblack.dtb'],
|
||||
['/', 'images', 'fdt-am335x-boneblack.dtb', 'signature-1'],
|
||||
['/', 'configurations', 'conf-am335x-boneblack.dtb'],
|
||||
['/', 'configurations', 'conf-am335x-boneblack.dtb', 'signature-1'],
|
||||
]
|
||||
|
||||
itspath = []
|
||||
itspaths = []
|
||||
linect = 0
|
||||
sigs = {}
|
||||
with open(fitimage_its_path) as its_file:
|
||||
linect += 1
|
||||
for line in its_file:
|
||||
line = line.strip()
|
||||
if line.endswith('};'):
|
||||
itspath.pop()
|
||||
elif line.endswith('{'):
|
||||
itspath.append(line[:-1].strip())
|
||||
itspaths.append(itspath[:])
|
||||
elif itspath and itspath[-1] == 'signature-1':
|
||||
itsdotpath = '.'.join(itspath)
|
||||
if not itsdotpath in sigs:
|
||||
sigs[itsdotpath] = {}
|
||||
if not '=' in line or not line.endswith(';'):
|
||||
self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
|
||||
key, value = line.split('=', 1)
|
||||
sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
|
||||
|
||||
for reqpath in req_itspaths:
|
||||
if not reqpath in itspaths:
|
||||
self.fail('Missing section in its file: %s' % reqpath)
|
||||
|
||||
reqsigvalues_image = {
|
||||
'algo': '"sha256,rsa2048"',
|
||||
'key-name-hint': '"img-oe-selftest"',
|
||||
}
|
||||
reqsigvalues_config = {
|
||||
'algo': '"sha256,rsa2048"',
|
||||
'key-name-hint': '"cfg-oe-selftest"',
|
||||
'sign-images': '"kernel", "fdt"',
|
||||
}
|
||||
|
||||
for itspath, values in sigs.items():
|
||||
if 'conf-' in itspath:
|
||||
reqsigvalues = reqsigvalues_config
|
||||
else:
|
||||
reqsigvalues = reqsigvalues_image
|
||||
for reqkey, reqvalue in reqsigvalues.items():
|
||||
value = values.get(reqkey, None)
|
||||
if value is None:
|
||||
self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath))
|
||||
self.assertEqual(value, reqvalue)
|
||||
|
||||
# Dump the image to see if it really got signed
|
||||
bitbake("u-boot-tools-native -c addto_recipe_sysroot")
|
||||
result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=')
|
||||
recipe_sysroot_native = result.output.split('=')[1].strip('"')
|
||||
dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
|
||||
result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
|
||||
in_signed = None
|
||||
signed_sections = {}
|
||||
for line in result.output.splitlines():
|
||||
if line.startswith((' Configuration', ' Image')):
|
||||
in_signed = re.search(r'\((.*)\)', line).groups()[0]
|
||||
elif re.match('^ *', line) in (' ', ''):
|
||||
in_signed = None
|
||||
elif in_signed:
|
||||
if not in_signed in signed_sections:
|
||||
signed_sections[in_signed] = {}
|
||||
key, value = line.split(':', 1)
|
||||
signed_sections[in_signed][key.strip()] = value.strip()
|
||||
self.assertIn('kernel-1', signed_sections)
|
||||
self.assertIn('fdt-am335x-boneblack.dtb', signed_sections)
|
||||
self.assertIn('conf-am335x-boneblack.dtb', signed_sections)
|
||||
for signed_section, values in signed_sections.items():
|
||||
value = values.get('Sign algo', None)
|
||||
if signed_section.startswith("conf"):
|
||||
self.assertEqual(value, 'sha256,rsa2048:cfg-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
|
||||
else:
|
||||
self.assertEqual(value, 'sha256,rsa2048:img-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
|
||||
value = values.get('Sign value', None)
|
||||
self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
|
||||
|
||||
# Check for UBOOT_MKIMAGE_SIGN_ARGS
|
||||
result = runCmd('bitbake -e virtual/kernel | grep ^T=')
|
||||
tempdir = result.output.split('=', 1)[1].strip().strip('')
|
||||
result = runCmd('grep "a smart comment" %s/run.do_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN_ARGS value did not get used')
|
||||
|
||||
# Check for evidence of test-mkimage-wrapper class
|
||||
result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
|
||||
result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
|
||||
|
||||
def test_uboot_fit_image(self):
|
||||
"""
|
||||
Summary: Check if Uboot FIT image and Image Tree Source
|
||||
(its) are built and the Image Tree Source has the
|
||||
correct fields.
|
||||
Expected: 1. u-boot-fitImage and u-boot-its can be built
|
||||
2. The type, load address, entrypoint address and
|
||||
default values of U-boot image are correct in the
|
||||
Image Tree Source. Not all the fields are tested,
|
||||
only the key fields that wont vary between
|
||||
different architectures.
|
||||
Product: oe-core
|
||||
Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com>
|
||||
based on work by Usama Arif <usama.arif@arm.com>
|
||||
"""
|
||||
config = """
|
||||
# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
|
||||
MACHINE = "qemuarm"
|
||||
UBOOT_MACHINE = "am57xx_evm_defconfig"
|
||||
SPL_BINARY = "MLO"
|
||||
|
||||
# Enable creation of the U-Boot fitImage
|
||||
UBOOT_FITIMAGE_ENABLE = "1"
|
||||
|
||||
# (U-boot) fitImage properties
|
||||
UBOOT_LOADADDRESS = "0x80080000"
|
||||
UBOOT_ENTRYPOINT = "0x80080000"
|
||||
UBOOT_FIT_DESC = "A model description"
|
||||
|
||||
# Enable creation of Kernel fitImage
|
||||
KERNEL_IMAGETYPES += " fitImage "
|
||||
KERNEL_CLASSES = " kernel-fitimage"
|
||||
UBOOT_SIGN_ENABLE = "1"
|
||||
FIT_GENERATE_KEYS = "1"
|
||||
UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
|
||||
UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
|
||||
UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
|
||||
FIT_SIGN_INDIVIDUAL = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
# The U-Boot fitImage is created as part of the U-Boot recipe
|
||||
bitbake("virtual/bootloader")
|
||||
|
||||
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
|
||||
machine = get_bb_var('MACHINE')
|
||||
fitimage_its_path = os.path.join(deploy_dir_image,
|
||||
"u-boot-its-%s" % (machine,))
|
||||
fitimage_path = os.path.join(deploy_dir_image,
|
||||
"u-boot-fitImage-%s" % (machine,))
|
||||
|
||||
self.assertTrue(os.path.exists(fitimage_its_path),
|
||||
"%s image tree source doesn't exist" % (fitimage_its_path))
|
||||
self.assertTrue(os.path.exists(fitimage_path),
|
||||
"%s FIT image doesn't exist" % (fitimage_path))
|
||||
|
||||
# Check that the type, load address, entrypoint address and default
|
||||
# values for kernel and ramdisk in Image Tree Source are as expected.
|
||||
# The order of fields in the below array is important. Not all the
|
||||
# fields are tested, only the key fields that wont vary between
|
||||
# different architectures.
|
||||
its_field_check = [
|
||||
'description = "A model description";',
|
||||
'type = "standalone";',
|
||||
'load = <0x80080000>;',
|
||||
'entry = <0x80080000>;',
|
||||
'default = "conf";',
|
||||
'loadables = "uboot";',
|
||||
'fdt = "fdt";'
|
||||
]
|
||||
|
||||
with open(fitimage_its_path) as its_file:
|
||||
field_index = 0
|
||||
for line in its_file:
|
||||
if field_index == len(its_field_check):
|
||||
break
|
||||
if its_field_check[field_index] in line:
|
||||
field_index +=1
|
||||
|
||||
if field_index != len(its_field_check): # if its equal, the test passed
|
||||
self.assertTrue(field_index == len(its_field_check),
|
||||
"Fields in Image Tree Source File %s did not match, error in finding %s"
|
||||
% (fitimage_its_path, its_field_check[field_index]))
|
||||
|
||||
def test_uboot_sign_fit_image(self):
|
||||
"""
|
||||
Summary: Check if Uboot FIT image and Image Tree Source
|
||||
(its) are built and the Image Tree Source has the
|
||||
correct fields, in the scenario where the Kernel
|
||||
is also creating/signing it's fitImage.
|
||||
Expected: 1. u-boot-fitImage and u-boot-its can be built
|
||||
2. The type, load address, entrypoint address and
|
||||
default values of U-boot image are correct in the
|
||||
Image Tree Source. Not all the fields are tested,
|
||||
only the key fields that wont vary between
|
||||
different architectures.
|
||||
Product: oe-core
|
||||
Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com>
|
||||
based on work by Usama Arif <usama.arif@arm.com>
|
||||
"""
|
||||
config = """
|
||||
# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
|
||||
MACHINE = "qemuarm"
|
||||
UBOOT_MACHINE = "am57xx_evm_defconfig"
|
||||
SPL_BINARY = "MLO"
|
||||
|
||||
# Enable creation of the U-Boot fitImage
|
||||
UBOOT_FITIMAGE_ENABLE = "1"
|
||||
|
||||
# (U-boot) fitImage properties
|
||||
UBOOT_LOADADDRESS = "0x80080000"
|
||||
UBOOT_ENTRYPOINT = "0x80080000"
|
||||
UBOOT_FIT_DESC = "A model description"
|
||||
KERNEL_IMAGETYPES += " fitImage "
|
||||
KERNEL_CLASSES = " kernel-fitimage "
|
||||
INHERIT += "test-mkimage-wrapper"
|
||||
UBOOT_SIGN_ENABLE = "1"
|
||||
FIT_GENERATE_KEYS = "1"
|
||||
UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
|
||||
UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
|
||||
UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
|
||||
FIT_SIGN_INDIVIDUAL = "1"
|
||||
UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
# The U-Boot fitImage is created as part of the U-Boot recipe
|
||||
bitbake("virtual/bootloader")
|
||||
|
||||
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
|
||||
machine = get_bb_var('MACHINE')
|
||||
fitimage_its_path = os.path.join(deploy_dir_image,
|
||||
"u-boot-its-%s" % (machine,))
|
||||
fitimage_path = os.path.join(deploy_dir_image,
|
||||
"u-boot-fitImage-%s" % (machine,))
|
||||
|
||||
self.assertTrue(os.path.exists(fitimage_its_path),
|
||||
"%s image tree source doesn't exist" % (fitimage_its_path))
|
||||
self.assertTrue(os.path.exists(fitimage_path),
|
||||
"%s FIT image doesn't exist" % (fitimage_path))
|
||||
|
||||
# Check that the type, load address, entrypoint address and default
|
||||
# values for kernel and ramdisk in Image Tree Source are as expected.
|
||||
# The order of fields in the below array is important. Not all the
|
||||
# fields are tested, only the key fields that wont vary between
|
||||
# different architectures.
|
||||
its_field_check = [
|
||||
'description = "A model description";',
|
||||
'type = "standalone";',
|
||||
'load = <0x80080000>;',
|
||||
'entry = <0x80080000>;',
|
||||
'default = "conf";',
|
||||
'loadables = "uboot";',
|
||||
'fdt = "fdt";'
|
||||
]
|
||||
|
||||
with open(fitimage_its_path) as its_file:
|
||||
field_index = 0
|
||||
for line in its_file:
|
||||
if field_index == len(its_field_check):
|
||||
break
|
||||
if its_field_check[field_index] in line:
|
||||
field_index +=1
|
||||
|
||||
if field_index != len(its_field_check): # if its equal, the test passed
|
||||
self.assertTrue(field_index == len(its_field_check),
|
||||
"Fields in Image Tree Source File %s did not match, error in finding %s"
|
||||
% (fitimage_its_path, its_field_check[field_index]))
|
||||
|
||||
|
||||
def test_sign_standalone_uboot_fit_image(self):
|
||||
"""
|
||||
Summary: Check if U-Boot FIT image and Image Tree Source (its) are
|
||||
created and signed correctly for the scenario where only
|
||||
the U-Boot proper fitImage is being created and signed.
|
||||
Expected: 1) U-Boot its and FIT image are built successfully
|
||||
2) Scanning the its file indicates signing is enabled
|
||||
as requested by SPL_SIGN_ENABLE (using keys generated
|
||||
via UBOOT_FIT_GENERATE_KEYS)
|
||||
3) Dumping the FIT image indicates signature values
|
||||
are present
|
||||
4) Examination of the do_uboot_assemble_fitimage
|
||||
runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
|
||||
and SPL_MKIMAGE_SIGN_ARGS are working as expected.
|
||||
Product: oe-core
|
||||
Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
|
||||
work by Paul Eggleton <paul.eggleton@microsoft.com> and
|
||||
Usama Arif <usama.arif@arm.com>
|
||||
"""
|
||||
config = """
|
||||
# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
|
||||
# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
|
||||
MACHINE = "qemuarm"
|
||||
UBOOT_MACHINE = "am57xx_evm_defconfig"
|
||||
SPL_BINARY = "MLO"
|
||||
# The kernel-fitimage class is a dependency even if we're only
|
||||
# creating/signing the U-Boot fitImage
|
||||
KERNEL_CLASSES = " kernel-fitimage"
|
||||
INHERIT += "test-mkimage-wrapper"
|
||||
# Enable creation and signing of the U-Boot fitImage
|
||||
UBOOT_FITIMAGE_ENABLE = "1"
|
||||
SPL_SIGN_ENABLE = "1"
|
||||
SPL_SIGN_KEYNAME = "spl-oe-selftest"
|
||||
SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
|
||||
UBOOT_DTB_BINARY = "u-boot.dtb"
|
||||
UBOOT_ENTRYPOINT = "0x80000000"
|
||||
UBOOT_LOADADDRESS = "0x80000000"
|
||||
UBOOT_DTB_LOADADDRESS = "0x82000000"
|
||||
UBOOT_ARCH = "arm"
|
||||
SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
|
||||
SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
|
||||
UBOOT_EXTLINUX = "0"
|
||||
UBOOT_FIT_GENERATE_KEYS = "1"
|
||||
UBOOT_FIT_HASH_ALG = "sha256"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
# The U-Boot fitImage is created as part of the U-Boot recipe
|
||||
bitbake("virtual/bootloader")
|
||||
|
||||
image_type = "core-image-minimal"
|
||||
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
|
||||
machine = get_bb_var('MACHINE')
|
||||
fitimage_its_path = os.path.join(deploy_dir_image,
|
||||
"u-boot-its-%s" % (machine,))
|
||||
fitimage_path = os.path.join(deploy_dir_image,
|
||||
"u-boot-fitImage-%s" % (machine,))
|
||||
|
||||
self.assertTrue(os.path.exists(fitimage_its_path),
|
||||
"%s image tree source doesn't exist" % (fitimage_its_path))
|
||||
self.assertTrue(os.path.exists(fitimage_path),
|
||||
"%s FIT image doesn't exist" % (fitimage_path))
|
||||
|
||||
req_itspaths = [
|
||||
['/', 'images', 'uboot'],
|
||||
['/', 'images', 'uboot', 'signature'],
|
||||
['/', 'images', 'fdt'],
|
||||
['/', 'images', 'fdt', 'signature'],
|
||||
]
|
||||
|
||||
itspath = []
|
||||
itspaths = []
|
||||
linect = 0
|
||||
sigs = {}
|
||||
with open(fitimage_its_path) as its_file:
|
||||
linect += 1
|
||||
for line in its_file:
|
||||
line = line.strip()
|
||||
if line.endswith('};'):
|
||||
itspath.pop()
|
||||
elif line.endswith('{'):
|
||||
itspath.append(line[:-1].strip())
|
||||
itspaths.append(itspath[:])
|
||||
elif itspath and itspath[-1] == 'signature':
|
||||
itsdotpath = '.'.join(itspath)
|
||||
if not itsdotpath in sigs:
|
||||
sigs[itsdotpath] = {}
|
||||
if not '=' in line or not line.endswith(';'):
|
||||
self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
|
||||
key, value = line.split('=', 1)
|
||||
sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
|
||||
|
||||
for reqpath in req_itspaths:
|
||||
if not reqpath in itspaths:
|
||||
self.fail('Missing section in its file: %s' % reqpath)
|
||||
|
||||
reqsigvalues_image = {
|
||||
'algo': '"sha256,rsa2048"',
|
||||
'key-name-hint': '"spl-oe-selftest"',
|
||||
}
|
||||
|
||||
for itspath, values in sigs.items():
|
||||
reqsigvalues = reqsigvalues_image
|
||||
for reqkey, reqvalue in reqsigvalues.items():
|
||||
value = values.get(reqkey, None)
|
||||
if value is None:
|
||||
self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath))
|
||||
self.assertEqual(value, reqvalue)
|
||||
|
||||
# Dump the image to see if it really got signed
|
||||
bitbake("u-boot-tools-native -c addto_recipe_sysroot")
|
||||
result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=')
|
||||
recipe_sysroot_native = result.output.split('=')[1].strip('"')
|
||||
dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
|
||||
result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
|
||||
in_signed = None
|
||||
signed_sections = {}
|
||||
for line in result.output.splitlines():
|
||||
if line.startswith((' Image')):
|
||||
in_signed = re.search(r'\((.*)\)', line).groups()[0]
|
||||
elif re.match(' \w', line):
|
||||
in_signed = None
|
||||
elif in_signed:
|
||||
if not in_signed in signed_sections:
|
||||
signed_sections[in_signed] = {}
|
||||
key, value = line.split(':', 1)
|
||||
signed_sections[in_signed][key.strip()] = value.strip()
|
||||
self.assertIn('uboot', signed_sections)
|
||||
self.assertIn('fdt', signed_sections)
|
||||
for signed_section, values in signed_sections.items():
|
||||
value = values.get('Sign algo', None)
|
||||
self.assertEqual(value, 'sha256,rsa2048:spl-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
|
||||
value = values.get('Sign value', None)
|
||||
self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
|
||||
|
||||
# Check for SPL_MKIMAGE_SIGN_ARGS
|
||||
result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
|
||||
tempdir = result.output.split('=', 1)[1].strip().strip('')
|
||||
result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
|
||||
|
||||
# Check for evidence of test-mkimage-wrapper class
|
||||
result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
|
||||
result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
|
||||
|
||||
def test_sign_cascaded_uboot_fit_image(self):
|
||||
"""
|
||||
Summary: Check if U-Boot FIT image and Image Tree Source (its) are
|
||||
created and signed correctly for the scenario where both
|
||||
U-Boot proper and Kernel fitImages are being created and
|
||||
signed.
|
||||
Expected: 1) U-Boot its and FIT image are built successfully
|
||||
2) Scanning the its file indicates signing is enabled
|
||||
as requested by SPL_SIGN_ENABLE (using keys generated
|
||||
via UBOOT_FIT_GENERATE_KEYS)
|
||||
3) Dumping the FIT image indicates signature values
|
||||
are present
|
||||
4) Examination of the do_uboot_assemble_fitimage
|
||||
runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
|
||||
and SPL_MKIMAGE_SIGN_ARGS are working as expected.
|
||||
Product: oe-core
|
||||
Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
|
||||
work by Paul Eggleton <paul.eggleton@microsoft.com> and
|
||||
Usama Arif <usama.arif@arm.com>
|
||||
"""
|
||||
config = """
|
||||
# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
|
||||
# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
|
||||
MACHINE = "qemuarm"
|
||||
UBOOT_MACHINE = "am57xx_evm_defconfig"
|
||||
SPL_BINARY = "MLO"
|
||||
# Enable creation and signing of the U-Boot fitImage
|
||||
UBOOT_FITIMAGE_ENABLE = "1"
|
||||
SPL_SIGN_ENABLE = "1"
|
||||
SPL_SIGN_KEYNAME = "spl-cascaded-oe-selftest"
|
||||
SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
|
||||
UBOOT_DTB_BINARY = "u-boot.dtb"
|
||||
UBOOT_ENTRYPOINT = "0x80000000"
|
||||
UBOOT_LOADADDRESS = "0x80000000"
|
||||
UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
|
||||
UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded Kernel comment'"
|
||||
UBOOT_DTB_LOADADDRESS = "0x82000000"
|
||||
UBOOT_ARCH = "arm"
|
||||
SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
|
||||
SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'"
|
||||
UBOOT_EXTLINUX = "0"
|
||||
UBOOT_FIT_GENERATE_KEYS = "1"
|
||||
UBOOT_FIT_HASH_ALG = "sha256"
|
||||
KERNEL_IMAGETYPES += " fitImage "
|
||||
KERNEL_CLASSES = " kernel-fitimage "
|
||||
INHERIT += "test-mkimage-wrapper"
|
||||
UBOOT_SIGN_ENABLE = "1"
|
||||
FIT_GENERATE_KEYS = "1"
|
||||
UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
|
||||
UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
|
||||
UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
|
||||
FIT_SIGN_INDIVIDUAL = "1"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
# The U-Boot fitImage is created as part of the U-Boot recipe
|
||||
bitbake("virtual/bootloader")
|
||||
|
||||
image_type = "core-image-minimal"
|
||||
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
|
||||
machine = get_bb_var('MACHINE')
|
||||
fitimage_its_path = os.path.join(deploy_dir_image,
|
||||
"u-boot-its-%s" % (machine,))
|
||||
fitimage_path = os.path.join(deploy_dir_image,
|
||||
"u-boot-fitImage-%s" % (machine,))
|
||||
|
||||
self.assertTrue(os.path.exists(fitimage_its_path),
|
||||
"%s image tree source doesn't exist" % (fitimage_its_path))
|
||||
self.assertTrue(os.path.exists(fitimage_path),
|
||||
"%s FIT image doesn't exist" % (fitimage_path))
|
||||
|
||||
req_itspaths = [
|
||||
['/', 'images', 'uboot'],
|
||||
['/', 'images', 'uboot', 'signature'],
|
||||
['/', 'images', 'fdt'],
|
||||
['/', 'images', 'fdt', 'signature'],
|
||||
]
|
||||
|
||||
itspath = []
|
||||
itspaths = []
|
||||
linect = 0
|
||||
sigs = {}
|
||||
with open(fitimage_its_path) as its_file:
|
||||
linect += 1
|
||||
for line in its_file:
|
||||
line = line.strip()
|
||||
if line.endswith('};'):
|
||||
itspath.pop()
|
||||
elif line.endswith('{'):
|
||||
itspath.append(line[:-1].strip())
|
||||
itspaths.append(itspath[:])
|
||||
elif itspath and itspath[-1] == 'signature':
|
||||
itsdotpath = '.'.join(itspath)
|
||||
if not itsdotpath in sigs:
|
||||
sigs[itsdotpath] = {}
|
||||
if not '=' in line or not line.endswith(';'):
|
||||
self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
|
||||
key, value = line.split('=', 1)
|
||||
sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
|
||||
|
||||
for reqpath in req_itspaths:
|
||||
if not reqpath in itspaths:
|
||||
self.fail('Missing section in its file: %s' % reqpath)
|
||||
|
||||
reqsigvalues_image = {
|
||||
'algo': '"sha256,rsa2048"',
|
||||
'key-name-hint': '"spl-cascaded-oe-selftest"',
|
||||
}
|
||||
|
||||
for itspath, values in sigs.items():
|
||||
reqsigvalues = reqsigvalues_image
|
||||
for reqkey, reqvalue in reqsigvalues.items():
|
||||
value = values.get(reqkey, None)
|
||||
if value is None:
|
||||
self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath))
|
||||
self.assertEqual(value, reqvalue)
|
||||
|
||||
# Dump the image to see if it really got signed
|
||||
bitbake("u-boot-tools-native -c addto_recipe_sysroot")
|
||||
result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=')
|
||||
recipe_sysroot_native = result.output.split('=')[1].strip('"')
|
||||
dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
|
||||
result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
|
||||
in_signed = None
|
||||
signed_sections = {}
|
||||
for line in result.output.splitlines():
|
||||
if line.startswith((' Image')):
|
||||
in_signed = re.search(r'\((.*)\)', line).groups()[0]
|
||||
elif re.match(' \w', line):
|
||||
in_signed = None
|
||||
elif in_signed:
|
||||
if not in_signed in signed_sections:
|
||||
signed_sections[in_signed] = {}
|
||||
key, value = line.split(':', 1)
|
||||
signed_sections[in_signed][key.strip()] = value.strip()
|
||||
self.assertIn('uboot', signed_sections)
|
||||
self.assertIn('fdt', signed_sections)
|
||||
for signed_section, values in signed_sections.items():
|
||||
value = values.get('Sign algo', None)
|
||||
self.assertEqual(value, 'sha256,rsa2048:spl-cascaded-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
|
||||
value = values.get('Sign value', None)
|
||||
self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
|
||||
|
||||
# Check for SPL_MKIMAGE_SIGN_ARGS
|
||||
result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
|
||||
tempdir = result.output.split('=', 1)[1].strip().strip('')
|
||||
result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
|
||||
|
||||
# Check for evidence of test-mkimage-wrapper class
|
||||
result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
|
||||
result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
|
||||
self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
|
||||
|
||||
|
||||
|
||||
def test_initramfs_bundle(self):
|
||||
"""
|
||||
Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its)
|
||||
The FIT settings are set by the test case.
|
||||
The machine used is beaglebone-yocto.
|
||||
Expected: 1. The ITS is generated with initramfs bundle support
|
||||
2. All the fields in the kernel node are as expected (matching the
|
||||
conf settings)
|
||||
3. The kernel is included in all the available configurations and
|
||||
its hash is included in the configuration signature
|
||||
|
||||
Product: oe-core
|
||||
Author: Abdellatif El Khlifi <abdellatif.elkhlifi@arm.com>
|
||||
"""
|
||||
|
||||
config = """
|
||||
DISTRO="poky"
|
||||
MACHINE = "beaglebone-yocto"
|
||||
INITRAMFS_IMAGE_BUNDLE = "1"
|
||||
INITRAMFS_IMAGE = "core-image-minimal-initramfs"
|
||||
INITRAMFS_SCRIPTS = ""
|
||||
UBOOT_MACHINE = "am335x_evm_defconfig"
|
||||
KERNEL_CLASSES = " kernel-fitimage "
|
||||
KERNEL_IMAGETYPES = "fitImage"
|
||||
UBOOT_SIGN_ENABLE = "1"
|
||||
UBOOT_SIGN_KEYNAME = "beaglebonekey"
|
||||
UBOOT_SIGN_KEYDIR ?= "${DEPLOY_DIR_IMAGE}"
|
||||
UBOOT_DTB_BINARY = "u-boot.dtb"
|
||||
UBOOT_ENTRYPOINT = "0x80000000"
|
||||
UBOOT_LOADADDRESS = "0x80000000"
|
||||
UBOOT_DTB_LOADADDRESS = "0x82000000"
|
||||
UBOOT_ARCH = "arm"
|
||||
UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
|
||||
UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
|
||||
UBOOT_EXTLINUX = "0"
|
||||
FIT_GENERATE_KEYS = "1"
|
||||
KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
|
||||
FIT_KERNEL_COMP_ALG = "none"
|
||||
FIT_HASH_ALG = "sha256"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
# fitImage is created as part of linux recipe
|
||||
bitbake("virtual/kernel")
|
||||
|
||||
image_type = get_bb_var('INITRAMFS_IMAGE')
|
||||
deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
|
||||
machine = get_bb_var('MACHINE')
|
||||
fitimage_its_path = os.path.join(deploy_dir_image,
|
||||
"fitImage-its-%s-%s-%s" % (image_type, machine, machine))
|
||||
fitimage_path = os.path.join(deploy_dir_image,"fitImage")
|
||||
|
||||
self.assertTrue(os.path.exists(fitimage_its_path),
|
||||
"%s image tree source doesn't exist" % (fitimage_its_path))
|
||||
self.assertTrue(os.path.exists(fitimage_path),
|
||||
"%s FIT image doesn't exist" % (fitimage_path))
|
||||
|
||||
kernel_load = str(get_bb_var('UBOOT_LOADADDRESS'))
|
||||
kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT'))
|
||||
kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE'))
|
||||
kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG'))
|
||||
uboot_arch = str(get_bb_var('UBOOT_ARCH'))
|
||||
fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
|
||||
|
||||
its_file = open(fitimage_its_path)
|
||||
|
||||
its_lines = [line.strip() for line in its_file.readlines()]
|
||||
|
||||
exp_node_lines = [
|
||||
'kernel-1 {',
|
||||
'description = "Linux kernel";',
|
||||
'data = /incbin/("linux.bin");',
|
||||
'type = "' + kernel_type + '";',
|
||||
'arch = "' + uboot_arch + '";',
|
||||
'os = "linux";',
|
||||
'compression = "' + kernel_compression + '";',
|
||||
'load = <' + kernel_load + '>;',
|
||||
'entry = <' + kernel_entry + '>;',
|
||||
'hash-1 {',
|
||||
'algo = "' + fit_hash_alg +'";',
|
||||
'};',
|
||||
'};'
|
||||
]
|
||||
|
||||
node_str = exp_node_lines[0]
|
||||
|
||||
test_passed = False
|
||||
|
||||
print ("checking kernel node\n")
|
||||
|
||||
if node_str in its_lines:
|
||||
node_start_idx = its_lines.index(node_str)
|
||||
node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))]
|
||||
if node == exp_node_lines:
|
||||
print("kernel node verified")
|
||||
else:
|
||||
self.assertTrue(test_passed == True,"kernel node does not match expectation")
|
||||
|
||||
rx_configs = re.compile("^conf-.*")
|
||||
its_configs = list(filter(rx_configs.match, its_lines))
|
||||
|
||||
for cfg_str in its_configs:
|
||||
cfg_start_idx = its_lines.index(cfg_str)
|
||||
line_idx = cfg_start_idx + 2
|
||||
node_end = False
|
||||
while node_end == False:
|
||||
if its_lines[line_idx] == "};" and its_lines[line_idx-1] == "};" :
|
||||
node_end = True
|
||||
line_idx = line_idx + 1
|
||||
|
||||
node = its_lines[cfg_start_idx:line_idx]
|
||||
print("checking configuration " + cfg_str.rstrip(" {"))
|
||||
rx_desc_line = re.compile("^description.*1 Linux kernel.*")
|
||||
if len(list(filter(rx_desc_line.match, node))) != 1:
|
||||
self.assertTrue(test_passed == True,"kernel keyword not found in the description line")
|
||||
break
|
||||
else:
|
||||
print("kernel keyword found in the description line")
|
||||
|
||||
if 'kernel = "kernel-1";' not in node:
|
||||
self.assertTrue(test_passed == True,"kernel line not found")
|
||||
break
|
||||
else:
|
||||
print("kernel line found")
|
||||
|
||||
rx_sign_line = re.compile("^sign-images.*kernel.*")
|
||||
if len(list(filter(rx_sign_line.match, node))) != 1:
|
||||
self.assertTrue(test_passed == True,"kernel hash not signed")
|
||||
break
|
||||
else:
|
||||
print("kernel hash signed")
|
||||
|
||||
test_passed = True
|
||||
self.assertTrue(test_passed == True,"Initramfs bundle test success")
|
||||
169
sources/poky/meta/lib/oeqa/selftest/cases/gcc.py
Normal file
169
sources/poky/meta/lib/oeqa/selftest/cases/gcc.py
Normal file
@@ -0,0 +1,169 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
import os
|
||||
import time
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.core.case import OEPTestResultTestCase
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu
|
||||
|
||||
def parse_values(content):
|
||||
for i in content:
|
||||
for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
|
||||
if i.startswith(v + ": "):
|
||||
yield i[len(v) + 2:].strip(), v
|
||||
break
|
||||
|
||||
class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
|
||||
def check_skip(self, suite):
|
||||
targets = get_bb_var("RUNTIMETARGET", "gcc-runtime").split()
|
||||
if suite not in targets:
|
||||
self.skipTest("Target does not use {0}".format(suite))
|
||||
|
||||
def run_check(self, *suites, ssh = None):
|
||||
targets = set()
|
||||
for s in suites:
|
||||
if s == "gcc":
|
||||
targets.add("check-gcc-c")
|
||||
elif s == "g++":
|
||||
targets.add("check-gcc-c++")
|
||||
else:
|
||||
targets.add("check-target-{}".format(s))
|
||||
|
||||
# configure ssh target
|
||||
features = []
|
||||
features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets)))
|
||||
if ssh is not None:
|
||||
features.append('TOOLCHAIN_TEST_TARGET = "ssh"')
|
||||
features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
|
||||
features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
|
||||
features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
|
||||
self.write_config("\n".join(features))
|
||||
|
||||
recipe = "gcc-runtime"
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
bitbake("{} -c check".format(recipe))
|
||||
|
||||
end_time = time.time()
|
||||
|
||||
bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe)
|
||||
builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"]
|
||||
|
||||
for suite in suites:
|
||||
sumspath = os.path.join(builddir, "gcc", "testsuite", suite, "{0}.sum".format(suite))
|
||||
if not os.path.exists(sumspath): # check in target dirs
|
||||
sumspath = os.path.join(builddir, target_sys, suite, "testsuite", "{0}.sum".format(suite))
|
||||
if not os.path.exists(sumspath): # handle libstdc++-v3 -> libstdc++
|
||||
sumspath = os.path.join(builddir, target_sys, suite, "testsuite", "{0}.sum".format(suite.split("-")[0]))
|
||||
logpath = os.path.splitext(sumspath)[0] + ".log"
|
||||
|
||||
ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite
|
||||
ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite
|
||||
self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
|
||||
with open(sumspath, "r") as f:
|
||||
for test, result in parse_values(f):
|
||||
self.ptest_result(ptestsuite, test, result)
|
||||
|
||||
def run_check_emulated(self, *args, **kwargs):
|
||||
# build core-image-minimal with required packages
|
||||
default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
|
||||
features = []
|
||||
features.append('IMAGE_FEATURES += "ssh-server-openssh"')
|
||||
features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
|
||||
self.write_config("\n".join(features))
|
||||
bitbake("core-image-minimal")
|
||||
|
||||
# wrap the execution with a qemu instance
|
||||
with runqemu("core-image-minimal", runqemuparams = "nographic") as qemu:
|
||||
# validate that SSH is working
|
||||
status, _ = qemu.run("uname")
|
||||
self.assertEqual(status, 0)
|
||||
|
||||
return self.run_check(*args, ssh=qemu.ip, **kwargs)
|
||||
|
||||
@OETestTag("toolchain-user")
|
||||
class GccCrossSelfTest(GccSelfTestBase):
|
||||
def test_cross_gcc(self):
|
||||
self.run_check("gcc")
|
||||
|
||||
@OETestTag("toolchain-user")
|
||||
class GxxCrossSelfTest(GccSelfTestBase):
|
||||
def test_cross_gxx(self):
|
||||
self.run_check("g++")
|
||||
|
||||
@OETestTag("toolchain-user")
|
||||
class GccLibAtomicSelfTest(GccSelfTestBase):
|
||||
def test_libatomic(self):
|
||||
self.run_check("libatomic")
|
||||
|
||||
@OETestTag("toolchain-user")
|
||||
class GccLibGompSelfTest(GccSelfTestBase):
|
||||
def test_libgomp(self):
|
||||
self.run_check("libgomp")
|
||||
|
||||
@OETestTag("toolchain-user")
|
||||
class GccLibStdCxxSelfTest(GccSelfTestBase):
|
||||
def test_libstdcxx(self):
|
||||
self.run_check("libstdc++-v3")
|
||||
|
||||
@OETestTag("toolchain-user")
|
||||
class GccLibSspSelfTest(GccSelfTestBase):
|
||||
def test_libssp(self):
|
||||
self.check_skip("libssp")
|
||||
self.run_check("libssp")
|
||||
|
||||
@OETestTag("toolchain-user")
|
||||
class GccLibItmSelfTest(GccSelfTestBase):
|
||||
def test_libitm(self):
|
||||
self.check_skip("libitm")
|
||||
self.run_check("libitm")
|
||||
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("runqemu")
|
||||
class GccCrossSelfTestSystemEmulated(GccSelfTestBase):
|
||||
def test_cross_gcc(self):
|
||||
self.run_check_emulated("gcc")
|
||||
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("runqemu")
|
||||
class GxxCrossSelfTestSystemEmulated(GccSelfTestBase):
|
||||
def test_cross_gxx(self):
|
||||
self.run_check_emulated("g++")
|
||||
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("runqemu")
|
||||
class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase):
|
||||
def test_libatomic(self):
|
||||
self.run_check_emulated("libatomic")
|
||||
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("runqemu")
|
||||
class GccLibGompSelfTestSystemEmulated(GccSelfTestBase):
|
||||
def test_libgomp(self):
|
||||
self.run_check_emulated("libgomp")
|
||||
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("runqemu")
|
||||
class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase):
|
||||
def test_libstdcxx(self):
|
||||
self.run_check_emulated("libstdc++-v3")
|
||||
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("runqemu")
|
||||
class GccLibSspSelfTestSystemEmulated(GccSelfTestBase):
|
||||
def test_libssp(self):
|
||||
self.check_skip("libssp")
|
||||
self.run_check_emulated("libssp")
|
||||
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("runqemu")
|
||||
class GccLibItmSelfTestSystemEmulated(GccSelfTestBase):
|
||||
def test_libitm(self):
|
||||
self.check_skip("libitm")
|
||||
self.run_check_emulated("libitm")
|
||||
|
||||
67
sources/poky/meta/lib/oeqa/selftest/cases/gdbserver.py
Normal file
67
sources/poky/meta/lib/oeqa/selftest/cases/gdbserver.py
Normal file
@@ -0,0 +1,67 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
import os
|
||||
import time
|
||||
import tempfile
|
||||
import shutil
|
||||
import concurrent.futures
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars , runqemu, runCmd
|
||||
|
||||
class GdbServerTest(OESelftestTestCase):
|
||||
def test_gdb_server(self):
|
||||
target_arch = self.td["TARGET_ARCH"]
|
||||
target_sys = self.td["TARGET_SYS"]
|
||||
|
||||
features = """
|
||||
IMAGE_GEN_DEBUGFS = "1"
|
||||
IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
|
||||
CORE_IMAGE_EXTRA_INSTALL = "gdbserver"
|
||||
"""
|
||||
self.write_config(features)
|
||||
|
||||
gdb_recipe = "gdb-cross-" + target_arch
|
||||
gdb_binary = target_sys + "-gdb"
|
||||
|
||||
bitbake("core-image-minimal %s:do_addto_recipe_sysroot" % gdb_recipe)
|
||||
|
||||
native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
|
||||
r = runCmd("%s --version" % gdb_binary, native_sysroot=native_sysroot, target_sys=target_sys)
|
||||
self.assertEqual(r.status, 0)
|
||||
self.assertIn("GNU gdb", r.output)
|
||||
image = 'core-image-minimal'
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="debugfs-") as debugfs:
|
||||
filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
|
||||
shutil.unpack_archive(filename, debugfs)
|
||||
filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
|
||||
shutil.unpack_archive(filename, debugfs)
|
||||
|
||||
with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
|
||||
status, output = qemu.run_serial("kmod --help")
|
||||
self.assertIn("modprobe", output)
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
|
||||
def run_gdb():
|
||||
for _ in range(5):
|
||||
time.sleep(2)
|
||||
cmd = "%s --batch -ex 'set sysroot %s' -ex \"target extended-remote %s:9999\" -ex \"info line kmod_help\"" % (gdb_binary, debugfs, qemu.ip)
|
||||
self.logger.warning("starting gdb %s" % cmd)
|
||||
r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys)
|
||||
self.assertEqual(0, r.status)
|
||||
line_re = r"Line \d+ of \"/usr/src/debug/kmod/.*/tools/kmod.c\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>"
|
||||
self.assertRegex(r.output, line_re)
|
||||
break
|
||||
else:
|
||||
self.fail("Timed out connecting to gdb")
|
||||
future = executor.submit(run_gdb)
|
||||
|
||||
status, output = qemu.run_serial("gdbserver --once :9999 kmod --help")
|
||||
self.assertEqual(status, 1)
|
||||
# The future either returns None, or raises an exception
|
||||
future.result()
|
||||
136
sources/poky/meta/lib/oeqa/selftest/cases/gitarchivetests.py
Normal file
136
sources/poky/meta/lib/oeqa/selftest/cases/gitarchivetests.py
Normal file
@@ -0,0 +1,136 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
|
||||
lib_path = basepath + '/scripts/lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
import oeqa.utils.gitarchive as ga
|
||||
from oeqa.utils.git import GitError
|
||||
import tempfile
|
||||
import shutil
|
||||
import scriptutils
|
||||
import logging
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
logger = scriptutils.logger_create('resulttool')
|
||||
|
||||
def create_fake_repository(commit, tag_list=[], add_remote=True):
|
||||
""" Create a testing git directory
|
||||
|
||||
Initialize a simple git repository with one initial commit, and as many
|
||||
tags on this commit as listed in tag_list
|
||||
Returns both git directory path and gitarchive git object
|
||||
If commit is true, fake data will be commited, otherwise it will stay in staging area
|
||||
If commit is true and tag_lsit is non empty, all tags in tag_list will be
|
||||
created on the initial commit
|
||||
Fake remote will also be added to make git ls-remote work
|
||||
"""
|
||||
fake_data_file = "fake_data.txt"
|
||||
tempdir = tempfile.mkdtemp(prefix='fake_results.')
|
||||
repo = ga.init_git_repo(tempdir, False, False, logger)
|
||||
if add_remote:
|
||||
repo.run_cmd(["remote", "add", "origin", "."])
|
||||
with open(os.path.join(tempdir, fake_data_file), "w") as fake_data:
|
||||
fake_data.write("Fake data")
|
||||
if commit:
|
||||
repo.run_cmd(["add", fake_data_file])
|
||||
repo.run_cmd(["commit", "-m", "\"Add fake data\""])
|
||||
for tag in tag_list:
|
||||
repo.run_cmd(["tag", tag])
|
||||
|
||||
return tempdir, repo
|
||||
|
||||
def delete_fake_repository(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
def tag_exists(git_obj, target_tag):
|
||||
for tag in git_obj.run_cmd(["tag"]).splitlines():
|
||||
if target_tag == tag:
|
||||
return True
|
||||
return False
|
||||
|
||||
class GitArchiveTests(OESelftestTestCase):
|
||||
TEST_BRANCH="main"
|
||||
TEST_COMMIT="0f7d5df"
|
||||
TEST_COMMIT_COUNT="42"
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.log = logging.getLogger('gitarchivetests')
|
||||
cls.log.setLevel(logging.DEBUG)
|
||||
|
||||
def test_create_first_test_tag(self):
|
||||
path, git_obj = create_fake_repository(False)
|
||||
keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
|
||||
target_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
|
||||
|
||||
ga.gitarchive(path, path, True, False,
|
||||
"Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
|
||||
False, "{branch}/{commit_count}-g{commit}/{tag_number}",
|
||||
'Test run #{tag_number} of {branch}:{commit}', '',
|
||||
[], [], False, keywords, logger)
|
||||
self.assertTrue(tag_exists(git_obj, target_tag), msg=f"Tag {target_tag} has not been created")
|
||||
delete_fake_repository(path)
|
||||
|
||||
def test_create_second_test_tag(self):
|
||||
first_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
|
||||
second_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/1"
|
||||
keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
|
||||
|
||||
path, git_obj = create_fake_repository(True, [first_tag])
|
||||
ga.gitarchive(path, path, True, False,
|
||||
"Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
|
||||
False, "{branch}/{commit_count}-g{commit}/{tag_number}",
|
||||
'Test run #{tag_number} of {branch}:{commit}', '',
|
||||
[], [], False, keywords, logger)
|
||||
self.assertTrue(tag_exists(git_obj, second_tag), msg=f"Second tag {second_tag} has not been created")
|
||||
delete_fake_repository(path)
|
||||
|
||||
def test_get_revs_on_branch(self):
|
||||
fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
|
||||
tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
|
||||
|
||||
path, git_obj = create_fake_repository(True, fake_tags_list)
|
||||
revs = ga.get_test_revs(logger, git_obj, tag_name, branch="main")
|
||||
self.assertEqual(len(revs), 1)
|
||||
self.assertEqual(revs[0].commit, "0f7d5df")
|
||||
self.assertEqual(len(revs[0].tags), 2)
|
||||
self.assertEqual(revs[0].tags, ['main/10-g0f7d5df/0', 'main/10-g0f7d5df/1'])
|
||||
delete_fake_repository(path)
|
||||
|
||||
def test_get_tags_without_valid_remote(self):
|
||||
url = 'git://git.yoctoproject.org/poky'
|
||||
path, git_obj = create_fake_repository(False, None, False)
|
||||
|
||||
tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
|
||||
"""Test for some well established tags (released tags)"""
|
||||
self.assertIn("yocto-4.0", tags)
|
||||
self.assertIn("yocto-4.1", tags)
|
||||
self.assertIn("yocto-4.2", tags)
|
||||
delete_fake_repository(path)
|
||||
|
||||
def test_get_tags_with_only_local_tag(self):
|
||||
fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
|
||||
path, git_obj = create_fake_repository(True, fake_tags_list, False)
|
||||
|
||||
"""No remote is configured and no url is passed: get_tags must fall
|
||||
back to local tags
|
||||
"""
|
||||
tags = ga.get_tags(git_obj, self.log)
|
||||
self.assertCountEqual(tags, fake_tags_list)
|
||||
delete_fake_repository(path)
|
||||
|
||||
def test_get_tags_without_valid_remote_and_wrong_url(self):
|
||||
url = 'git://git.foo.org/bar'
|
||||
path, git_obj = create_fake_repository(False, None, False)
|
||||
|
||||
"""Test for some well established tags (released tags)"""
|
||||
with self.assertRaises(GitError):
|
||||
tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
|
||||
delete_fake_repository(path)
|
||||
99
sources/poky/meta/lib/oeqa/selftest/cases/glibc.py
Normal file
99
sources/poky/meta/lib/oeqa/selftest/cases/glibc.py
Normal file
@@ -0,0 +1,99 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
import os
|
||||
import time
|
||||
import contextlib
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.core.case import OEPTestResultTestCase
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_var, runqemu
|
||||
from oeqa.utils.nfs import unfs_server
|
||||
|
||||
def parse_values(content):
|
||||
for i in content:
|
||||
for v in ["PASS", "FAIL", "XPASS", "XFAIL", "UNRESOLVED", "UNSUPPORTED", "UNTESTED", "ERROR", "WARNING"]:
|
||||
if i.startswith(v + ": "):
|
||||
yield i[len(v) + 2:].strip(), v
|
||||
break
|
||||
|
||||
class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
|
||||
def run_check(self, ssh = None):
|
||||
# configure ssh target
|
||||
features = []
|
||||
if ssh is not None:
|
||||
features.append('TOOLCHAIN_TEST_TARGET = "ssh"')
|
||||
features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
|
||||
features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
|
||||
features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
|
||||
# force single threaded test execution
|
||||
features.append('EGLIBCPARALLELISM:task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
|
||||
self.write_config("\n".join(features))
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
bitbake("glibc-testsuite -c check")
|
||||
|
||||
end_time = time.time()
|
||||
|
||||
builddir = get_bb_var("B", "glibc-testsuite")
|
||||
|
||||
ptestsuite = "glibc-user" if ssh is None else "glibc"
|
||||
self.ptest_section(ptestsuite, duration = int(end_time - start_time))
|
||||
with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f:
|
||||
for test, result in parse_values(f):
|
||||
self.ptest_result(ptestsuite, test, result)
|
||||
|
||||
def run_check_emulated(self):
|
||||
with contextlib.ExitStack() as s:
|
||||
# use the base work dir, as the nfs mount, since the recipe directory may not exist
|
||||
tmpdir = get_bb_var("BASE_WORKDIR")
|
||||
nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False))
|
||||
|
||||
# build core-image-minimal with required packages
|
||||
default_installed_packages = [
|
||||
"glibc-charmaps",
|
||||
"libgcc",
|
||||
"libstdc++",
|
||||
"libatomic",
|
||||
"libgomp",
|
||||
# "python3",
|
||||
# "python3-pexpect",
|
||||
"nfs-utils",
|
||||
]
|
||||
features = []
|
||||
features.append('IMAGE_FEATURES += "ssh-server-openssh"')
|
||||
features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
|
||||
self.write_config("\n".join(features))
|
||||
bitbake("core-image-minimal")
|
||||
|
||||
# start runqemu
|
||||
qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024"))
|
||||
|
||||
# validate that SSH is working
|
||||
status, _ = qemu.run("uname")
|
||||
self.assertEqual(status, 0)
|
||||
|
||||
# setup nfs mount
|
||||
if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
|
||||
raise Exception("Failed to setup NFS mount directory on target")
|
||||
mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
|
||||
status, output = qemu.run(mountcmd)
|
||||
if status != 0:
|
||||
raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
|
||||
|
||||
self.run_check(ssh = qemu.ip)
|
||||
|
||||
@OETestTag("toolchain-user")
|
||||
class GlibcSelfTest(GlibcSelfTestBase):
|
||||
def test_glibc(self):
|
||||
self.run_check()
|
||||
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("runqemu")
|
||||
class GlibcSelfTestSystemEmulated(GlibcSelfTestBase):
|
||||
def test_glibc(self):
|
||||
self.run_check_emulated()
|
||||
|
||||
75
sources/poky/meta/lib/oeqa/selftest/cases/gotoolchain.py
Normal file
75
sources/poky/meta/lib/oeqa/selftest/cases/gotoolchain.py
Normal file
@@ -0,0 +1,75 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
|
||||
|
||||
|
||||
class oeGoToolchainSelfTest(OESelftestTestCase):
|
||||
"""
|
||||
Test cases for OE's Go toolchain
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_sdk_environment(tmpdir_SDKQA):
|
||||
pattern = os.path.join(tmpdir_SDKQA, "environment-setup-*")
|
||||
# FIXME: this is a very naive implementation
|
||||
return glob.glob(pattern)[0]
|
||||
|
||||
@staticmethod
|
||||
def get_sdk_toolchain():
|
||||
bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAIN_OUTPUTNAME'],
|
||||
"meta-go-toolchain")
|
||||
sdk_deploy = bb_vars['SDK_DEPLOY']
|
||||
toolchain_name = bb_vars['TOOLCHAIN_OUTPUTNAME']
|
||||
return os.path.join(sdk_deploy, toolchain_name + ".sh")
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(oeGoToolchainSelfTest, cls).setUpClass()
|
||||
cls.tmpdir_SDKQA = tempfile.mkdtemp(prefix='SDKQA')
|
||||
cls.go_path = os.path.join(cls.tmpdir_SDKQA, "go")
|
||||
# Build the SDK and locate it in DEPLOYDIR
|
||||
bitbake("meta-go-toolchain")
|
||||
cls.sdk_path = oeGoToolchainSelfTest.get_sdk_toolchain()
|
||||
# Install the SDK into the tmpdir
|
||||
runCmd("sh %s -y -d \"%s\"" % (cls.sdk_path, cls.tmpdir_SDKQA))
|
||||
cls.env_SDK = oeGoToolchainSelfTest.get_sdk_environment(cls.tmpdir_SDKQA)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
shutil.rmtree(cls.tmpdir_SDKQA, ignore_errors=True)
|
||||
super(oeGoToolchainSelfTest, cls).tearDownClass()
|
||||
|
||||
def run_sdk_go_command(self, gocmd, proj, name):
|
||||
cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name)
|
||||
cmd = cmd + ". %s; " % self.env_SDK
|
||||
cmd = cmd + "export GOPATH=%s; " % self.go_path
|
||||
cmd = cmd + "export GOFLAGS=-modcacherw; "
|
||||
cmd = cmd + "export CGO_ENABLED=1; "
|
||||
cmd = cmd + "export GOPROXY=https://proxy.golang.org,direct; "
|
||||
cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
|
||||
return runCmd(cmd).status
|
||||
|
||||
def test_go_dep_build(self):
|
||||
proj = "github.com/direnv"
|
||||
name = "direnv"
|
||||
ver = "v2.27.0"
|
||||
archive = ".tar.gz"
|
||||
url = "https://%s/%s/archive/%s%s" % (proj, name, ver, archive)
|
||||
|
||||
runCmd("cd %s; wget %s" % (self.tmpdir_SDKQA, url))
|
||||
runCmd("cd %s; tar -xf %s" % (self.tmpdir_SDKQA, ver+archive))
|
||||
runCmd("mkdir -p %s/src/%s" % (self.go_path, proj))
|
||||
runCmd("mv %s/direnv-2.27.0 %s/src/%s/%s"
|
||||
% (self.tmpdir_SDKQA, self.go_path, proj, name))
|
||||
retv = self.run_sdk_go_command('build', proj, name)
|
||||
self.assertEqual(retv, 0,
|
||||
msg="Running go build failed for %s" % name)
|
||||
60
sources/poky/meta/lib/oeqa/selftest/cases/image_typedep.py
Normal file
60
sources/poky/meta/lib/oeqa/selftest/cases/image_typedep.py
Normal file
@@ -0,0 +1,60 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class ImageTypeDepTests(OESelftestTestCase):
|
||||
|
||||
# Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
|
||||
# the conversion type bar gets added as a dep as well
|
||||
def test_conversion_typedep_added(self):
|
||||
|
||||
self.write_recipeinc('emptytest', """
|
||||
# Try to empty out the default dependency list
|
||||
PACKAGE_INSTALL = ""
|
||||
DISTRO_EXTRA_RDEPENDS=""
|
||||
|
||||
LICENSE = "MIT"
|
||||
IMAGE_FSTYPES = "testfstype"
|
||||
|
||||
IMAGE_TYPES_MASKED += "testfstype"
|
||||
IMAGE_TYPEDEP:testfstype = "tar.bz2"
|
||||
|
||||
inherit image
|
||||
|
||||
""")
|
||||
# First get the dependency that should exist for bz2, it will look
|
||||
# like CONVERSION_DEPENDS_bz2="somedep"
|
||||
result = bitbake('-e emptytest')
|
||||
|
||||
dep = None
|
||||
for line in result.output.split('\n'):
|
||||
if line.startswith('CONVERSION_DEPENDS_bz2'):
|
||||
dep = line.split('=')[1].strip('"')
|
||||
break
|
||||
|
||||
self.assertIsNotNone(dep, "CONVERSION_DEPENDS_bz2 dependency not found in bitbake -e output")
|
||||
|
||||
# Now get the dependency task list and check for the expected task
|
||||
# dependency
|
||||
bitbake('-g emptytest')
|
||||
|
||||
taskdependsfile = os.path.join(self.builddir, 'task-depends.dot')
|
||||
dep = dep + ".do_populate_sysroot"
|
||||
depfound = False
|
||||
expectedline = '"emptytest.do_rootfs" -> "{}"'.format(dep)
|
||||
|
||||
with open(taskdependsfile, "r") as f:
|
||||
for line in f:
|
||||
if line.strip() == expectedline:
|
||||
depfound = True
|
||||
break
|
||||
|
||||
if not depfound:
|
||||
raise AssertionError("\"{}\" not found".format(expectedline))
|
||||
336
sources/poky/meta/lib/oeqa/selftest/cases/imagefeatures.py
Normal file
336
sources/poky/meta/lib/oeqa/selftest/cases/imagefeatures.py
Normal file
@@ -0,0 +1,336 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
|
||||
from oeqa.utils.sshcontrol import SSHControl
|
||||
import glob
|
||||
import os
|
||||
import json
|
||||
|
||||
class ImageFeatures(OESelftestTestCase):
|
||||
|
||||
test_user = 'tester'
|
||||
root_user = 'root'
|
||||
|
||||
@OETestTag("runqemu")
|
||||
def test_non_root_user_can_connect_via_ssh_without_password(self):
|
||||
"""
|
||||
Summary: Check if non root user can connect via ssh without password
|
||||
Expected: 1. Connection to the image via ssh using root user without providing a password should be allowed.
|
||||
2. Connection to the image via ssh using tester user without providing a password should be allowed.
|
||||
Product: oe-core
|
||||
Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
|
||||
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
"""
|
||||
|
||||
features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh empty-root-password allow-empty-password allow-root-login"\n'
|
||||
features += 'INHERIT += "extrausers"\n'
|
||||
features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
|
||||
self.write_config(features)
|
||||
|
||||
# Build a core-image-minimal
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
with runqemu("core-image-minimal") as qemu:
|
||||
# Attempt to ssh with each user into qemu with empty password
|
||||
for user in [self.root_user, self.test_user]:
|
||||
ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user=user)
|
||||
status, output = ssh.run("true")
|
||||
self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output))
|
||||
|
||||
@OETestTag("runqemu")
|
||||
def test_all_users_can_connect_via_ssh_without_password(self):
|
||||
"""
|
||||
Summary: Check if all users can connect via ssh without password
|
||||
Expected: 1. Connection to the image via ssh using root user without providing a password should NOT be allowed.
|
||||
2. Connection to the image via ssh using tester user without providing a password should be allowed.
|
||||
Product: oe-core
|
||||
Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
|
||||
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
"""
|
||||
|
||||
features = 'EXTRA_IMAGE_FEATURES = "ssh-server-openssh allow-empty-password allow-root-login"\n'
|
||||
features += 'INHERIT += "extrausers"\n'
|
||||
features += 'EXTRA_USERS_PARAMS = "useradd -p \'\' {}; usermod -s /bin/sh {};"'.format(self.test_user, self.test_user)
|
||||
self.write_config(features)
|
||||
|
||||
# Build a core-image-minimal
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
with runqemu("core-image-minimal") as qemu:
|
||||
# Attempt to ssh with each user into qemu with empty password
|
||||
for user in [self.root_user, self.test_user]:
|
||||
ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user=user)
|
||||
status, output = ssh.run("true")
|
||||
if user == 'root':
|
||||
self.assertNotEqual(status, 0, 'ssh to user root was allowed when it should not have been')
|
||||
else:
|
||||
self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output)
|
||||
|
||||
|
||||
def test_wayland_support_in_image(self):
|
||||
"""
|
||||
Summary: Check Wayland support in image
|
||||
Expected: 1. Wayland image can be build
|
||||
2. Wayland feature can be installed
|
||||
Product: oe-core
|
||||
Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
|
||||
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
"""
|
||||
|
||||
distro_features = get_bb_var('DISTRO_FEATURES')
|
||||
if not ('opengl' in distro_features and 'wayland' in distro_features):
|
||||
self.skipTest('neither opengl nor wayland present on DISTRO_FEATURES so core-image-weston cannot be built')
|
||||
|
||||
# Build a core-image-weston
|
||||
bitbake('core-image-weston')
|
||||
|
||||
def test_bmap(self):
|
||||
"""
|
||||
Summary: Check bmap support
|
||||
Expected: 1. core-image-minimal can be build with bmap support
|
||||
2. core-image-minimal is sparse
|
||||
Product: oe-core
|
||||
Author: Ed Bartosh <ed.bartosh@linux.intel.com>
|
||||
"""
|
||||
|
||||
features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"'
|
||||
self.write_config(features)
|
||||
|
||||
image = 'core-image-minimal'
|
||||
bitbake(image)
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
|
||||
|
||||
image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4" % bb_vars['IMAGE_LINK_NAME'])
|
||||
bmap_path = "%s.bmap" % image_path
|
||||
gzip_path = "%s.gz" % bmap_path
|
||||
|
||||
# check if result image, bmap and bmap.gz files are in deploy directory
|
||||
self.assertTrue(os.path.exists(image_path))
|
||||
self.assertTrue(os.path.exists(bmap_path))
|
||||
self.assertTrue(os.path.exists(gzip_path))
|
||||
|
||||
# check if result image is sparse
|
||||
image_stat = os.stat(image_path)
|
||||
self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512)
|
||||
|
||||
# check if the resulting gzip is valid, --force is needed in case gzip_path is a symlink
|
||||
self.assertTrue(runCmd('gzip --test --force %s' % gzip_path))
|
||||
|
||||
def test_hypervisor_fmts(self):
|
||||
"""
|
||||
Summary: Check various hypervisor formats
|
||||
Expected: 1. core-image-minimal can be built with vmdk, vdi and
|
||||
qcow2 support.
|
||||
2. qemu-img says each image has the expected format
|
||||
Product: oe-core
|
||||
Author: Tom Rini <trini@konsulko.com>
|
||||
"""
|
||||
|
||||
img_types = [ 'vmdk', 'vdi', 'qcow2' ]
|
||||
features = ""
|
||||
for itype in img_types:
|
||||
features += 'IMAGE_FSTYPES += "ext4.%s"\n' % itype
|
||||
self.write_config(features)
|
||||
|
||||
image = 'core-image-minimal'
|
||||
bitbake(image)
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
|
||||
|
||||
for itype in img_types:
|
||||
image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4.%s" %
|
||||
(bb_vars['IMAGE_LINK_NAME'], itype))
|
||||
|
||||
# check if result image file is in deploy directory
|
||||
self.assertTrue(os.path.exists(image_path))
|
||||
|
||||
# check if result image is vmdk
|
||||
sysroot = get_bb_var('STAGING_DIR_NATIVE', 'core-image-minimal')
|
||||
result = runCmd('qemu-img info --output json %s' % image_path,
|
||||
native_sysroot=sysroot)
|
||||
try:
|
||||
data = json.loads(result.output)
|
||||
self.assertEqual(data.get('format'), itype,
|
||||
msg="Unexpected format in '%s'" % (result.output))
|
||||
except json.decoder.JSONDecodeError:
|
||||
self.fail("Could not parse '%ss'" % result.output)
|
||||
|
||||
def test_long_chain_conversion(self):
|
||||
"""
|
||||
Summary: Check for chaining many CONVERSION_CMDs together
|
||||
Expected: 1. core-image-minimal can be built with
|
||||
ext4.bmap.gz.bz2.zst.xz.u-boot and also create a
|
||||
sha256sum
|
||||
2. The above image has a valid sha256sum
|
||||
Product: oe-core
|
||||
Author: Tom Rini <trini@konsulko.com>
|
||||
"""
|
||||
|
||||
conv = "ext4.bmap.gz.bz2.zst.xz.u-boot"
|
||||
features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv)
|
||||
self.write_config(features)
|
||||
|
||||
image = 'core-image-minimal'
|
||||
bitbake(image)
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
|
||||
image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" %
|
||||
(bb_vars['IMAGE_LINK_NAME'], conv))
|
||||
|
||||
# check if resulting image is in the deploy directory
|
||||
self.assertTrue(os.path.exists(image_path))
|
||||
self.assertTrue(os.path.exists(image_path + ".sha256sum"))
|
||||
|
||||
# check if the resulting sha256sum agrees
|
||||
self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' %
|
||||
(bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'], conv)))
|
||||
|
||||
def test_image_fstypes(self):
|
||||
"""
|
||||
Summary: Check if image of supported image fstypes can be built
|
||||
Expected: core-image-minimal can be built for various image types
|
||||
Product: oe-core
|
||||
Author: Ed Bartosh <ed.bartosh@linux.intel.com>
|
||||
"""
|
||||
image = 'core-image-minimal'
|
||||
|
||||
all_image_types = set(get_bb_var("IMAGE_TYPES", image).split())
|
||||
skip_image_types = set(('container', 'elf', 'f2fs', 'tar.zst', 'wic.zst', 'squashfs-lzo', 'vfat'))
|
||||
img_types = all_image_types - skip_image_types
|
||||
|
||||
config = """
|
||||
IMAGE_FSTYPES += "%s"
|
||||
WKS_FILE = "wictestdisk.wks"
|
||||
MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"
|
||||
UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"
|
||||
MULTIUBI_BUILD += "mtd_2_128"
|
||||
MKUBIFS_ARGS_mtd_2_128 ?= "-m 2048 -e 129024 -c 2047"
|
||||
UBINIZE_ARGS_mtd_2_128 ?= "-m 2048 -p 128KiB -s 512"
|
||||
MULTIUBI_BUILD += "mtd_4_256"
|
||||
MKUBIFS_ARGS_mtd_4_256 ?= "-m 4096 -e 253952 -c 4096"
|
||||
UBINIZE_ARGS_mtd_4_256 ?= "-m 4096 -p 256KiB"
|
||||
""" % ' '.join(img_types)
|
||||
self.write_config(config)
|
||||
|
||||
bitbake(image)
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'MULTIUBI_BUILD'], image)
|
||||
|
||||
for itype in img_types:
|
||||
if itype == 'multiubi':
|
||||
# For multiubi build we need to manage MULTIUBI_BUILD entry to append
|
||||
# specific name to IMAGE_LINK_NAME
|
||||
for vname in bb_vars['MULTIUBI_BUILD'].split():
|
||||
image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s_%s.ubifs" % (bb_vars['IMAGE_LINK_NAME'], vname))
|
||||
# check if result image is in deploy directory
|
||||
self.assertTrue(os.path.exists(image_path),
|
||||
"%s image %s doesn't exist" % (itype, image_path))
|
||||
else:
|
||||
image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % (bb_vars['IMAGE_LINK_NAME'], itype))
|
||||
# check if result image is in deploy directory
|
||||
self.assertTrue(os.path.exists(image_path),
|
||||
"%s image %s doesn't exist" % (itype, image_path))
|
||||
|
||||
def test_useradd_static(self):
|
||||
config = """
|
||||
USERADDEXTENSION = "useradd-staticids"
|
||||
USERADD_ERROR_DYNAMIC = "skip"
|
||||
USERADD_UID_TABLES += "files/static-passwd"
|
||||
USERADD_GID_TABLES += "files/static-group"
|
||||
"""
|
||||
self.write_config(config)
|
||||
bitbake("core-image-base")
|
||||
|
||||
def test_no_busybox_base_utils(self):
|
||||
config = """
|
||||
# Enable wayland
|
||||
DISTRO_FEATURES:append = " pam opengl wayland"
|
||||
|
||||
# Switch to systemd
|
||||
DISTRO_FEATURES:append = " systemd usrmerge"
|
||||
VIRTUAL-RUNTIME_init_manager = "systemd"
|
||||
VIRTUAL-RUNTIME_initscripts = ""
|
||||
VIRTUAL-RUNTIME_syslog = ""
|
||||
VIRTUAL-RUNTIME_login_manager = "shadow-base"
|
||||
DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"
|
||||
|
||||
# Replace busybox
|
||||
PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils"
|
||||
VIRTUAL-RUNTIME_base-utils = "packagegroup-core-base-utils"
|
||||
VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock"
|
||||
VIRTUAL-RUNTIME_base-utils-syslog = ""
|
||||
|
||||
# Skip busybox
|
||||
SKIP_RECIPE[busybox] = "Don't build this"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
bitbake("--graphviz core-image-weston")
|
||||
|
||||
def test_image_gen_debugfs(self):
|
||||
"""
|
||||
Summary: Check debugfs generation
|
||||
Expected: 1. core-image-minimal can be build with IMAGE_GEN_DEBUGFS variable set
|
||||
2. debug filesystem is created when variable set
|
||||
3. debug symbols available
|
||||
Product: oe-core
|
||||
Author: Humberto Ibarra <humberto.ibarra.lopez@intel.com>
|
||||
Yeoh Ee Peng <ee.peng.yeoh@intel.com>
|
||||
"""
|
||||
|
||||
image = 'core-image-minimal'
|
||||
image_fstypes_debugfs = 'tar.bz2'
|
||||
features = 'IMAGE_GEN_DEBUGFS = "1"\n'
|
||||
features += 'IMAGE_FSTYPES_DEBUGFS = "%s"\n' % image_fstypes_debugfs
|
||||
self.write_config(features)
|
||||
|
||||
bitbake(image)
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
|
||||
|
||||
dbg_tar_file = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.%s" % (bb_vars['IMAGE_LINK_NAME'], image_fstypes_debugfs))
|
||||
self.assertTrue(os.path.exists(dbg_tar_file), 'debug filesystem not generated at %s' % dbg_tar_file)
|
||||
result = runCmd('cd %s; tar xvf %s' % (bb_vars['DEPLOY_DIR_IMAGE'], dbg_tar_file))
|
||||
self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output))
|
||||
result = runCmd('find %s -name %s' % (bb_vars['DEPLOY_DIR_IMAGE'], "udevadm"))
|
||||
self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output)
|
||||
dbg_symbols_targets = result.output.splitlines()
|
||||
self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets)
|
||||
for t in dbg_symbols_targets:
|
||||
result = runCmd('objdump --syms %s | grep debug' % t)
|
||||
self.assertTrue("debug" in result.output, msg='Failed to find debug symbol: %s' % result.output)
|
||||
|
||||
def test_empty_image(self):
|
||||
"""Test creation of image with no packages"""
|
||||
image = 'test-empty-image'
|
||||
bitbake(image)
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
|
||||
manifest = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.manifest" % bb_vars['IMAGE_LINK_NAME'])
|
||||
self.assertTrue(os.path.exists(manifest))
|
||||
|
||||
with open(manifest, "r") as f:
|
||||
self.assertEqual(len(f.read().strip()),0)
|
||||
|
||||
def test_mandb(self):
|
||||
"""
|
||||
Test that an image containing manpages has working man and apropos commands.
|
||||
"""
|
||||
config = """
|
||||
DISTRO_FEATURES:append = " api-documentation"
|
||||
CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc"
|
||||
"""
|
||||
self.write_config(config)
|
||||
bitbake("core-image-minimal")
|
||||
|
||||
with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
|
||||
# This manpage is provided by man-pages
|
||||
status, output = qemu.run_serial("apropos 8859")
|
||||
self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
|
||||
self.assertIn("iso_8859_15", output)
|
||||
|
||||
# This manpage is provided by kmod
|
||||
status, output = qemu.run_serial("man --pager=cat modprobe")
|
||||
self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
|
||||
self.assertIn("force-modversion", output)
|
||||
152
sources/poky/meta/lib/oeqa/selftest/cases/incompatible_lic.py
Normal file
152
sources/poky/meta/lib/oeqa/selftest/cases/incompatible_lic.py
Normal file
@@ -0,0 +1,152 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class IncompatibleLicenseTestObsolete(OESelftestTestCase):
|
||||
|
||||
def lic_test(self, pn, pn_lic, lic, error_msg=None):
|
||||
if not error_msg:
|
||||
error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
|
||||
|
||||
self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
|
||||
|
||||
result = bitbake('%s --dry-run' % (pn), ignore_status=True)
|
||||
if error_msg not in result.output:
|
||||
raise AssertionError(result.output)
|
||||
|
||||
# Verify that a package with an SPDX license cannot be built when
|
||||
# INCOMPATIBLE_LICENSE contains an alias (in SPDXLICENSEMAP) of this SPDX
|
||||
# license
|
||||
def test_incompatible_alias_spdx_license(self):
|
||||
self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
|
||||
|
||||
# Verify that a package with an SPDX license cannot be built when
|
||||
# INCOMPATIBLE_LICENSE contains a wildcarded alias license matching this
|
||||
# SPDX license
|
||||
def test_incompatible_alias_spdx_license_wildcard(self):
|
||||
self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
|
||||
|
||||
# Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
|
||||
# license cannot be built when INCOMPATIBLE_LICENSE contains this alias
|
||||
def test_incompatible_alias_spdx_license_alias(self):
|
||||
self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
|
||||
|
||||
# Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
|
||||
# license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
|
||||
# license matching this SPDX license
|
||||
def test_incompatible_spdx_license_alias_wildcard(self):
|
||||
self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0', "*GPL-3.0 is an invalid license wildcard entry")
|
||||
|
||||
# Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
|
||||
# license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
|
||||
# alias license matching the SPDX license
|
||||
def test_incompatible_alias_spdx_license_alias_wildcard(self):
|
||||
self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
|
||||
|
||||
|
||||
# Verify that a package with multiple SPDX licenses cannot be built when
|
||||
# INCOMPATIBLE_LICENSE contains a wildcard to some of them
|
||||
def test_incompatible_spdx_licenses_wildcard(self):
|
||||
self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only', "*GPL-3.0-only is an invalid license wildcard entry")
|
||||
|
||||
|
||||
# Verify that a package with multiple SPDX licenses cannot be built when
|
||||
# INCOMPATIBLE_LICENSE contains a wildcard matching all licenses
|
||||
def test_incompatible_all_licenses_wildcard(self):
|
||||
self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*', "* is an invalid license wildcard entry")
|
||||
|
||||
class IncompatibleLicenseTests(OESelftestTestCase):
|
||||
|
||||
def lic_test(self, pn, pn_lic, lic):
|
||||
error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
|
||||
|
||||
self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
|
||||
|
||||
result = bitbake('%s --dry-run' % (pn), ignore_status=True)
|
||||
if error_msg not in result.output:
|
||||
raise AssertionError(result.output)
|
||||
|
||||
# Verify that a package with an SPDX license cannot be built when
|
||||
# INCOMPATIBLE_LICENSE contains this SPDX license
|
||||
def test_incompatible_spdx_license(self):
|
||||
self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only')
|
||||
|
||||
# Verify that a package with an SPDX license cannot be built when
|
||||
# INCOMPATIBLE_LICENSE contains a wildcarded license matching this SPDX
|
||||
# license
|
||||
def test_incompatible_spdx_license_wildcard(self):
|
||||
self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0*')
|
||||
|
||||
# Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
|
||||
# license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
|
||||
# license
|
||||
def test_incompatible_spdx_license_alias(self):
|
||||
self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only')
|
||||
|
||||
# Verify that a package with multiple SPDX licenses cannot be built when
|
||||
# INCOMPATIBLE_LICENSE contains some of them
|
||||
def test_incompatible_spdx_licenses(self):
|
||||
self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only')
|
||||
|
||||
# Verify that a package with a non-SPDX license cannot be built when
|
||||
# INCOMPATIBLE_LICENSE contains this license
|
||||
def test_incompatible_nonspdx_license(self):
|
||||
self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense')
|
||||
|
||||
class IncompatibleLicensePerImageTests(OESelftestTestCase):
|
||||
def default_config(self):
|
||||
return """
|
||||
IMAGE_INSTALL:append = " bash"
|
||||
INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
|
||||
"""
|
||||
|
||||
def test_bash_default(self):
|
||||
self.write_config(self.default_config())
|
||||
error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
|
||||
|
||||
result = bitbake('core-image-minimal', ignore_status=True)
|
||||
if error_msg not in result.output:
|
||||
raise AssertionError(result.output)
|
||||
|
||||
def test_bash_and_license(self):
|
||||
self.disable_class("create-spdx")
|
||||
self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"')
|
||||
error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
|
||||
|
||||
result = bitbake('core-image-minimal', ignore_status=True)
|
||||
if error_msg not in result.output:
|
||||
raise AssertionError(result.output)
|
||||
|
||||
def test_bash_or_license(self):
|
||||
self.disable_class("create-spdx")
|
||||
self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"')
|
||||
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
def test_bash_license_exceptions(self):
|
||||
self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"')
|
||||
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
class NoGPL3InImagesTests(OESelftestTestCase):
|
||||
def test_core_image_minimal(self):
|
||||
self.write_config("""
|
||||
INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
|
||||
""")
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
def test_core_image_full_cmdline_weston(self):
|
||||
self.write_config("""
|
||||
IMAGE_CLASSES += "testimage"
|
||||
INCOMPATIBLE_LICENSE:pn-core-image-full-cmdline = "GPL-3.0* LGPL-3.0*"
|
||||
INCOMPATIBLE_LICENSE:pn-core-image-weston = "GPL-3.0* LGPL-3.0*"
|
||||
|
||||
require conf/distro/include/no-gplv3.inc
|
||||
""")
|
||||
bitbake('core-image-full-cmdline core-image-weston')
|
||||
bitbake('-c testimage core-image-full-cmdline core-image-weston')
|
||||
|
||||
21
sources/poky/meta/lib/oeqa/selftest/cases/intercept.py
Normal file
21
sources/poky/meta/lib/oeqa/selftest/cases/intercept.py
Normal file
@@ -0,0 +1,21 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class GitCheck(OESelftestTestCase):
|
||||
def test_git_intercept(self):
|
||||
"""
|
||||
Git binaries with CVE-2022-24765 fixed will refuse to operate on a
|
||||
repository which is owned by a different user. This breaks our
|
||||
do_install task as that runs inside pseudo, so the git repository is
|
||||
owned by the build user but git is running as (fake)root.
|
||||
|
||||
We have an intercept which disables pseudo, so verify that it works.
|
||||
"""
|
||||
bitbake("git-submodule-test -c test_git_as_user")
|
||||
bitbake("git-submodule-test -c test_git_as_root")
|
||||
@@ -0,0 +1,74 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, get_bb_var
|
||||
from oeqa.utils.git import GitRepo
|
||||
|
||||
class KernelDev(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(KernelDev, cls).setUpClass()
|
||||
# Create the recipe directory structure inside the created layer
|
||||
cls.layername = 'meta-kerneltest'
|
||||
runCmd('bitbake-layers create-layer %s' % cls.layername)
|
||||
runCmd('mkdir -p %s/recipes-kernel/linux/linux-yocto' % cls.layername)
|
||||
cls.recipes_linuxyocto_dir = os.path.join \
|
||||
(cls.builddir, cls.layername, 'recipes-kernel', 'linux', 'linux-yocto')
|
||||
cls.recipeskernel_dir = os.path.dirname(cls.recipes_linuxyocto_dir)
|
||||
runCmd('bitbake-layers add-layer %s' % cls.layername)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
runCmd('bitbake-layers remove-layer %s' % cls.layername, ignore_status=True)
|
||||
runCmd('rm -rf %s' % cls.layername)
|
||||
super(KernelDev, cls).tearDownClass()
|
||||
|
||||
def setUp(self):
|
||||
super(KernelDev, self).setUp()
|
||||
self.set_machine_config('MACHINE = "qemux86-64"\n')
|
||||
|
||||
def test_apply_patches(self):
|
||||
"""
|
||||
Summary: Able to apply a single patch to the Linux kernel source
|
||||
Expected: The README file should exist and the patch changes should be
|
||||
displayed at the end of the file.
|
||||
Product: Kernel Development
|
||||
Author: Yeoh Ee Peng <ee.peng.yeoh@intel.com>
|
||||
AutomatedBy: Mazliana Mohamad <mazliana.mohamad@intel.com>
|
||||
"""
|
||||
runCmd('bitbake virtual/kernel -c patch')
|
||||
kernel_source = get_bb_var('STAGING_KERNEL_DIR')
|
||||
readme = os.path.join(kernel_source, 'README')
|
||||
|
||||
# This test step adds modified file 'README' to git and creates a
|
||||
# patch file '0001-KERNEL_DEV_TEST_CASE.patch' at the same location as file
|
||||
patch_content = 'This is a test to apply a patch to the kernel'
|
||||
with open(readme, 'a+') as f:
|
||||
f.write(patch_content)
|
||||
repo = GitRepo('%s' % kernel_source, is_topdir=True)
|
||||
repo.run_cmd('add %s' % readme)
|
||||
repo.run_cmd(['commit', '-m', 'KERNEL_DEV_TEST_CASE'])
|
||||
repo.run_cmd(['format-patch', '-1'])
|
||||
patch_name = '0001-KERNEL_DEV_TEST_CASE.patch'
|
||||
patchpath = os.path.join(kernel_source, patch_name)
|
||||
runCmd('mv %s %s' % (patchpath, self.recipes_linuxyocto_dir))
|
||||
runCmd('rm %s ' % readme)
|
||||
self.assertFalse(os.path.exists(readme))
|
||||
|
||||
recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend')
|
||||
with open(recipe_append, 'w+') as fh:
|
||||
fh.write('SRC_URI += "file://%s"\n' % patch_name)
|
||||
fh.write('ERROR_QA:remove:pn-linux-yocto = "patch-status"\n')
|
||||
fh.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"')
|
||||
|
||||
runCmd('bitbake virtual/kernel -c clean')
|
||||
runCmd('bitbake virtual/kernel -c patch')
|
||||
self.assertTrue(os.path.exists(readme))
|
||||
result = runCmd('tail -n 1 %s' % readme)
|
||||
self.assertEqual(result.output, patch_content)
|
||||
99
sources/poky/meta/lib/oeqa/selftest/cases/layerappend.py
Normal file
99
sources/poky/meta/lib/oeqa/selftest/cases/layerappend.py
Normal file
@@ -0,0 +1,99 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_var
|
||||
import oeqa.utils.ftools as ftools
|
||||
|
||||
class LayerAppendTests(OESelftestTestCase):
|
||||
layerconf = """
|
||||
# We have a conf and classes directory, append to BBPATH
|
||||
BBPATH .= ":${LAYERDIR}"
|
||||
|
||||
# We have a recipes directory, add to BBFILES
|
||||
BBFILES += "${LAYERDIR}/recipes*/*.bb ${LAYERDIR}/recipes*/*.bbappend"
|
||||
|
||||
BBFILE_COLLECTIONS += "meta-layerINT"
|
||||
BBFILE_PATTERN_meta-layerINT := "^${LAYERDIR}/"
|
||||
BBFILE_PRIORITY_meta-layerINT = "6"
|
||||
"""
|
||||
recipe = """
|
||||
LICENSE="CLOSED"
|
||||
INHIBIT_DEFAULT_DEPS = "1"
|
||||
|
||||
python do_build() {
|
||||
bb.plain('Building ...')
|
||||
}
|
||||
addtask build
|
||||
"""
|
||||
append = """
|
||||
FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
|
||||
|
||||
SRC_URI:append = " file://appendtest.txt"
|
||||
|
||||
sysroot_stage_all:append() {
|
||||
install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
append2 = """
|
||||
FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
|
||||
|
||||
SRC_URI:append = " file://appendtest.txt"
|
||||
"""
|
||||
layerappend = ''
|
||||
|
||||
def tearDownLocal(self):
|
||||
if self.layerappend:
|
||||
ftools.remove_from_file(self.builddir + "/conf/bblayers.conf", self.layerappend)
|
||||
super(LayerAppendTests, self).tearDownLocal()
|
||||
|
||||
def test_layer_appends(self):
|
||||
corebase = get_bb_var("COREBASE")
|
||||
|
||||
for l in ["0", "1", "2"]:
|
||||
layer = os.path.join(corebase, "meta-layertest" + l)
|
||||
self.assertFalse(os.path.exists(layer))
|
||||
os.mkdir(layer)
|
||||
os.mkdir(layer + "/conf")
|
||||
with open(layer + "/conf/layer.conf", "w") as f:
|
||||
f.write(self.layerconf.replace("INT", l))
|
||||
os.mkdir(layer + "/recipes-test")
|
||||
if l == "0":
|
||||
with open(layer + "/recipes-test/layerappendtest.bb", "w") as f:
|
||||
f.write(self.recipe)
|
||||
elif l == "1":
|
||||
with open(layer + "/recipes-test/layerappendtest.bbappend", "w") as f:
|
||||
f.write(self.append)
|
||||
os.mkdir(layer + "/recipes-test/layerappendtest")
|
||||
with open(layer + "/recipes-test/layerappendtest/appendtest.txt", "w") as f:
|
||||
f.write("Layer 1 test")
|
||||
elif l == "2":
|
||||
with open(layer + "/recipes-test/layerappendtest.bbappend", "w") as f:
|
||||
f.write(self.append2)
|
||||
os.mkdir(layer + "/recipes-test/layerappendtest")
|
||||
with open(layer + "/recipes-test/layerappendtest/appendtest.txt", "w") as f:
|
||||
f.write("Layer 2 test")
|
||||
self.track_for_cleanup(layer)
|
||||
|
||||
self.layerappend = "BBLAYERS += \"{0}/meta-layertest0 {0}/meta-layertest1 {0}/meta-layertest2\"".format(corebase)
|
||||
ftools.append_file(self.builddir + "/conf/bblayers.conf", self.layerappend)
|
||||
stagingdir = get_bb_var("SYSROOT_DESTDIR", "layerappendtest")
|
||||
bitbake("layerappendtest")
|
||||
data = ftools.read_file(stagingdir + "/appendtest.txt")
|
||||
self.assertEqual(data, "Layer 2 test")
|
||||
os.remove(corebase + "/meta-layertest2/recipes-test/layerappendtest/appendtest.txt")
|
||||
bitbake("layerappendtest")
|
||||
data = ftools.read_file(stagingdir + "/appendtest.txt")
|
||||
self.assertEqual(data, "Layer 1 test")
|
||||
with open(corebase + "/meta-layertest2/recipes-test/layerappendtest/appendtest.txt", "w") as f:
|
||||
f.write("Layer 2 test")
|
||||
bitbake("layerappendtest")
|
||||
data = ftools.read_file(stagingdir + "/appendtest.txt")
|
||||
self.assertEqual(data, "Layer 2 test")
|
||||
104
sources/poky/meta/lib/oeqa/selftest/cases/liboe.py
Normal file
104
sources/poky/meta/lib/oeqa/selftest/cases/liboe.py
Normal file
@@ -0,0 +1,104 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake, runCmd
|
||||
import oe.path
|
||||
import os
|
||||
|
||||
class LibOE(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(LibOE, cls).setUpClass()
|
||||
cls.tmp_dir = get_bb_var('TMPDIR')
|
||||
|
||||
def test_copy_tree_special(self):
|
||||
"""
|
||||
Summary: oe.path.copytree() should copy files with special character
|
||||
Expected: 'test file with sp£c!al @nd spaces' should exist in
|
||||
copy destination
|
||||
Product: OE-Core
|
||||
Author: Joshua Lock <joshua.g.lock@intel.com>
|
||||
"""
|
||||
testloc = oe.path.join(self.tmp_dir, 'liboetests')
|
||||
src = oe.path.join(testloc, 'src')
|
||||
dst = oe.path.join(testloc, 'dst')
|
||||
bb.utils.mkdirhier(testloc)
|
||||
bb.utils.mkdirhier(src)
|
||||
testfilename = 'test file with sp£c!al @nd spaces'
|
||||
|
||||
# create the test file and copy it
|
||||
open(oe.path.join(src, testfilename), 'w+b').close()
|
||||
oe.path.copytree(src, dst)
|
||||
|
||||
# ensure path exists in dest
|
||||
fileindst = os.path.isfile(oe.path.join(dst, testfilename))
|
||||
self.assertTrue(fileindst, "File with spaces doesn't exist in dst")
|
||||
|
||||
oe.path.remove(testloc)
|
||||
|
||||
def test_copy_tree_xattr(self):
|
||||
"""
|
||||
Summary: oe.path.copytree() should preserve xattr on copied files
|
||||
Expected: testxattr file in destination should have user.oetest
|
||||
extended attribute
|
||||
Product: OE-Core
|
||||
Author: Joshua Lock <joshua.g.lock@intel.com>
|
||||
"""
|
||||
testloc = oe.path.join(self.tmp_dir, 'liboetests')
|
||||
src = oe.path.join(testloc, 'src')
|
||||
dst = oe.path.join(testloc, 'dst')
|
||||
bb.utils.mkdirhier(testloc)
|
||||
bb.utils.mkdirhier(src)
|
||||
testfilename = 'testxattr'
|
||||
|
||||
# ensure we have setfattr available
|
||||
bitbake("attr-native")
|
||||
|
||||
bb_vars = get_bb_vars(['SYSROOT_DESTDIR', 'bindir'], 'attr-native')
|
||||
destdir = bb_vars['SYSROOT_DESTDIR']
|
||||
bindir = bb_vars['bindir']
|
||||
bindir = destdir + bindir
|
||||
|
||||
# create a file with xattr and copy it
|
||||
open(oe.path.join(src, testfilename), 'w+b').close()
|
||||
runCmd('%s/setfattr -n user.oetest -v "testing liboe" %s' % (bindir, oe.path.join(src, testfilename)))
|
||||
oe.path.copytree(src, dst)
|
||||
|
||||
# ensure file in dest has user.oetest xattr
|
||||
result = runCmd('%s/getfattr -n user.oetest %s' % (bindir, oe.path.join(dst, testfilename)))
|
||||
self.assertIn('user.oetest="testing liboe"', result.output, 'Extended attribute not sert in dst')
|
||||
|
||||
oe.path.remove(testloc)
|
||||
|
||||
def test_copy_hardlink_tree_count(self):
|
||||
"""
|
||||
Summary: oe.path.copyhardlinktree() shouldn't miss out files
|
||||
Expected: src and dst should have the same number of files
|
||||
Product: OE-Core
|
||||
Author: Joshua Lock <joshua.g.lock@intel.com>
|
||||
"""
|
||||
testloc = oe.path.join(self.tmp_dir, 'liboetests')
|
||||
src = oe.path.join(testloc, 'src')
|
||||
dst = oe.path.join(testloc, 'dst')
|
||||
bb.utils.mkdirhier(testloc)
|
||||
bb.utils.mkdirhier(src)
|
||||
testfiles = ['foo', 'bar', '.baz', 'quux']
|
||||
|
||||
def touchfile(tf):
|
||||
open(oe.path.join(src, tf), 'w+b').close()
|
||||
|
||||
for f in testfiles:
|
||||
touchfile(f)
|
||||
|
||||
oe.path.copyhardlinktree(src, dst)
|
||||
|
||||
dstcnt = len(os.listdir(dst))
|
||||
srccnt = len(os.listdir(src))
|
||||
self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
|
||||
|
||||
oe.path.remove(testloc)
|
||||
59
sources/poky/meta/lib/oeqa/selftest/cases/lic_checksum.py
Normal file
59
sources/poky/meta/lib/oeqa/selftest/cases/lic_checksum.py
Normal file
@@ -0,0 +1,59 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import urllib
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class LicenseTests(OESelftestTestCase):
|
||||
|
||||
def test_checksum_with_space(self):
|
||||
bitbake_cmd = '-c populate_lic emptytest'
|
||||
|
||||
lic_file, lic_path = tempfile.mkstemp(" -afterspace")
|
||||
os.close(lic_file)
|
||||
#self.track_for_cleanup(lic_path)
|
||||
|
||||
self.write_config("INHERIT:remove = \"report-error\"")
|
||||
|
||||
self.write_recipeinc('emptytest', """
|
||||
INHIBIT_DEFAULT_DEPS = "1"
|
||||
LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
|
||||
SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
|
||||
""" % (urllib.parse.quote(lic_path), urllib.parse.quote(lic_path)))
|
||||
result = bitbake(bitbake_cmd)
|
||||
self.delete_recipeinc('emptytest')
|
||||
|
||||
|
||||
# Verify that changing a license file that has an absolute path causes
|
||||
# the license qa to fail due to a mismatched md5sum.
|
||||
def test_nonmatching_checksum(self):
|
||||
bitbake_cmd = '-c populate_lic emptytest'
|
||||
error_msg = 'emptytest: The new md5 checksum is 8d777f385d3dfec8815d20f7496026dc'
|
||||
|
||||
lic_file, lic_path = tempfile.mkstemp()
|
||||
os.close(lic_file)
|
||||
self.track_for_cleanup(lic_path)
|
||||
|
||||
self.write_config("INHERIT:remove = \"report-error\"")
|
||||
|
||||
self.write_recipeinc('emptytest', """
|
||||
INHIBIT_DEFAULT_DEPS = "1"
|
||||
LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
|
||||
SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
|
||||
""" % (lic_path, lic_path))
|
||||
result = bitbake(bitbake_cmd)
|
||||
|
||||
with open(lic_path, "w") as f:
|
||||
f.write("data")
|
||||
|
||||
result = bitbake(bitbake_cmd, ignore_status=True)
|
||||
self.delete_recipeinc('emptytest')
|
||||
if error_msg not in result.output:
|
||||
raise AssertionError(result.output)
|
||||
54
sources/poky/meta/lib/oeqa/selftest/cases/locales.py
Normal file
54
sources/poky/meta/lib/oeqa/selftest/cases/locales.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.utils.commands import bitbake, runqemu
|
||||
|
||||
class LocalesTest(OESelftestTestCase):
|
||||
|
||||
@OETestTag("runqemu")
|
||||
|
||||
def run_locales_test(self, binary_enabled):
|
||||
features = []
|
||||
features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
|
||||
features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
|
||||
features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"')
|
||||
features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
|
||||
if binary_enabled:
|
||||
features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
|
||||
else:
|
||||
features.append('ENABLE_BINARY_LOCALE_GENERATION = "0"')
|
||||
self.write_config("\n".join(features))
|
||||
|
||||
# Build a core-image-minimal
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
with runqemu("core-image-minimal", ssh=False, runqemuparams='nographic') as qemu:
|
||||
cmd = "locale -a"
|
||||
status, output = qemu.run_serial(cmd)
|
||||
# output must includes fr_FR or fr_FR.UTF-8
|
||||
self.assertEqual(status, 1, msg='locale test command failed: output: %s' % output)
|
||||
self.assertIn("fr_FR", output, msg='locale -a test failed: output: %s' % output)
|
||||
|
||||
cmd = "localedef --list-archive -v"
|
||||
status, output = qemu.run_serial(cmd)
|
||||
# output must includes fr_FR.utf8
|
||||
self.assertEqual(status, 1, msg='localedef test command failed: output: %s' % output)
|
||||
self.assertIn("fr_FR.utf8", output, msg='localedef test failed: output: %s' % output)
|
||||
|
||||
def test_locales_on(self):
|
||||
"""
|
||||
Summary: Test the locales are generated
|
||||
Expected: 1. Check the locale exist in the locale-archive
|
||||
2. Check the locale exist for the glibc
|
||||
3. Check the locale can be generated
|
||||
Product: oe-core
|
||||
Author: Louis Rannou <lrannou@baylibre.com>
|
||||
AutomatedBy: Louis Rannou <lrannou@baylibre.com>
|
||||
"""
|
||||
self.run_locales_test(True)
|
||||
|
||||
def test_locales_off(self):
|
||||
self.run_locales_test(False)
|
||||
166
sources/poky/meta/lib/oeqa/selftest/cases/manifest.py
Normal file
166
sources/poky/meta/lib/oeqa/selftest/cases/manifest.py
Normal file
@@ -0,0 +1,166 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import get_bb_var, bitbake
|
||||
|
||||
class ManifestEntry:
|
||||
'''A manifest item of a collection able to list missing packages'''
|
||||
def __init__(self, entry):
|
||||
self.file = entry
|
||||
self.missing = []
|
||||
|
||||
class VerifyManifest(OESelftestTestCase):
|
||||
'''Tests for the manifest files and contents of an image'''
|
||||
|
||||
@classmethod
|
||||
def check_manifest_entries(self, manifest, path):
|
||||
manifest_errors = []
|
||||
try:
|
||||
with open(manifest, "r") as mfile:
|
||||
for line in mfile:
|
||||
manifest_entry = os.path.join(path, line.split()[0])
|
||||
self.logger.debug("{}: looking for {}"\
|
||||
.format(self.classname, manifest_entry))
|
||||
if not os.path.isfile(manifest_entry):
|
||||
manifest_errors.append(manifest_entry)
|
||||
self.logger.debug("{}: {} not found"\
|
||||
.format(self.classname, manifest_entry))
|
||||
except OSError as e:
|
||||
self.logger.debug("{}: checking of {} failed"\
|
||||
.format(self.classname, manifest))
|
||||
raise e
|
||||
|
||||
return manifest_errors
|
||||
|
||||
#this will possibly move from here
|
||||
@classmethod
|
||||
def get_dir_from_bb_var(self, bb_var, target = None):
|
||||
target == self.buildtarget if target == None else target
|
||||
directory = get_bb_var(bb_var, target);
|
||||
if not directory or not os.path.isdir(directory):
|
||||
self.logger.debug("{}: {} points to {} when target = {}"\
|
||||
.format(self.classname, bb_var, directory, target))
|
||||
raise OSError
|
||||
return directory
|
||||
|
||||
@classmethod
|
||||
def setUpClass(self):
|
||||
|
||||
super(VerifyManifest, self).setUpClass()
|
||||
self.buildtarget = 'core-image-minimal'
|
||||
self.classname = 'VerifyManifest'
|
||||
|
||||
self.logger.info("{}: doing bitbake {} as a prerequisite of the test"\
|
||||
.format(self.classname, self.buildtarget))
|
||||
if bitbake(self.buildtarget).status:
|
||||
self.logger.debug("{} Failed to setup {}"\
|
||||
.format(self.classname, self.buildtarget))
|
||||
self.skipTest("{}: Cannot setup testing scenario"\
|
||||
.format(self.classname))
|
||||
|
||||
def test_SDK_manifest_entries(self):
|
||||
'''Verifying the SDK manifest entries exist, this may take a build'''
|
||||
|
||||
# the setup should bitbake core-image-minimal and here it is required
|
||||
# to do an additional setup for the sdk
|
||||
sdktask = '-c populate_sdk'
|
||||
bbargs = sdktask + ' ' + self.buildtarget
|
||||
self.logger.debug("{}: doing bitbake {} as a prerequisite of the test"\
|
||||
.format(self.classname, bbargs))
|
||||
if bitbake(bbargs).status:
|
||||
self.logger.debug("{} Failed to bitbake {}"\
|
||||
.format(self.classname, bbargs))
|
||||
self.skipTest("{}: Cannot setup testing scenario"\
|
||||
.format(self.classname))
|
||||
|
||||
|
||||
pkgdata_dir = reverse_dir = {}
|
||||
mfilename = mpath = m_entry = {}
|
||||
# get manifest location based on target to query about
|
||||
d_target= dict(target = self.buildtarget,
|
||||
host = 'nativesdk-packagegroup-sdk-host')
|
||||
try:
|
||||
mdir = self.get_dir_from_bb_var('SDK_DEPLOY', self.buildtarget)
|
||||
for k in d_target.keys():
|
||||
toolchain_outputname = get_bb_var('TOOLCHAIN_OUTPUTNAME', self.buildtarget)
|
||||
mfilename[k] = "{}.{}.manifest".format(toolchain_outputname, k)
|
||||
mpath[k] = os.path.join(mdir, mfilename[k])
|
||||
if not os.path.isfile(mpath[k]):
|
||||
self.logger.debug("{}: {} does not exist".format(
|
||||
self.classname, mpath[k]))
|
||||
raise IOError
|
||||
m_entry[k] = ManifestEntry(mpath[k])
|
||||
|
||||
pkgdata_dir[k] = self.get_dir_from_bb_var('PKGDATA_DIR',
|
||||
d_target[k])
|
||||
reverse_dir[k] = os.path.join(pkgdata_dir[k],
|
||||
'runtime-reverse')
|
||||
if not os.path.exists(reverse_dir[k]):
|
||||
self.logger.debug("{}: {} does not exist".format(
|
||||
self.classname, reverse_dir[k]))
|
||||
raise IOError
|
||||
except OSError:
|
||||
raise self.skipTest("{}: Error in obtaining manifest dirs"\
|
||||
.format(self.classname))
|
||||
except IOError:
|
||||
msg = "{}: Error cannot find manifests in the specified dir:\n{}"\
|
||||
.format(self.classname, mdir)
|
||||
self.fail(msg)
|
||||
|
||||
for k in d_target.keys():
|
||||
self.logger.debug("{}: Check manifest {}".format(
|
||||
self.classname, m_entry[k].file))
|
||||
|
||||
m_entry[k].missing = self.check_manifest_entries(\
|
||||
m_entry[k].file,reverse_dir[k])
|
||||
if m_entry[k].missing:
|
||||
msg = '{}: {} Error has the following missing entries'\
|
||||
.format(self.classname, m_entry[k].file)
|
||||
logmsg = msg+':\n'+'\n'.join(m_entry[k].missing)
|
||||
self.logger.debug(logmsg)
|
||||
self.logger.info(msg)
|
||||
self.fail(logmsg)
|
||||
|
||||
def test_image_manifest_entries(self):
|
||||
'''Verifying the image manifest entries exist'''
|
||||
|
||||
# get manifest location based on target to query about
|
||||
try:
|
||||
mdir = self.get_dir_from_bb_var('DEPLOY_DIR_IMAGE',
|
||||
self.buildtarget)
|
||||
mfilename = get_bb_var("IMAGE_LINK_NAME", self.buildtarget)\
|
||||
+ ".manifest"
|
||||
mpath = os.path.join(mdir, mfilename)
|
||||
if not os.path.isfile(mpath): raise IOError
|
||||
m_entry = ManifestEntry(mpath)
|
||||
|
||||
pkgdata_dir = {}
|
||||
pkgdata_dir = self.get_dir_from_bb_var('PKGDATA_DIR',
|
||||
self.buildtarget)
|
||||
revdir = os.path.join(pkgdata_dir, 'runtime-reverse')
|
||||
if not os.path.exists(revdir): raise IOError
|
||||
except OSError:
|
||||
raise self.skipTest("{}: Error in obtaining manifest dirs"\
|
||||
.format(self.classname))
|
||||
except IOError:
|
||||
msg = "{}: Error cannot find manifests in dir:\n{}"\
|
||||
.format(self.classname, mdir)
|
||||
self.fail(msg)
|
||||
|
||||
self.logger.debug("{}: Check manifest {}"\
|
||||
.format(self.classname, m_entry.file))
|
||||
m_entry.missing = self.check_manifest_entries(\
|
||||
m_entry.file, revdir)
|
||||
if m_entry.missing:
|
||||
msg = '{}: {} Error has the following missing entries'\
|
||||
.format(self.classname, m_entry.file)
|
||||
logmsg = msg+':\n'+'\n'.join(m_entry.missing)
|
||||
self.logger.debug(logmsg)
|
||||
self.logger.info(msg)
|
||||
self.fail(logmsg)
|
||||
60
sources/poky/meta/lib/oeqa/selftest/cases/meta_ide.py
Normal file
60
sources/poky/meta/lib/oeqa/selftest/cases/meta_ide.py
Normal file
@@ -0,0 +1,60 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.sdk.utils.sdkbuildproject import SDKBuildProject
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
|
||||
from oeqa.core.decorator import OETestTag
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
@OETestTag("machine")
|
||||
class MetaIDE(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(MetaIDE, cls).setUpClass()
|
||||
bitbake('meta-ide-support')
|
||||
bitbake('build-sysroots -c build_native_sysroot')
|
||||
bitbake('build-sysroots -c build_target_sysroot')
|
||||
bb_vars = get_bb_vars(['MACHINE_ARCH', 'TARGET_VENDOR', 'TARGET_OS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
|
||||
cls.environment_script = 'environment-setup-%s%s-%s' % (bb_vars['MACHINE_ARCH'], bb_vars['TARGET_VENDOR'], bb_vars['TARGET_OS'])
|
||||
cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
|
||||
cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
|
||||
cls.corebasedir = bb_vars['COREBASE']
|
||||
cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide')
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True)
|
||||
super(MetaIDE, cls).tearDownClass()
|
||||
|
||||
def test_meta_ide_had_installed_meta_ide_support(self):
|
||||
self.assertExists(self.environment_script_path)
|
||||
|
||||
def test_meta_ide_can_compile_c_program(self):
|
||||
runCmd('cp %s/test.c %s' % (self.tc.files_dir, self.tmpdir_metaideQA))
|
||||
runCmd("cd %s; . %s; $CC test.c -lm" % (self.tmpdir_metaideQA, self.environment_script_path))
|
||||
compiled_file = '%s/a.out' % self.tmpdir_metaideQA
|
||||
self.assertExists(compiled_file)
|
||||
|
||||
def test_meta_ide_can_build_cpio_project(self):
|
||||
dl_dir = self.td.get('DL_DIR', None)
|
||||
self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path,
|
||||
"https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz",
|
||||
self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir)
|
||||
self.project.download_archive()
|
||||
self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS'), 0,
|
||||
msg="Running configure failed")
|
||||
self.assertEqual(self.project.run_make(), 0,
|
||||
msg="Running make failed")
|
||||
self.assertEqual(self.project.run_install(), 0,
|
||||
msg="Running make install failed")
|
||||
|
||||
def test_meta_ide_can_run_sdk_tests(self):
|
||||
bitbake('-c populate_sysroot gtk+3')
|
||||
bitbake('build-sysroots -c build_target_sysroot')
|
||||
bitbake('-c testsdk meta-ide-support')
|
||||
44
sources/poky/meta/lib/oeqa/selftest/cases/minidebuginfo.py
Normal file
44
sources/poky/meta/lib/oeqa/selftest/cases/minidebuginfo.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
|
||||
|
||||
|
||||
class Minidebuginfo(OESelftestTestCase):
|
||||
def test_minidebuginfo(self):
|
||||
target_sys = get_bb_var("TARGET_SYS")
|
||||
binutils = "binutils-cross-{}".format(get_bb_var("TARGET_ARCH"))
|
||||
|
||||
image = 'core-image-minimal'
|
||||
bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'READELF'], image)
|
||||
|
||||
self.write_config("""
|
||||
DISTRO_FEATURES:append = " minidebuginfo"
|
||||
IMAGE_FSTYPES = "tar.bz2"
|
||||
""")
|
||||
bitbake("{} {}:do_addto_recipe_sysroot".format(image, binutils))
|
||||
|
||||
native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", binutils)
|
||||
|
||||
# confirm that executables and shared libraries contain an ELF section
|
||||
# ".gnu_debugdata" which stores minidebuginfo.
|
||||
with tempfile.TemporaryDirectory(prefix = "unpackfs-") as unpackedfs:
|
||||
filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "{}.tar.bz2".format(bb_vars['IMAGE_LINK_NAME']))
|
||||
shutil.unpack_archive(filename, unpackedfs)
|
||||
|
||||
r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "bin", "busybox")],
|
||||
native_sysroot = native_sysroot, target_sys = target_sys)
|
||||
self.assertIn(".gnu_debugdata", r.output)
|
||||
|
||||
r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "lib", "libc.so.6")],
|
||||
native_sysroot = native_sysroot, target_sys = target_sys)
|
||||
self.assertIn(".gnu_debugdata", r.output)
|
||||
|
||||
87
sources/poky/meta/lib/oeqa/selftest/cases/multiconfig.py
Normal file
87
sources/poky/meta/lib/oeqa/selftest/cases/multiconfig.py
Normal file
@@ -0,0 +1,87 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import textwrap
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class MultiConfig(OESelftestTestCase):
|
||||
|
||||
def test_multiconfig(self):
|
||||
"""
|
||||
Test that a simple multiconfig build works. This uses the mcextend class and the
|
||||
multiconfig-image-packager test recipe to build a core-image-full-cmdline image which
|
||||
contains a tiny core-image-minimal and a musl core-image-minimal, installed as packages.
|
||||
"""
|
||||
|
||||
config = """
|
||||
IMAGE_INSTALL:append:pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl"
|
||||
BBMULTICONFIG = "tiny musl"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
muslconfig = """
|
||||
MACHINE = "qemux86-64"
|
||||
DISTRO = "poky"
|
||||
TCLIBC = "musl"
|
||||
TMPDIR = "${TOPDIR}/tmp-mc-musl"
|
||||
"""
|
||||
self.write_config(muslconfig, 'musl')
|
||||
|
||||
tinyconfig = """
|
||||
MACHINE = "qemux86"
|
||||
DISTRO = "poky-tiny"
|
||||
TMPDIR = "${TOPDIR}/tmp-mc-tiny"
|
||||
"""
|
||||
self.write_config(tinyconfig, 'tiny')
|
||||
|
||||
# Build a core-image-minimal
|
||||
bitbake('core-image-full-cmdline')
|
||||
|
||||
def test_multiconfig_reparse(self):
|
||||
"""
|
||||
Test that changes to a multiconfig conf file are correctly detected and
|
||||
cause a reparse/rebuild of a recipe.
|
||||
"""
|
||||
config = textwrap.dedent('''\
|
||||
MCTESTVAR = "test"
|
||||
BBMULTICONFIG = "test"
|
||||
''')
|
||||
self.write_config(config)
|
||||
|
||||
testconfig = textwrap.dedent('''\
|
||||
MCTESTVAR:append = "1"
|
||||
''')
|
||||
self.write_config(testconfig, 'test')
|
||||
|
||||
# Check that the 1) the task executed and 2) that it output the correct
|
||||
# value. Note "bitbake -e" is not used because it always reparses the
|
||||
# recipe and we want to ensure that the automatic reparsing and parse
|
||||
# caching is detected.
|
||||
result = bitbake('mc:test:multiconfig-test-parse -c showvar')
|
||||
self.assertIn('MCTESTVAR=test1', result.output.splitlines())
|
||||
|
||||
testconfig = textwrap.dedent('''\
|
||||
MCTESTVAR:append = "2"
|
||||
''')
|
||||
self.write_config(testconfig, 'test')
|
||||
|
||||
result = bitbake('mc:test:multiconfig-test-parse -c showvar')
|
||||
self.assertIn('MCTESTVAR=test2', result.output.splitlines())
|
||||
|
||||
def test_multiconfig_inlayer(self):
|
||||
"""
|
||||
Test that a multiconfig from meta-selftest works.
|
||||
"""
|
||||
|
||||
config = """
|
||||
BBMULTICONFIG = "muslmc"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
# Build a core-image-minimal, only dry run needed to check config is present
|
||||
bitbake('mc:muslmc:bash -n')
|
||||
13
sources/poky/meta/lib/oeqa/selftest/cases/newlib.py
Normal file
13
sources/poky/meta/lib/oeqa/selftest/cases/newlib.py
Normal file
@@ -0,0 +1,13 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class NewlibTest(OESelftestTestCase):
|
||||
def test_newlib(self):
|
||||
self.write_config('TCLIBC = "newlib"')
|
||||
bitbake("newlib libgloss")
|
||||
157
sources/poky/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
Normal file
157
sources/poky/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
Normal file
@@ -0,0 +1,157 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
import tempfile
|
||||
import operator
|
||||
from oeqa.utils.commands import get_bb_var
|
||||
|
||||
class TestBlobParsing(OESelftestTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
|
||||
dir=get_bb_var('TOPDIR'))
|
||||
|
||||
try:
|
||||
from git import Repo
|
||||
self.repo = Repo.init(self.repo_path)
|
||||
except ImportError as e:
|
||||
self.skipTest('Python module GitPython is not present (%s) (%s)' % (e, sys.path))
|
||||
|
||||
self.test_file = "test"
|
||||
self.var_map = {}
|
||||
|
||||
def tearDown(self):
|
||||
import shutil
|
||||
shutil.rmtree(self.repo_path)
|
||||
|
||||
@property
|
||||
def heads_default(self):
|
||||
"""
|
||||
Support repos defaulting to master or to main branch
|
||||
"""
|
||||
try:
|
||||
return self.repo.heads.main
|
||||
except AttributeError:
|
||||
return self.repo.heads.master
|
||||
|
||||
def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
|
||||
if len(to_add) == 0 and len(to_remove) == 0:
|
||||
return
|
||||
|
||||
for k in to_remove:
|
||||
self.var_map.pop(x,None)
|
||||
for k in to_add:
|
||||
self.var_map[k] = to_add[k]
|
||||
|
||||
with open(os.path.join(self.repo_path, self.test_file), 'w') as repo_file:
|
||||
for k in self.var_map:
|
||||
repo_file.write("%s = %s\n" % (k, self.var_map[k]))
|
||||
|
||||
self.repo.git.add("--all")
|
||||
self.repo.git.commit(message=msg)
|
||||
|
||||
def test_blob_to_dict(self):
|
||||
"""
|
||||
Test conversion of git blobs to dictionary
|
||||
"""
|
||||
from oe.buildhistory_analysis import blob_to_dict
|
||||
valuesmap = { "foo" : "1", "bar" : "2" }
|
||||
self.commit_vars(to_add = valuesmap)
|
||||
|
||||
blob = self.repo.head.commit.tree.blobs[0]
|
||||
self.assertEqual(valuesmap, blob_to_dict(blob),
|
||||
"commit was not translated correctly to dictionary")
|
||||
|
||||
def test_compare_dict_blobs(self):
|
||||
"""
|
||||
Test comparisson of dictionaries extracted from git blobs
|
||||
"""
|
||||
from oe.buildhistory_analysis import compare_dict_blobs
|
||||
|
||||
changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
|
||||
|
||||
self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
|
||||
blob1 = self.heads_default.commit.tree.blobs[0]
|
||||
|
||||
self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
|
||||
blob2 = self.heads_default.commit.tree.blobs[0]
|
||||
|
||||
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
|
||||
blob1, blob2, False, False)
|
||||
|
||||
var_changes = { x.fieldname : (x.oldvalue, x.newvalue) for x in change_records}
|
||||
self.assertEqual(changesmap, var_changes, "Changes not reported correctly")
|
||||
|
||||
def test_compare_dict_blobs_default(self):
|
||||
"""
|
||||
Test default values for comparisson of git blob dictionaries
|
||||
"""
|
||||
from oe.buildhistory_analysis import compare_dict_blobs
|
||||
defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
|
||||
|
||||
self.commit_vars(to_add = { "foo" : "1" })
|
||||
blob1 = self.heads_default.commit.tree.blobs[0]
|
||||
|
||||
self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
|
||||
blob2 = self.heads_default.commit.tree.blobs[0]
|
||||
|
||||
change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
|
||||
blob1, blob2, False, False)
|
||||
|
||||
var_changes = {}
|
||||
for x in change_records:
|
||||
oldvalue = "default" if ("default" in x.oldvalue) else x.oldvalue
|
||||
var_changes[x.fieldname] = (oldvalue, x.newvalue)
|
||||
|
||||
self.assertEqual(defaultmap, var_changes, "Defaults not set properly")
|
||||
|
||||
class TestFileListCompare(OESelftestTestCase):
|
||||
|
||||
def test_compare_file_lists(self):
|
||||
# Test that a directory tree that moves location such as /lib/modules/5.4.40-yocto-standard -> /lib/modules/5.4.43-yocto-standard
|
||||
# is correctly identified as a move
|
||||
from oe.buildhistory_analysis import compare_file_lists, FileChange
|
||||
|
||||
with open(self.tc.files_dir + "/buildhistory_filelist1.txt", "r") as f:
|
||||
filelist1 = f.readlines()
|
||||
with open(self.tc.files_dir + "/buildhistory_filelist2.txt", "r") as f:
|
||||
filelist2 = f.readlines()
|
||||
|
||||
expectedResult = [
|
||||
'/lib/libcap.so.2 changed symlink target from libcap.so.2.33 to libcap.so.2.34',
|
||||
'/lib/libcap.so.2.33 moved to /lib/libcap.so.2.34',
|
||||
'/lib/modules/5.4.40-yocto-standard moved to /lib/modules/5.4.43-yocto-standard',
|
||||
'/lib/modules/5.4.43-yocto-standard/modules.builtin.alias.bin was added',
|
||||
'/usr/bin/gawk-5.0.1 moved to /usr/bin/gawk-5.1.0',
|
||||
'/usr/lib/libbtrfsutil.so changed symlink target from libbtrfsutil.so.1.1.1 to libbtrfsutil.so.1.2.0',
|
||||
'/usr/lib/libbtrfsutil.so.1 changed symlink target from libbtrfsutil.so.1.1.1 to libbtrfsutil.so.1.2.0',
|
||||
'/usr/lib/libbtrfsutil.so.1.1.1 moved to /usr/lib/libbtrfsutil.so.1.2.0',
|
||||
'/usr/lib/libkmod.so changed symlink target from libkmod.so.2.3.4 to libkmod.so.2.3.5',
|
||||
'/usr/lib/libkmod.so.2 changed symlink target from libkmod.so.2.3.4 to libkmod.so.2.3.5',
|
||||
'/usr/lib/libkmod.so.2.3.4 moved to /usr/lib/libkmod.so.2.3.5',
|
||||
'/usr/lib/libpixman-1.so.0 changed symlink target from libpixman-1.so.0.38.4 to libpixman-1.so.0.40.0',
|
||||
'/usr/lib/libpixman-1.so.0.38.4 moved to /usr/lib/libpixman-1.so.0.40.0',
|
||||
'/usr/lib/opkg/alternatives/rtcwake was added',
|
||||
'/usr/lib/python3.8/site-packages/PyGObject-3.34.0.egg-info moved to /usr/lib/python3.8/site-packages/PyGObject-3.36.1.egg-info',
|
||||
'/usr/lib/python3.8/site-packages/btrfsutil-1.1.1-py3.8.egg-info moved to /usr/lib/python3.8/site-packages/btrfsutil-1.2.0-py3.8.egg-info',
|
||||
'/usr/lib/python3.8/site-packages/pycairo-1.19.0.egg-info moved to /usr/lib/python3.8/site-packages/pycairo-1.19.1.egg-info',
|
||||
'/usr/sbin/rtcwake changed type from file to symlink',
|
||||
'/usr/sbin/rtcwake changed permissions from rwxr-xr-x to rwxrwxrwx',
|
||||
'/usr/sbin/rtcwake changed symlink target from None to /usr/sbin/rtcwake.util-linux',
|
||||
'/usr/sbin/rtcwake.util-linux was added'
|
||||
]
|
||||
|
||||
result = compare_file_lists(filelist1, filelist2)
|
||||
rendered = []
|
||||
for entry in sorted(result, key=operator.attrgetter("path")):
|
||||
rendered.append(str(entry))
|
||||
|
||||
self.maxDiff = None
|
||||
self.assertCountEqual(rendered, expectedResult)
|
||||
|
||||
28
sources/poky/meta/lib/oeqa/selftest/cases/oelib/elf.py
Normal file
28
sources/poky/meta/lib/oeqa/selftest/cases/oelib/elf.py
Normal file
@@ -0,0 +1,28 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from unittest.case import TestCase
|
||||
import oe.qa
|
||||
|
||||
class TestElf(TestCase):
|
||||
def test_machine_name(self):
|
||||
"""
|
||||
Test elf_machine_to_string()
|
||||
"""
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x02), "SPARC")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x03), "x86")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x08), "MIPS")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x14), "PowerPC")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x28), "ARM")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x2A), "SuperH")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x32), "IA-64")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x3E), "x86-64")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0xB7), "AArch64")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0xF7), "BPF")
|
||||
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0x00), "Unset")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string(0xDEADBEEF), "Unknown (3735928559)")
|
||||
self.assertEqual(oe.qa.elf_machine_to_string("foobar"), "Unknown ('foobar')")
|
||||
105
sources/poky/meta/lib/oeqa/selftest/cases/oelib/license.py
Normal file
105
sources/poky/meta/lib/oeqa/selftest/cases/oelib/license.py
Normal file
@@ -0,0 +1,105 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from unittest.case import TestCase
|
||||
import oe.license
|
||||
|
||||
class SeenVisitor(oe.license.LicenseVisitor):
|
||||
def __init__(self):
|
||||
self.seen = []
|
||||
oe.license.LicenseVisitor.__init__(self)
|
||||
|
||||
def visit_Str(self, node):
|
||||
self.seen.append(node.s)
|
||||
|
||||
class TestSingleLicense(TestCase):
|
||||
licenses = [
|
||||
"GPL-2.0-only",
|
||||
"LGPL-2.0-only",
|
||||
"Artistic-1.0",
|
||||
"MIT",
|
||||
"GPL-3.0-or-later",
|
||||
"FOO_BAR",
|
||||
]
|
||||
invalid_licenses = ["GPL/BSD"]
|
||||
|
||||
@staticmethod
|
||||
def parse(licensestr):
|
||||
visitor = SeenVisitor()
|
||||
visitor.visit_string(licensestr)
|
||||
return visitor.seen
|
||||
|
||||
def test_single_licenses(self):
|
||||
for license in self.licenses:
|
||||
licenses = self.parse(license)
|
||||
self.assertListEqual(licenses, [license])
|
||||
|
||||
def test_invalid_licenses(self):
|
||||
for license in self.invalid_licenses:
|
||||
with self.assertRaises(oe.license.InvalidLicense) as cm:
|
||||
self.parse(license)
|
||||
self.assertEqual(cm.exception.license, license)
|
||||
|
||||
class TestSimpleCombinations(TestCase):
|
||||
tests = {
|
||||
"FOO&BAR": ["FOO", "BAR"],
|
||||
"BAZ & MOO": ["BAZ", "MOO"],
|
||||
"ALPHA|BETA": ["ALPHA"],
|
||||
"BAZ&MOO|FOO": ["FOO"],
|
||||
"FOO&BAR|BAZ": ["FOO", "BAR"],
|
||||
}
|
||||
preferred = ["ALPHA", "FOO", "BAR"]
|
||||
|
||||
def test_tests(self):
|
||||
def choose(a, b):
|
||||
if all(lic in self.preferred for lic in b):
|
||||
return b
|
||||
else:
|
||||
return a
|
||||
|
||||
for license, expected in self.tests.items():
|
||||
licenses = oe.license.flattened_licenses(license, choose)
|
||||
self.assertListEqual(licenses, expected)
|
||||
|
||||
class TestComplexCombinations(TestSimpleCombinations):
|
||||
tests = {
|
||||
"FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"],
|
||||
"(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"],
|
||||
"((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"],
|
||||
"(GPL-2.0-only|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0-only", "BSD-4-clause", "MIT"],
|
||||
}
|
||||
preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0-only"]
|
||||
|
||||
class TestIsIncluded(TestCase):
|
||||
tests = {
|
||||
("FOO | BAR", None, None):
|
||||
[True, ["FOO"]],
|
||||
("FOO | BAR", None, "FOO"):
|
||||
[True, ["BAR"]],
|
||||
("FOO | BAR", "BAR", None):
|
||||
[True, ["BAR"]],
|
||||
("FOO | BAR & FOOBAR", "*BAR", None):
|
||||
[True, ["BAR", "FOOBAR"]],
|
||||
("FOO | BAR & FOOBAR", None, "FOO*"):
|
||||
[False, ["FOOBAR"]],
|
||||
("(FOO | BAR) & FOOBAR | BARFOO", None, "FOO"):
|
||||
[True, ["BAR", "FOOBAR"]],
|
||||
("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"):
|
||||
[True, ["BAZ", "MOO", "BARFOO"]],
|
||||
("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, None):
|
||||
[True, ["GPL-3.0-or-later", "GPL-2.0-only", "LGPL-2.1-only"]],
|
||||
("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later"):
|
||||
[True, ["Proprietary"]],
|
||||
("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later Proprietary"):
|
||||
[False, ["GPL-3.0-or-later"]]
|
||||
}
|
||||
|
||||
def test_tests(self):
|
||||
for args, expected in self.tests.items():
|
||||
is_included, licenses = oe.license.is_included(
|
||||
args[0], (args[1] or '').split(), (args[2] or '').split())
|
||||
self.assertEqual(is_included, expected[0])
|
||||
self.assertListEqual(licenses, expected[1])
|
||||
91
sources/poky/meta/lib/oeqa/selftest/cases/oelib/path.py
Normal file
91
sources/poky/meta/lib/oeqa/selftest/cases/oelib/path.py
Normal file
@@ -0,0 +1,91 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from unittest.case import TestCase
|
||||
import oe, oe.path
|
||||
import tempfile
|
||||
import os
|
||||
import errno
|
||||
import shutil
|
||||
|
||||
class TestRealPath(TestCase):
|
||||
DIRS = [ "a", "b", "etc", "sbin", "usr", "usr/bin", "usr/binX", "usr/sbin", "usr/include", "usr/include/gdbm" ]
|
||||
FILES = [ "etc/passwd", "b/file" ]
|
||||
LINKS = [
|
||||
( "bin", "/usr/bin", "/usr/bin" ),
|
||||
( "binX", "usr/binX", "/usr/binX" ),
|
||||
( "c", "broken", "/broken" ),
|
||||
( "etc/passwd-1", "passwd", "/etc/passwd" ),
|
||||
( "etc/passwd-2", "passwd-1", "/etc/passwd" ),
|
||||
( "etc/passwd-3", "/etc/passwd-1", "/etc/passwd" ),
|
||||
( "etc/shadow-1", "/etc/shadow", "/etc/shadow" ),
|
||||
( "etc/shadow-2", "/etc/shadow-1", "/etc/shadow" ),
|
||||
( "prog-A", "bin/prog-A", "/usr/bin/prog-A" ),
|
||||
( "prog-B", "/bin/prog-B", "/usr/bin/prog-B" ),
|
||||
( "usr/bin/prog-C", "../../sbin/prog-C", "/sbin/prog-C" ),
|
||||
( "usr/bin/prog-D", "/sbin/prog-D", "/sbin/prog-D" ),
|
||||
( "usr/binX/prog-E", "../sbin/prog-E", None ),
|
||||
( "usr/bin/prog-F", "../../../sbin/prog-F", "/sbin/prog-F" ),
|
||||
( "loop", "a/loop", None ),
|
||||
( "a/loop", "../loop", None ),
|
||||
( "b/test", "file/foo", "/b/file/foo" ),
|
||||
]
|
||||
|
||||
LINKS_PHYS = [
|
||||
( "./", "/", "" ),
|
||||
( "binX/prog-E", "/usr/sbin/prog-E", "/sbin/prog-E" ),
|
||||
]
|
||||
|
||||
EXCEPTIONS = [
|
||||
( "loop", errno.ELOOP ),
|
||||
( "b/test", errno.ENOENT ),
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp(prefix = "oe-test_path")
|
||||
self.root = os.path.join(self.tmpdir, "R")
|
||||
|
||||
os.mkdir(os.path.join(self.tmpdir, "_real"))
|
||||
os.symlink("_real", self.root)
|
||||
|
||||
for d in self.DIRS:
|
||||
os.mkdir(os.path.join(self.root, d))
|
||||
for f in self.FILES:
|
||||
open(os.path.join(self.root, f), "w")
|
||||
for l in self.LINKS:
|
||||
os.symlink(l[1], os.path.join(self.root, l[0]))
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir)
|
||||
|
||||
def __realpath(self, file, use_physdir, assume_dir = True):
|
||||
return oe.path.realpath(os.path.join(self.root, file), self.root,
|
||||
use_physdir, assume_dir = assume_dir)
|
||||
|
||||
def test_norm(self):
|
||||
for l in self.LINKS:
|
||||
if l[2] == None:
|
||||
continue
|
||||
|
||||
target_p = self.__realpath(l[0], True)
|
||||
target_l = self.__realpath(l[0], False)
|
||||
|
||||
if l[2] != False:
|
||||
self.assertEqual(target_p, target_l)
|
||||
self.assertEqual(l[2], target_p[len(self.root):])
|
||||
|
||||
def test_phys(self):
|
||||
for l in self.LINKS_PHYS:
|
||||
target_p = self.__realpath(l[0], True)
|
||||
target_l = self.__realpath(l[0], False)
|
||||
|
||||
self.assertEqual(l[1], target_p[len(self.root):])
|
||||
self.assertEqual(l[2], target_l[len(self.root):])
|
||||
|
||||
def test_loop(self):
|
||||
for e in self.EXCEPTIONS:
|
||||
self.assertRaisesRegex(OSError, r'\[Errno %u\]' % e[1],
|
||||
self.__realpath, e[0], False, False)
|
||||
56
sources/poky/meta/lib/oeqa/selftest/cases/oelib/types.py
Normal file
56
sources/poky/meta/lib/oeqa/selftest/cases/oelib/types.py
Normal file
@@ -0,0 +1,56 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from unittest.case import TestCase
|
||||
from oe.maketype import create
|
||||
|
||||
class TestBooleanType(TestCase):
|
||||
def test_invalid(self):
|
||||
self.assertRaises(ValueError, create, '', 'boolean')
|
||||
self.assertRaises(ValueError, create, 'foo', 'boolean')
|
||||
self.assertRaises(TypeError, create, object(), 'boolean')
|
||||
|
||||
def test_true(self):
|
||||
self.assertTrue(create('y', 'boolean'))
|
||||
self.assertTrue(create('yes', 'boolean'))
|
||||
self.assertTrue(create('1', 'boolean'))
|
||||
self.assertTrue(create('t', 'boolean'))
|
||||
self.assertTrue(create('true', 'boolean'))
|
||||
self.assertTrue(create('TRUE', 'boolean'))
|
||||
self.assertTrue(create('truE', 'boolean'))
|
||||
|
||||
def test_false(self):
|
||||
self.assertFalse(create('n', 'boolean'))
|
||||
self.assertFalse(create('no', 'boolean'))
|
||||
self.assertFalse(create('0', 'boolean'))
|
||||
self.assertFalse(create('f', 'boolean'))
|
||||
self.assertFalse(create('false', 'boolean'))
|
||||
self.assertFalse(create('FALSE', 'boolean'))
|
||||
self.assertFalse(create('faLse', 'boolean'))
|
||||
|
||||
def test_bool_equality(self):
|
||||
self.assertEqual(create('n', 'boolean'), False)
|
||||
self.assertNotEqual(create('n', 'boolean'), True)
|
||||
self.assertEqual(create('y', 'boolean'), True)
|
||||
self.assertNotEqual(create('y', 'boolean'), False)
|
||||
|
||||
class TestList(TestCase):
|
||||
def assertListEqual(self, value, valid, sep=None):
|
||||
obj = create(value, 'list', separator=sep)
|
||||
self.assertEqual(obj, valid)
|
||||
if sep is not None:
|
||||
self.assertEqual(obj.separator, sep)
|
||||
self.assertEqual(str(obj), obj.separator.join(obj))
|
||||
|
||||
def test_list_nosep(self):
|
||||
testlist = ['alpha', 'beta', 'theta']
|
||||
self.assertListEqual('alpha beta theta', testlist)
|
||||
self.assertListEqual('alpha beta\ttheta', testlist)
|
||||
self.assertListEqual('alpha', ['alpha'])
|
||||
|
||||
def test_list_usersep(self):
|
||||
self.assertListEqual('foo:bar', ['foo', 'bar'], ':')
|
||||
self.assertListEqual('foo:bar:baz', ['foo', 'bar', 'baz'], ':')
|
||||
104
sources/poky/meta/lib/oeqa/selftest/cases/oelib/utils.py
Normal file
104
sources/poky/meta/lib/oeqa/selftest/cases/oelib/utils.py
Normal file
@@ -0,0 +1,104 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import sys
|
||||
from unittest.case import TestCase
|
||||
from contextlib import contextmanager
|
||||
from io import StringIO
|
||||
from oe.utils import packages_filter_out_system, trim_version, multiprocess_launch
|
||||
|
||||
class TestPackagesFilterOutSystem(TestCase):
|
||||
def test_filter(self):
|
||||
"""
|
||||
Test that oe.utils.packages_filter_out_system works.
|
||||
"""
|
||||
try:
|
||||
import bb
|
||||
except ImportError:
|
||||
self.skipTest("Cannot import bb")
|
||||
|
||||
d = bb.data_smart.DataSmart()
|
||||
d.setVar("PN", "foo")
|
||||
|
||||
d.setVar("PACKAGES", "foo foo-doc foo-dev")
|
||||
pkgs = packages_filter_out_system(d)
|
||||
self.assertEqual(pkgs, [])
|
||||
|
||||
d.setVar("PACKAGES", "foo foo-doc foo-data foo-dev")
|
||||
pkgs = packages_filter_out_system(d)
|
||||
self.assertEqual(pkgs, ["foo-data"])
|
||||
|
||||
d.setVar("PACKAGES", "foo foo-locale-en-gb")
|
||||
pkgs = packages_filter_out_system(d)
|
||||
self.assertEqual(pkgs, [])
|
||||
|
||||
d.setVar("PACKAGES", "foo foo-data foo-locale-en-gb")
|
||||
pkgs = packages_filter_out_system(d)
|
||||
self.assertEqual(pkgs, ["foo-data"])
|
||||
|
||||
|
||||
class TestTrimVersion(TestCase):
|
||||
def test_version_exception(self):
|
||||
with self.assertRaises(TypeError):
|
||||
trim_version(None, 2)
|
||||
with self.assertRaises(TypeError):
|
||||
trim_version((1, 2, 3), 2)
|
||||
|
||||
def test_num_exception(self):
|
||||
with self.assertRaises(ValueError):
|
||||
trim_version("1.2.3", 0)
|
||||
with self.assertRaises(ValueError):
|
||||
trim_version("1.2.3", -1)
|
||||
|
||||
def test_valid(self):
|
||||
self.assertEqual(trim_version("1.2.3", 1), "1")
|
||||
self.assertEqual(trim_version("1.2.3", 2), "1.2")
|
||||
self.assertEqual(trim_version("1.2.3", 3), "1.2.3")
|
||||
self.assertEqual(trim_version("1.2.3", 4), "1.2.3")
|
||||
|
||||
|
||||
class TestMultiprocessLaunch(TestCase):
|
||||
|
||||
def test_multiprocesslaunch(self):
|
||||
import bb
|
||||
|
||||
def testfunction(item, d):
|
||||
if item == "2":
|
||||
raise KeyError("Invalid number %s" % item)
|
||||
return "Found %s" % item
|
||||
|
||||
def dummyerror(msg):
|
||||
print("ERROR: %s" % msg)
|
||||
def dummyfatal(msg):
|
||||
print("ERROR: %s" % msg)
|
||||
raise bb.BBHandledException()
|
||||
|
||||
@contextmanager
|
||||
def captured_output():
|
||||
new_out, new_err = StringIO(), StringIO()
|
||||
old_out, old_err = sys.stdout, sys.stderr
|
||||
try:
|
||||
sys.stdout, sys.stderr = new_out, new_err
|
||||
yield sys.stdout, sys.stderr
|
||||
finally:
|
||||
sys.stdout, sys.stderr = old_out, old_err
|
||||
|
||||
d = bb.data_smart.DataSmart()
|
||||
bb.error = dummyerror
|
||||
bb.fatal = dummyfatal
|
||||
|
||||
# Assert the function returns the right results
|
||||
result = multiprocess_launch(testfunction, ["3", "4", "5", "6"], d, extraargs=(d,))
|
||||
self.assertIn("Found 3", result)
|
||||
self.assertIn("Found 4", result)
|
||||
self.assertIn("Found 5", result)
|
||||
self.assertIn("Found 6", result)
|
||||
self.assertEqual(len(result), 4)
|
||||
|
||||
# Assert the function prints exceptions
|
||||
with captured_output() as (out, err):
|
||||
self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,))
|
||||
self.assertIn("KeyError: 'Invalid number 2'", out.getvalue())
|
||||
194
sources/poky/meta/lib/oeqa/selftest/cases/oescripts.py
Normal file
194
sources/poky/meta/lib/oeqa/selftest/cases/oescripts.py
Normal file
@@ -0,0 +1,194 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import importlib
|
||||
import unittest
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.selftest.cases.buildhistory import BuildhistoryBase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
|
||||
from oeqa.utils import CommandError
|
||||
|
||||
class BuildhistoryDiffTests(BuildhistoryBase):
|
||||
|
||||
def test_buildhistory_diff(self):
|
||||
target = 'xcursor-transparent-theme'
|
||||
self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
|
||||
self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True)
|
||||
result = runCmd("oe-pkgdata-util read-value PKGV %s" % target)
|
||||
pkgv = result.output.rstrip()
|
||||
result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
|
||||
expected_endlines = [
|
||||
"xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
|
||||
"xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
|
||||
]
|
||||
for line in result.output.splitlines():
|
||||
for el in expected_endlines:
|
||||
if line.endswith(el):
|
||||
expected_endlines.remove(el)
|
||||
break
|
||||
else:
|
||||
self.fail('Unexpected line:\n%s\nExpected line endings:\n %s' % (line, '\n '.join(expected_endlines)))
|
||||
if expected_endlines:
|
||||
self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines))
|
||||
|
||||
@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present")
|
||||
class OEPybootchartguyTests(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
bitbake("core-image-minimal -c rootfs -f")
|
||||
cls.tmpdir = get_bb_var('TMPDIR')
|
||||
cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1]
|
||||
cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
|
||||
|
||||
def test_pybootchartguy_help(self):
|
||||
runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir)
|
||||
|
||||
def test_pybootchartguy_to_generate_build_png_output(self):
|
||||
runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f png' % (self.scripts_dir, self.buildstats, self.tmpdir))
|
||||
self.assertTrue(os.path.exists(self.tmpdir + "/charts.png"))
|
||||
|
||||
def test_pybootchartguy_to_generate_build_svg_output(self):
|
||||
runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f svg' % (self.scripts_dir, self.buildstats, self.tmpdir))
|
||||
self.assertTrue(os.path.exists(self.tmpdir + "/charts.svg"))
|
||||
|
||||
def test_pybootchartguy_to_generate_build_pdf_output(self):
|
||||
runCmd('%s/pybootchartgui/pybootchartgui.py %s -o %s/charts -f pdf' % (self.scripts_dir, self.buildstats, self.tmpdir))
|
||||
self.assertTrue(os.path.exists(self.tmpdir + "/charts.pdf"))
|
||||
|
||||
|
||||
class OEGitproxyTests(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
|
||||
|
||||
def test_oegitproxy_help(self):
|
||||
try:
|
||||
res = runCmd('%s/oe-git-proxy --help' % self.scripts_dir, assert_error=False)
|
||||
self.assertTrue(False)
|
||||
except CommandError as e:
|
||||
self.assertEqual(2, e.retcode)
|
||||
|
||||
def run_oegitproxy(self, custom_shell=None):
|
||||
os.environ['SOCAT'] = shutil.which("echo")
|
||||
os.environ['ALL_PROXY'] = "https://proxy.example.com:3128"
|
||||
os.environ['NO_PROXY'] = "*.example.com,.no-proxy.org,192.168.42.0/24,127.*.*.*"
|
||||
|
||||
if custom_shell is None:
|
||||
prefix = ''
|
||||
else:
|
||||
prefix = custom_shell + ' '
|
||||
|
||||
# outside, use the proxy
|
||||
res = runCmd('%s%s/oe-git-proxy host.outside-example.com 9418' %
|
||||
(prefix,self.scripts_dir))
|
||||
self.assertIn('PROXY:', res.output)
|
||||
# match with wildcard suffix
|
||||
res = runCmd('%s%s/oe-git-proxy host.example.com 9418' %
|
||||
(prefix, self.scripts_dir))
|
||||
self.assertIn('TCP:', res.output)
|
||||
# match just suffix
|
||||
res = runCmd('%s%s/oe-git-proxy host.no-proxy.org 9418' %
|
||||
(prefix, self.scripts_dir))
|
||||
self.assertIn('TCP:', res.output)
|
||||
# match IP subnet
|
||||
res = runCmd('%s%s/oe-git-proxy 192.168.42.42 9418' %
|
||||
(prefix, self.scripts_dir))
|
||||
self.assertIn('TCP:', res.output)
|
||||
# match IP wildcard
|
||||
res = runCmd('%s%s/oe-git-proxy 127.1.2.3 9418' %
|
||||
(prefix, self.scripts_dir))
|
||||
self.assertIn('TCP:', res.output)
|
||||
|
||||
# test that * globbering is off
|
||||
os.environ['NO_PROXY'] = "*"
|
||||
res = runCmd('%s%s/oe-git-proxy host.example.com 9418' %
|
||||
(prefix, self.scripts_dir))
|
||||
self.assertIn('TCP:', res.output)
|
||||
|
||||
def test_oegitproxy_proxy(self):
|
||||
self.run_oegitproxy()
|
||||
|
||||
def test_oegitproxy_proxy_dash(self):
|
||||
dash = shutil.which("dash")
|
||||
if dash is None:
|
||||
self.skipTest("No \"dash\" found on test system.")
|
||||
self.run_oegitproxy(custom_shell=dash)
|
||||
|
||||
class OeRunNativeTest(OESelftestTestCase):
|
||||
def test_oe_run_native(self):
|
||||
bitbake("qemu-helper-native -c addto_recipe_sysroot")
|
||||
result = runCmd("oe-run-native qemu-helper-native qemu-oe-bridge-helper --help")
|
||||
self.assertIn("Helper function to find and exec qemu-bridge-helper", result.output)
|
||||
|
||||
class OEListPackageconfigTests(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
|
||||
|
||||
#oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
|
||||
def check_endlines(self, results, expected_endlines):
|
||||
for line in results.output.splitlines():
|
||||
for el in expected_endlines:
|
||||
if line and line.split()[0] == el.split()[0] and \
|
||||
' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
|
||||
expected_endlines.remove(el)
|
||||
break
|
||||
|
||||
if expected_endlines:
|
||||
self.fail('Missing expected listings:\n %s' % '\n '.join(expected_endlines))
|
||||
|
||||
|
||||
#oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
|
||||
def test_packageconfig_flags_help(self):
|
||||
runCmd('%s/contrib/list-packageconfig-flags.py -h' % self.scripts_dir)
|
||||
|
||||
def test_packageconfig_flags_default(self):
|
||||
results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir)
|
||||
expected_endlines = []
|
||||
expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
|
||||
expected_endlines.append("pinentry gtk2 ncurses qt secret")
|
||||
expected_endlines.append("tar acl selinux")
|
||||
|
||||
self.check_endlines(results, expected_endlines)
|
||||
|
||||
|
||||
def test_packageconfig_flags_option_flags(self):
|
||||
results = runCmd('%s/contrib/list-packageconfig-flags.py -f' % self.scripts_dir)
|
||||
expected_endlines = []
|
||||
expected_endlines.append("PACKAGECONFIG FLAG RECIPE NAMES")
|
||||
expected_endlines.append("qt nativesdk-pinentry pinentry pinentry-native")
|
||||
expected_endlines.append("secret nativesdk-pinentry pinentry pinentry-native")
|
||||
|
||||
self.check_endlines(results, expected_endlines)
|
||||
|
||||
def test_packageconfig_flags_option_all(self):
|
||||
results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
|
||||
expected_endlines = []
|
||||
expected_endlines.append("pinentry-1.2.1")
|
||||
expected_endlines.append("PACKAGECONFIG ncurses")
|
||||
expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
|
||||
expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
|
||||
expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses")
|
||||
expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret")
|
||||
|
||||
self.check_endlines(results, expected_endlines)
|
||||
|
||||
def test_packageconfig_flags_options_preferred_only(self):
|
||||
results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir)
|
||||
expected_endlines = []
|
||||
expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
|
||||
expected_endlines.append("pinentry gtk2 ncurses qt secret")
|
||||
|
||||
self.check_endlines(results, expected_endlines)
|
||||
|
||||
541
sources/poky/meta/lib/oeqa/selftest/cases/overlayfs.py
Normal file
541
sources/poky/meta/lib/oeqa/selftest/cases/overlayfs.py
Normal file
@@ -0,0 +1,541 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, runqemu, get_bb_vars
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.core.decorator.data import skipIfNotMachine
|
||||
|
||||
def getline_qemu(out, line):
|
||||
for l in out.split('\n'):
|
||||
if line in l:
|
||||
return l
|
||||
|
||||
def getline(res, line):
|
||||
return getline_qemu(res.output, line)
|
||||
|
||||
class OverlayFSTests(OESelftestTestCase):
|
||||
"""Overlayfs class usage tests"""
|
||||
|
||||
def add_overlay_conf_to_machine(self):
|
||||
machine_inc = """
|
||||
OVERLAYFS_MOUNT_POINT[mnt-overlay] = "/mnt/overlay"
|
||||
"""
|
||||
self.set_machine_config(machine_inc)
|
||||
|
||||
def test_distro_features_missing(self):
|
||||
"""
|
||||
Summary: Check that required DISTRO_FEATURES are set
|
||||
Expected: Fail when either systemd or overlayfs are not in DISTRO_FEATURES
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = """
|
||||
IMAGE_INSTALL:append = " overlayfs-user"
|
||||
"""
|
||||
overlayfs_recipe_append = """
|
||||
inherit overlayfs
|
||||
"""
|
||||
self.write_config(config)
|
||||
self.add_overlay_conf_to_machine()
|
||||
self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
|
||||
|
||||
res = bitbake('core-image-minimal', ignore_status=True)
|
||||
line = getline(res, "overlayfs-user was skipped: missing required distro features")
|
||||
self.assertTrue("overlayfs" in res.output, msg=res.output)
|
||||
self.assertTrue("systemd" in res.output, msg=res.output)
|
||||
self.assertTrue("ERROR: Required build target 'core-image-minimal' has no buildable providers." in res.output, msg=res.output)
|
||||
|
||||
def test_not_all_units_installed(self):
|
||||
"""
|
||||
Summary: Test QA check that we have required mount units in the image
|
||||
Expected: Fail because mount unit for overlay partition is not installed
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = """
|
||||
IMAGE_INSTALL:append = " overlayfs-user"
|
||||
DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
|
||||
"""
|
||||
|
||||
self.write_config(config)
|
||||
self.add_overlay_conf_to_machine()
|
||||
|
||||
res = bitbake('core-image-minimal', ignore_status=True)
|
||||
line = getline(res, " Mount path /mnt/overlay not found in fstab and unit mnt-overlay.mount not found in systemd unit directories")
|
||||
self.assertTrue(line and line.startswith("WARNING:"), msg=res.output)
|
||||
line = getline(res, "Not all mount paths and units are installed in the image")
|
||||
self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
|
||||
|
||||
def test_not_all_units_installed_but_qa_skipped(self):
|
||||
"""
|
||||
Summary: Test skipping the QA check
|
||||
Expected: Image is created successfully
|
||||
Author: Claudius Heine <ch@denx.de>
|
||||
"""
|
||||
|
||||
config = """
|
||||
IMAGE_INSTALL:append = " overlayfs-user"
|
||||
DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
|
||||
OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
|
||||
"""
|
||||
|
||||
self.write_config(config)
|
||||
self.add_overlay_conf_to_machine()
|
||||
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
def test_mount_unit_not_set(self):
|
||||
"""
|
||||
Summary: Test whether mount unit was set properly
|
||||
Expected: Fail because mount unit was not set
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = """
|
||||
IMAGE_INSTALL:append = " overlayfs-user"
|
||||
DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
|
||||
"""
|
||||
|
||||
self.write_config(config)
|
||||
|
||||
res = bitbake('core-image-minimal', ignore_status=True)
|
||||
line = getline(res, "A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration")
|
||||
self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
|
||||
|
||||
def test_wrong_mount_unit_set(self):
|
||||
"""
|
||||
Summary: Test whether mount unit was set properly
|
||||
Expected: Fail because not the correct flag used for mount unit
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = """
|
||||
IMAGE_INSTALL:append = " overlayfs-user"
|
||||
DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
|
||||
"""
|
||||
|
||||
wrong_machine_config = """
|
||||
OVERLAYFS_MOUNT_POINT[usr-share-overlay] = "/usr/share/overlay"
|
||||
"""
|
||||
|
||||
self.write_config(config)
|
||||
self.set_machine_config(wrong_machine_config)
|
||||
|
||||
res = bitbake('core-image-minimal', ignore_status=True)
|
||||
line = getline(res, "Missing required mount point for OVERLAYFS_MOUNT_POINT[mnt-overlay] in your MACHINE configuration")
|
||||
self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
|
||||
|
||||
def _test_correct_image(self, recipe, data):
|
||||
"""
|
||||
Summary: Check that we can create an image when all parameters are
|
||||
set correctly
|
||||
Expected: Image is created successfully
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = """
|
||||
IMAGE_INSTALL:append = " overlayfs-user systemd-machine-units"
|
||||
DISTRO_FEATURES:append = " overlayfs"
|
||||
|
||||
# Use systemd as init manager
|
||||
INIT_MANAGER = "systemd"
|
||||
|
||||
# enable overlayfs in the kernel
|
||||
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
|
||||
"""
|
||||
|
||||
overlayfs_recipe_append = """
|
||||
OVERLAYFS_WRITABLE_PATHS[mnt-overlay] += "/usr/share/another-overlay-mount"
|
||||
|
||||
SYSTEMD_SERVICE:${PN} += " \
|
||||
my-application.service \
|
||||
"
|
||||
|
||||
do_install:append() {
|
||||
install -d ${D}${systemd_system_unitdir}
|
||||
cat <<EOT > ${D}${systemd_system_unitdir}/my-application.service
|
||||
[Unit]
|
||||
Description=Sample application start-up unit
|
||||
After=overlayfs-user-overlays.service
|
||||
Requires=overlayfs-user-overlays.service
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/bin/true
|
||||
RemainAfterExit=true
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOT
|
||||
}
|
||||
"""
|
||||
|
||||
self.write_config(config)
|
||||
self.add_overlay_conf_to_machine()
|
||||
self.write_recipeinc(recipe, data)
|
||||
self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
|
||||
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
with runqemu('core-image-minimal') as qemu:
|
||||
# Check that application service started
|
||||
status, output = qemu.run_serial("systemctl status my-application")
|
||||
self.assertTrue("active (exited)" in output, msg=output)
|
||||
|
||||
# Check that overlay mounts are dependencies of our application unit
|
||||
status, output = qemu.run_serial("systemctl list-dependencies my-application")
|
||||
self.assertTrue("overlayfs-user-overlays.service" in output, msg=output)
|
||||
|
||||
status, output = qemu.run_serial("systemctl list-dependencies overlayfs-user-overlays")
|
||||
self.assertTrue("usr-share-another\\x2doverlay\\x2dmount.mount" in output, msg=output)
|
||||
self.assertTrue("usr-share-my\\x2dapplication.mount" in output, msg=output)
|
||||
|
||||
# Check that we have /mnt/overlay fs mounted as tmpfs and
|
||||
# /usr/share/my-application as an overlay (see overlayfs-user recipe)
|
||||
status, output = qemu.run_serial("/bin/mount -t tmpfs,overlay")
|
||||
|
||||
line = getline_qemu(output, "on /mnt/overlay")
|
||||
self.assertTrue(line and line.startswith("tmpfs"), msg=output)
|
||||
|
||||
line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/my-application")
|
||||
self.assertTrue(line and line.startswith("overlay"), msg=output)
|
||||
|
||||
line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/another-overlay-mount")
|
||||
self.assertTrue(line and line.startswith("overlay"), msg=output)
|
||||
|
||||
@OETestTag("runqemu")
|
||||
def test_correct_image_fstab(self):
|
||||
"""
|
||||
Summary: Check that we can create an image when all parameters are
|
||||
set correctly via fstab
|
||||
Expected: Image is created successfully
|
||||
Author: Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
|
||||
"""
|
||||
|
||||
base_files_append = """
|
||||
do_install:append() {
|
||||
cat <<EOT >> ${D}${sysconfdir}/fstab
|
||||
tmpfs /mnt/overlay tmpfs mode=1777,strictatime,nosuid,nodev 0 0
|
||||
EOT
|
||||
}
|
||||
"""
|
||||
|
||||
self._test_correct_image('base-files', base_files_append)
|
||||
|
||||
@OETestTag("runqemu")
|
||||
def test_correct_image_unit(self):
|
||||
"""
|
||||
Summary: Check that we can create an image when all parameters are
|
||||
set correctly via mount unit
|
||||
Expected: Image is created successfully
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
systemd_machine_unit_append = """
|
||||
SYSTEMD_SERVICE:${PN} += " \
|
||||
mnt-overlay.mount \
|
||||
"
|
||||
|
||||
do_install:append() {
|
||||
install -d ${D}${systemd_system_unitdir}
|
||||
cat <<EOT > ${D}${systemd_system_unitdir}/mnt-overlay.mount
|
||||
[Unit]
|
||||
Description=Tmpfs directory
|
||||
DefaultDependencies=no
|
||||
|
||||
[Mount]
|
||||
What=tmpfs
|
||||
Where=/mnt/overlay
|
||||
Type=tmpfs
|
||||
Options=mode=1777,strictatime,nosuid,nodev
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOT
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
self._test_correct_image('systemd-machine-units', systemd_machine_unit_append)
|
||||
|
||||
@OETestTag("runqemu")
|
||||
class OverlayFSEtcRunTimeTests(OESelftestTestCase):
|
||||
"""overlayfs-etc class tests"""
|
||||
|
||||
def test_all_required_variables_set(self):
|
||||
"""
|
||||
Summary: Check that required variables are set
|
||||
Expected: Fail when any of required variables is missing
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
configBase = """
|
||||
# Use systemd as init manager
|
||||
INIT_MANAGER = "systemd"
|
||||
|
||||
# enable overlayfs in the kernel
|
||||
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
|
||||
|
||||
# Image configuration for overlayfs-etc
|
||||
EXTRA_IMAGE_FEATURES += "overlayfs-etc"
|
||||
IMAGE_FEATURES:remove = "package-management"
|
||||
"""
|
||||
configMountPoint = """
|
||||
OVERLAYFS_ETC_MOUNT_POINT = "/data"
|
||||
"""
|
||||
configDevice = """
|
||||
OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p1"
|
||||
"""
|
||||
|
||||
self.write_config(configBase)
|
||||
res = bitbake('core-image-minimal', ignore_status=True)
|
||||
line = getline(res, "OVERLAYFS_ETC_MOUNT_POINT must be set in your MACHINE configuration")
|
||||
self.assertTrue(line, msg=res.output)
|
||||
|
||||
self.append_config(configMountPoint)
|
||||
res = bitbake('core-image-minimal', ignore_status=True)
|
||||
line = getline(res, "OVERLAYFS_ETC_DEVICE must be set in your MACHINE configuration")
|
||||
self.assertTrue(line, msg=res.output)
|
||||
|
||||
self.append_config(configDevice)
|
||||
res = bitbake('core-image-minimal', ignore_status=True)
|
||||
line = getline(res, "OVERLAYFS_ETC_FSTYPE should contain a valid file system type on /dev/mmcblk0p1")
|
||||
self.assertTrue(line, msg=res.output)
|
||||
|
||||
def test_image_feature_conflict(self):
|
||||
"""
|
||||
Summary: Overlayfs-etc is not allowed to be used with package-management
|
||||
Expected: Feature conflict
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = """
|
||||
# Use systemd as init manager
|
||||
INIT_MANAGER = "systemd"
|
||||
|
||||
# enable overlayfs in the kernel
|
||||
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
|
||||
EXTRA_IMAGE_FEATURES += "overlayfs-etc"
|
||||
EXTRA_IMAGE_FEATURES += "package-management"
|
||||
"""
|
||||
|
||||
self.write_config(config)
|
||||
|
||||
res = bitbake('core-image-minimal', ignore_status=True)
|
||||
line = getline(res, "contains conflicting IMAGE_FEATURES")
|
||||
self.assertTrue("overlayfs-etc" in res.output, msg=res.output)
|
||||
self.assertTrue("package-management" in res.output, msg=res.output)
|
||||
|
||||
# https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_image_feature_is_missing(self):
|
||||
"""
|
||||
Summary: Overlayfs-etc class is not applied when image feature is not set
|
||||
Expected: Image is created successfully but /etc is not an overlay
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = """
|
||||
# Use systemd as init manager
|
||||
INIT_MANAGER = "systemd"
|
||||
|
||||
# enable overlayfs in the kernel
|
||||
KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
|
||||
|
||||
IMAGE_FSTYPES += "wic"
|
||||
WKS_FILE = "overlayfs_etc.wks.in"
|
||||
|
||||
EXTRA_IMAGE_FEATURES += "read-only-rootfs"
|
||||
# Image configuration for overlayfs-etc
|
||||
OVERLAYFS_ETC_MOUNT_POINT = "/data"
|
||||
OVERLAYFS_ETC_DEVICE = "/dev/sda3"
|
||||
OVERLAYFS_ROOTFS_TYPE = "ext4"
|
||||
"""
|
||||
|
||||
self.write_config(config)
|
||||
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
with runqemu('core-image-minimal', image_fstype='wic') as qemu:
|
||||
status, output = qemu.run_serial("/bin/mount")
|
||||
|
||||
line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
|
||||
self.assertFalse(line, msg=output)
|
||||
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_sbin_init_preinit(self):
|
||||
self.run_sbin_init(False, "ext4")
|
||||
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_sbin_init_original(self):
|
||||
self.run_sbin_init(True, "ext4")
|
||||
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_sbin_init_read_only(self):
|
||||
self.run_sbin_init(True, "squashfs")
|
||||
|
||||
def run_sbin_init(self, origInit, rootfsType):
|
||||
"""
|
||||
Summary: Confirm we can replace original init and mount overlay on top of /etc
|
||||
Expected: Image is created successfully and /etc is mounted as an overlay
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = self.get_working_config()
|
||||
|
||||
args = {
|
||||
'OVERLAYFS_INIT_OPTION': "" if origInit else "init=/sbin/preinit",
|
||||
'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True),
|
||||
'OVERLAYFS_ROOTFS_TYPE': rootfsType,
|
||||
'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': int(rootfsType == "ext4")
|
||||
}
|
||||
|
||||
self.write_config(config.format(**args))
|
||||
|
||||
bitbake('core-image-minimal')
|
||||
testFile = "/etc/my-test-data"
|
||||
|
||||
with runqemu('core-image-minimal', image_fstype='wic', discard_writes=False) as qemu:
|
||||
status, output = qemu.run_serial("/bin/mount")
|
||||
|
||||
line = getline_qemu(output, "/dev/sda3")
|
||||
self.assertTrue("/data" in output, msg=output)
|
||||
|
||||
line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
|
||||
self.assertTrue(line and line.startswith("/data/overlay-etc/upper on /etc type overlay"), msg=output)
|
||||
|
||||
# check that lower layer is not available
|
||||
status, output = qemu.run_serial("ls -1 /data/overlay-etc/lower")
|
||||
line = getline_qemu(output, "No such file or directory")
|
||||
self.assertTrue(line, msg=output)
|
||||
|
||||
status, output = qemu.run_serial("touch " + testFile)
|
||||
status, output = qemu.run_serial("sync")
|
||||
status, output = qemu.run_serial("ls -1 " + testFile)
|
||||
line = getline_qemu(output, testFile)
|
||||
self.assertTrue(line and line.startswith(testFile), msg=output)
|
||||
|
||||
# Check that file exists in /etc after reboot
|
||||
with runqemu('core-image-minimal', image_fstype='wic') as qemu:
|
||||
status, output = qemu.run_serial("ls -1 " + testFile)
|
||||
line = getline_qemu(output, testFile)
|
||||
self.assertTrue(line and line.startswith(testFile), msg=output)
|
||||
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_lower_layer_access(self):
|
||||
"""
|
||||
Summary: Test that lower layer of /etc is available read-only when configured
|
||||
Expected: Can't write to lower layer. The files on lower and upper different after
|
||||
modification
|
||||
Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
|
||||
"""
|
||||
|
||||
config = self.get_working_config()
|
||||
|
||||
configLower = """
|
||||
OVERLAYFS_ETC_EXPOSE_LOWER = "1"
|
||||
IMAGE_INSTALL:append = " overlayfs-user"
|
||||
"""
|
||||
testFile = "lower-layer-test.txt"
|
||||
|
||||
args = {
|
||||
'OVERLAYFS_INIT_OPTION': "",
|
||||
'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
|
||||
'OVERLAYFS_ROOTFS_TYPE': "ext4",
|
||||
'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
|
||||
}
|
||||
|
||||
self.write_config(config.format(**args))
|
||||
|
||||
self.append_config(configLower)
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
with runqemu('core-image-minimal', image_fstype='wic') as qemu:
|
||||
status, output = qemu.run_serial("echo \"Modified in upper\" > /etc/" + testFile)
|
||||
status, output = qemu.run_serial("diff /etc/" + testFile + " /data/overlay-etc/lower/" + testFile)
|
||||
line = getline_qemu(output, "Modified in upper")
|
||||
self.assertTrue(line, msg=output)
|
||||
line = getline_qemu(output, "Original file")
|
||||
self.assertTrue(line, msg=output)
|
||||
|
||||
status, output = qemu.run_serial("touch /data/overlay-etc/lower/ro-test.txt")
|
||||
line = getline_qemu(output, "Read-only file system")
|
||||
self.assertTrue(line, msg=output)
|
||||
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_postinst_on_target_for_read_only_rootfs(self):
|
||||
"""
|
||||
Summary: The purpose of this test case is to verify that post-installation
|
||||
on target scripts are executed even if using read-only rootfs when
|
||||
read-only-rootfs-delayed-postinsts is set
|
||||
Expected: The test files are created on first boot
|
||||
"""
|
||||
|
||||
import oe.path
|
||||
|
||||
vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
|
||||
sysconfdir = vars["sysconfdir"]
|
||||
self.assertIsNotNone(sysconfdir)
|
||||
# Need to use oe.path here as sysconfdir starts with /
|
||||
targettestdir = os.path.join(sysconfdir, "postinst-test")
|
||||
|
||||
config = self.get_working_config()
|
||||
|
||||
args = {
|
||||
'OVERLAYFS_INIT_OPTION': "",
|
||||
'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
|
||||
'OVERLAYFS_ROOTFS_TYPE': "ext4",
|
||||
'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
|
||||
}
|
||||
|
||||
# read-only-rootfs is already set in get_working_config()
|
||||
config += 'EXTRA_IMAGE_FEATURES += "read-only-rootfs-delayed-postinsts"\n'
|
||||
config += 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
|
||||
|
||||
self.write_config(config.format(**args))
|
||||
|
||||
res = bitbake('core-image-minimal')
|
||||
|
||||
with runqemu('core-image-minimal', image_fstype='wic') as qemu:
|
||||
for filename in ("rootfs", "delayed-a", "delayed-b"):
|
||||
status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
|
||||
self.assertIn("found", output, "%s was not present on boot" % filename)
|
||||
|
||||
def get_working_config(self):
|
||||
return """
|
||||
# Use systemd as init manager
|
||||
INIT_MANAGER = "systemd"
|
||||
|
||||
# enable overlayfs in the kernel
|
||||
KERNEL_EXTRA_FEATURES:append = " \
|
||||
features/overlayfs/overlayfs.scc \
|
||||
cfg/fs/squashfs.scc"
|
||||
|
||||
IMAGE_FSTYPES += "wic"
|
||||
OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
|
||||
OVERLAYFS_ROOTFS_TYPE = "{OVERLAYFS_ROOTFS_TYPE}"
|
||||
OVERLAYFS_ETC_CREATE_MOUNT_DIRS = "{OVERLAYFS_ETC_CREATE_MOUNT_DIRS}"
|
||||
WKS_FILE = "overlayfs_etc.wks.in"
|
||||
|
||||
EXTRA_IMAGE_FEATURES += "read-only-rootfs"
|
||||
# Image configuration for overlayfs-etc
|
||||
EXTRA_IMAGE_FEATURES += "overlayfs-etc"
|
||||
IMAGE_FEATURES:remove = "package-management"
|
||||
OVERLAYFS_ETC_MOUNT_POINT = "/data"
|
||||
OVERLAYFS_ETC_FSTYPE = "ext4"
|
||||
OVERLAYFS_ETC_DEVICE = "/dev/sda3"
|
||||
OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
|
||||
|
||||
ROOTFS_POSTPROCESS_COMMAND += "{OVERLAYFS_ROOTFS_TYPE}_rootfs"
|
||||
|
||||
ext4_rootfs() {{
|
||||
}}
|
||||
|
||||
squashfs_rootfs() {{
|
||||
mkdir -p ${{IMAGE_ROOTFS}}/data
|
||||
}}
|
||||
"""
|
||||
210
sources/poky/meta/lib/oeqa/selftest/cases/package.py
Normal file
210
sources/poky/meta/lib/oeqa/selftest/cases/package.py
Normal file
@@ -0,0 +1,210 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu
|
||||
import subprocess, os
|
||||
import oe.path
|
||||
import re
|
||||
|
||||
class VersionOrdering(OESelftestTestCase):
|
||||
# version1, version2, sort order
|
||||
tests = (
|
||||
("1.0", "1.0", 0),
|
||||
("1.0", "2.0", -1),
|
||||
("2.0", "1.0", 1),
|
||||
("2.0-rc", "2.0", 1),
|
||||
("2.0~rc", "2.0", -1),
|
||||
("1.2rc2", "1.2.0", -1)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super().setUpClass()
|
||||
|
||||
# Build the tools we need and populate a sysroot
|
||||
bitbake("dpkg-native opkg-native rpm-native python3-native")
|
||||
bitbake("build-sysroots -c build_native_sysroot")
|
||||
|
||||
# Get the paths so we can point into the sysroot correctly
|
||||
vars = get_bb_vars(["STAGING_DIR", "BUILD_ARCH", "bindir_native", "libdir_native"])
|
||||
cls.staging = oe.path.join(vars["STAGING_DIR"], vars["BUILD_ARCH"])
|
||||
cls.bindir = oe.path.join(cls.staging, vars["bindir_native"])
|
||||
cls.libdir = oe.path.join(cls.staging, vars["libdir_native"])
|
||||
|
||||
def setUpLocal(self):
|
||||
# Just for convenience
|
||||
self.staging = type(self).staging
|
||||
self.bindir = type(self).bindir
|
||||
self.libdir = type(self).libdir
|
||||
|
||||
def test_dpkg(self):
|
||||
for ver1, ver2, sort in self.tests:
|
||||
op = { -1: "<<", 0: "=", 1: ">>" }[sort]
|
||||
status = subprocess.call((oe.path.join(self.bindir, "dpkg"), "--compare-versions", ver1, op, ver2))
|
||||
self.assertEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
|
||||
|
||||
# Now do it again but with incorrect operations
|
||||
op = { -1: ">>", 0: ">>", 1: "<<" }[sort]
|
||||
status = subprocess.call((oe.path.join(self.bindir, "dpkg"), "--compare-versions", ver1, op, ver2))
|
||||
self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
|
||||
|
||||
# Now do it again but with incorrect operations
|
||||
op = { -1: "=", 0: "<<", 1: "=" }[sort]
|
||||
status = subprocess.call((oe.path.join(self.bindir, "dpkg"), "--compare-versions", ver1, op, ver2))
|
||||
self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
|
||||
|
||||
def test_opkg(self):
|
||||
for ver1, ver2, sort in self.tests:
|
||||
op = { -1: "<<", 0: "=", 1: ">>" }[sort]
|
||||
status = subprocess.call((oe.path.join(self.bindir, "opkg"), "compare-versions", ver1, op, ver2))
|
||||
self.assertEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
|
||||
|
||||
# Now do it again but with incorrect operations
|
||||
op = { -1: ">>", 0: ">>", 1: "<<" }[sort]
|
||||
status = subprocess.call((oe.path.join(self.bindir, "opkg"), "compare-versions", ver1, op, ver2))
|
||||
self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
|
||||
|
||||
# Now do it again but with incorrect operations
|
||||
op = { -1: "=", 0: "<<", 1: "=" }[sort]
|
||||
status = subprocess.call((oe.path.join(self.bindir, "opkg"), "compare-versions", ver1, op, ver2))
|
||||
self.assertNotEqual(status, 0, "%s %s %s failed" % (ver1, op, ver2))
|
||||
|
||||
def test_rpm(self):
|
||||
# Need to tell the Python bindings where to find its configuration
|
||||
env = os.environ.copy()
|
||||
env["RPM_CONFIGDIR"] = oe.path.join(self.libdir, "rpm")
|
||||
|
||||
for ver1, ver2, sort in self.tests:
|
||||
# The only way to test rpm is via the Python module, so we need to
|
||||
# execute python3-native. labelCompare returns -1/0/1 (like strcmp)
|
||||
# so add 100 and use that as the exit code.
|
||||
command = (oe.path.join(self.bindir, "python3-native", "python3"), "-c",
|
||||
"import sys, rpm; v1=(None, \"%s\", None); v2=(None, \"%s\", None); sys.exit(rpm.labelCompare(v1, v2) + 100)" % (ver1, ver2))
|
||||
status = subprocess.call(command, env=env)
|
||||
self.assertIn(status, (99, 100, 101))
|
||||
self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort))
|
||||
|
||||
class PackageTests(OESelftestTestCase):
|
||||
# Verify that a recipe cannot rename a package into an existing one
|
||||
def test_package_name_conflict(self):
|
||||
res = bitbake("packagenameconflict", ignore_status=True)
|
||||
self.assertNotEqual(res.status, 0)
|
||||
err = "package name already exists"
|
||||
self.assertTrue(err in res.output)
|
||||
|
||||
# Verify that a recipe which sets up hardlink files has those preserved into split packages
|
||||
# Also test file sparseness is preserved
|
||||
def test_preserve_sparse_hardlinks(self):
|
||||
bitbake("selftest-hardlink -c package")
|
||||
|
||||
dest = get_bb_var('PKGDEST', 'selftest-hardlink')
|
||||
bindir = get_bb_var('bindir', 'selftest-hardlink')
|
||||
libdir = get_bb_var('libdir', 'selftest-hardlink')
|
||||
libexecdir = get_bb_var('libexecdir', 'selftest-hardlink')
|
||||
|
||||
def checkfiles():
|
||||
# Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/
|
||||
# so expect 8 in total.
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8)
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink" + libexecdir + "/hello3").st_nlink, 8)
|
||||
|
||||
# Check dbg version
|
||||
# 2 items, a copy in both package/packages-split so 4
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + bindir + "/.debug/hello1").st_nlink, 4)
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello1").st_nlink, 4)
|
||||
|
||||
# Even though the libexecdir name is 'hello3' or 'hello4', that isn't the debug target name
|
||||
self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello3"), False)
|
||||
self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello4"), False)
|
||||
|
||||
# Check the staticdev libraries
|
||||
# 101 items, a copy in both package/packages-split so 202
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello.a").st_nlink, 202)
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-25.a").st_nlink, 202)
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-50.a").st_nlink, 202)
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-75.a").st_nlink, 202)
|
||||
|
||||
# Check static dbg
|
||||
# 101 items, a copy in both package/packages-split so 202
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello.a").st_nlink, 202)
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-25.a").st_nlink, 202)
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-50.a").st_nlink, 202)
|
||||
self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-75.a").st_nlink, 202)
|
||||
|
||||
# Test a sparse file remains sparse
|
||||
sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest")
|
||||
self.assertEqual(sparsestat.st_blocks, 0)
|
||||
self.assertEqual(sparsestat.st_size, 1048576)
|
||||
|
||||
checkfiles()
|
||||
|
||||
# Clean and reinstall so its now definitely from sstate, then retest.
|
||||
bitbake("selftest-hardlink -c clean")
|
||||
bitbake("selftest-hardlink -c package")
|
||||
|
||||
checkfiles()
|
||||
|
||||
# Verify gdb to read symbols from separated debug hardlink file correctly
|
||||
def test_gdb_hardlink_debug(self):
|
||||
features = 'IMAGE_INSTALL:append = " selftest-hardlink"\n'
|
||||
features += 'IMAGE_INSTALL:append = " selftest-hardlink-dbg"\n'
|
||||
features += 'IMAGE_INSTALL:append = " selftest-hardlink-gdb"\n'
|
||||
self.write_config(features)
|
||||
bitbake("core-image-minimal")
|
||||
|
||||
def gdbtest(qemu, binary):
|
||||
"""
|
||||
Check that gdb ``binary`` to read symbols from separated debug file
|
||||
"""
|
||||
self.logger.info("gdbtest %s" % binary)
|
||||
status, output = qemu.run_serial('/usr/bin/gdb.sh %s' % binary, timeout=60)
|
||||
for l in output.split('\n'):
|
||||
# Check debugging symbols exists
|
||||
if '(no debugging symbols found)' in l:
|
||||
self.logger.error("No debugging symbols found. GDB result:\n%s" % output)
|
||||
return False
|
||||
|
||||
# Check debugging symbols works correctly. Don't look for a
|
||||
# source file as optimisation can put the breakpoint inside
|
||||
# stdio.h.
|
||||
elif "Breakpoint 1 at" in l:
|
||||
return True
|
||||
|
||||
self.logger.error("GDB result:\n%d: %s", status, output)
|
||||
return False
|
||||
|
||||
with runqemu('core-image-minimal') as qemu:
|
||||
for binary in ['/usr/bin/hello1',
|
||||
'/usr/bin/hello2',
|
||||
'/usr/libexec/hello3',
|
||||
'/usr/libexec/hello4']:
|
||||
if not gdbtest(qemu, binary):
|
||||
self.fail('GDB %s failed' % binary)
|
||||
|
||||
def test_preserve_ownership(self):
|
||||
features = 'IMAGE_INSTALL:append = " selftest-chown"\n'
|
||||
self.write_config(features)
|
||||
bitbake("core-image-minimal")
|
||||
|
||||
def check_ownership(qemu, expected_gid, expected_uid, path):
|
||||
self.logger.info("Check ownership of %s", path)
|
||||
status, output = qemu.run_serial('stat -c "%U %G" ' + path)
|
||||
self.assertEqual(status, 1, "stat failed: " + output)
|
||||
try:
|
||||
uid, gid = output.split()
|
||||
self.assertEqual(uid, expected_uid)
|
||||
self.assertEqual(gid, expected_gid)
|
||||
except ValueError:
|
||||
self.fail("Cannot parse output: " + output)
|
||||
|
||||
sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
|
||||
with runqemu('core-image-minimal') as qemu:
|
||||
for path in [ sysconfdir + "/selftest-chown/file",
|
||||
sysconfdir + "/selftest-chown/dir",
|
||||
sysconfdir + "/selftest-chown/symlink",
|
||||
sysconfdir + "/selftest-chown/fifotest/fifo"]:
|
||||
check_ownership(qemu, "test", "test", path)
|
||||
227
sources/poky/meta/lib/oeqa/selftest/cases/pkgdata.py
Normal file
227
sources/poky/meta/lib/oeqa/selftest/cases/pkgdata.py
Normal file
@@ -0,0 +1,227 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import fnmatch
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
|
||||
|
||||
class OePkgdataUtilTests(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(OePkgdataUtilTests, cls).setUpClass()
|
||||
# Ensure we have the right data in pkgdata
|
||||
cls.logger.info('Running bitbake to generate pkgdata')
|
||||
bitbake('target-sdk-provides-dummy -c clean')
|
||||
bitbake('busybox zlib m4')
|
||||
|
||||
def test_lookup_pkg(self):
|
||||
# Forward tests
|
||||
result = runCmd('oe-pkgdata-util lookup-pkg "zlib busybox"')
|
||||
self.assertEqual(result.output, 'libz1\nbusybox')
|
||||
result = runCmd('oe-pkgdata-util lookup-pkg zlib-dev')
|
||||
self.assertEqual(result.output, 'libz-dev')
|
||||
result = runCmd('oe-pkgdata-util lookup-pkg nonexistentpkg', ignore_status=True)
|
||||
self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
|
||||
self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
|
||||
# Reverse tests
|
||||
result = runCmd('oe-pkgdata-util lookup-pkg -r "libz1 busybox"')
|
||||
self.assertEqual(result.output, 'zlib\nbusybox')
|
||||
result = runCmd('oe-pkgdata-util lookup-pkg -r libz-dev')
|
||||
self.assertEqual(result.output, 'zlib-dev')
|
||||
result = runCmd('oe-pkgdata-util lookup-pkg -r nonexistentpkg', ignore_status=True)
|
||||
self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
|
||||
self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
|
||||
|
||||
def test_read_value(self):
|
||||
result = runCmd('oe-pkgdata-util read-value PN libz1')
|
||||
self.assertEqual(result.output, 'zlib')
|
||||
result = runCmd('oe-pkgdata-util read-value PKG libz1')
|
||||
self.assertEqual(result.output, 'libz1')
|
||||
result = runCmd('oe-pkgdata-util read-value PKGSIZE m4')
|
||||
pkgsize = int(result.output.strip())
|
||||
self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
|
||||
|
||||
def test_find_path(self):
|
||||
result = runCmd('oe-pkgdata-util find-path /usr/lib/libz.so.1')
|
||||
self.assertEqual(result.output, 'zlib: /usr/lib/libz.so.1')
|
||||
result = runCmd('oe-pkgdata-util find-path /usr/bin/m4')
|
||||
self.assertEqual(result.output, 'm4: /usr/bin/m4')
|
||||
result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
|
||||
self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
|
||||
self.assertEqual(result.output, 'ERROR: Unable to find any package producing path /not/exist')
|
||||
|
||||
def test_lookup_recipe(self):
|
||||
result = runCmd('oe-pkgdata-util lookup-recipe "libz-staticdev busybox"')
|
||||
self.assertEqual(result.output, 'zlib\nbusybox')
|
||||
result = runCmd('oe-pkgdata-util lookup-recipe libz-dbg')
|
||||
self.assertEqual(result.output, 'zlib')
|
||||
result = runCmd('oe-pkgdata-util lookup-recipe nonexistentpkg', ignore_status=True)
|
||||
self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output)
|
||||
self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg')
|
||||
|
||||
def test_list_pkgs(self):
|
||||
# No arguments
|
||||
result = runCmd('oe-pkgdata-util list-pkgs')
|
||||
pkglist = result.output.split()
|
||||
self.assertIn('zlib', pkglist, "Listed packages: %s" % result.output)
|
||||
self.assertIn('zlib-dev', pkglist, "Listed packages: %s" % result.output)
|
||||
# No pkgspec, runtime
|
||||
result = runCmd('oe-pkgdata-util list-pkgs -r')
|
||||
pkglist = result.output.split()
|
||||
self.assertIn('libz-dev', pkglist, "Listed packages: %s" % result.output)
|
||||
# With recipe specified
|
||||
result = runCmd('oe-pkgdata-util list-pkgs -p zlib')
|
||||
pkglist = sorted(result.output.split())
|
||||
try:
|
||||
pkglist.remove('zlib-ptest') # in case ptest is disabled
|
||||
except ValueError:
|
||||
pass
|
||||
self.assertEqual(pkglist, ['zlib', 'zlib-dbg', 'zlib-dev', 'zlib-doc', 'zlib-src', 'zlib-staticdev'], "Packages listed after remove: %s" % result.output)
|
||||
# With recipe specified, runtime
|
||||
result = runCmd('oe-pkgdata-util list-pkgs -p zlib -r')
|
||||
pkglist = sorted(result.output.split())
|
||||
try:
|
||||
pkglist.remove('libz-ptest') # in case ptest is disabled
|
||||
except ValueError:
|
||||
pass
|
||||
self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc', 'libz-src', 'libz-staticdev', 'libz1'], "Packages listed after remove: %s" % result.output)
|
||||
# With recipe specified and unpackaged
|
||||
result = runCmd('oe-pkgdata-util list-pkgs -p zlib -u')
|
||||
pkglist = sorted(result.output.split())
|
||||
self.assertIn('zlib-locale', pkglist, "Listed packages: %s" % result.output)
|
||||
# With recipe specified and unpackaged, runtime
|
||||
result = runCmd('oe-pkgdata-util list-pkgs -p zlib -u -r')
|
||||
pkglist = sorted(result.output.split())
|
||||
self.assertIn('libz-locale', pkglist, "Listed packages: %s" % result.output)
|
||||
# With recipe specified and pkgspec
|
||||
result = runCmd('oe-pkgdata-util list-pkgs -p zlib "*-d*"')
|
||||
pkglist = sorted(result.output.split())
|
||||
self.assertEqual(pkglist, ['zlib-dbg', 'zlib-dev', 'zlib-doc'], "Packages listed: %s" % result.output)
|
||||
# With recipe specified and pkgspec, runtime
|
||||
result = runCmd('oe-pkgdata-util list-pkgs -p zlib -r "*-d*"')
|
||||
pkglist = sorted(result.output.split())
|
||||
self.assertEqual(pkglist, ['libz-dbg', 'libz-dev', 'libz-doc'], "Packages listed: %s" % result.output)
|
||||
|
||||
def test_list_pkg_files(self):
|
||||
def splitoutput(output):
|
||||
files = {}
|
||||
curpkg = None
|
||||
for line in output.splitlines():
|
||||
if line.startswith('\t'):
|
||||
self.assertTrue(curpkg, 'Unexpected non-package line:\n%s' % line)
|
||||
files[curpkg].append(line.strip())
|
||||
else:
|
||||
self.assertTrue(line.rstrip().endswith(':'), 'Invalid package line in output:\n%s' % line)
|
||||
curpkg = line.split(':')[0]
|
||||
files[curpkg] = []
|
||||
return files
|
||||
bb_vars = get_bb_vars(['libdir', 'includedir', 'mandir'])
|
||||
libdir = bb_vars['libdir']
|
||||
includedir = bb_vars['includedir']
|
||||
mandir = bb_vars['mandir']
|
||||
# Test recipe-space package name
|
||||
result = runCmd('oe-pkgdata-util list-pkg-files zlib-dev zlib-doc')
|
||||
files = splitoutput(result.output)
|
||||
self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
|
||||
self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
|
||||
# Test runtime package name
|
||||
result = runCmd('oe-pkgdata-util list-pkg-files -r libz1 libz-dev')
|
||||
files = splitoutput(result.output)
|
||||
self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertGreater(len(files['libz1']), 1)
|
||||
libspec = os.path.join(libdir, 'libz.so.1.*')
|
||||
found = False
|
||||
for fileitem in files['libz1']:
|
||||
if fnmatch.fnmatchcase(fileitem, libspec):
|
||||
found = True
|
||||
break
|
||||
self.assertTrue(found, 'Could not find zlib library file %s in libz1 package file list: %s' % (libspec, files['libz1']))
|
||||
self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
|
||||
# Test recipe
|
||||
result = runCmd('oe-pkgdata-util list-pkg-files -p zlib')
|
||||
files = splitoutput(result.output)
|
||||
self.assertIn('zlib-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertNotIn('zlib-locale', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
# (ignore ptest, might not be there depending on config)
|
||||
self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
|
||||
self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
|
||||
self.assertIn(os.path.join(libdir, 'libz.a'), files['zlib-staticdev'])
|
||||
# Test recipe, runtime
|
||||
result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r')
|
||||
files = splitoutput(result.output)
|
||||
self.assertIn('libz-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertNotIn('libz-locale', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
|
||||
self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc'])
|
||||
self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev'])
|
||||
# Test recipe, unpackaged
|
||||
result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -u')
|
||||
files = splitoutput(result.output)
|
||||
self.assertIn('zlib-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('zlib-locale', list(files.keys()), "listed pkgs. files: %s" %result.output) # this is the key one
|
||||
self.assertIn(os.path.join(includedir, 'zlib.h'), files['zlib-dev'])
|
||||
self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['zlib-doc'])
|
||||
self.assertIn(os.path.join(libdir, 'libz.a'), files['zlib-staticdev'])
|
||||
# Test recipe, runtime, unpackaged
|
||||
result = runCmd('oe-pkgdata-util list-pkg-files -p zlib -r -u')
|
||||
files = splitoutput(result.output)
|
||||
self.assertIn('libz-dbg', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz-doc', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz-staticdev', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
|
||||
self.assertIn('libz-locale', list(files.keys()), "listed pkgs. files: %s" %result.output) # this is the key one
|
||||
self.assertIn(os.path.join(includedir, 'zlib.h'), files['libz-dev'])
|
||||
self.assertIn(os.path.join(mandir, 'man3/zlib.3'), files['libz-doc'])
|
||||
self.assertIn(os.path.join(libdir, 'libz.a'), files['libz-staticdev'])
|
||||
|
||||
def test_glob(self):
|
||||
tempdir = tempfile.mkdtemp(prefix='pkgdataqa')
|
||||
self.track_for_cleanup(tempdir)
|
||||
pkglistfile = os.path.join(tempdir, 'pkglist')
|
||||
with open(pkglistfile, 'w') as f:
|
||||
f.write('libz1\n')
|
||||
f.write('busybox\n')
|
||||
result = runCmd('oe-pkgdata-util glob %s "*-dev"' % pkglistfile)
|
||||
desiredresult = ['libz-dev', 'busybox-dev']
|
||||
self.assertEqual(sorted(result.output.split()), sorted(desiredresult))
|
||||
# The following should not error (because when we use this during rootfs construction, sometimes the complementary package won't exist)
|
||||
result = runCmd('oe-pkgdata-util glob %s "*-nonexistent"' % pkglistfile)
|
||||
self.assertEqual(result.output, '')
|
||||
# Test exclude option
|
||||
result = runCmd('oe-pkgdata-util glob %s "*-dev *-dbg" -x "^libz"' % pkglistfile)
|
||||
resultlist = result.output.split()
|
||||
self.assertNotIn('libz-dev', resultlist)
|
||||
self.assertNotIn('libz-dbg', resultlist)
|
||||
|
||||
def test_specify_pkgdatadir(self):
|
||||
result = runCmd('oe-pkgdata-util -p %s lookup-pkg zlib' % get_bb_var('PKGDATA_DIR'))
|
||||
self.assertEqual(result.output, 'libz1')
|
||||
|
||||
def test_no_param(self):
|
||||
result = runCmd('oe-pkgdata-util', ignore_status=True)
|
||||
self.assertEqual(result.status, 2, "Status different than 2. output: %s" % result.output)
|
||||
currpos = result.output.find('usage: oe-pkgdata-util')
|
||||
self.assertTrue(currpos != -1, msg = "Test is Failed. Help is not Displayed in %s" % result.output)
|
||||
142
sources/poky/meta/lib/oeqa/selftest/cases/prservice.py
Normal file
142
sources/poky/meta/lib/oeqa/selftest/cases/prservice.py
Normal file
@@ -0,0 +1,142 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import datetime
|
||||
|
||||
import oeqa.utils.ftools as ftools
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
|
||||
from oeqa.utils.network import get_free_port
|
||||
|
||||
import bb.utils
|
||||
|
||||
class BitbakePrTests(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(BitbakePrTests, cls).setUpClass()
|
||||
cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
|
||||
|
||||
cls.exported_db_path = os.path.join(cls.builddir, 'export.inc')
|
||||
cls.current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
|
||||
|
||||
def cleanup(self):
|
||||
# Ensure any memory resident bitbake is stopped
|
||||
bitbake("-m")
|
||||
# Remove any existing export file or prserv database
|
||||
bb.utils.remove(self.exported_db_path)
|
||||
bb.utils.remove(self.current_db_path + "*")
|
||||
|
||||
def get_pr_version(self, package_name):
|
||||
package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
|
||||
package_data = ftools.read_file(package_data_file)
|
||||
find_pr = re.search(r"PKGR: r[0-9]+\.([0-9]+)", package_data)
|
||||
self.assertTrue(find_pr, "No PKG revision found via regex 'PKGR: r[0-9]+\.([0-9]+)' in %s" % package_data_file)
|
||||
return int(find_pr.group(1))
|
||||
|
||||
def get_task_stamp(self, package_name, recipe_task):
|
||||
stampdata = get_bb_var('STAMP', target=package_name).split('/')
|
||||
prefix = stampdata[-1]
|
||||
package_stamps_path = "/".join(stampdata[:-1])
|
||||
stamps = []
|
||||
for stamp in os.listdir(package_stamps_path):
|
||||
find_stamp = re.match(r"%s\.%s\.([a-z0-9]{32})" % (re.escape(prefix), recipe_task), stamp)
|
||||
if find_stamp:
|
||||
stamps.append(find_stamp.group(1))
|
||||
self.assertFalse(len(stamps) == 0, msg="Cound not find stamp for task %s for recipe %s" % (recipe_task, package_name))
|
||||
self.assertFalse(len(stamps) > 1, msg="Found multiple %s stamps for the %s recipe in the %s directory." % (recipe_task, package_name, package_stamps_path))
|
||||
return str(stamps[0])
|
||||
|
||||
def increment_package_pr(self, package_name):
|
||||
inc_data = "do_package:append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now()
|
||||
self.write_recipeinc(package_name, inc_data)
|
||||
res = bitbake(package_name, ignore_status=True)
|
||||
self.delete_recipeinc(package_name)
|
||||
self.assertEqual(res.status, 0, msg=res.output)
|
||||
|
||||
def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
|
||||
self.cleanup()
|
||||
config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
|
||||
self.write_config(config_package_data)
|
||||
config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
|
||||
self.append_config(config_server_data)
|
||||
|
||||
def run_test_pr_service(self, package_name, package_type='rpm', track_task='do_package', pr_socket='localhost:0'):
|
||||
self.config_pr_tests(package_name, package_type, pr_socket)
|
||||
|
||||
self.increment_package_pr(package_name)
|
||||
pr_1 = self.get_pr_version(package_name)
|
||||
stamp_1 = self.get_task_stamp(package_name, track_task)
|
||||
|
||||
self.increment_package_pr(package_name)
|
||||
pr_2 = self.get_pr_version(package_name)
|
||||
stamp_2 = self.get_task_stamp(package_name, track_task)
|
||||
|
||||
self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
|
||||
self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
|
||||
|
||||
self.cleanup()
|
||||
|
||||
def run_test_pr_export_import(self, package_name, replace_current_db=True):
|
||||
self.config_pr_tests(package_name)
|
||||
|
||||
self.increment_package_pr(package_name)
|
||||
pr_1 = self.get_pr_version(package_name)
|
||||
|
||||
export_result = runCmd("bitbake-prserv-tool export %s" % self.exported_db_path, ignore_status=True)
|
||||
self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
|
||||
self.assertTrue(os.path.exists(self.exported_db_path), msg="%s didn't exist, tool output %s" % (self.exported_db_path, export_result.output))
|
||||
|
||||
if replace_current_db:
|
||||
self.assertTrue(os.path.exists(self.current_db_path), msg="Path to current PR Service database is invalid: %s" % self.current_db_path)
|
||||
os.remove(self.current_db_path)
|
||||
|
||||
import_result = runCmd("bitbake-prserv-tool import %s" % self.exported_db_path, ignore_status=True)
|
||||
#os.remove(self.exported_db_path)
|
||||
self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
|
||||
|
||||
self.increment_package_pr(package_name)
|
||||
pr_2 = self.get_pr_version(package_name)
|
||||
|
||||
self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
|
||||
|
||||
self.cleanup()
|
||||
|
||||
def test_import_export_replace_db(self):
|
||||
self.run_test_pr_export_import('m4')
|
||||
|
||||
def test_import_export_override_db(self):
|
||||
self.run_test_pr_export_import('m4', replace_current_db=False)
|
||||
|
||||
def test_pr_service_rpm_arch_dep(self):
|
||||
self.run_test_pr_service('m4', 'rpm', 'do_package')
|
||||
|
||||
def test_pr_service_deb_arch_dep(self):
|
||||
self.run_test_pr_service('m4', 'deb', 'do_package')
|
||||
|
||||
def test_pr_service_ipk_arch_dep(self):
|
||||
self.run_test_pr_service('m4', 'ipk', 'do_package')
|
||||
|
||||
def test_pr_service_rpm_arch_indep(self):
|
||||
self.run_test_pr_service('xcursor-transparent-theme', 'rpm', 'do_package')
|
||||
|
||||
def test_pr_service_deb_arch_indep(self):
|
||||
self.run_test_pr_service('xcursor-transparent-theme', 'deb', 'do_package')
|
||||
|
||||
def test_pr_service_ipk_arch_indep(self):
|
||||
self.run_test_pr_service('xcursor-transparent-theme', 'ipk', 'do_package')
|
||||
|
||||
def test_stopping_prservice_message(self):
|
||||
port = get_free_port()
|
||||
|
||||
runCmd('bitbake-prserv --host localhost --port %s --loglevel=DEBUG --start' % port)
|
||||
ret = runCmd('bitbake-prserv --host localhost --port %s --loglevel=DEBUG --stop' % port)
|
||||
|
||||
self.assertEqual(ret.status, 0)
|
||||
|
||||
29
sources/poky/meta/lib/oeqa/selftest/cases/pseudo.py
Normal file
29
sources/poky/meta/lib/oeqa/selftest/cases/pseudo.py
Normal file
@@ -0,0 +1,29 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
from oeqa.utils.commands import bitbake, get_test_layer
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
class Pseudo(OESelftestTestCase):
|
||||
|
||||
def test_pseudo_pyc_creation(self):
|
||||
self.write_config("")
|
||||
|
||||
metaselftestpath = get_test_layer()
|
||||
pycache_path = os.path.join(metaselftestpath, 'lib/__pycache__')
|
||||
if os.path.exists(pycache_path):
|
||||
shutil.rmtree(pycache_path)
|
||||
|
||||
bitbake('pseudo-pyc-test -c install')
|
||||
|
||||
test1_pyc_present = len(glob.glob(os.path.join(pycache_path, 'pseudo_pyc_test1.*.pyc')))
|
||||
self.assertTrue(test1_pyc_present, 'test1 pyc file missing, should be created outside of pseudo context.')
|
||||
|
||||
test2_pyc_present = len(glob.glob(os.path.join(pycache_path, 'pseudo_pyc_test2.*.pyc')))
|
||||
self.assertFalse(test2_pyc_present, 'test2 pyc file present, should not be created in pseudo context.')
|
||||
1399
sources/poky/meta/lib/oeqa/selftest/cases/recipetool.py
Normal file
1399
sources/poky/meta/lib/oeqa/selftest/cases/recipetool.py
Normal file
File diff suppressed because it is too large
Load Diff
138
sources/poky/meta/lib/oeqa/selftest/cases/recipeutils.py
Normal file
138
sources/poky/meta/lib/oeqa/selftest/cases/recipeutils.py
Normal file
@@ -0,0 +1,138 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import bb.tinfoil
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import get_test_layer
|
||||
|
||||
|
||||
def setUpModule():
|
||||
global tinfoil
|
||||
global metaselftestpath
|
||||
metaselftestpath = get_test_layer()
|
||||
tinfoil = bb.tinfoil.Tinfoil(tracking=True)
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
|
||||
|
||||
def tearDownModule():
|
||||
tinfoil.shutdown()
|
||||
|
||||
|
||||
class RecipeUtilsTests(OESelftestTestCase):
|
||||
""" Tests for the recipeutils module functions """
|
||||
|
||||
def test_patch_recipe_varflag(self):
|
||||
import oe.recipeutils
|
||||
rd = tinfoil.parse_recipe('python3-async-test')
|
||||
vals = {'SRC_URI[md5sum]': 'aaaaaa', 'LICENSE': 'something'}
|
||||
patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
|
||||
|
||||
expected_patch = """
|
||||
--- a/recipes-devtools/python/python-async-test.inc
|
||||
+++ b/recipes-devtools/python/python-async-test.inc
|
||||
@@ -1,14 +1,14 @@
|
||||
SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
|
||||
HOMEPAGE = "http://github.com/gitpython-developers/async"
|
||||
SECTION = "devel/python"
|
||||
-LICENSE = "BSD-3-Clause"
|
||||
+LICENSE = "something"
|
||||
LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
|
||||
|
||||
inherit pypi
|
||||
|
||||
PYPI_PACKAGE = "async"
|
||||
|
||||
-SRC_URI[md5sum] = "9b06b5997de2154f3bc0273f80bcef6b"
|
||||
+SRC_URI[md5sum] = "aaaaaa"
|
||||
SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
|
||||
|
||||
RDEPENDS:${PN} += "python3-threading"
|
||||
"""
|
||||
patchlines = []
|
||||
for f in patches:
|
||||
for line in f:
|
||||
patchlines.append(line)
|
||||
self.maxDiff = None
|
||||
self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
|
||||
|
||||
|
||||
def test_patch_recipe_singleappend(self):
|
||||
import oe.recipeutils
|
||||
rd = tinfoil.parse_recipe('recipeutils-test')
|
||||
val = rd.getVar('SRC_URI', False).split()
|
||||
del val[1]
|
||||
val = ' '.join(val)
|
||||
vals = {'SRC_URI': val}
|
||||
patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
|
||||
|
||||
expected_patch = """
|
||||
--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
|
||||
+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
|
||||
@@ -8,6 +8,4 @@
|
||||
|
||||
BBCLASSEXTEND = "native nativesdk"
|
||||
|
||||
-SRC_URI += "file://somefile"
|
||||
-
|
||||
SRC_URI:append = " file://anotherfile"
|
||||
"""
|
||||
patchlines = []
|
||||
for f in patches:
|
||||
for line in f:
|
||||
patchlines.append(line)
|
||||
self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
|
||||
|
||||
|
||||
def test_patch_recipe_appends(self):
|
||||
import oe.recipeutils
|
||||
rd = tinfoil.parse_recipe('recipeutils-test')
|
||||
val = rd.getVar('SRC_URI', False).split()
|
||||
vals = {'SRC_URI': val[0]}
|
||||
patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)
|
||||
|
||||
expected_patch = """
|
||||
--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
|
||||
+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
|
||||
@@ -8,6 +8,3 @@
|
||||
|
||||
BBCLASSEXTEND = "native nativesdk"
|
||||
|
||||
-SRC_URI += "file://somefile"
|
||||
-
|
||||
-SRC_URI:append = " file://anotherfile"
|
||||
"""
|
||||
patchlines = []
|
||||
for f in patches:
|
||||
for line in f:
|
||||
patchlines.append(line)
|
||||
self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
|
||||
|
||||
|
||||
def test_validate_pn(self):
|
||||
import oe.recipeutils
|
||||
expected_results = {
|
||||
'test': '',
|
||||
'glib-2.0': '',
|
||||
'gtk+': '',
|
||||
'forcevariable': 'reserved',
|
||||
'pn-something': 'reserved',
|
||||
'test.bb': 'file',
|
||||
'test_one': 'character',
|
||||
'test!': 'character',
|
||||
}
|
||||
|
||||
for pn, expected in expected_results.items():
|
||||
result = oe.recipeutils.validate_pn(pn)
|
||||
if expected:
|
||||
self.assertIn(expected, result)
|
||||
else:
|
||||
self.assertEqual(result, '')
|
||||
|
||||
def test_split_var_value(self):
|
||||
import oe.recipeutils
|
||||
res = oe.recipeutils.split_var_value('test.1 test.2 ${@call_function("hi there world", false)} test.4')
|
||||
self.assertEqual(res, ['test.1', 'test.2', '${@call_function("hi there world", false)}', 'test.4'])
|
||||
335
sources/poky/meta/lib/oeqa/selftest/cases/reproducible.py
Normal file
335
sources/poky/meta/lib/oeqa/selftest/cases/reproducible.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
# Copyright 2019-2020 by Garmin Ltd. or its subsidiaries
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
|
||||
import bb.utils
|
||||
import functools
|
||||
import multiprocessing
|
||||
import textwrap
|
||||
import tempfile
|
||||
import shutil
|
||||
import stat
|
||||
import os
|
||||
import datetime
|
||||
|
||||
exclude_packages = [
|
||||
'rust-rustdoc',
|
||||
'rust-dbg'
|
||||
]
|
||||
|
||||
def is_excluded(package):
|
||||
package_name = os.path.basename(package)
|
||||
for i in exclude_packages:
|
||||
if package_name.startswith(i):
|
||||
return i
|
||||
return None
|
||||
|
||||
MISSING = 'MISSING'
|
||||
DIFFERENT = 'DIFFERENT'
|
||||
SAME = 'SAME'
|
||||
|
||||
@functools.total_ordering
|
||||
class CompareResult(object):
|
||||
def __init__(self):
|
||||
self.reference = None
|
||||
self.test = None
|
||||
self.status = 'UNKNOWN'
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.status, self.test) == (other.status, other.test)
|
||||
|
||||
def __lt__(self, other):
|
||||
return (self.status, self.test) < (other.status, other.test)
|
||||
|
||||
class PackageCompareResults(object):
|
||||
def __init__(self, exclusions):
|
||||
self.total = []
|
||||
self.missing = []
|
||||
self.different = []
|
||||
self.different_excluded = []
|
||||
self.same = []
|
||||
self.active_exclusions = set()
|
||||
exclude_packages.extend((exclusions or "").split())
|
||||
|
||||
def add_result(self, r):
|
||||
self.total.append(r)
|
||||
if r.status == MISSING:
|
||||
self.missing.append(r)
|
||||
elif r.status == DIFFERENT:
|
||||
exclusion = is_excluded(r.reference)
|
||||
if exclusion:
|
||||
self.different_excluded.append(r)
|
||||
self.active_exclusions.add(exclusion)
|
||||
else:
|
||||
self.different.append(r)
|
||||
else:
|
||||
self.same.append(r)
|
||||
|
||||
def sort(self):
|
||||
self.total.sort()
|
||||
self.missing.sort()
|
||||
self.different.sort()
|
||||
self.different_excluded.sort()
|
||||
self.same.sort()
|
||||
|
||||
def __str__(self):
|
||||
return 'same=%i different=%i different_excluded=%i missing=%i total=%i\nunused_exclusions=%s' % (len(self.same), len(self.different), len(self.different_excluded), len(self.missing), len(self.total), self.unused_exclusions())
|
||||
|
||||
def unused_exclusions(self):
|
||||
return sorted(set(exclude_packages) - self.active_exclusions)
|
||||
|
||||
def compare_file(reference, test, diffutils_sysroot):
|
||||
result = CompareResult()
|
||||
result.reference = reference
|
||||
result.test = test
|
||||
|
||||
if not os.path.exists(reference):
|
||||
result.status = MISSING
|
||||
return result
|
||||
|
||||
r = runCmd(['cmp', '--quiet', reference, test], native_sysroot=diffutils_sysroot, ignore_status=True, sync=False)
|
||||
|
||||
if r.status:
|
||||
result.status = DIFFERENT
|
||||
return result
|
||||
|
||||
result.status = SAME
|
||||
return result
|
||||
|
||||
def run_diffoscope(a_dir, b_dir, html_dir, max_report_size=0, **kwargs):
|
||||
return runCmd(['diffoscope', '--no-default-limits', '--max-report-size', str(max_report_size),
|
||||
'--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir],
|
||||
**kwargs)
|
||||
|
||||
class DiffoscopeTests(OESelftestTestCase):
|
||||
diffoscope_test_files = os.path.join(os.path.dirname(os.path.abspath(__file__)), "diffoscope")
|
||||
|
||||
def test_diffoscope(self):
|
||||
bitbake("diffoscope-native -c addto_recipe_sysroot")
|
||||
diffoscope_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffoscope-native")
|
||||
|
||||
# Check that diffoscope doesn't return an error when the files compare
|
||||
# the same (a general check that diffoscope is working)
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
run_diffoscope('A', 'A', tmpdir,
|
||||
native_sysroot=diffoscope_sysroot, cwd=self.diffoscope_test_files)
|
||||
|
||||
# Check that diffoscope generates an index.html file when the files are
|
||||
# different
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
r = run_diffoscope('A', 'B', tmpdir,
|
||||
native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=self.diffoscope_test_files)
|
||||
|
||||
self.assertNotEqual(r.status, 0, msg="diffoscope was successful when an error was expected")
|
||||
self.assertTrue(os.path.exists(os.path.join(tmpdir, 'index.html')), "HTML index not found!")
|
||||
|
||||
class ReproducibleTests(OESelftestTestCase):
|
||||
# Test the reproducibility of whatever is built between sstate_targets and targets
|
||||
|
||||
package_classes = ['deb', 'ipk', 'rpm']
|
||||
|
||||
# Maximum report size, in bytes
|
||||
max_report_size = 250 * 1024 * 1024
|
||||
|
||||
# targets are the things we want to test the reproducibility of
|
||||
# Have to add the virtual targets manually for now as builds may or may not include them as they're exclude from world
|
||||
targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world', 'virtual/librpc', 'virtual/libsdl2', 'virtual/crypt']
|
||||
|
||||
# sstate targets are things to pull from sstate to potentially cut build/debugging time
|
||||
sstate_targets = []
|
||||
|
||||
save_results = False
|
||||
if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ:
|
||||
save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT']
|
||||
|
||||
# This variable controls if one of the test builds is allowed to pull from
|
||||
# an sstate cache/mirror. The other build is always done clean as a point of
|
||||
# comparison.
|
||||
# If you know that your sstate archives are reproducible, enabling this
|
||||
# will test that and also make the test run faster. If your sstate is not
|
||||
# reproducible, disable this in your derived test class
|
||||
build_from_sstate = True
|
||||
|
||||
def setUpLocal(self):
|
||||
super().setUpLocal()
|
||||
needed_vars = [
|
||||
'TOPDIR',
|
||||
'TARGET_PREFIX',
|
||||
'BB_NUMBER_THREADS',
|
||||
'BB_HASHSERVE',
|
||||
'OEQA_REPRODUCIBLE_TEST_PACKAGE',
|
||||
'OEQA_REPRODUCIBLE_TEST_TARGET',
|
||||
'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS',
|
||||
'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES',
|
||||
]
|
||||
bb_vars = get_bb_vars(needed_vars)
|
||||
for v in needed_vars:
|
||||
setattr(self, v.lower(), bb_vars[v])
|
||||
|
||||
if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']:
|
||||
self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split()
|
||||
|
||||
if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET']:
|
||||
self.targets = bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'].split()
|
||||
|
||||
if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']:
|
||||
self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split()
|
||||
|
||||
self.extraresults = {}
|
||||
self.extraresults.setdefault('reproducible', {}).setdefault('files', {})
|
||||
|
||||
def compare_packages(self, reference_dir, test_dir, diffutils_sysroot):
|
||||
result = PackageCompareResults(self.oeqa_reproducible_excluded_packages)
|
||||
|
||||
old_cwd = os.getcwd()
|
||||
try:
|
||||
file_result = {}
|
||||
os.chdir(test_dir)
|
||||
with multiprocessing.Pool(processes=int(self.bb_number_threads or 0)) as p:
|
||||
for root, dirs, files in os.walk('.'):
|
||||
async_result = []
|
||||
for f in files:
|
||||
reference_path = os.path.join(reference_dir, root, f)
|
||||
test_path = os.path.join(test_dir, root, f)
|
||||
async_result.append(p.apply_async(compare_file, (reference_path, test_path, diffutils_sysroot)))
|
||||
|
||||
for a in async_result:
|
||||
result.add_result(a.get())
|
||||
|
||||
finally:
|
||||
os.chdir(old_cwd)
|
||||
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
def write_package_list(self, package_class, name, packages):
|
||||
self.extraresults['reproducible']['files'].setdefault(package_class, {})[name] = [
|
||||
p.reference.split("/./")[1] for p in packages]
|
||||
|
||||
def copy_file(self, source, dest):
|
||||
bb.utils.mkdirhier(os.path.dirname(dest))
|
||||
shutil.copyfile(source, dest)
|
||||
|
||||
def do_test_build(self, name, use_sstate):
|
||||
capture_vars = ['DEPLOY_DIR_' + c.upper() for c in self.package_classes]
|
||||
|
||||
tmpdir = os.path.join(self.topdir, name, 'tmp')
|
||||
if os.path.exists(tmpdir):
|
||||
bb.utils.remove(tmpdir, recurse=True)
|
||||
|
||||
config = textwrap.dedent('''\
|
||||
PACKAGE_CLASSES = "{package_classes}"
|
||||
TMPDIR = "{tmpdir}"
|
||||
LICENSE_FLAGS_ACCEPTED = "commercial"
|
||||
DISTRO_FEATURES:append = ' pam'
|
||||
USERADDEXTENSION = "useradd-staticids"
|
||||
USERADD_ERROR_DYNAMIC = "skip"
|
||||
USERADD_UID_TABLES += "files/static-passwd"
|
||||
USERADD_GID_TABLES += "files/static-group"
|
||||
''').format(package_classes=' '.join('package_%s' % c for c in self.package_classes),
|
||||
tmpdir=tmpdir)
|
||||
|
||||
if not use_sstate:
|
||||
if self.sstate_targets:
|
||||
self.logger.info("Building prebuild for %s (sstate allowed)..." % (name))
|
||||
self.write_config(config)
|
||||
bitbake(' '.join(self.sstate_targets))
|
||||
|
||||
# This config fragment will disable using shared and the sstate
|
||||
# mirror, forcing a complete build from scratch
|
||||
config += textwrap.dedent('''\
|
||||
SSTATE_DIR = "${TMPDIR}/sstate"
|
||||
SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
|
||||
''')
|
||||
|
||||
self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT'))
|
||||
self.write_config(config)
|
||||
d = get_bb_vars(capture_vars)
|
||||
# targets used to be called images
|
||||
bitbake(' '.join(getattr(self, 'images', self.targets)))
|
||||
return d
|
||||
|
||||
def test_reproducible_builds(self):
|
||||
def strip_topdir(s):
|
||||
if s.startswith(self.topdir):
|
||||
return s[len(self.topdir):]
|
||||
return s
|
||||
|
||||
# Build native utilities
|
||||
self.write_config('')
|
||||
bitbake("diffoscope-native diffutils-native jquery-native -c addto_recipe_sysroot")
|
||||
diffutils_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffutils-native")
|
||||
diffoscope_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "diffoscope-native")
|
||||
jquery_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "jquery-native")
|
||||
|
||||
if self.save_results:
|
||||
os.makedirs(self.save_results, exist_ok=True)
|
||||
datestr = datetime.datetime.now().strftime('%Y%m%d')
|
||||
save_dir = tempfile.mkdtemp(prefix='oe-reproducible-%s-' % datestr, dir=self.save_results)
|
||||
os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
|
||||
self.logger.info('Non-reproducible packages will be copied to %s', save_dir)
|
||||
|
||||
vars_A = self.do_test_build('reproducibleA', self.build_from_sstate)
|
||||
|
||||
vars_B = self.do_test_build('reproducibleB', False)
|
||||
|
||||
# NOTE: The temp directories from the reproducible build are purposely
|
||||
# kept after the build so it can be diffed for debugging.
|
||||
|
||||
fails = []
|
||||
|
||||
for c in self.package_classes:
|
||||
with self.subTest(package_class=c):
|
||||
package_class = 'package_' + c
|
||||
|
||||
deploy_A = vars_A['DEPLOY_DIR_' + c.upper()]
|
||||
deploy_B = vars_B['DEPLOY_DIR_' + c.upper()]
|
||||
|
||||
self.logger.info('Checking %s packages for differences...' % c)
|
||||
result = self.compare_packages(deploy_A, deploy_B, diffutils_sysroot)
|
||||
|
||||
self.logger.info('Reproducibility summary for %s: %s' % (c, result))
|
||||
|
||||
self.write_package_list(package_class, 'missing', result.missing)
|
||||
self.write_package_list(package_class, 'different', result.different)
|
||||
self.write_package_list(package_class, 'different_excluded', result.different_excluded)
|
||||
self.write_package_list(package_class, 'same', result.same)
|
||||
|
||||
if self.save_results:
|
||||
for d in result.different:
|
||||
self.copy_file(d.reference, '/'.join([save_dir, 'packages', strip_topdir(d.reference)]))
|
||||
self.copy_file(d.test, '/'.join([save_dir, 'packages', strip_topdir(d.test)]))
|
||||
|
||||
for d in result.different_excluded:
|
||||
self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)]))
|
||||
self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)]))
|
||||
|
||||
if result.different:
|
||||
fails.append("The following %s packages are different and not in exclusion list:\n%s" %
|
||||
(c, '\n'.join(r.test for r in (result.different))))
|
||||
|
||||
if result.missing and len(self.sstate_targets) == 0:
|
||||
fails.append("The following %s packages are missing and not in exclusion list:\n%s" %
|
||||
(c, '\n'.join(r.test for r in (result.missing))))
|
||||
|
||||
# Clean up empty directories
|
||||
if self.save_results:
|
||||
if not os.listdir(save_dir):
|
||||
os.rmdir(save_dir)
|
||||
else:
|
||||
self.logger.info('Running diffoscope')
|
||||
package_dir = os.path.join(save_dir, 'packages')
|
||||
package_html_dir = os.path.join(package_dir, 'diff-html')
|
||||
|
||||
# Copy jquery to improve the diffoscope output usability
|
||||
self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js'))
|
||||
|
||||
run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, max_report_size=self.max_report_size,
|
||||
native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir)
|
||||
|
||||
if fails:
|
||||
self.fail('\n'.join(fails))
|
||||
|
||||
375
sources/poky/meta/lib/oeqa/selftest/cases/resulttooltests.py
Normal file
375
sources/poky/meta/lib/oeqa/selftest/cases/resulttooltests.py
Normal file
@@ -0,0 +1,375 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
|
||||
lib_path = basepath + '/scripts/lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
from resulttool.report import ResultsTextReport
|
||||
from resulttool import regression as regression
|
||||
from resulttool import resultutils as resultutils
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
class ResultToolTests(OESelftestTestCase):
|
||||
base_results_data = {'base_result1': {'configuration': {"TEST_TYPE": "runtime",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86"},
|
||||
'result': {}},
|
||||
'base_result2': {'configuration': {"TEST_TYPE": "runtime",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86-64"},
|
||||
'result': {}}}
|
||||
target_results_data = {'target_result1': {'configuration': {"TEST_TYPE": "runtime",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86"},
|
||||
'result': {}},
|
||||
'target_result2': {'configuration': {"TEST_TYPE": "runtime",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86"},
|
||||
'result': {}},
|
||||
'target_result3': {'configuration': {"TEST_TYPE": "runtime",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86-64"},
|
||||
'result': {}}}
|
||||
|
||||
def test_report_can_aggregate_test_result(self):
|
||||
result_data = {'result': {'test1': {'status': 'PASSED'},
|
||||
'test2': {'status': 'PASSED'},
|
||||
'test3': {'status': 'FAILED'},
|
||||
'test4': {'status': 'ERROR'},
|
||||
'test5': {'status': 'SKIPPED'}}}
|
||||
report = ResultsTextReport()
|
||||
result_report = report.get_aggregated_test_result(None, result_data, 'DummyMachine')
|
||||
self.assertTrue(result_report['passed'] == 2, msg="Passed count not correct:%s" % result_report['passed'])
|
||||
self.assertTrue(result_report['failed'] == 2, msg="Failed count not correct:%s" % result_report['failed'])
|
||||
self.assertTrue(result_report['skipped'] == 1, msg="Skipped count not correct:%s" % result_report['skipped'])
|
||||
|
||||
def test_regression_can_get_regression_base_target_pair(self):
|
||||
|
||||
results = {}
|
||||
resultutils.append_resultsdata(results, ResultToolTests.base_results_data)
|
||||
resultutils.append_resultsdata(results, ResultToolTests.target_results_data)
|
||||
self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
|
||||
self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
|
||||
|
||||
def test_regression_can_get_regression_result(self):
|
||||
base_result_data = {'result': {'test1': {'status': 'PASSED'},
|
||||
'test2': {'status': 'PASSED'},
|
||||
'test3': {'status': 'FAILED'},
|
||||
'test4': {'status': 'ERROR'},
|
||||
'test5': {'status': 'SKIPPED'}}}
|
||||
target_result_data = {'result': {'test1': {'status': 'PASSED'},
|
||||
'test2': {'status': 'FAILED'},
|
||||
'test3': {'status': 'PASSED'},
|
||||
'test4': {'status': 'ERROR'},
|
||||
'test5': {'status': 'SKIPPED'}}}
|
||||
result, text = regression.compare_result(self.logger, "BaseTestRunName", "TargetTestRunName", base_result_data, target_result_data)
|
||||
self.assertTrue(result['test2']['base'] == 'PASSED',
|
||||
msg="regression not correct:%s" % result['test2']['base'])
|
||||
self.assertTrue(result['test2']['target'] == 'FAILED',
|
||||
msg="regression not correct:%s" % result['test2']['target'])
|
||||
self.assertTrue(result['test3']['base'] == 'FAILED',
|
||||
msg="regression not correct:%s" % result['test3']['base'])
|
||||
self.assertTrue(result['test3']['target'] == 'PASSED',
|
||||
msg="regression not correct:%s" % result['test3']['target'])
|
||||
|
||||
def test_merge_can_merged_results(self):
|
||||
results = {}
|
||||
resultutils.append_resultsdata(results, ResultToolTests.base_results_data, configmap=resultutils.flatten_map)
|
||||
resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
|
||||
self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
|
||||
|
||||
def test_results_without_metadata_can_be_compared(self):
|
||||
base_configuration = {"configuration": {
|
||||
"TEST_TYPE": "oeselftest",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600
|
||||
}, "result": {}}
|
||||
target_configuration = {"configuration": {
|
||||
"TEST_TYPE": "oeselftest",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600
|
||||
}, "result": {}}
|
||||
self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect metadata filtering, tests without metadata should be compared")
|
||||
|
||||
def test_target_result_with_missing_metadata_can_not_be_compared(self):
|
||||
base_configuration = {"configuration": {
|
||||
"TEST_TYPE": "oeselftest",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"OESELFTEST_METADATA": {
|
||||
"run_all_tests": True,
|
||||
"run_tests": None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags": ["toolchain-user", "toolchain-system"],
|
||||
"exclude_tags": None
|
||||
}}, "result": {}}
|
||||
target_configuration = {"configuration": {"TEST_TYPE": "oeselftest",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600
|
||||
}, "result": {}}
|
||||
self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect metadata filtering, tests should not be compared")
|
||||
|
||||
def test_results_with_matching_metadata_can_be_compared(self):
|
||||
base_configuration = {"configuration": {
|
||||
"TEST_TYPE": "oeselftest",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600,
|
||||
"OESELFTEST_METADATA": {"run_all_tests": True,
|
||||
"run_tests": None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags": ["toolchain-user", "toolchain-system"],
|
||||
"exclude_tags": None}
|
||||
}, "result": {}}
|
||||
target_configuration = {"configuration": {
|
||||
"TEST_TYPE": "oeselftest",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600,
|
||||
"OESELFTEST_METADATA": {"run_all_tests": True,
|
||||
"run_tests": None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags": ["toolchain-user", "toolchain-system"],
|
||||
"exclude_tags": None}
|
||||
}, "result": {}}
|
||||
self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect metadata filtering, tests with matching metadata should be compared")
|
||||
|
||||
def test_results_with_mismatching_metadata_can_not_be_compared(self):
|
||||
base_configuration = {"configuration": {
|
||||
"TEST_TYPE": "oeselftest",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600,
|
||||
"OESELFTEST_METADATA": {"run_all_tests": True,
|
||||
"run_tests": None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags": ["toolchain-user", "toolchain-system"],
|
||||
"exclude_tags": None}
|
||||
}, "result": {}}
|
||||
target_configuration = {"configuration": {
|
||||
"TEST_TYPE": "oeselftest",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600,
|
||||
"OESELFTEST_METADATA": {"run_all_tests": True,
|
||||
"run_tests": None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags": ["machine"],
|
||||
"exclude_tags": None}
|
||||
}, "result": {}}
|
||||
self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect metadata filtering, tests with mismatching metadata should not be compared")
|
||||
|
||||
def test_metadata_matching_is_only_checked_for_relevant_test_type(self):
|
||||
base_configuration = {"configuration": {"TEST_TYPE": "runtime",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600,
|
||||
"OESELFTEST_METADATA": {"run_all_tests": True,
|
||||
"run_tests": None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags": ["toolchain-user", "toolchain-system"],
|
||||
"exclude_tags": None}}, "result": {}}
|
||||
target_configuration = {"configuration": {"TEST_TYPE": "runtime",
|
||||
"TESTSERIES": "series1",
|
||||
"IMAGE_BASENAME": "image",
|
||||
"IMAGE_PKGTYPE": "ipk",
|
||||
"DISTRO": "mydistro",
|
||||
"MACHINE": "qemux86",
|
||||
"STARTTIME": 1672527600,
|
||||
"OESELFTEST_METADATA": {"run_all_tests": True,
|
||||
"run_tests": None,
|
||||
"skips": None,
|
||||
"machine": None,
|
||||
"select_tags": ["machine"],
|
||||
"exclude_tags": None}}, "result": {}}
|
||||
self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect metadata filtering, %s tests should be compared" % base_configuration['configuration']['TEST_TYPE'])
|
||||
|
||||
def test_machine_matches(self):
|
||||
base_configuration = {"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86"}, "result": {}}
|
||||
target_configuration = {"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86"
|
||||
}, "result": {}}
|
||||
self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect machine filtering, identical machine tests should be compared")
|
||||
|
||||
def test_machine_mismatches(self):
|
||||
base_configuration = {"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86"
|
||||
}, "result": {}}
|
||||
target_configuration = {"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86_64"
|
||||
}, "result": {}}
|
||||
self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect machine filtering, mismatching machine tests should not be compared")
|
||||
|
||||
def test_can_not_compare_non_ltp_tests(self):
|
||||
base_configuration = {"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86"
|
||||
}, "result": {
|
||||
"ltpresult_foo": {
|
||||
"status": "PASSED"
|
||||
}}}
|
||||
target_configuration = {"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86_64"
|
||||
}, "result": {
|
||||
"bar": {
|
||||
"status": "PASSED"
|
||||
}}}
|
||||
self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect ltpresult filtering, mismatching ltpresult content should not be compared")
|
||||
|
||||
def test_can_compare_ltp_tests(self):
|
||||
base_configuration = {"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86"
|
||||
}, "result": {
|
||||
"ltpresult_foo": {
|
||||
"status": "PASSED"
|
||||
}}}
|
||||
target_configuration = {"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86"
|
||||
}, "result": {
|
||||
"ltpresult_foo": {
|
||||
"status": "PASSED"
|
||||
}}}
|
||||
self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
|
||||
msg="incorrect ltpresult filtering, matching ltpresult content should be compared")
|
||||
|
||||
def test_can_match_non_static_ptest_names(self):
|
||||
base_configuration = {"a": {
|
||||
"conf_X": {
|
||||
"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86"
|
||||
}, "result": {
|
||||
"ptestresult.lttng-tools.foo_-_bar_-_moo": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.babeltrace.bar_-_moo_-_foo": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.babeltrace2.moo_-_foo_-_bar": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.curl.test_0000__foo_out_of_bar": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.dbus.test_0000__foo_out_of_bar,_remaining:_00:02,_took_0.032s,_duration:_03:32_": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.binutils-ld.in testcase /foo/build-st-bar/moo/ctf.exp": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.30975 on target": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
|
||||
"status": "PASSED"
|
||||
}
|
||||
}}}}
|
||||
target_configuration = {"a": {
|
||||
"conf_Y": {
|
||||
"configuration": {
|
||||
"TEST_TYPE": "runtime",
|
||||
"MACHINE": "qemux86"
|
||||
}, "result": {
|
||||
"ptestresult.lttng-tools.foo_-_yyy_-_zzz": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.babeltrace.bar_-_zzz_-_xxx": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.babeltrace2.moo_-_xxx_-_yyy": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.curl.test_0000__xxx_out_of_yyy": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.dbus.test_0000__yyy_out_of_zzz,_remaining:_00:03,_took_0.034s,_duration:_03:30_": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.binutils-ld.in testcase /xxx/build-st-yyy/zzz/ctf.exp": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.45678 on target": {
|
||||
"status": "PASSED"
|
||||
},
|
||||
"ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
|
||||
"status": "PASSED"
|
||||
}
|
||||
}}}}
|
||||
regression.fixup_ptest_names(base_configuration, self.logger)
|
||||
regression.fixup_ptest_names(target_configuration, self.logger)
|
||||
result, resultstring = regression.compare_result(
|
||||
self.logger, "A", "B", base_configuration["a"]["conf_X"], target_configuration["a"]["conf_Y"])
|
||||
self.assertDictEqual(
|
||||
result, {}, msg=f"ptests should be compared: {resultstring}")
|
||||
@@ -0,0 +1,97 @@
|
||||
# SPDX-FileCopyrightText: Huawei Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
import os
|
||||
import oe
|
||||
import unittest
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_vars
|
||||
|
||||
class ShadowUtilsTidyFiles(OESelftestTestCase):
|
||||
"""
|
||||
Check if shadow image rootfs files are tidy.
|
||||
|
||||
The tests are focused on testing the functionality provided by the
|
||||
'tidy_shadowutils_files' rootfs postprocess command (via
|
||||
SORT_PASSWD_POSTPROCESS_COMMAND).
|
||||
"""
|
||||
|
||||
def sysconf_build(self):
|
||||
"""
|
||||
Verify if shadow tidy files tests are to be run and if yes, build a
|
||||
test image and return its sysconf rootfs path.
|
||||
"""
|
||||
|
||||
test_image = "core-image-minimal"
|
||||
|
||||
config = 'IMAGE_CLASSES += "extrausers"\n'
|
||||
config += 'EXTRA_USERS_PARAMS = "groupadd -g 1000 oeqatester; "\n'
|
||||
config += 'EXTRA_USERS_PARAMS += "useradd -p \'\' -u 1000 -N -g 1000 oeqatester; "\n'
|
||||
self.write_config(config)
|
||||
|
||||
vars = get_bb_vars(("IMAGE_ROOTFS", "SORT_PASSWD_POSTPROCESS_COMMAND", "sysconfdir"),
|
||||
test_image)
|
||||
passwd_postprocess_cmd = vars["SORT_PASSWD_POSTPROCESS_COMMAND"]
|
||||
self.assertIsNotNone(passwd_postprocess_cmd)
|
||||
if (passwd_postprocess_cmd.strip() != 'tidy_shadowutils_files;'):
|
||||
raise unittest.SkipTest("Testcase skipped as 'tidy_shadowutils_files' "
|
||||
"rootfs post process command is not the set SORT_PASSWD_POSTPROCESS_COMMAND.")
|
||||
|
||||
rootfs = vars["IMAGE_ROOTFS"]
|
||||
self.assertIsNotNone(rootfs)
|
||||
sysconfdir = vars["sysconfdir"]
|
||||
bitbake(test_image)
|
||||
self.assertIsNotNone(sysconfdir)
|
||||
|
||||
return oe.path.join(rootfs, sysconfdir)
|
||||
|
||||
def test_shadowutils_backup_files(self):
|
||||
"""
|
||||
Test that the rootfs doesn't include any known shadow backup files.
|
||||
"""
|
||||
|
||||
backup_files = (
|
||||
'group-',
|
||||
'gshadow-',
|
||||
'passwd-',
|
||||
'shadow-',
|
||||
'subgid-',
|
||||
'subuid-',
|
||||
)
|
||||
|
||||
rootfs_sysconfdir = self.sysconf_build()
|
||||
found = []
|
||||
for backup_file in backup_files:
|
||||
backup_filepath = oe.path.join(rootfs_sysconfdir, backup_file)
|
||||
if os.path.exists(backup_filepath):
|
||||
found.append(backup_file)
|
||||
if (found):
|
||||
raise Exception('The following shadow backup files were found in '
|
||||
'the rootfs: %s' % found)
|
||||
|
||||
def test_shadowutils_sorted_files(self):
|
||||
"""
|
||||
Test that the 'passwd' and the 'group' shadow utils files are ordered
|
||||
by ID.
|
||||
"""
|
||||
|
||||
files = (
|
||||
'passwd',
|
||||
'group',
|
||||
)
|
||||
|
||||
rootfs_sysconfdir = self.sysconf_build()
|
||||
unsorted = []
|
||||
for file in files:
|
||||
filepath = oe.path.join(rootfs_sysconfdir, file)
|
||||
with open(filepath, 'rb') as f:
|
||||
ids = []
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
entries = line.split(b':')
|
||||
ids.append(int(entries[2]))
|
||||
if (ids != sorted(ids)):
|
||||
unsorted.append(file)
|
||||
if (unsorted):
|
||||
raise Exception("The following files were not sorted by ID as expected: %s" % unsorted)
|
||||
14
sources/poky/meta/lib/oeqa/selftest/cases/rpmtests.py
Normal file
14
sources/poky/meta/lib/oeqa/selftest/cases/rpmtests.py
Normal file
@@ -0,0 +1,14 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class BitbakeTests(OESelftestTestCase):
|
||||
|
||||
def test_rpm_filenames(self):
|
||||
test_recipe = "testrpm"
|
||||
bitbake(test_recipe)
|
||||
124
sources/poky/meta/lib/oeqa/selftest/cases/runcmd.py
Normal file
124
sources/poky/meta/lib/oeqa/selftest/cases/runcmd.py
Normal file
@@ -0,0 +1,124 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd
|
||||
from oeqa.utils import CommandError
|
||||
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
import signal
|
||||
|
||||
class MemLogger(object):
|
||||
def __init__(self):
|
||||
self.info_msgs = []
|
||||
self.error_msgs = []
|
||||
|
||||
def info(self, msg):
|
||||
self.info_msgs.append(msg)
|
||||
|
||||
def error(self, msg):
|
||||
self.error_msgs.append(msg)
|
||||
|
||||
class RunCmdTests(OESelftestTestCase):
|
||||
""" Basic tests for runCmd() utility function """
|
||||
|
||||
# The delta is intentionally smaller than the timeout, to detect cases where
|
||||
# we incorrectly apply the timeout more than once.
|
||||
TIMEOUT = 10
|
||||
DELTA = 8
|
||||
|
||||
def test_result_okay(self):
|
||||
result = runCmd("true")
|
||||
self.assertEqual(result.status, 0)
|
||||
|
||||
def test_result_false(self):
|
||||
result = runCmd("false", ignore_status=True)
|
||||
self.assertEqual(result.status, 1)
|
||||
|
||||
def test_shell(self):
|
||||
# A shell is used for all string commands.
|
||||
result = runCmd("false; true", ignore_status=True)
|
||||
self.assertEqual(result.status, 0)
|
||||
|
||||
def test_no_shell(self):
|
||||
self.assertRaises(FileNotFoundError,
|
||||
runCmd, "false; true", shell=False)
|
||||
|
||||
def test_list_not_found(self):
|
||||
self.assertRaises(FileNotFoundError,
|
||||
runCmd, ["false; true"])
|
||||
|
||||
def test_list_okay(self):
|
||||
result = runCmd(["true"])
|
||||
self.assertEqual(result.status, 0)
|
||||
|
||||
def test_result_assertion(self):
|
||||
self.assertRaisesRegex(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
|
||||
runCmd, "echo foobar >&2; false", shell=True)
|
||||
|
||||
def test_result_exception(self):
|
||||
self.assertRaisesRegex(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
|
||||
runCmd, "echo foobar >&2; false", shell=True, assert_error=False)
|
||||
|
||||
def test_output(self):
|
||||
result = runCmd("echo stdout; echo stderr >&2", shell=True, sync=False)
|
||||
self.assertEqual("stdout\nstderr", result.output)
|
||||
self.assertEqual("", result.error)
|
||||
|
||||
def test_output_split(self):
|
||||
result = runCmd("echo stdout; echo stderr >&2", shell=True, stderr=subprocess.PIPE, sync=False)
|
||||
self.assertEqual("stdout", result.output)
|
||||
self.assertEqual("stderr", result.error)
|
||||
|
||||
def test_timeout(self):
|
||||
numthreads = threading.active_count()
|
||||
start = time.time()
|
||||
# Killing a hanging process only works when not using a shell?!
|
||||
result = runCmd(['sleep', '60'], timeout=self.TIMEOUT, ignore_status=True, sync=False)
|
||||
self.assertEqual(result.status, -signal.SIGTERM)
|
||||
end = time.time()
|
||||
self.assertLess(end - start, self.TIMEOUT + self.DELTA)
|
||||
self.assertEqual(numthreads, threading.active_count(), msg="Thread counts were not equal before (%s) and after (%s), active threads: %s" % (numthreads, threading.active_count(), threading.enumerate()))
|
||||
|
||||
def test_timeout_split(self):
|
||||
numthreads = threading.active_count()
|
||||
start = time.time()
|
||||
# Killing a hanging process only works when not using a shell?!
|
||||
result = runCmd(['sleep', '60'], timeout=self.TIMEOUT, ignore_status=True, stderr=subprocess.PIPE, sync=False)
|
||||
self.assertEqual(result.status, -signal.SIGTERM)
|
||||
end = time.time()
|
||||
self.assertLess(end - start, self.TIMEOUT + self.DELTA)
|
||||
self.assertEqual(numthreads, threading.active_count(), msg="Thread counts were not equal before (%s) and after (%s), active threads: %s" % (numthreads, threading.active_count(), threading.enumerate()))
|
||||
|
||||
def test_stdin(self):
|
||||
numthreads = threading.active_count()
|
||||
result = runCmd("cat", data=b"hello world", timeout=self.TIMEOUT, sync=False)
|
||||
self.assertEqual("hello world", result.output)
|
||||
self.assertEqual(numthreads, threading.active_count(), msg="Thread counts were not equal before (%s) and after (%s), active threads: %s" % (numthreads, threading.active_count(), threading.enumerate()))
|
||||
self.assertEqual(numthreads, 1)
|
||||
|
||||
def test_stdin_timeout(self):
|
||||
numthreads = threading.active_count()
|
||||
start = time.time()
|
||||
result = runCmd(['sleep', '60'], data=b"hello world", timeout=self.TIMEOUT, ignore_status=True, sync=False)
|
||||
self.assertEqual(result.status, -signal.SIGTERM)
|
||||
end = time.time()
|
||||
self.assertLess(end - start, self.TIMEOUT + self.DELTA)
|
||||
self.assertEqual(numthreads, threading.active_count(), msg="Thread counts were not equal before (%s) and after (%s), active threads: %s" % (numthreads, threading.active_count(), threading.enumerate()))
|
||||
|
||||
def test_log(self):
|
||||
log = MemLogger()
|
||||
result = runCmd("echo stdout; echo stderr >&2", shell=True, output_log=log, sync=False)
|
||||
self.assertEqual(["Running: echo stdout; echo stderr >&2", "stdout", "stderr"], log.info_msgs)
|
||||
self.assertEqual([], log.error_msgs)
|
||||
|
||||
def test_log_split(self):
|
||||
log = MemLogger()
|
||||
result = runCmd("echo stdout; echo stderr >&2", shell=True, output_log=log, stderr=subprocess.PIPE, sync=False)
|
||||
self.assertEqual(["Running: echo stdout; echo stderr >&2", "stdout"], log.info_msgs)
|
||||
self.assertEqual(["stderr"], log.error_msgs)
|
||||
216
sources/poky/meta/lib/oeqa/selftest/cases/runqemu.py
Normal file
216
sources/poky/meta/lib/oeqa/selftest/cases/runqemu.py
Normal file
@@ -0,0 +1,216 @@
|
||||
#
|
||||
# Copyright (c) 2017 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import oe.types
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.core.decorator.data import skipIfNotArch, skipIfNotMachine
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, runqemu, get_bb_var
|
||||
|
||||
|
||||
@OETestTag("runqemu")
|
||||
class RunqemuTests(OESelftestTestCase):
|
||||
"""Runqemu test class"""
|
||||
|
||||
image_is_ready = False
|
||||
deploy_dir_image = ''
|
||||
|
||||
def setUpLocal(self):
|
||||
super(RunqemuTests, self).setUpLocal()
|
||||
self.recipe = 'core-image-minimal'
|
||||
self.machine = self.td['MACHINE']
|
||||
self.image_link_name = get_bb_var('IMAGE_LINK_NAME', self.recipe)
|
||||
|
||||
self.fstypes = "ext4"
|
||||
if self.td["HOST_ARCH"] in ('i586', 'i686', 'x86_64'):
|
||||
self.fstypes += " iso hddimg"
|
||||
if self.machine == "qemux86-64":
|
||||
self.fstypes += " wic.vmdk wic.qcow2 wic.vdi"
|
||||
|
||||
self.cmd_common = "runqemu nographic"
|
||||
kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), self.td["TARGET_ARCH"])
|
||||
if kvm:
|
||||
self.cmd_common += " kvm"
|
||||
|
||||
self.write_config(
|
||||
"""
|
||||
IMAGE_FSTYPES = "%s"
|
||||
# 10 means 1 second
|
||||
SYSLINUX_TIMEOUT = "10"
|
||||
""" % self.fstypes)
|
||||
|
||||
if not RunqemuTests.image_is_ready:
|
||||
RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
|
||||
bitbake(self.recipe)
|
||||
RunqemuTests.image_is_ready = True
|
||||
|
||||
def test_boot_machine(self):
|
||||
"""Test runqemu machine"""
|
||||
cmd = "%s %s" % (self.cmd_common, self.machine)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
|
||||
|
||||
def test_boot_machine_ext4(self):
|
||||
"""Test runqemu machine ext4"""
|
||||
cmd = "%s %s ext4" % (self.cmd_common, self.machine)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
regexp = r'\nROOTFS: .*\.ext4]\n'
|
||||
self.assertRegex(f.read(), regexp, "Failed to find '%s' in '%s' after running '%s'" % (regexp, qemu.qemurunnerlog, cmd))
|
||||
|
||||
@skipIfNotArch(['i586', 'i686', 'x86_64'])
|
||||
def test_boot_machine_iso(self):
|
||||
"""Test runqemu machine iso"""
|
||||
cmd = "%s %s iso" % (self.cmd_common, self.machine)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
text_in = 'media=cdrom'
|
||||
self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
|
||||
|
||||
def test_boot_recipe_image(self):
|
||||
"""Test runqemu recipe-image"""
|
||||
cmd = "%s %s" % (self.cmd_common, self.recipe)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
|
||||
|
||||
# https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_boot_recipe_image_vmdk(self):
|
||||
"""Test runqemu recipe-image vmdk"""
|
||||
cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
text_in = 'format=vmdk'
|
||||
self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
|
||||
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_boot_recipe_image_vdi(self):
|
||||
"""Test runqemu recipe-image vdi"""
|
||||
cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
text_in = 'format=vdi'
|
||||
self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
|
||||
|
||||
def test_boot_deploy(self):
|
||||
"""Test runqemu deploy_dir_image"""
|
||||
cmd = "%s %s" % (self.cmd_common, self.deploy_dir_image)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
|
||||
|
||||
@skipIfNotArch(['i586', 'i686', 'x86_64'])
|
||||
def test_boot_deploy_hddimg(self):
|
||||
"""Test runqemu deploy_dir_image hddimg"""
|
||||
cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
self.assertTrue(re.search('file=.*.hddimg', f.read()), "Failed: %s, %s" % (cmd, f.read()))
|
||||
|
||||
def test_boot_machine_slirp(self):
|
||||
"""Test runqemu machine slirp"""
|
||||
cmd = "%s slirp %s" % (self.cmd_common, self.machine)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd)
|
||||
|
||||
@skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
|
||||
def test_boot_machine_slirp_qcow2(self):
|
||||
"""Test runqemu machine slirp qcow2"""
|
||||
cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
self.assertIn('format=qcow2', f.read(), "Failed: %s" % cmd)
|
||||
|
||||
def test_boot_qemu_boot(self):
|
||||
"""Test runqemu /path/to/image.qemuboot.conf"""
|
||||
qemuboot_conf = "%s.qemuboot.conf" % (self.image_link_name)
|
||||
qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf)
|
||||
if not os.path.exists(qemuboot_conf):
|
||||
self.skipTest("%s not found" % qemuboot_conf)
|
||||
cmd = "%s %s" % (self.cmd_common, qemuboot_conf)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
|
||||
|
||||
def test_boot_rootfs(self):
|
||||
"""Test runqemu /path/to/rootfs.ext4"""
|
||||
rootfs = "%s.ext4" % (self.image_link_name)
|
||||
rootfs = os.path.join(self.deploy_dir_image, rootfs)
|
||||
if not os.path.exists(rootfs):
|
||||
self.skipTest("%s not found" % rootfs)
|
||||
cmd = "%s %s" % (self.cmd_common, rootfs)
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
with open(qemu.qemurunnerlog) as f:
|
||||
self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
|
||||
|
||||
|
||||
# This test was designed as a separate class to test that shutdown
|
||||
# command will shutdown qemu as expected on each qemu architecture
|
||||
# based on the MACHINE configuration inside the config file
|
||||
# (eg. local.conf).
|
||||
#
|
||||
# This was different compared to RunqemuTests, where RunqemuTests was
|
||||
# dedicated for MACHINE=qemux86-64 where it test that qemux86-64 will
|
||||
# bootup various filesystem types, including live image(iso and hddimg)
|
||||
# where live image was not supported on all qemu architecture.
|
||||
@OETestTag("machine")
|
||||
@OETestTag("runqemu")
|
||||
class QemuTest(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(QemuTest, cls).setUpClass()
|
||||
cls.recipe = 'core-image-minimal'
|
||||
cls.machine = get_bb_var('MACHINE')
|
||||
cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
|
||||
cls.image_link_name = get_bb_var('IMAGE_LINK_NAME', cls.recipe)
|
||||
cls.cmd_common = "runqemu nographic"
|
||||
cls.qemuboot_conf = "%s.qemuboot.conf" % (cls.image_link_name)
|
||||
cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf)
|
||||
bitbake(cls.recipe)
|
||||
|
||||
def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout):
|
||||
# Allow the runner's LoggingThread instance to exit without errors
|
||||
# (such as the exception "Console connection closed unexpectedly")
|
||||
# as qemu will disappear when we shut it down
|
||||
qemu.runner.allowexit()
|
||||
qemu.run_serial("shutdown -h now")
|
||||
time_track = 0
|
||||
try:
|
||||
while True:
|
||||
is_alive = qemu.check()
|
||||
if not is_alive:
|
||||
return True
|
||||
if time_track > timeout:
|
||||
return False
|
||||
time.sleep(1)
|
||||
time_track += 1
|
||||
except SystemExit:
|
||||
return True
|
||||
|
||||
def test_qemu_can_shutdown(self):
|
||||
self.assertExists(self.qemuboot_conf)
|
||||
cmd = "%s %s" % (self.cmd_common, self.qemuboot_conf)
|
||||
shutdown_timeout = 120
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
|
||||
self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
|
||||
|
||||
def test_qemu_can_boot_nfs_and_shutdown(self):
|
||||
rootfs_tar = "%s.tar.bz2" % (self.image_link_name)
|
||||
rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
|
||||
self.assertExists(rootfs_tar)
|
||||
cmd = "%s %s" % (self.cmd_common, rootfs_tar)
|
||||
shutdown_timeout = 120
|
||||
with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
|
||||
qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
|
||||
self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
|
||||
486
sources/poky/meta/lib/oeqa/selftest/cases/runtime_test.py
Normal file
486
sources/poky/meta/lib/oeqa/selftest/cases/runtime_test.py
Normal file
@@ -0,0 +1,486 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
|
||||
from oeqa.core.decorator import OETestTag
|
||||
import os
|
||||
import tempfile
|
||||
import oe.lsb
|
||||
from oeqa.core.decorator.data import skipIfNotQemu, skipIfNotMachine
|
||||
|
||||
class TestExport(OESelftestTestCase):
|
||||
|
||||
@OETestTag("runqemu")
|
||||
def test_testexport_basic(self):
|
||||
"""
|
||||
Summary: Check basic testexport functionality with only ping test enabled.
|
||||
Expected: 1. testexport directory must be created.
|
||||
2. runexported.py must run without any error/exception.
|
||||
3. ping test must succeed.
|
||||
Product: oe-core
|
||||
Author: Mariano Lopez <mariano.lopez@intel.com>
|
||||
"""
|
||||
|
||||
features = 'IMAGE_CLASSES += "testexport"\n'
|
||||
# These aren't the actual IP addresses but testexport class needs something defined
|
||||
features += 'TEST_SERVER_IP = "192.168.7.1"\n'
|
||||
features += 'TEST_TARGET_IP = "192.168.7.1"\n'
|
||||
features += 'TEST_SUITES = "ping"\n'
|
||||
self.write_config(features)
|
||||
|
||||
# Build tesexport for core-image-minimal
|
||||
bitbake('core-image-minimal')
|
||||
bitbake('-c testexport core-image-minimal')
|
||||
|
||||
testexport_dir = get_bb_var('TEST_EXPORT_DIR', 'core-image-minimal')
|
||||
|
||||
# Verify if TEST_EXPORT_DIR was created
|
||||
isdir = os.path.isdir(testexport_dir)
|
||||
self.assertEqual(True, isdir, 'Failed to create testexport dir: %s' % testexport_dir)
|
||||
|
||||
with runqemu('core-image-minimal') as qemu:
|
||||
# Attempt to run runexported.py to perform ping test
|
||||
test_path = os.path.join(testexport_dir, "oe-test")
|
||||
data_file = os.path.join(testexport_dir, 'data', 'testdata.json')
|
||||
manifest = os.path.join(testexport_dir, 'data', 'manifest')
|
||||
cmd = ("%s runtime --test-data-file %s --packages-manifest %s "
|
||||
"--target-ip %s --server-ip %s --quiet"
|
||||
% (test_path, data_file, manifest, qemu.ip, qemu.server_ip))
|
||||
result = runCmd(cmd)
|
||||
# Verify ping test was succesful
|
||||
self.assertEqual(0, result.status, 'oe-test runtime returned a non 0 status')
|
||||
|
||||
def test_testexport_sdk(self):
|
||||
"""
|
||||
Summary: Check sdk functionality for testexport.
|
||||
Expected: 1. testexport directory must be created.
|
||||
2. SDK tarball must exists.
|
||||
3. Uncompressing of tarball must succeed.
|
||||
4. Check if the SDK directory is added to PATH.
|
||||
5. Run tar from the SDK directory.
|
||||
Product: oe-core
|
||||
Author: Mariano Lopez <mariano.lopez@intel.com>
|
||||
"""
|
||||
|
||||
features = 'IMAGE_CLASSES += "testexport"\n'
|
||||
# These aren't the actual IP addresses but testexport class needs something defined
|
||||
features += 'TEST_SERVER_IP = "192.168.7.1"\n'
|
||||
features += 'TEST_TARGET_IP = "192.168.7.1"\n'
|
||||
features += 'TEST_SUITES = "ping"\n'
|
||||
features += 'TEST_EXPORT_SDK_ENABLED = "1"\n'
|
||||
features += 'TEST_EXPORT_SDK_PACKAGES = "nativesdk-tar"\n'
|
||||
self.write_config(features)
|
||||
|
||||
# Build tesexport for core-image-minimal
|
||||
bitbake('core-image-minimal')
|
||||
bitbake('-c testexport core-image-minimal')
|
||||
|
||||
needed_vars = ['TEST_EXPORT_DIR', 'TEST_EXPORT_SDK_DIR', 'TEST_EXPORT_SDK_NAME']
|
||||
bb_vars = get_bb_vars(needed_vars, 'core-image-minimal')
|
||||
testexport_dir = bb_vars['TEST_EXPORT_DIR']
|
||||
sdk_dir = bb_vars['TEST_EXPORT_SDK_DIR']
|
||||
sdk_name = bb_vars['TEST_EXPORT_SDK_NAME']
|
||||
|
||||
# Check for SDK
|
||||
tarball_name = "%s.sh" % sdk_name
|
||||
tarball_path = os.path.join(testexport_dir, sdk_dir, tarball_name)
|
||||
msg = "Couldn't find SDK tarball: %s" % tarball_path
|
||||
self.assertEqual(os.path.isfile(tarball_path), True, msg)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||
# Extract SDK and run tar from SDK
|
||||
result = runCmd("%s -y -d %s" % (tarball_path, tmpdirname))
|
||||
self.assertEqual(0, result.status, "Couldn't extract SDK")
|
||||
|
||||
env_script = result.output.split()[-1]
|
||||
result = runCmd(". %s; which tar" % env_script, shell=True)
|
||||
self.assertEqual(0, result.status, "Couldn't setup SDK environment")
|
||||
is_sdk_tar = True if tmpdirname in result.output else False
|
||||
self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
|
||||
|
||||
tar_sdk = result.output
|
||||
result = runCmd("%s --version" % tar_sdk)
|
||||
self.assertEqual(0, result.status, "Couldn't run tar from SDK")
|
||||
|
||||
|
||||
@OETestTag("runqemu")
|
||||
class TestImage(OESelftestTestCase):
|
||||
|
||||
def test_testimage_install(self):
|
||||
"""
|
||||
Summary: Check install packages functionality for testimage/testexport.
|
||||
Expected: 1. Import tests from a directory other than meta.
|
||||
2. Check install/uninstall of socat.
|
||||
Product: oe-core
|
||||
Author: Mariano Lopez <mariano.lopez@intel.com>
|
||||
"""
|
||||
if get_bb_var('DISTRO') == 'poky-tiny':
|
||||
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
|
||||
|
||||
features = 'IMAGE_CLASSES += "testimage"\n'
|
||||
features += 'IMAGE_INSTALL:append = " libssl"\n'
|
||||
features += 'TEST_SUITES = "ping ssh selftest"\n'
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('core-image-full-cmdline socat')
|
||||
bitbake('-c testimage core-image-full-cmdline')
|
||||
|
||||
def test_testimage_slirp(self):
|
||||
"""
|
||||
Summary: Check basic testimage functionality with qemu and slirp networking.
|
||||
"""
|
||||
|
||||
features = '''
|
||||
IMAGE_CLASSES:append = " testimage"
|
||||
IMAGE_FEATURES:append = " ssh-server-dropbear"
|
||||
IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("IMAGE_CLASSES", "testimage", " + 5120", "", d)}"
|
||||
TEST_RUNQEMUPARAMS += " slirp"
|
||||
'''
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('core-image-minimal')
|
||||
bitbake('-c testimage core-image-minimal')
|
||||
|
||||
def test_testimage_dnf(self):
|
||||
"""
|
||||
Summary: Check package feeds functionality for dnf
|
||||
Expected: 1. Check that remote package feeds can be accessed
|
||||
Product: oe-core
|
||||
Author: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
"""
|
||||
if get_bb_var('DISTRO') == 'poky-tiny':
|
||||
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
|
||||
|
||||
features = 'IMAGE_CLASSES += "testimage"\n'
|
||||
features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n'
|
||||
# We don't yet know what the server ip and port will be - they will be patched
|
||||
# in at the start of the on-image test
|
||||
features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n'
|
||||
features += 'EXTRA_IMAGE_FEATURES += "package-management"\n'
|
||||
features += 'PACKAGE_CLASSES = "package_rpm"\n'
|
||||
|
||||
bitbake('gnupg-native -c addto_recipe_sysroot')
|
||||
|
||||
# Enable package feed signing
|
||||
self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-")
|
||||
self.track_for_cleanup(self.gpg_home)
|
||||
signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing')
|
||||
runCmd('gpgconf --list-dirs --homedir %s; gpg -v --batch --homedir %s --import %s' % (self.gpg_home, self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"), shell=True)
|
||||
features += 'INHERIT += "sign_package_feed"\n'
|
||||
features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
|
||||
features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
|
||||
features += 'GPG_PATH = "%s"\n' % self.gpg_home
|
||||
features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('core-image-full-cmdline socat')
|
||||
bitbake('-c testimage core-image-full-cmdline')
|
||||
|
||||
def test_testimage_apt(self):
|
||||
"""
|
||||
Summary: Check package feeds functionality for apt
|
||||
Expected: 1. Check that remote package feeds can be accessed
|
||||
Product: oe-core
|
||||
Author: Ferry Toth <fntoth@gmail.com>
|
||||
"""
|
||||
if get_bb_var('DISTRO') == 'poky-tiny':
|
||||
self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
|
||||
|
||||
features = 'IMAGE_CLASSES += "testimage"\n'
|
||||
features += 'TEST_SUITES = "ping ssh apt.AptRepoTest.test_apt_install_from_repo"\n'
|
||||
# We don't yet know what the server ip and port will be - they will be patched
|
||||
# in at the start of the on-image test
|
||||
features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n'
|
||||
features += 'EXTRA_IMAGE_FEATURES += "package-management"\n'
|
||||
features += 'PACKAGE_CLASSES = "package_deb"\n'
|
||||
# We need gnupg on the target to install keys
|
||||
features += 'IMAGE_INSTALL:append:pn-core-image-full-cmdline = " gnupg"\n'
|
||||
|
||||
bitbake('gnupg-native -c addto_recipe_sysroot')
|
||||
|
||||
# Enable package feed signing
|
||||
self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-")
|
||||
self.track_for_cleanup(self.gpg_home)
|
||||
signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing')
|
||||
runCmd('gpgconf --list-dirs --homedir %s; gpg -v --batch --homedir %s --import %s' % (self.gpg_home, self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"), shell=True)
|
||||
features += 'INHERIT += "sign_package_feed"\n'
|
||||
features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
|
||||
features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
|
||||
features += 'GPG_PATH = "%s"\n' % self.gpg_home
|
||||
features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home
|
||||
self.write_config(features)
|
||||
|
||||
# Build core-image-sato and testimage
|
||||
bitbake('core-image-full-cmdline socat')
|
||||
bitbake('-c testimage core-image-full-cmdline')
|
||||
|
||||
# https://bugzilla.yoctoproject.org/show_bug.cgi?id=14966
|
||||
@skipIfNotMachine("qemux86-64", "test needs qemux86-64")
|
||||
def test_testimage_virgl_gtk_sdl(self):
|
||||
"""
|
||||
Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends
|
||||
Expected: 1. Check that virgl kernel driver is loaded and 3d acceleration is enabled
|
||||
2. Check that kmscube demo runs without crashing.
|
||||
Product: oe-core
|
||||
Author: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
"""
|
||||
if "DISPLAY" not in os.environ:
|
||||
self.skipTest("virgl gtk test must be run inside a X session")
|
||||
distro = oe.lsb.distro_identifier()
|
||||
if distro and distro == 'debian-8':
|
||||
self.skipTest('virgl isn\'t working with Debian 8')
|
||||
if distro and distro == 'debian-9':
|
||||
self.skipTest('virgl isn\'t working with Debian 9')
|
||||
if distro and distro == 'centos-7':
|
||||
self.skipTest('virgl isn\'t working with Centos 7')
|
||||
if distro and distro == 'opensuseleap-15.0':
|
||||
self.skipTest('virgl isn\'t working with Opensuse 15.0')
|
||||
|
||||
qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
|
||||
qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
|
||||
features = 'IMAGE_CLASSES += "testimage"\n'
|
||||
if 'gtk+' not in qemu_packageconfig:
|
||||
features += 'PACKAGECONFIG:append:pn-qemu-system-native = " gtk+"\n'
|
||||
if 'sdl' not in qemu_packageconfig:
|
||||
features += 'PACKAGECONFIG:append:pn-qemu-system-native = " sdl"\n'
|
||||
if 'opengl' not in qemu_distrofeatures:
|
||||
features += 'DISTRO_FEATURES:append = " opengl"\n'
|
||||
features += 'TEST_SUITES = "ping ssh virgl"\n'
|
||||
features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
|
||||
features += 'IMAGE_INSTALL:append = " kmscube"\n'
|
||||
features_gtk = features + 'TEST_RUNQEMUPARAMS += " gtk gl"\n'
|
||||
self.write_config(features_gtk)
|
||||
bitbake('core-image-minimal')
|
||||
bitbake('-c testimage core-image-minimal')
|
||||
features_sdl = features + 'TEST_RUNQEMUPARAMS += " sdl gl"\n'
|
||||
self.write_config(features_sdl)
|
||||
bitbake('core-image-minimal')
|
||||
bitbake('-c testimage core-image-minimal')
|
||||
|
||||
@skipIfNotMachine("qemux86-64", "test needs qemux86-64")
|
||||
def test_testimage_virgl_headless(self):
|
||||
"""
|
||||
Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend
|
||||
Expected: 1. Check that virgl kernel driver is loaded and 3d acceleration is enabled
|
||||
2. Check that kmscube demo runs without crashing.
|
||||
Product: oe-core
|
||||
Author: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
"""
|
||||
import subprocess, os
|
||||
|
||||
distro = oe.lsb.distro_identifier()
|
||||
if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'centos-9', 'ubuntu-16.04', 'ubuntu-18.04'] or
|
||||
distro.startswith('almalinux') or distro.startswith('rocky')):
|
||||
self.skipTest('virgl headless cannot be tested with %s' %(distro))
|
||||
|
||||
qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
|
||||
features = 'IMAGE_CLASSES += "testimage"\n'
|
||||
if 'opengl' not in qemu_distrofeatures:
|
||||
features += 'DISTRO_FEATURES:append = " opengl"\n'
|
||||
features += 'TEST_SUITES = "ping ssh virgl"\n'
|
||||
features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
|
||||
features += 'IMAGE_INSTALL:append = " kmscube"\n'
|
||||
features += 'TEST_RUNQEMUPARAMS += " egl-headless"\n'
|
||||
self.write_config(features)
|
||||
bitbake('core-image-minimal')
|
||||
bitbake('-c testimage core-image-minimal')
|
||||
|
||||
@OETestTag("runqemu")
|
||||
class Postinst(OESelftestTestCase):
|
||||
|
||||
def init_manager_loop(self, init_manager):
|
||||
import oe.path
|
||||
|
||||
vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
|
||||
rootfs = vars["IMAGE_ROOTFS"]
|
||||
self.assertIsNotNone(rootfs)
|
||||
sysconfdir = vars["sysconfdir"]
|
||||
self.assertIsNotNone(sysconfdir)
|
||||
# Need to use oe.path here as sysconfdir starts with /
|
||||
hosttestdir = oe.path.join(rootfs, sysconfdir, "postinst-test")
|
||||
targettestdir = os.path.join(sysconfdir, "postinst-test")
|
||||
|
||||
for classes in ("package_rpm", "package_deb", "package_ipk"):
|
||||
with self.subTest(init_manager=init_manager, package_class=classes):
|
||||
features = 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
|
||||
features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
|
||||
features += 'PACKAGE_CLASSES = "%s"\n' % classes
|
||||
if init_manager == "systemd":
|
||||
features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n'
|
||||
features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
|
||||
features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
|
||||
features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
|
||||
self.write_config(features)
|
||||
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
self.assertTrue(os.path.isfile(os.path.join(hosttestdir, "rootfs")),
|
||||
"rootfs state file was not created")
|
||||
|
||||
with runqemu('core-image-minimal') as qemu:
|
||||
# Make the test echo a string and search for that as
|
||||
# run_serial()'s status code is useless.'
|
||||
for filename in ("rootfs", "delayed-a", "delayed-b"):
|
||||
status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
|
||||
self.assertIn("found", output, "%s was not present on boot" % filename)
|
||||
|
||||
|
||||
|
||||
@skipIfNotQemu()
|
||||
def test_postinst_rootfs_and_boot_sysvinit(self):
|
||||
"""
|
||||
Summary: The purpose of this test case is to verify Post-installation
|
||||
scripts are called when rootfs is created and also test
|
||||
that script can be delayed to run at first boot.
|
||||
Dependencies: NA
|
||||
Steps: 1. Add proper configuration to local.conf file
|
||||
2. Build a "core-image-minimal" image
|
||||
3. Verify that file created by postinst_rootfs recipe is
|
||||
present on rootfs dir.
|
||||
4. Boot the image created on qemu and verify that the file
|
||||
created by postinst_boot recipe is present on image.
|
||||
Expected: The files are successfully created during rootfs and boot
|
||||
time for 3 different package managers: rpm,ipk,deb and
|
||||
for initialization managers: sysvinit.
|
||||
|
||||
"""
|
||||
self.init_manager_loop("sysvinit")
|
||||
|
||||
|
||||
@skipIfNotQemu()
|
||||
def test_postinst_rootfs_and_boot_systemd(self):
|
||||
"""
|
||||
Summary: The purpose of this test case is to verify Post-installation
|
||||
scripts are called when rootfs is created and also test
|
||||
that script can be delayed to run at first boot.
|
||||
Dependencies: NA
|
||||
Steps: 1. Add proper configuration to local.conf file
|
||||
2. Build a "core-image-minimal" image
|
||||
3. Verify that file created by postinst_rootfs recipe is
|
||||
present on rootfs dir.
|
||||
4. Boot the image created on qemu and verify that the file
|
||||
created by postinst_boot recipe is present on image.
|
||||
Expected: The files are successfully created during rootfs and boot
|
||||
time for 3 different package managers: rpm,ipk,deb and
|
||||
for initialization managers: systemd.
|
||||
|
||||
"""
|
||||
|
||||
self.init_manager_loop("systemd")
|
||||
|
||||
|
||||
def test_failing_postinst(self):
|
||||
"""
|
||||
Summary: The purpose of this test case is to verify that post-installation
|
||||
scripts that contain errors are properly reported.
|
||||
Expected: The scriptlet failure is properly reported.
|
||||
The file that is created after the error in the scriptlet is not present.
|
||||
Product: oe-core
|
||||
Author: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
"""
|
||||
|
||||
import oe.path
|
||||
|
||||
vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
|
||||
rootfs = vars["IMAGE_ROOTFS"]
|
||||
self.assertIsNotNone(rootfs)
|
||||
sysconfdir = vars["sysconfdir"]
|
||||
self.assertIsNotNone(sysconfdir)
|
||||
# Need to use oe.path here as sysconfdir starts with /
|
||||
hosttestdir = oe.path.join(rootfs, sysconfdir, "postinst-test")
|
||||
|
||||
for classes in ("package_rpm", "package_deb", "package_ipk"):
|
||||
with self.subTest(package_class=classes):
|
||||
features = 'CORE_IMAGE_EXTRA_INSTALL = "postinst-rootfs-failing"\n'
|
||||
features += 'PACKAGE_CLASSES = "%s"\n' % classes
|
||||
self.write_config(features)
|
||||
bb_result = bitbake('core-image-minimal', ignore_status=True)
|
||||
self.assertGreaterEqual(bb_result.output.find("Postinstall scriptlets of ['postinst-rootfs-failing'] have failed."), 0,
|
||||
"Warning about a failed scriptlet not found in bitbake output: %s" %(bb_result.output))
|
||||
|
||||
self.assertTrue(os.path.isfile(os.path.join(hosttestdir, "rootfs-before-failure")),
|
||||
"rootfs-before-failure file was not created")
|
||||
self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")),
|
||||
"rootfs-after-failure file was created")
|
||||
|
||||
@OETestTag("runqemu")
|
||||
class SystemTap(OESelftestTestCase):
|
||||
"""
|
||||
Summary: The purpose of this test case is to verify native crosstap
|
||||
works while talking to a target.
|
||||
Expected: The script should successfully connect to the qemu machine
|
||||
and run some systemtap examples on a qemu machine.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(SystemTap, cls).setUpClass()
|
||||
cls.image = "core-image-minimal"
|
||||
|
||||
def default_config(self):
|
||||
return """
|
||||
# These aren't the actual IP addresses but testexport class needs something defined
|
||||
TEST_SERVER_IP = "192.168.7.1"
|
||||
TEST_TARGET_IP = "192.168.7.2"
|
||||
|
||||
EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs"
|
||||
IMAGE_FEATURES:append = " ssh-server-dropbear"
|
||||
|
||||
# enables kernel debug symbols
|
||||
KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
|
||||
KERNEL_EXTRA_FEATURES:append = " features/systemtap/systemtap.scc"
|
||||
|
||||
# add systemtap run-time into target image if it is not there yet
|
||||
IMAGE_INSTALL:append = " systemtap-runtime"
|
||||
"""
|
||||
|
||||
def test_crosstap_helloworld(self):
|
||||
self.write_config(self.default_config())
|
||||
bitbake('systemtap-native')
|
||||
systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
|
||||
bitbake(self.image)
|
||||
|
||||
with runqemu(self.image) as qemu:
|
||||
cmd = "crosstap -r root@192.168.7.2 -s %s/general/helloworld.stp " % systemtap_examples
|
||||
result = runCmd(cmd)
|
||||
self.assertEqual(0, result.status, 'crosstap helloworld returned a non 0 status:%s' % result.output)
|
||||
|
||||
def test_crosstap_pstree(self):
|
||||
self.write_config(self.default_config())
|
||||
|
||||
bitbake('systemtap-native')
|
||||
systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
|
||||
bitbake(self.image)
|
||||
|
||||
with runqemu(self.image) as qemu:
|
||||
cmd = "crosstap -r root@192.168.7.2 -s %s/process/pstree.stp" % systemtap_examples
|
||||
result = runCmd(cmd)
|
||||
self.assertEqual(0, result.status, 'crosstap pstree returned a non 0 status:%s' % result.output)
|
||||
|
||||
def test_crosstap_syscalls_by_proc(self):
|
||||
self.write_config(self.default_config())
|
||||
|
||||
bitbake('systemtap-native')
|
||||
systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
|
||||
bitbake(self.image)
|
||||
|
||||
with runqemu(self.image) as qemu:
|
||||
cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_proc.stp" % systemtap_examples
|
||||
result = runCmd(cmd)
|
||||
self.assertEqual(0, result.status, 'crosstap syscalls_by_proc returned a non 0 status:%s' % result.output)
|
||||
|
||||
def test_crosstap_syscalls_by_pid(self):
|
||||
self.write_config(self.default_config())
|
||||
|
||||
bitbake('systemtap-native')
|
||||
systemtap_examples = os.path.join(get_bb_var("WORKDIR","systemtap-native"), "usr/share/systemtap/examples")
|
||||
bitbake(self.image)
|
||||
|
||||
with runqemu(self.image) as qemu:
|
||||
cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples
|
||||
result = runCmd(cmd)
|
||||
self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output)
|
||||
228
sources/poky/meta/lib/oeqa/selftest/cases/rust.py
Normal file
228
sources/poky/meta/lib/oeqa/selftest/cases/rust.py
Normal file
@@ -0,0 +1,228 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
from oeqa.core.decorator import OETestTag
|
||||
from oeqa.core.decorator.data import skipIfArch
|
||||
from oeqa.core.case import OEPTestResultTestCase
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu, Command
|
||||
from oeqa.utils.sshcontrol import SSHControl
|
||||
|
||||
def parse_results(filename):
|
||||
tests = {}
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
if "..." in line and "test [" in line:
|
||||
test = line.split("test ")[1].split(" ... ")[0]
|
||||
if "] " in test:
|
||||
test = test.split("] ", 1)[1]
|
||||
result = line.split(" ... ")[1].strip()
|
||||
if result == "ok":
|
||||
result = "PASS"
|
||||
elif result == "failed":
|
||||
result = "FAIL"
|
||||
elif "ignored" in result:
|
||||
result = "SKIPPED"
|
||||
if test in tests:
|
||||
if tests[test] != result:
|
||||
print("Duplicate and mismatching result %s for %s" % (result, test))
|
||||
else:
|
||||
print("Duplicate result %s for %s" % (result, test))
|
||||
else:
|
||||
tests[test] = result
|
||||
return tests
|
||||
|
||||
# Total time taken for testing is of about 2hr 20min, with PARALLEL_MAKE set to 40 number of jobs.
|
||||
@OETestTag("toolchain-system")
|
||||
@OETestTag("toolchain-user")
|
||||
@OETestTag("runqemu")
|
||||
class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
|
||||
|
||||
@skipIfArch(['mips', 'mips64'])
|
||||
def test_rust(self, *args, **kwargs):
|
||||
# Disable Rust Oe-selftest
|
||||
#self.skipTest("The Rust Oe-selftest is disabled.")
|
||||
|
||||
# build remote-test-server before image build
|
||||
recipe = "rust"
|
||||
start_time = time.time()
|
||||
bitbake("{} -c test_compile".format(recipe))
|
||||
builddir = get_bb_var("RUSTSRC", "rust")
|
||||
# build core-image-minimal with required packages
|
||||
default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
|
||||
features = []
|
||||
features.append('IMAGE_FEATURES += "ssh-server-dropbear"')
|
||||
features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
|
||||
self.write_config("\n".join(features))
|
||||
bitbake("core-image-minimal")
|
||||
|
||||
# Exclude the test folders that error out while building
|
||||
# TODO: Fix the errors and include them for testing
|
||||
# no-fail-fast: Run all tests regardless of failure.
|
||||
# bless: First runs rustfmt to format the codebase,
|
||||
# then runs tidy checks.
|
||||
exclude_list = [
|
||||
'compiler/rustc',
|
||||
'compiler/rustc_interface/src/tests.rs',
|
||||
'library/panic_abort',
|
||||
'library/panic_unwind',
|
||||
'library/test/src/stats/tests.rs',
|
||||
'src/bootstrap/builder/tests.rs',
|
||||
'src/doc/rustc',
|
||||
'src/doc/rustdoc',
|
||||
'src/doc/unstable-book',
|
||||
'src/librustdoc',
|
||||
'src/rustdoc-json-types',
|
||||
'src/tools/compiletest/src/common.rs',
|
||||
'src/tools/lint-docs',
|
||||
'src/tools/rust-analyzer',
|
||||
'src/tools/rustdoc-themes',
|
||||
'src/tools/tidy',
|
||||
'tests/assembly/asm/aarch64-outline-atomics.rs',
|
||||
'tests/codegen/abi-main-signature-32bit-c-int.rs',
|
||||
'tests/codegen/abi-repr-ext.rs',
|
||||
'tests/codegen/abi-x86-interrupt.rs',
|
||||
'tests/codegen/branch-protection.rs',
|
||||
'tests/codegen/catch-unwind.rs',
|
||||
'tests/codegen/cf-protection.rs',
|
||||
'tests/codegen/enum-bounds-check-derived-idx.rs',
|
||||
'tests/codegen/force-unwind-tables.rs',
|
||||
'tests/codegen/intrinsic-no-unnamed-attr.rs',
|
||||
'tests/codegen/issues/issue-103840.rs',
|
||||
'tests/codegen/issues/issue-47278.rs',
|
||||
'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs',
|
||||
'tests/codegen/lifetime_start_end.rs',
|
||||
'tests/codegen/local-generics-in-exe-internalized.rs',
|
||||
'tests/codegen/match-unoptimized.rs',
|
||||
'tests/codegen/noalias-rwlockreadguard.rs',
|
||||
'tests/codegen/non-terminate/nonempty-infinite-loop.rs',
|
||||
'tests/codegen/noreturn-uninhabited.rs',
|
||||
'tests/codegen/repr-transparent-aggregates-3.rs',
|
||||
'tests/codegen/riscv-abi/call-llvm-intrinsics.rs',
|
||||
'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs',
|
||||
'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs',
|
||||
'tests/codegen/sse42-implies-crc32.rs',
|
||||
'tests/codegen/thread-local.rs',
|
||||
'tests/codegen/uninit-consts.rs',
|
||||
'tests/pretty/raw-str-nonexpr.rs',
|
||||
'tests/run-make',
|
||||
'tests/run-make-fulldeps',
|
||||
'tests/rustdoc',
|
||||
'tests/rustdoc-json',
|
||||
'tests/rustdoc-js-std',
|
||||
'tests/rustdoc-ui/cfg-test.rs',
|
||||
'tests/rustdoc-ui/check-cfg-test.rs',
|
||||
'tests/rustdoc-ui/display-output.rs',
|
||||
'tests/rustdoc-ui/doc-comment-multi-line-attr.rs',
|
||||
'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs',
|
||||
'tests/rustdoc-ui/doc-test-doctest-feature.rs',
|
||||
'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs',
|
||||
'tests/rustdoc-ui/doctest-output.rs',
|
||||
'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
|
||||
'tests/rustdoc-ui/failed-doctest-compile-fail.rs',
|
||||
'tests/rustdoc-ui/issue-80992.rs',
|
||||
'tests/rustdoc-ui/issue-91134.rs',
|
||||
'tests/rustdoc-ui/nocapture-fail.rs',
|
||||
'tests/rustdoc-ui/nocapture.rs',
|
||||
'tests/rustdoc-ui/no-run-flag.rs',
|
||||
'tests/rustdoc-ui/run-directory.rs',
|
||||
'tests/rustdoc-ui/test-no_std.rs',
|
||||
'tests/rustdoc-ui/test-type.rs',
|
||||
'tests/rustdoc/unit-return.rs',
|
||||
'tests/ui/abi/stack-probes-lto.rs',
|
||||
'tests/ui/abi/stack-probes.rs',
|
||||
'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs',
|
||||
'tests/ui/asm/x86_64/sym.rs',
|
||||
'tests/ui/associated-type-bounds/fn-apit.rs',
|
||||
'tests/ui/associated-type-bounds/fn-dyn-apit.rs',
|
||||
'tests/ui/associated-type-bounds/fn-wrap-apit.rs',
|
||||
'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
|
||||
'tests/ui/drop/dynamic-drop.rs',
|
||||
'tests/ui/empty_global_asm.rs',
|
||||
'tests/ui/functions-closures/fn-help-with-err.rs',
|
||||
'tests/ui/linkage-attr/issue-10755.rs',
|
||||
'tests/ui/macros/restricted-shadowing-legacy.rs',
|
||||
'tests/ui/process/nofile-limit.rs',
|
||||
'tests/ui/process/process-panic-after-fork.rs',
|
||||
'tests/ui/process/process-sigpipe.rs',
|
||||
'tests/ui/simd/target-feature-mixup.rs',
|
||||
'tests/ui/structs-enums/multiple-reprs.rs',
|
||||
'src/tools/jsondoclint',
|
||||
'src/tools/replace-version-placeholder',
|
||||
'tests/codegen/abi-efiapi.rs',
|
||||
'tests/codegen/abi-sysv64.rs',
|
||||
'tests/codegen/align-byval.rs',
|
||||
'tests/codegen/align-fn.rs',
|
||||
'tests/codegen/asm-powerpc-clobbers.rs',
|
||||
'tests/codegen/async-fn-debug-awaitee-field.rs',
|
||||
'tests/codegen/binary-search-index-no-bound-check.rs',
|
||||
'tests/codegen/call-metadata.rs',
|
||||
'tests/codegen/debug-column.rs',
|
||||
'tests/codegen/debug-limited.rs',
|
||||
'tests/codegen/debuginfo-generic-closure-env-names.rs',
|
||||
'tests/codegen/drop.rs',
|
||||
'tests/codegen/dst-vtable-align-nonzero.rs',
|
||||
'tests/codegen/enable-lto-unit-splitting.rs',
|
||||
'tests/codegen/enum/enum-u128.rs',
|
||||
'tests/codegen/fn-impl-trait-self.rs',
|
||||
'tests/codegen/inherit_overflow.rs',
|
||||
'tests/codegen/inline-function-args-debug-info.rs',
|
||||
'tests/codegen/intrinsics/mask.rs',
|
||||
'tests/codegen/intrinsics/transmute-niched.rs',
|
||||
'tests/codegen/issues/issue-73258.rs',
|
||||
'tests/codegen/issues/issue-75546.rs',
|
||||
'tests/codegen/issues/issue-77812.rs',
|
||||
'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs',
|
||||
'tests/codegen/llvm-ident.rs',
|
||||
'tests/codegen/mainsubprogram.rs',
|
||||
'tests/codegen/move-operands.rs',
|
||||
'tests/codegen/repr/transparent-mips64.rs',
|
||||
'tests/mir-opt/',
|
||||
'tests/rustdoc-json',
|
||||
'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
|
||||
'tests/rustdoc-ui/no-run-flag.rs',
|
||||
'tests/ui-fulldeps/',
|
||||
'tests/ui/numbers-arithmetic/u128.rs'
|
||||
]
|
||||
|
||||
exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
|
||||
# Add exclude_fail_tests with other test arguments
|
||||
testargs = exclude_fail_tests + " --doc --no-fail-fast --bless"
|
||||
|
||||
# wrap the execution with a qemu instance.
|
||||
# Tests are run with 512 tasks in parallel to execute all tests very quickly
|
||||
with runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 512") as qemu:
|
||||
# Copy remote-test-server to image through scp
|
||||
host_sys = get_bb_var("RUST_BUILD_SYS", "rust")
|
||||
ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root")
|
||||
ssh.copy_to(builddir + "/build/" + host_sys + "/stage1-tools-bin/remote-test-server","~/")
|
||||
# Execute remote-test-server on image through background ssh
|
||||
command = '~/remote-test-server --bind 0.0.0.0:12345 -v'
|
||||
sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# Get the values of variables.
|
||||
tcpath = get_bb_var("TARGET_SYS", "rust")
|
||||
targetsys = get_bb_var("RUST_TARGET_SYS", "rust")
|
||||
rustlibpath = get_bb_var("WORKDIR", "rust")
|
||||
tmpdir = get_bb_var("TMPDIR", "rust")
|
||||
|
||||
# Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
|
||||
cmd = "export TARGET_VENDOR=\"-poky\";"
|
||||
cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/python3-native:%s/recipe-sysroot-native/usr/bin:%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, rustlibpath, rustlibpath, tcpath, tmpdir)
|
||||
cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
|
||||
# Trigger testing.
|
||||
cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
|
||||
cmd = cmd + " cd %s; python3 src/bootstrap/bootstrap.py test %s --target %s" % (builddir, testargs, targetsys)
|
||||
retval = runCmd(cmd)
|
||||
end_time = time.time()
|
||||
|
||||
resultlog = rustlibpath + "/results-log.txt"
|
||||
with open(resultlog, "w") as f:
|
||||
f.write(retval.output)
|
||||
|
||||
ptestsuite = "rust"
|
||||
self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile=resultlog)
|
||||
test_results = parse_results(resultlog)
|
||||
for test in test_results:
|
||||
self.ptest_result(ptestsuite, test, test_results[test])
|
||||
54
sources/poky/meta/lib/oeqa/selftest/cases/selftest.py
Normal file
54
sources/poky/meta/lib/oeqa/selftest/cases/selftest.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import importlib
|
||||
import oeqa.selftest
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
class ExternalLayer(OESelftestTestCase):
|
||||
|
||||
def test_list_imported(self):
|
||||
"""
|
||||
Summary: Checks functionality to import tests from other layers.
|
||||
Expected: 1. File "external-layer.py" must be in
|
||||
oeqa.selftest.__path__
|
||||
2. test_unconditional_pas method must exists
|
||||
in ImportedTests class
|
||||
Product: oe-core
|
||||
Author: Mariano Lopez <mariano.lopez@intel.com>
|
||||
"""
|
||||
|
||||
test_file = "external-layer.py"
|
||||
test_module = "oeqa.selftest.cases.external-layer"
|
||||
method_name = "test_unconditional_pass"
|
||||
|
||||
# Check if "external-layer.py" is in oeqa path
|
||||
found_file = search_test_file(test_file)
|
||||
self.assertTrue(found_file, msg="Can't find %s in the oeqa path" % test_file)
|
||||
|
||||
# Import oeqa.selftest.external-layer module and search for
|
||||
# test_unconditional_pass method of ImportedTests class
|
||||
found_method = search_method(test_module, method_name)
|
||||
self.assertTrue(method_name, msg="Can't find %s method" % method_name)
|
||||
|
||||
def search_test_file(file_name):
|
||||
for layer_path in oeqa.selftest.__path__:
|
||||
for _, _, files in os.walk(layer_path):
|
||||
for f in files:
|
||||
if f == file_name:
|
||||
return True
|
||||
return False
|
||||
|
||||
def search_method(module, method):
|
||||
modlib = importlib.import_module(module)
|
||||
for var in vars(modlib):
|
||||
klass = vars(modlib)[var]
|
||||
if isinstance(klass, type(OESelftestTestCase)) and issubclass(klass, OESelftestTestCase):
|
||||
for m in dir(klass):
|
||||
if m == method:
|
||||
return True
|
||||
return False
|
||||
|
||||
228
sources/poky/meta/lib/oeqa/selftest/cases/signing.py
Normal file
228
sources/poky/meta/lib/oeqa/selftest/cases/signing.py
Normal file
@@ -0,0 +1,228 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, create_temp_layer
|
||||
import os
|
||||
import oe
|
||||
import glob
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from oeqa.utils.ftools import write_file
|
||||
|
||||
|
||||
class Signing(OESelftestTestCase):
|
||||
|
||||
gpg_dir = ""
|
||||
pub_key_path = ""
|
||||
secret_key_path = ""
|
||||
|
||||
def setup_gpg(self):
|
||||
bitbake('gnupg-native -c addto_recipe_sysroot')
|
||||
|
||||
self.gpg_dir = tempfile.mkdtemp(prefix="oeqa-signing-")
|
||||
self.track_for_cleanup(self.gpg_dir)
|
||||
|
||||
self.pub_key_path = os.path.join(self.testlayer_path, 'files', 'signing', "key.pub")
|
||||
self.secret_key_path = os.path.join(self.testlayer_path, 'files', 'signing', "key.secret")
|
||||
|
||||
nsysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native")
|
||||
|
||||
runCmd('gpg --agent-program=`which gpg-agent`\|--auto-expand-secmem --batch --homedir %s --import %s %s' % (self.gpg_dir, self.pub_key_path, self.secret_key_path), native_sysroot=nsysroot)
|
||||
return nsysroot + get_bb_var("bindir_native")
|
||||
|
||||
|
||||
@contextmanager
|
||||
def create_new_builddir(self, builddir, newbuilddir):
|
||||
bb.utils.mkdirhier(newbuilddir)
|
||||
oe.path.copytree(builddir + "/conf", newbuilddir + "/conf")
|
||||
oe.path.copytree(builddir + "/cache", newbuilddir + "/cache")
|
||||
|
||||
origenv = os.environ.copy()
|
||||
|
||||
for e in os.environ:
|
||||
if builddir + "/" in os.environ[e]:
|
||||
os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/")
|
||||
if os.environ[e].endswith(builddir):
|
||||
os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
|
||||
|
||||
os.chdir(newbuilddir)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for e in origenv:
|
||||
os.environ[e] = origenv[e]
|
||||
os.chdir(builddir)
|
||||
|
||||
def test_signing_packages(self):
|
||||
"""
|
||||
Summary: Test that packages can be signed in the package feed
|
||||
Expected: Package should be signed with the correct key
|
||||
Expected: Images can be created from signed packages
|
||||
Product: oe-core
|
||||
Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
Author: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
"""
|
||||
import oe.packagedata
|
||||
|
||||
self.setup_gpg()
|
||||
|
||||
package_classes = get_bb_var('PACKAGE_CLASSES')
|
||||
if 'package_rpm' not in package_classes:
|
||||
self.skipTest('This test requires RPM Packaging.')
|
||||
|
||||
test_recipe = 'ed'
|
||||
|
||||
feature = 'INHERIT += "sign_rpm"\n'
|
||||
feature += 'RPM_GPG_PASSPHRASE = "test123"\n'
|
||||
feature += 'RPM_GPG_NAME = "testuser"\n'
|
||||
feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
|
||||
|
||||
self.write_config(feature)
|
||||
|
||||
bitbake('-c clean %s' % test_recipe)
|
||||
bitbake('-f -c package_write_rpm %s' % test_recipe)
|
||||
|
||||
self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
|
||||
|
||||
needed_vars = ['PKGDATA_DIR', 'DEPLOY_DIR_RPM', 'PACKAGE_ARCH', 'STAGING_BINDIR_NATIVE']
|
||||
bb_vars = get_bb_vars(needed_vars, test_recipe)
|
||||
pkgdatadir = bb_vars['PKGDATA_DIR']
|
||||
pkgdata = oe.packagedata.read_pkgdatafile(pkgdatadir + "/runtime/ed")
|
||||
if 'PKGE' in pkgdata:
|
||||
pf = pkgdata['PN'] + "-" + pkgdata['PKGE'] + pkgdata['PKGV'] + '-' + pkgdata['PKGR']
|
||||
else:
|
||||
pf = pkgdata['PN'] + "-" + pkgdata['PKGV'] + '-' + pkgdata['PKGR']
|
||||
deploy_dir_rpm = bb_vars['DEPLOY_DIR_RPM']
|
||||
package_arch = bb_vars['PACKAGE_ARCH'].replace('-', '_')
|
||||
staging_bindir_native = bb_vars['STAGING_BINDIR_NATIVE']
|
||||
|
||||
pkg_deploy = os.path.join(deploy_dir_rpm, package_arch, '.'.join((pf, package_arch, 'rpm')))
|
||||
|
||||
# Use a temporary rpmdb
|
||||
rpmdb = tempfile.mkdtemp(prefix='oeqa-rpmdb')
|
||||
|
||||
runCmd('%s/rpmkeys --define "_dbpath %s" --import %s' %
|
||||
(staging_bindir_native, rpmdb, self.pub_key_path))
|
||||
|
||||
ret = runCmd('%s/rpmkeys --define "_dbpath %s" --checksig %s' %
|
||||
(staging_bindir_native, rpmdb, pkg_deploy))
|
||||
# tmp/deploy/rpm/i586/ed-1.9-r0.i586.rpm: rsa sha1 md5 OK
|
||||
self.assertIn('digests signatures OK', ret.output, 'Package signed incorrectly.')
|
||||
shutil.rmtree(rpmdb)
|
||||
|
||||
#Check that an image can be built from signed packages
|
||||
self.add_command_to_tearDown('bitbake -c clean core-image-minimal')
|
||||
bitbake('-c clean core-image-minimal')
|
||||
bitbake('core-image-minimal')
|
||||
|
||||
|
||||
def test_signing_sstate_archive(self):
|
||||
"""
|
||||
Summary: Test that sstate archives can be signed
|
||||
Expected: Package should be signed with the correct key
|
||||
Product: oe-core
|
||||
Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
"""
|
||||
|
||||
test_recipe = 'ed'
|
||||
|
||||
# Since we need gpg but we can't use gpg-native for sstate signatures, we
|
||||
# build gpg-native in our original builddir then run the tests in a second one.
|
||||
builddir = os.environ.get('BUILDDIR') + "-testsign"
|
||||
sstatedir = os.path.join(builddir, 'test-sstate')
|
||||
|
||||
nsysroot = self.setup_gpg()
|
||||
|
||||
feature = 'SSTATE_SIG_KEY ?= "testuser"\n'
|
||||
feature += 'SSTATE_SIG_PASSPHRASE ?= "test123"\n'
|
||||
feature += 'SSTATE_VERIFY_SIG ?= "1"\n'
|
||||
feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
|
||||
feature += 'SSTATE_DIR = "%s"\n' % sstatedir
|
||||
# Any mirror might have partial sstate without .sig files, triggering failures
|
||||
feature += 'SSTATE_MIRRORS:forcevariable = ""\n'
|
||||
|
||||
self.write_config(feature)
|
||||
|
||||
with self.create_new_builddir(os.environ['BUILDDIR'], builddir):
|
||||
|
||||
os.environ["PATH"] = nsysroot + ":" + os.environ["PATH"]
|
||||
self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe)
|
||||
self.add_command_to_tearDown('rm -rf %s' % sstatedir)
|
||||
self.add_command_to_tearDown('rm -rf %s' % builddir)
|
||||
|
||||
bitbake('-c clean %s' % test_recipe)
|
||||
bitbake('-c populate_lic %s' % test_recipe)
|
||||
|
||||
recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst.sig')
|
||||
recipe_archive = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst')
|
||||
|
||||
self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.')
|
||||
self.assertEqual(len(recipe_archive), 1, 'Failed to find .tar.zst file.')
|
||||
|
||||
ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_archive[0]))
|
||||
# gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30
|
||||
# gpg: Good signature from "testuser (nocomment) <testuser@email.com>"
|
||||
self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.')
|
||||
|
||||
|
||||
class LockedSignatures(OESelftestTestCase):
|
||||
|
||||
def test_locked_signatures(self):
|
||||
"""
|
||||
Summary: Test locked signature mechanism
|
||||
Expected: Locked signatures will prevent task to run
|
||||
Product: oe-core
|
||||
Author: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
|
||||
"""
|
||||
|
||||
import uuid
|
||||
|
||||
test_recipe = 'ed'
|
||||
locked_sigs_file = 'locked-sigs.inc'
|
||||
|
||||
bitbake(test_recipe)
|
||||
# Generate locked sigs include file
|
||||
bitbake('-S lockedsigs %s' % test_recipe)
|
||||
|
||||
feature = 'require %s\n' % locked_sigs_file
|
||||
feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n'
|
||||
self.write_config(feature)
|
||||
|
||||
# Build a locked recipe
|
||||
bitbake(test_recipe)
|
||||
|
||||
templayerdir = tempfile.mkdtemp(prefix='signingqa')
|
||||
create_temp_layer(templayerdir, 'selftestsigning')
|
||||
runCmd('bitbake-layers add-layer %s' % templayerdir)
|
||||
|
||||
# Make a change that should cause the locked task signature to change
|
||||
# Use uuid so hash equivalance server isn't triggered
|
||||
recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend'
|
||||
recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file)
|
||||
feature = 'SUMMARY:${PN} = "test locked signature%s"\n' % uuid.uuid4()
|
||||
|
||||
os.mkdir(os.path.join(templayerdir, 'recipes-test'))
|
||||
os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe))
|
||||
write_file(recipe_append_path, feature)
|
||||
|
||||
self.add_command_to_tearDown('bitbake-layers remove-layer %s' % templayerdir)
|
||||
self.add_command_to_tearDown('rm -f %s' % os.path.join(self.builddir, locked_sigs_file))
|
||||
self.add_command_to_tearDown('rm -rf %s' % templayerdir)
|
||||
|
||||
# Build the recipe again
|
||||
ret = bitbake(test_recipe)
|
||||
|
||||
# Verify you get the warning and that the real task *isn't* run (i.e. the locked signature has worked)
|
||||
patt = r'The %s:do_package sig is computed to be \S+, but the sig is locked to \S+ in SIGGEN_LOCKEDSIGS\S+' % test_recipe
|
||||
found_warn = re.search(patt, ret.output)
|
||||
|
||||
self.assertIsNotNone(found_warn, "Didn't find the expected warning message. Output: %s" % ret.output)
|
||||
54
sources/poky/meta/lib/oeqa/selftest/cases/spdx.py
Normal file
54
sources/poky/meta/lib/oeqa/selftest/cases/spdx.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import json
|
||||
import os
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake, get_bb_var, runCmd
|
||||
|
||||
class SPDXCheck(OESelftestTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(SPDXCheck, cls).setUpClass()
|
||||
bitbake("python3-spdx-tools-native")
|
||||
bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
|
||||
|
||||
def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
|
||||
config = """
|
||||
INHERIT += "create-spdx"
|
||||
"""
|
||||
self.write_config(config)
|
||||
|
||||
deploy_dir = get_bb_var("DEPLOY_DIR")
|
||||
machine_var = get_bb_var("MACHINE")
|
||||
# qemux86-64 creates the directory qemux86_64
|
||||
machine_dir = machine_var.replace("-", "_")
|
||||
|
||||
full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file)
|
||||
|
||||
try:
|
||||
os.remove(full_file_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
bitbake("%s -c create_spdx" % target_name)
|
||||
|
||||
def check_spdx_json(filename):
|
||||
with open(filename) as f:
|
||||
report = json.load(f)
|
||||
self.assertNotEqual(report, None)
|
||||
self.assertNotEqual(report["SPDXID"], None)
|
||||
|
||||
python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3')
|
||||
validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools')
|
||||
result = runCmd("{} {} -i {}".format(python, validator, filename))
|
||||
|
||||
self.assertExists(full_file_path)
|
||||
result = check_spdx_json(full_file_path)
|
||||
|
||||
def test_spdx_base_files(self):
|
||||
self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
|
||||
1008
sources/poky/meta/lib/oeqa/selftest/cases/sstatetests.py
Normal file
1008
sources/poky/meta/lib/oeqa/selftest/cases/sstatetests.py
Normal file
File diff suppressed because it is too large
Load Diff
86
sources/poky/meta/lib/oeqa/selftest/cases/sysroot.py
Normal file
86
sources/poky/meta/lib/oeqa/selftest/cases/sysroot.py
Normal file
@@ -0,0 +1,86 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import uuid
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class SysrootTests(OESelftestTestCase):
|
||||
def test_sysroot_cleanup(self):
|
||||
"""
|
||||
Build sysroot test which depends on virtual/sysroot-test for one machine,
|
||||
switch machine, switch provider of virtual/sysroot-test and check that the
|
||||
sysroot is correctly cleaned up. The files in the two providers overlap
|
||||
so can cause errors if the sysroot code doesn't function correctly.
|
||||
Yes, sysroot-test should be machine specific really to avoid this, however
|
||||
the sysroot cleanup should also work [YOCTO #13702].
|
||||
"""
|
||||
|
||||
uuid1 = uuid.uuid4()
|
||||
uuid2 = uuid.uuid4()
|
||||
|
||||
self.write_config("""
|
||||
PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1"
|
||||
MACHINE = "qemux86"
|
||||
TESTSTRING:pn-sysroot-test-arch1 = "%s"
|
||||
TESTSTRING:pn-sysroot-test-arch2 = "%s"
|
||||
""" % (uuid1, uuid2))
|
||||
bitbake("sysroot-test")
|
||||
self.write_config("""
|
||||
PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2"
|
||||
MACHINE = "qemux86copy"
|
||||
TESTSTRING:pn-sysroot-test-arch1 = "%s"
|
||||
TESTSTRING:pn-sysroot-test-arch2 = "%s"
|
||||
""" % (uuid1, uuid2))
|
||||
bitbake("sysroot-test")
|
||||
|
||||
def test_sysroot_max_shebang(self):
|
||||
"""
|
||||
Summary: Check max shebang triggers. To confirm [YOCTO #11053] is closed.
|
||||
Expected: Fail when a shebang bigger than the max shebang-size is reached.
|
||||
Author: Paulo Neves <ptsneves@gmail.com>
|
||||
"""
|
||||
expected = "maximum shebang size exceeded, the maximum size is 128. [shebang-size]"
|
||||
res = bitbake("sysroot-shebang-test-native -c populate_sysroot", ignore_status=True)
|
||||
self.assertTrue(expected in res.output, msg=res.output)
|
||||
self.assertTrue(res.status != 0)
|
||||
|
||||
def test_sysroot_la(self):
|
||||
"""
|
||||
Summary: Check that workdir paths are not contained in .la files.
|
||||
Expected: Fail when a workdir path is found in the file content.
|
||||
Author: Paulo Neves <ptsneves@gmail.com>
|
||||
"""
|
||||
expected = "la-test.la failed sanity test (workdir) in path"
|
||||
|
||||
res = bitbake("sysroot-la-test -c populate_sysroot", ignore_status=True)
|
||||
self.assertTrue(expected in res.output, msg=res.output)
|
||||
self.assertTrue('[la]' in res.output, msg=res.output)
|
||||
self.assertTrue(res.status != 0)
|
||||
|
||||
res = bitbake("sysroot-la-test-native -c populate_sysroot", ignore_status=True)
|
||||
self.assertTrue(expected in res.output, msg=res.output)
|
||||
self.assertTrue('[la]' in res.output, msg=res.output)
|
||||
self.assertTrue(res.status != 0)
|
||||
|
||||
def test_sysroot_pkgconfig(self):
|
||||
"""
|
||||
Summary: Check that tmpdir paths are not contained in .pc files.
|
||||
Expected: Fail when a tmpdir path is found in the file content.
|
||||
Author: Paulo Neves <ptsneves@gmail.com>
|
||||
"""
|
||||
expected = "test.pc failed sanity test (tmpdir) in path"
|
||||
|
||||
res = bitbake("sysroot-pc-test -c populate_sysroot", ignore_status=True)
|
||||
self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
|
||||
self.assertTrue(expected in res.output, msg=res.output)
|
||||
self.assertTrue(res.status != 0)
|
||||
|
||||
res = bitbake("sysroot-pc-test-native -c populate_sysroot", ignore_status=True)
|
||||
self.assertTrue(expected in res.output, msg=res.output)
|
||||
self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
|
||||
self.assertTrue(res.status != 0)
|
||||
252
sources/poky/meta/lib/oeqa/selftest/cases/tinfoil.py
Normal file
252
sources/poky/meta/lib/oeqa/selftest/cases/tinfoil.py
Normal file
@@ -0,0 +1,252 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import logging
|
||||
import bb.tinfoil
|
||||
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
|
||||
class TinfoilTests(OESelftestTestCase):
|
||||
""" Basic tests for the tinfoil API """
|
||||
|
||||
def test_getvar(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(True)
|
||||
machine = tinfoil.config_data.getVar('MACHINE')
|
||||
if not machine:
|
||||
self.fail('Unable to get MACHINE value - returned %s' % machine)
|
||||
|
||||
def test_expand(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(True)
|
||||
expr = '${@os.getpid()}'
|
||||
pid = tinfoil.config_data.expand(expr)
|
||||
if not pid:
|
||||
self.fail('Unable to expand "%s" - returned %s' % (expr, pid))
|
||||
|
||||
def test_getvar_bb_origenv(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(True)
|
||||
origenv = tinfoil.config_data.getVar('BB_ORIGENV', False)
|
||||
if not origenv:
|
||||
self.fail('Unable to get BB_ORIGENV value - returned %s' % origenv)
|
||||
self.assertEqual(origenv.getVar('HOME', False), os.environ['HOME'])
|
||||
|
||||
def test_parse_recipe(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
testrecipe = 'mdadm'
|
||||
best = tinfoil.find_best_provider(testrecipe)
|
||||
if not best:
|
||||
self.fail('Unable to find recipe providing %s' % testrecipe)
|
||||
rd = tinfoil.parse_recipe_file(best[3])
|
||||
self.assertEqual(testrecipe, rd.getVar('PN'))
|
||||
|
||||
def test_parse_virtual_recipe(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
testrecipe = 'nativesdk-gcc'
|
||||
best = tinfoil.find_best_provider(testrecipe)
|
||||
if not best:
|
||||
self.fail('Unable to find recipe providing %s' % testrecipe)
|
||||
rd = tinfoil.parse_recipe_file(best[3])
|
||||
self.assertEqual(testrecipe, rd.getVar('PN'))
|
||||
self.assertIsNotNone(rd.getVar('FILE_LAYERNAME'))
|
||||
|
||||
def test_parse_recipe_copy_expand(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
testrecipe = 'mdadm'
|
||||
best = tinfoil.find_best_provider(testrecipe)
|
||||
if not best:
|
||||
self.fail('Unable to find recipe providing %s' % testrecipe)
|
||||
rd = tinfoil.parse_recipe_file(best[3])
|
||||
# Check we can get variable values
|
||||
self.assertEqual(testrecipe, rd.getVar('PN'))
|
||||
# Check that expanding a value that includes a variable reference works
|
||||
self.assertEqual(testrecipe, rd.getVar('BPN'))
|
||||
# Now check that changing the referenced variable's value in a copy gives that
|
||||
# value when expanding
|
||||
localdata = bb.data.createCopy(rd)
|
||||
localdata.setVar('PN', 'hello')
|
||||
self.assertEqual('hello', localdata.getVar('BPN'))
|
||||
|
||||
# The config_data API to parse_recipe_file is used by:
|
||||
# layerindex-web layerindex/update_layer.py
|
||||
def test_parse_recipe_custom_data(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
localdata = bb.data.createCopy(tinfoil.config_data)
|
||||
localdata.setVar("TESTVAR", "testval")
|
||||
testrecipe = 'mdadm'
|
||||
best = tinfoil.find_best_provider(testrecipe)
|
||||
if not best:
|
||||
self.fail('Unable to find recipe providing %s' % testrecipe)
|
||||
rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
|
||||
self.assertEqual("testval", rd.getVar('TESTVAR'))
|
||||
|
||||
def test_parse_virtual_recipe_custom_data(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
localdata = bb.data.createCopy(tinfoil.config_data)
|
||||
localdata.setVar("TESTVAR", "testval")
|
||||
testrecipe = 'nativesdk-gcc'
|
||||
best = tinfoil.find_best_provider(testrecipe)
|
||||
if not best:
|
||||
self.fail('Unable to find recipe providing %s' % testrecipe)
|
||||
rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
|
||||
self.assertEqual("testval", rd.getVar('TESTVAR'))
|
||||
|
||||
def test_list_recipes(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
# Check pkg_pn
|
||||
checkpns = ['tar', 'automake', 'coreutils', 'm4-native', 'nativesdk-gcc']
|
||||
pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn
|
||||
for pn in checkpns:
|
||||
self.assertIn(pn, pkg_pn)
|
||||
# Check pkg_fn
|
||||
checkfns = {'nativesdk-gcc': '^virtual:nativesdk:.*', 'coreutils': '.*/coreutils_.*.bb'}
|
||||
for fn, pn in tinfoil.cooker.recipecaches[''].pkg_fn.items():
|
||||
if pn in checkpns:
|
||||
if pn in checkfns:
|
||||
self.assertTrue(re.match(checkfns[pn], fn), 'Entry for %s: %s did not match %s' % (pn, fn, checkfns[pn]))
|
||||
checkpns.remove(pn)
|
||||
if checkpns:
|
||||
self.fail('Unable to find pkg_fn entries for: %s' % ', '.join(checkpns))
|
||||
|
||||
def test_wait_event(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=True)
|
||||
|
||||
tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit'])
|
||||
|
||||
# Need to drain events otherwise events that were masked may still be in the queue
|
||||
while tinfoil.wait_event():
|
||||
pass
|
||||
|
||||
pattern = 'conf'
|
||||
res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False)
|
||||
self.assertTrue(res)
|
||||
|
||||
eventreceived = False
|
||||
commandcomplete = False
|
||||
start = time.time()
|
||||
# Wait for maximum 60s in total so we'd detect spurious heartbeat events for example
|
||||
while (not (eventreceived == True and commandcomplete == True)
|
||||
and (time.time() - start < 60)):
|
||||
# if we received both events (on let's say a good day), we are done
|
||||
event = tinfoil.wait_event(1)
|
||||
if event:
|
||||
if isinstance(event, bb.command.CommandCompleted):
|
||||
commandcomplete = True
|
||||
elif isinstance(event, bb.event.FilesMatchingFound):
|
||||
self.assertEqual(pattern, event._pattern)
|
||||
self.assertIn('A', event._matches)
|
||||
self.assertIn('B', event._matches)
|
||||
eventreceived = True
|
||||
elif isinstance(event, logging.LogRecord):
|
||||
continue
|
||||
else:
|
||||
self.fail('Unexpected event: %s' % event)
|
||||
|
||||
self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived))
|
||||
self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
|
||||
|
||||
def test_setvariable_clean(self):
|
||||
# First check that setVariable affects the datastore
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=True)
|
||||
tinfoil.run_command('setVariable', 'TESTVAR', 'specialvalue')
|
||||
self.assertEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()')
|
||||
|
||||
# Now check that the setVariable's effects are no longer present
|
||||
# (this may legitimately break in future if we stop reinitialising
|
||||
# the datastore, in which case we'll have to reconsider use of
|
||||
# setVariable entirely)
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=True)
|
||||
self.assertNotEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is still present!')
|
||||
|
||||
# Now check that setVar on the main datastore works (uses setVariable internally)
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=True)
|
||||
tinfoil.config_data.setVar('TESTVAR', 'specialvalue')
|
||||
value = tinfoil.run_command('getVariable', 'TESTVAR')
|
||||
self.assertEqual(value, 'specialvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()')
|
||||
|
||||
def test_datastore_operations(self):
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=True)
|
||||
# Test setVarFlag() / getVarFlag()
|
||||
tinfoil.config_data.setVarFlag('TESTVAR', 'flagname', 'flagval')
|
||||
value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname')
|
||||
self.assertEqual(value, 'flagval', 'Value set using config_data.setVarFlag() is not reflected in config_data.getVarFlag()')
|
||||
# Test delVarFlag()
|
||||
tinfoil.config_data.setVarFlag('TESTVAR', 'otherflag', 'othervalue')
|
||||
tinfoil.config_data.delVarFlag('TESTVAR', 'flagname')
|
||||
value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname')
|
||||
self.assertEqual(value, None, 'Varflag deleted using config_data.delVarFlag() is not reflected in config_data.getVarFlag()')
|
||||
value = tinfoil.config_data.getVarFlag('TESTVAR', 'otherflag')
|
||||
self.assertEqual(value, 'othervalue', 'Varflag deleted using config_data.delVarFlag() caused unrelated flag to be removed')
|
||||
# Test delVar()
|
||||
tinfoil.config_data.setVar('TESTVAR', 'varvalue')
|
||||
value = tinfoil.config_data.getVar('TESTVAR')
|
||||
self.assertEqual(value, 'varvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()')
|
||||
tinfoil.config_data.delVar('TESTVAR')
|
||||
value = tinfoil.config_data.getVar('TESTVAR')
|
||||
self.assertEqual(value, None, 'Variable deleted using config_data.delVar() appears to still have a value')
|
||||
# Test renameVar()
|
||||
tinfoil.config_data.setVar('TESTVAROLD', 'origvalue')
|
||||
tinfoil.config_data.renameVar('TESTVAROLD', 'TESTVARNEW')
|
||||
value = tinfoil.config_data.getVar('TESTVAROLD')
|
||||
self.assertEqual(value, None, 'Variable renamed using config_data.renameVar() still seems to exist')
|
||||
value = tinfoil.config_data.getVar('TESTVARNEW')
|
||||
self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name')
|
||||
# Test overrides
|
||||
tinfoil.config_data.setVar('TESTVAR', 'original')
|
||||
tinfoil.config_data.setVar('TESTVAR:overrideone', 'one')
|
||||
tinfoil.config_data.setVar('TESTVAR:overridetwo', 'two')
|
||||
tinfoil.config_data.appendVar('OVERRIDES', ':overrideone')
|
||||
value = tinfoil.config_data.getVar('TESTVAR')
|
||||
self.assertEqual(value, 'one', 'Variable overrides not functioning correctly')
|
||||
|
||||
def test_variable_history(self):
|
||||
# Basic test to ensure that variable history works when tracking=True
|
||||
with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
|
||||
tinfoil.prepare(config_only=False, quiet=2)
|
||||
# Note that _tracking for any datastore we get will be
|
||||
# false here, that's currently expected - so we can't check
|
||||
# for that
|
||||
history = tinfoil.config_data.varhistory.variable('DL_DIR')
|
||||
for entry in history:
|
||||
if entry['file'].endswith('/bitbake.conf'):
|
||||
if entry['op'] in ['set', 'set?']:
|
||||
break
|
||||
else:
|
||||
self.fail('Did not find history entry setting DL_DIR in bitbake.conf. History: %s' % history)
|
||||
# Check it works for recipes as well
|
||||
testrecipe = 'zlib'
|
||||
rd = tinfoil.parse_recipe(testrecipe)
|
||||
history = rd.varhistory.variable('LICENSE')
|
||||
bbfound = -1
|
||||
recipefound = -1
|
||||
for i, entry in enumerate(history):
|
||||
if entry['file'].endswith('/bitbake.conf'):
|
||||
if entry['detail'] == 'INVALID' and entry['op'] in ['set', 'set?']:
|
||||
bbfound = i
|
||||
elif entry['file'].endswith('.bb'):
|
||||
if entry['op'] == 'set':
|
||||
recipefound = i
|
||||
if bbfound == -1:
|
||||
self.fail('Did not find history entry setting LICENSE in bitbake.conf parsing %s recipe. History: %s' % (testrecipe, history))
|
||||
if recipefound == -1:
|
||||
self.fail('Did not find history entry setting LICENSE in %s recipe. History: %s' % (testrecipe, history))
|
||||
if bbfound > recipefound:
|
||||
self.fail('History entry setting LICENSE in %s recipe and in bitbake.conf in wrong order. History: %s' % (testrecipe, history))
|
||||
57
sources/poky/meta/lib/oeqa/selftest/cases/usergrouptests.py
Normal file
57
sources/poky/meta/lib/oeqa/selftest/cases/usergrouptests.py
Normal file
@@ -0,0 +1,57 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
from oeqa.utils.commands import bitbake, get_bb_var, get_test_layer
|
||||
|
||||
class UserGroupTests(OESelftestTestCase):
|
||||
def test_group_from_dep_package(self):
|
||||
self.logger.info("Building creategroup2")
|
||||
bitbake(' creategroup2 creategroup1')
|
||||
bitbake(' creategroup2 creategroup1 -c clean')
|
||||
self.logger.info("Packaging creategroup2")
|
||||
self.assertTrue(bitbake(' creategroup2 -c package'))
|
||||
|
||||
def test_add_task_between_p_sysroot_and_package(self):
|
||||
# Test for YOCTO #14961
|
||||
self.assertTrue(bitbake('useraddbadtask -C fetch'))
|
||||
|
||||
def test_postinst_order(self):
|
||||
self.logger.info("Building dcreategroup")
|
||||
self.assertTrue(bitbake(' dcreategroup'))
|
||||
|
||||
def test_static_useradd_from_dynamic(self):
|
||||
metaselftestpath = get_test_layer()
|
||||
self.logger.info("Building core-image-minimal to generate passwd/group file")
|
||||
bitbake(' core-image-minimal')
|
||||
self.logger.info("Setting up useradd-staticids")
|
||||
repropassdir = os.path.join(metaselftestpath, "conf/include")
|
||||
os.makedirs(repropassdir)
|
||||
etcdir=os.path.join(os.path.join(os.path.join(get_bb_var("TMPDIR"), "work"), \
|
||||
os.path.join(get_bb_var("MACHINE").replace("-","_")+"-poky-linux", "core-image-minimal/1.0/rootfs/etc")))
|
||||
shutil.copy(os.path.join(etcdir, "passwd"), os.path.join(repropassdir, "reproducable-passwd"))
|
||||
shutil.copy(os.path.join(etcdir, "group"), os.path.join(repropassdir, "reproducable-group"))
|
||||
# Copy the original local.conf
|
||||
shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'))
|
||||
|
||||
self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
|
||||
self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
|
||||
self.write_config("USERADD_UID_TABLES += \"conf/include/reproducible-passwd\"")
|
||||
self.write_config("USERADD_GID_TABLES += \"conf/include/reproducible-group\"")
|
||||
self.logger.info("Rebuild with staticids")
|
||||
bitbake(' core-image-minimal')
|
||||
shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'))
|
||||
self.logger.info("Rebuild without staticids")
|
||||
bitbake(' core-image-minimal')
|
||||
self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
|
||||
self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
|
||||
self.write_config("USERADD_UID_TABLES += \"files/static-passwd\"")
|
||||
self.write_config("USERADD_GID_TABLES += \"files/static-group\"")
|
||||
self.logger.info("Rebuild with other staticids")
|
||||
self.assertTrue(bitbake(' core-image-minimal'))
|
||||
1692
sources/poky/meta/lib/oeqa/selftest/cases/wic.py
Normal file
1692
sources/poky/meta/lib/oeqa/selftest/cases/wic.py
Normal file
File diff suppressed because it is too large
Load Diff
16
sources/poky/meta/lib/oeqa/selftest/cases/wrapper.py
Normal file
16
sources/poky/meta/lib/oeqa/selftest/cases/wrapper.py
Normal file
@@ -0,0 +1,16 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from oeqa.utils.commands import bitbake
|
||||
|
||||
class WrapperTests(OESelftestTestCase):
|
||||
def test_shebang_wrapper(self):
|
||||
"""
|
||||
Summary: Build a recipe which will fail if the cmdline_shebang_wrapper function is defective.
|
||||
Expected: Exit status to be 0.
|
||||
Author: Paulo Neves <ptsneves@gmail.com>
|
||||
"""
|
||||
res = bitbake("cmdline-shebang-wrapper-test -c install", ignore_status=False)
|
||||
@@ -0,0 +1,39 @@
|
||||
#
|
||||
# Copyright OpenEmbedded Contributors
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import shutil
|
||||
from oeqa.selftest.case import OESelftestTestCase
|
||||
from yocto_testresults_query import get_sha1, create_workdir
|
||||
basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
|
||||
lib_path = basepath + '/scripts/lib'
|
||||
sys.path = sys.path + [lib_path]
|
||||
|
||||
|
||||
class TestResultsQueryTests(OESelftestTestCase):
|
||||
def test_get_sha1(self):
|
||||
test_data_get_sha1 = [
|
||||
{"input": "yocto-4.0", "expected": "00cfdde791a0176c134f31e5a09eff725e75b905"},
|
||||
{"input": "4.1_M1", "expected": "95066dde6861ee08fdb505ab3e0422156cc24fae"},
|
||||
]
|
||||
for data in test_data_get_sha1:
|
||||
test_name = data["input"]
|
||||
with self.subTest(f"Test SHA1 from {test_name}"):
|
||||
self.assertEqual(
|
||||
get_sha1(basepath, data["input"]), data["expected"])
|
||||
|
||||
def test_create_workdir(self):
|
||||
workdir = create_workdir()
|
||||
try:
|
||||
url = subprocess.check_output(
|
||||
["git", "-C", workdir, "remote", "get-url", "origin"]).strip().decode("utf-8")
|
||||
except:
|
||||
shutil.rmtree(workdir, ignore_errors=True)
|
||||
self.fail(f"Can not execute git commands in {workdir}")
|
||||
shutil.rmtree(workdir)
|
||||
self.assertEqual(url, "git://git.yoctoproject.org/yocto-testresults")
|
||||
Reference in New Issue
Block a user