Complete Yocto mirror with license table for TQMa6UL (2038-compliance)

- 264 license table entries with exact download URLs (224/264 resolved)
- Complete sources/ directory with all BitBake recipes
- Build configuration: tqma6ul-multi-mba6ulx, spaetzle (musl)
- Full traceability for Softwarefreigabeantrag
- GCC 13.4.0, Linux 6.6.102, U-Boot 2023.04, musl 1.2.4
- License distribution: GPL-2.0 (24), MIT (23), GPL-2.0+ (18), BSD-3 (16)
This commit is contained in:
Siggi (OpenClaw Agent)
2026-03-01 20:58:18 +00:00
commit 16accb6b24
15086 changed files with 1292356 additions and 0 deletions

View File

@@ -0,0 +1,477 @@
# Recipe creation tool - append plugin
#
# Copyright (C) 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
import glob
import fnmatch
import re
import subprocess
import logging
import stat
import shutil
import scriptutils
import errno
from collections import defaultdict
import difflib
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
# FIXME guessing when we don't have pkgdata?
# FIXME mode to create patch rather than directly substitute
class InvalidTargetFileError(Exception):
pass
def find_target_file(targetpath, d, pkglist=None):
"""Find the recipe installing the specified target path, optionally limited to a select list of packages"""
import json
pkgdata_dir = d.getVar('PKGDATA_DIR')
# The mix between /etc and ${sysconfdir} here may look odd, but it is just
# being consistent with usage elsewhere
invalidtargets = {'${sysconfdir}/version': '${sysconfdir}/version is written out at image creation time',
'/etc/timestamp': '/etc/timestamp is written out at image creation time',
'/dev/*': '/dev is handled by udev (or equivalent) and the kernel (devtmpfs)',
'/etc/passwd': '/etc/passwd should be managed through the useradd and extrausers classes',
'/etc/group': '/etc/group should be managed through the useradd and extrausers classes',
'/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes',
'/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes',
'${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname:pn-base-files = "value" in configuration',}
for pthspec, message in invalidtargets.items():
if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)):
raise InvalidTargetFileError(d.expand(message))
targetpath_re = re.compile(r'\s+(\$D)?%s(\s|$)' % targetpath)
recipes = defaultdict(list)
for root, dirs, files in os.walk(os.path.join(pkgdata_dir, 'runtime')):
if pkglist:
filelist = pkglist
else:
filelist = files
for fn in filelist:
pkgdatafile = os.path.join(root, fn)
if pkglist and not os.path.exists(pkgdatafile):
continue
with open(pkgdatafile, 'r') as f:
pn = ''
# This does assume that PN comes before other values, but that's a fairly safe assumption
for line in f:
if line.startswith('PN:'):
pn = line.split(': ', 1)[1].strip()
elif line.startswith('FILES_INFO'):
val = line.split(': ', 1)[1].strip()
dictval = json.loads(val)
for fullpth in dictval.keys():
if fnmatch.fnmatchcase(fullpth, targetpath):
recipes[targetpath].append(pn)
elif line.startswith('pkg_preinst:') or line.startswith('pkg_postinst:'):
scriptval = line.split(': ', 1)[1].strip().encode('utf-8').decode('unicode_escape')
if 'update-alternatives --install %s ' % targetpath in scriptval:
recipes[targetpath].append('?%s' % pn)
elif targetpath_re.search(scriptval):
recipes[targetpath].append('!%s' % pn)
return recipes
def _parse_recipe(pn, tinfoil):
try:
rd = tinfoil.parse_recipe(pn)
except bb.providers.NoProvider as e:
logger.error(str(e))
return None
return rd
def determine_file_source(targetpath, rd):
"""Assuming we know a file came from a specific recipe, figure out exactly where it came from"""
import oe.recipeutils
# See if it's in do_install for the recipe
workdir = rd.getVar('WORKDIR')
src_uri = rd.getVar('SRC_URI')
srcfile = ''
modpatches = []
elements = check_do_install(rd, targetpath)
if elements:
logger.debug('do_install line:\n%s' % ' '.join(elements))
srcpath = get_source_path(elements)
logger.debug('source path: %s' % srcpath)
if not srcpath.startswith('/'):
# Handle non-absolute path
srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
if srcpath.startswith(workdir):
# OK, now we have the source file name, look for it in SRC_URI
workdirfile = os.path.relpath(srcpath, workdir)
# FIXME this is where we ought to have some code in the fetcher, because this is naive
for item in src_uri.split():
localpath = bb.fetch2.localpath(item, rd)
# Source path specified in do_install might be a glob
if fnmatch.fnmatch(os.path.basename(localpath), workdirfile):
srcfile = 'file://%s' % localpath
elif '/' in workdirfile:
if item == 'file://%s' % workdirfile:
srcfile = 'file://%s' % localpath
# Check patches
srcpatches = []
patchedfiles = oe.recipeutils.get_recipe_patched_files(rd)
for patch, filelist in patchedfiles.items():
for fileitem in filelist:
if fileitem[0] == srcpath:
srcpatches.append((patch, fileitem[1]))
if srcpatches:
addpatch = None
for patch in srcpatches:
if patch[1] == 'A':
addpatch = patch[0]
else:
modpatches.append(patch[0])
if addpatch:
srcfile = 'patch://%s' % addpatch
return (srcfile, elements, modpatches)
def get_source_path(cmdelements):
"""Find the source path specified within a command"""
command = cmdelements[0]
if command in ['install', 'cp']:
helptext = subprocess.check_output('LC_ALL=C %s --help' % command, shell=True).decode('utf-8')
argopts = ''
argopt_line_re = re.compile('^-([a-zA-Z0-9]), --[a-z-]+=')
for line in helptext.splitlines():
line = line.lstrip()
res = argopt_line_re.search(line)
if res:
argopts += res.group(1)
if not argopts:
# Fallback
if command == 'install':
argopts = 'gmoSt'
elif command == 'cp':
argopts = 't'
else:
raise Exception('No fallback arguments for command %s' % command)
skipnext = False
for elem in cmdelements[1:-1]:
if elem.startswith('-'):
if len(elem) > 1 and elem[1] in argopts:
skipnext = True
continue
if skipnext:
skipnext = False
continue
return elem
else:
raise Exception('get_source_path: no handling for command "%s"')
def get_func_deps(func, d):
"""Find the function dependencies of a shell function"""
deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func))
deps |= set((d.getVarFlag(func, "vardeps") or "").split())
funcdeps = []
for dep in deps:
if d.getVarFlag(dep, 'func'):
funcdeps.append(dep)
return funcdeps
def check_do_install(rd, targetpath):
"""Look at do_install for a command that installs/copies the specified target path"""
instpath = os.path.abspath(os.path.join(rd.getVar('D'), targetpath.lstrip('/')))
do_install = rd.getVar('do_install')
# Handle where do_install calls other functions (somewhat crudely, but good enough for this purpose)
deps = get_func_deps('do_install', rd)
for dep in deps:
do_install = do_install.replace(dep, rd.getVar(dep))
# Look backwards through do_install as we want to catch where a later line (perhaps
# from a bbappend) is writing over the top
for line in reversed(do_install.splitlines()):
line = line.strip()
if (line.startswith('install ') and ' -m' in line) or line.startswith('cp '):
elements = line.split()
destpath = os.path.abspath(elements[-1])
if destpath == instpath:
return elements
elif destpath.rstrip('/') == os.path.dirname(instpath):
# FIXME this doesn't take recursive copy into account; unsure if it's practical to do so
srcpath = get_source_path(elements)
if fnmatch.fnmatchcase(os.path.basename(instpath), os.path.basename(srcpath)):
return elements
return None
def appendfile(args):
import oe.recipeutils
stdout = ''
try:
(stdout, _) = bb.process.run('LANG=C file -b %s' % args.newfile, shell=True)
if 'cannot open' in stdout:
raise bb.process.ExecutionError(stdout)
except bb.process.ExecutionError as err:
logger.debug('file command returned error: %s' % err)
stdout = ''
if stdout:
logger.debug('file command output: %s' % stdout.rstrip())
if ('executable' in stdout and not 'shell script' in stdout) or 'shared object' in stdout:
logger.warning('This file looks like it is a binary or otherwise the output of compilation. If it is, you should consider building it properly instead of substituting a binary file directly.')
if args.recipe:
recipes = {args.targetpath: [args.recipe],}
else:
try:
recipes = find_target_file(args.targetpath, tinfoil.config_data)
except InvalidTargetFileError as e:
logger.error('%s cannot be handled by this tool: %s' % (args.targetpath, e))
return 1
if not recipes:
logger.error('Unable to find any package producing path %s - this may be because the recipe packaging it has not been built yet' % args.targetpath)
return 1
alternative_pns = []
postinst_pns = []
selectpn = None
for targetpath, pnlist in recipes.items():
for pn in pnlist:
if pn.startswith('?'):
alternative_pns.append(pn[1:])
elif pn.startswith('!'):
postinst_pns.append(pn[1:])
elif selectpn:
# hit here with multilibs
continue
else:
selectpn = pn
if not selectpn and len(alternative_pns) == 1:
selectpn = alternative_pns[0]
logger.error('File %s is an alternative possibly provided by recipe %s but seemingly no other, selecting it by default - you should double check other recipes' % (args.targetpath, selectpn))
if selectpn:
logger.debug('Selecting recipe %s for file %s' % (selectpn, args.targetpath))
if postinst_pns:
logger.warning('%s be modified by postinstall scripts for the following recipes:\n %s\nThis may or may not be an issue depending on what modifications these postinstall scripts make.' % (args.targetpath, '\n '.join(postinst_pns)))
rd = _parse_recipe(selectpn, tinfoil)
if not rd:
# Error message already shown
return 1
sourcefile, instelements, modpatches = determine_file_source(args.targetpath, rd)
sourcepath = None
if sourcefile:
sourcetype, sourcepath = sourcefile.split('://', 1)
logger.debug('Original source file is %s (%s)' % (sourcepath, sourcetype))
if sourcetype == 'patch':
logger.warning('File %s is added by the patch %s - you may need to remove or replace this patch in order to replace the file.' % (args.targetpath, sourcepath))
sourcepath = None
else:
logger.debug('Unable to determine source file, proceeding anyway')
if modpatches:
logger.warning('File %s is modified by the following patches:\n %s' % (args.targetpath, '\n '.join(modpatches)))
if instelements and sourcepath:
install = None
else:
# Auto-determine permissions
# Check destination
binpaths = '${bindir}:${sbindir}:${base_bindir}:${base_sbindir}:${libexecdir}:${sysconfdir}/init.d'
perms = '0644'
if os.path.abspath(os.path.dirname(args.targetpath)) in rd.expand(binpaths).split(':'):
# File is going into a directory normally reserved for executables, so it should be executable
perms = '0755'
else:
# Check source
st = os.stat(args.newfile)
if st.st_mode & stat.S_IXUSR:
perms = '0755'
install = {args.newfile: (args.targetpath, perms)}
if sourcepath:
sourcepath = os.path.basename(sourcepath)
oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
tinfoil.modified_files()
return 0
else:
if alternative_pns:
logger.error('File %s is an alternative possibly provided by the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(alternative_pns)))
elif postinst_pns:
logger.error('File %s may be written out in a pre/postinstall script of the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(postinst_pns)))
return 3
def appendsrc(args, files, rd, extralines=None):
import oe.recipeutils
srcdir = rd.getVar('S')
workdir = rd.getVar('WORKDIR')
import bb.fetch
simplified = {}
src_uri = rd.getVar('SRC_URI').split()
for uri in src_uri:
if uri.endswith(';'):
uri = uri[:-1]
simple_uri = bb.fetch.URI(uri)
simple_uri.params = {}
simplified[str(simple_uri)] = uri
copyfiles = {}
extralines = extralines or []
params = []
for newfile, srcfile in files.items():
src_destdir = os.path.dirname(srcfile)
if not args.use_workdir:
if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
srcdir = os.path.join(workdir, 'git')
if not bb.data.inherits_class('kernel-yocto', rd):
logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git')
src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir)
src_destdir = os.path.normpath(src_destdir)
if src_destdir and src_destdir != '.':
params.append({'subdir': src_destdir})
else:
params.append({})
copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
dry_run_output = None
dry_run_outdir = None
if args.dry_run:
import tempfile
dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
dry_run_outdir = dry_run_output.name
appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
if not appendfile:
return
if args.dry_run:
output = ''
appendfilename = os.path.basename(appendfile)
newappendfile = appendfile
if appendfile and os.path.exists(appendfile):
with open(appendfile, 'r') as f:
oldlines = f.readlines()
else:
appendfile = '/dev/null'
oldlines = []
with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
newlines = f.readlines()
diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
difflines = list(diff)
if difflines:
output += ''.join(difflines)
if output:
logger.info('Diff of changed files:\n%s' % output)
else:
logger.info('No changed files')
tinfoil.modified_files()
def appendsrcfiles(parser, args):
recipedata = _parse_recipe(args.recipe, tinfoil)
if not recipedata:
parser.error('RECIPE must be a valid recipe name')
files = dict((f, os.path.join(args.destdir, os.path.basename(f)))
for f in args.files)
return appendsrc(args, files, recipedata)
def appendsrcfile(parser, args):
recipedata = _parse_recipe(args.recipe, tinfoil)
if not recipedata:
parser.error('RECIPE must be a valid recipe name')
if not args.destfile:
args.destfile = os.path.basename(args.file)
elif args.destfile.endswith('/'):
args.destfile = os.path.join(args.destfile, os.path.basename(args.file))
return appendsrc(args, {args.file: args.destfile}, recipedata)
def layer(layerpath):
if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')):
raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath))
return layerpath
def existing_path(filepath):
if not os.path.exists(filepath):
raise argparse.ArgumentTypeError('{0!r} must be an existing path'.format(filepath))
return filepath
def existing_file(filepath):
filepath = existing_path(filepath)
if os.path.isdir(filepath):
raise argparse.ArgumentTypeError('{0!r} must be a file, not a directory'.format(filepath))
return filepath
def destination_path(destpath):
if os.path.isabs(destpath):
raise argparse.ArgumentTypeError('{0!r} must be a relative path, not absolute'.format(destpath))
return destpath
def target_path(targetpath):
if not os.path.isabs(targetpath):
raise argparse.ArgumentTypeError('{0!r} must be an absolute path, not relative'.format(targetpath))
return targetpath
def register_commands(subparsers):
common = argparse.ArgumentParser(add_help=False)
common.add_argument('-m', '--machine', help='Make bbappend changes specific to a machine only', metavar='MACHINE')
common.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
common.add_argument('destlayer', metavar='DESTLAYER', help='Base directory of the destination layer to write the bbappend to', type=layer)
parser_appendfile = subparsers.add_parser('appendfile',
parents=[common],
help='Create/update a bbappend to replace a target file',
description='Creates a bbappend (or updates an existing one) to replace the specified file that appears in the target system, determining the recipe that packages the file and the required path and name for the bbappend automatically. Note that the ability to determine the recipe packaging a particular file depends upon the recipe\'s do_packagedata task having already run prior to running this command (which it will have when the recipe has been built successfully, which in turn will have happened if one or more of the recipe\'s packages is included in an image that has been built successfully).')
parser_appendfile.add_argument('targetpath', help='Path to the file to be replaced (as it would appear within the target image, e.g. /etc/motd)', type=target_path)
parser_appendfile.add_argument('newfile', help='Custom file to replace the target file with', type=existing_file)
parser_appendfile.add_argument('-r', '--recipe', help='Override recipe to apply to (default is to find which recipe already packages the file)')
parser_appendfile.set_defaults(func=appendfile, parserecipes=True)
common_src = argparse.ArgumentParser(add_help=False, parents=[common])
common_src.add_argument('-W', '--workdir', help='Unpack file into WORKDIR rather than S', dest='use_workdir', action='store_true')
common_src.add_argument('recipe', metavar='RECIPE', help='Override recipe to apply to')
parser = subparsers.add_parser('appendsrcfiles',
parents=[common_src],
help='Create/update a bbappend to add or replace source files',
description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
parser = subparsers.add_parser('appendsrcfile',
parents=[common_src],
help='Create/update a bbappend to add or replace a source file',
description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,875 @@
# Recipe creation tool - create command build system handlers
#
# Copyright (C) 2014-2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import os
import re
import logging
from recipetool.create import RecipeHandler, validate_pv
logger = logging.getLogger('recipetool')
tinfoil = None
plugins = None
def plugin_init(pluginlist):
# Take a reference to the list so we can use it later
global plugins
plugins = pluginlist
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class CmakeRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']):
classes.append('cmake')
values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues)
classes.extend(values.pop('inherit', '').split())
for var, value in values.items():
lines_before.append('%s = "%s"' % (var, value))
lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:')
lines_after.append('EXTRA_OECMAKE = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
@staticmethod
def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None):
# Find all plugins that want to register handlers
logger.debug('Loading cmake handlers')
handlers = []
for plugin in plugins:
if hasattr(plugin, 'register_cmake_handlers'):
plugin.register_cmake_handlers(handlers)
values = {}
inherits = []
if cmakelistsfile:
srcfiles = [cmakelistsfile]
else:
srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt'])
# Note that some of these are non-standard, but probably better to
# be able to map them anyway if we see them
cmake_pkgmap = {'alsa': 'alsa-lib',
'aspell': 'aspell',
'atk': 'atk',
'bison': 'bison-native',
'boost': 'boost',
'bzip2': 'bzip2',
'cairo': 'cairo',
'cups': 'cups',
'curl': 'curl',
'curses': 'ncurses',
'cvs': 'cvs',
'drm': 'libdrm',
'dbus': 'dbus',
'dbusglib': 'dbus-glib',
'egl': 'virtual/egl',
'expat': 'expat',
'flex': 'flex-native',
'fontconfig': 'fontconfig',
'freetype': 'freetype',
'gettext': '',
'git': '',
'gio': 'glib-2.0',
'giounix': 'glib-2.0',
'glew': 'glew',
'glib': 'glib-2.0',
'glib2': 'glib-2.0',
'glu': 'libglu',
'glut': 'freeglut',
'gobject': 'glib-2.0',
'gperf': 'gperf-native',
'gnutls': 'gnutls',
'gtk2': 'gtk+',
'gtk3': 'gtk+3',
'gtk': 'gtk+3',
'harfbuzz': 'harfbuzz',
'icu': 'icu',
'intl': 'virtual/libintl',
'jpeg': 'jpeg',
'libarchive': 'libarchive',
'libiconv': 'virtual/libiconv',
'liblzma': 'xz',
'libxml2': 'libxml2',
'libxslt': 'libxslt',
'opengl': 'virtual/libgl',
'openmp': '',
'openssl': 'openssl',
'pango': 'pango',
'perl': '',
'perllibs': '',
'pkgconfig': '',
'png': 'libpng',
'pthread': '',
'pythoninterp': '',
'pythonlibs': '',
'ruby': 'ruby-native',
'sdl': 'libsdl',
'sdl2': 'libsdl2',
'subversion': 'subversion-native',
'swig': 'swig-native',
'tcl': 'tcl-native',
'threads': '',
'tiff': 'tiff',
'wget': 'wget',
'x11': 'libx11',
'xcb': 'libxcb',
'xext': 'libxext',
'xfixes': 'libxfixes',
'zlib': 'zlib',
}
pcdeps = []
libdeps = []
deps = []
unmappedpkgs = []
proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
def find_cmake_package(pkg):
RecipeHandler.load_devel_filemap(tinfoil.config_data)
for fn, pn in RecipeHandler.recipecmakefilemap.items():
splitname = fn.split('/')
if len(splitname) > 1:
if splitname[0].lower().startswith(pkg.lower()):
if splitname[1] == '%s-config.cmake' % pkg.lower() or splitname[1] == '%sConfig.cmake' % pkg or splitname[1] == 'Find%s.cmake' % pkg:
return pn
return None
def interpret_value(value):
return value.strip('"')
def parse_cmake_file(fn, paths=None):
searchpaths = (paths or []) + [os.path.dirname(fn)]
logger.debug('Parsing file %s' % fn)
with open(fn, 'r', errors='surrogateescape') as f:
for line in f:
line = line.strip()
for handler in handlers:
if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
continue
res = include_re.match(line)
if res:
includefn = bb.utils.which(':'.join(searchpaths), res.group(1))
if includefn:
parse_cmake_file(includefn, searchpaths)
else:
logger.debug('Unable to recurse into include file %s' % res.group(1))
continue
res = subdir_re.match(line)
if res:
subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt')
if os.path.exists(subdirfn):
parse_cmake_file(subdirfn, searchpaths)
else:
logger.debug('Unable to recurse into subdirectory file %s' % subdirfn)
continue
res = proj_re.match(line)
if res:
extravalues['PN'] = interpret_value(res.group(1).split()[0])
continue
res = pkgcm_re.match(line)
if res:
res = dep_re.findall(res.group(2))
if res:
pcdeps.extend([interpret_value(x[0]) for x in res])
inherits.append('pkgconfig')
continue
res = pkgsm_re.match(line)
if res:
res = dep_re.findall(res.group(2))
if res:
# Note: appending a tuple here!
item = tuple((interpret_value(x[0]) for x in res))
if len(item) == 1:
item = item[0]
pcdeps.append(item)
inherits.append('pkgconfig')
continue
res = findpackage_re.match(line)
if res:
origpkg = res.group(1)
pkg = interpret_value(origpkg)
found = False
for handler in handlers:
if handler.process_findpackage(srctree, fn, pkg, deps, outlines, inherits, values):
logger.debug('Mapped CMake package %s via handler %s' % (pkg, handler.__class__.__name__))
found = True
break
if found:
continue
elif pkg == 'Gettext':
inherits.append('gettext')
elif pkg == 'Perl':
inherits.append('perlnative')
elif pkg == 'PkgConfig':
inherits.append('pkgconfig')
elif pkg == 'PythonInterp':
inherits.append('python3native')
elif pkg == 'PythonLibs':
inherits.append('python3-dir')
else:
# Try to map via looking at installed CMake packages in pkgdata
dep = find_cmake_package(pkg)
if dep:
logger.debug('Mapped CMake package %s to recipe %s via pkgdata' % (pkg, dep))
deps.append(dep)
else:
dep = cmake_pkgmap.get(pkg.lower(), None)
if dep:
logger.debug('Mapped CMake package %s to recipe %s via internal list' % (pkg, dep))
deps.append(dep)
elif dep is None:
unmappedpkgs.append(origpkg)
continue
res = checklib_re.match(line)
if res:
lib = interpret_value(res.group(1))
if not lib.startswith('$'):
libdeps.append(lib)
res = findlibrary_re.match(line)
if res:
libs = res.group(2).split()
for lib in libs:
if lib in ['HINTS', 'PATHS', 'PATH_SUFFIXES', 'DOC', 'NAMES_PER_DIR'] or lib.startswith(('NO_', 'CMAKE_', 'ONLY_CMAKE_')):
break
lib = interpret_value(lib)
if not lib.startswith('$'):
libdeps.append(lib)
if line.lower().startswith('useswig'):
deps.append('swig-native')
continue
parse_cmake_file(srcfiles[0])
if unmappedpkgs:
outlines.append('# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(list(set(unmappedpkgs))))
RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
for handler in handlers:
handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
if inherits:
values['inherit'] = ' '.join(list(set(inherits)))
return values
class CmakeExtensionHandler(object):
'''Base class for CMake extension handlers'''
def process_line(self, srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values):
'''
Handle a line parsed out of an CMake file.
Return True if you've completely handled the passed in line, otherwise return False.
'''
return False
def process_findpackage(self, srctree, fn, pkg, deps, outlines, inherits, values):
'''
Handle a find_package package parsed out of a CMake file.
Return True if you've completely handled the passed in package, otherwise return False.
'''
return False
def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
'''
Apply any desired post-processing on the output
'''
return
class SconsRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['SConstruct', 'Sconstruct', 'sconstruct']):
classes.append('scons')
lines_after.append('# Specify any options you want to pass to scons using EXTRA_OESCONS:')
lines_after.append('EXTRA_OESCONS = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
class QmakeRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
if RecipeHandler.checkfiles(srctree, ['*.pro']):
classes.append('qmake2')
handled.append('buildsystem')
return True
return False
class AutotoolsRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
autoconf = False
if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']):
autoconf = True
values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, extravalues)
classes.extend(values.pop('inherit', '').split())
for var, value in values.items():
lines_before.append('%s = "%s"' % (var, value))
else:
conffile = RecipeHandler.checkfiles(srctree, ['configure'])
if conffile:
# Check if this is just a pre-generated autoconf configure script
with open(conffile[0], 'r', errors='surrogateescape') as f:
for i in range(1, 10):
if 'Generated by GNU Autoconf' in f.readline():
autoconf = True
break
if autoconf and not ('PV' in extravalues and 'PN' in extravalues):
# Last resort
conffile = RecipeHandler.checkfiles(srctree, ['configure'])
if conffile:
with open(conffile[0], 'r', errors='surrogateescape') as f:
for line in f:
line = line.strip()
if line.startswith('VERSION=') or line.startswith('PACKAGE_VERSION='):
pv = line.split('=')[1].strip('"\'')
if pv and not 'PV' in extravalues and validate_pv(pv):
extravalues['PV'] = pv
elif line.startswith('PACKAGE_NAME=') or line.startswith('PACKAGE='):
pn = line.split('=')[1].strip('"\'')
if pn and not 'PN' in extravalues:
extravalues['PN'] = pn
if autoconf:
lines_before.append('')
lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory')
lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the')
lines_before.append('# inherit line')
classes.append('autotools')
lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:')
lines_after.append('EXTRA_OECONF = ""')
lines_after.append('')
handled.append('buildsystem')
return True
return False
@staticmethod
def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None):
import shlex
# Find all plugins that want to register handlers
logger.debug('Loading autotools handlers')
handlers = []
for plugin in plugins:
if hasattr(plugin, 'register_autotools_handlers'):
plugin.register_autotools_handlers(handlers)
values = {}
inherits = []
# Hardcoded map, we also use a dynamic one based on what's in the sysroot
progmap = {'flex': 'flex-native',
'bison': 'bison-native',
'm4': 'm4-native',
'tar': 'tar-native',
'ar': 'binutils-native',
'ranlib': 'binutils-native',
'ld': 'binutils-native',
'strip': 'binutils-native',
'libtool': '',
'autoconf': '',
'autoheader': '',
'automake': '',
'uname': '',
'rm': '',
'cp': '',
'mv': '',
'find': '',
'awk': '',
'sed': '',
}
progclassmap = {'gconftool-2': 'gconf',
'pkg-config': 'pkgconfig',
'python': 'python3native',
'python3': 'python3native',
'perl': 'perlnative',
'makeinfo': 'texinfo',
}
pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
version_re = re.compile(r'([0-9.]+)')
defines = {}
def subst_defines(value):
newvalue = value
for define, defval in defines.items():
newvalue = newvalue.replace(define, defval)
if newvalue != value:
return subst_defines(newvalue)
return value
def process_value(value):
value = value.replace('[', '').replace(']', '')
if value.startswith('m4_esyscmd(') or value.startswith('m4_esyscmd_s('):
cmd = subst_defines(value[value.index('(')+1:-1])
try:
if '|' in cmd:
cmd = 'set -o pipefail; ' + cmd
stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True)
ret = stdout.rstrip()
except bb.process.ExecutionError as e:
ret = ''
elif value.startswith('m4_'):
return None
ret = subst_defines(value)
if ret:
ret = ret.strip('"\'')
return ret
# Since a configure.ac file is essentially a program, this is only ever going to be
# a hack unfortunately; but it ought to be enough of an approximation
if acfile:
srcfiles = [acfile]
else:
srcfiles = RecipeHandler.checkfiles(srctree, ['acinclude.m4', 'configure.ac', 'configure.in'])
pcdeps = []
libdeps = []
deps = []
unmapped = []
RecipeHandler.load_binmap(tinfoil.config_data)
def process_macro(keyword, value):
for handler in handlers:
if handler.process_macro(srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
return
logger.debug('Found keyword %s with value "%s"' % (keyword, value))
if keyword == 'PKG_CHECK_MODULES':
res = pkg_re.search(value)
if res:
res = dep_re.findall(res.group(1))
if res:
pcdeps.extend([x[0] for x in res])
inherits.append('pkgconfig')
elif keyword == 'PKG_CHECK_EXISTS':
res = pkgce_re.search(value)
if res:
res = dep_re.findall(res.group(1))
if res:
pcdeps.extend([x[0] for x in res])
inherits.append('pkgconfig')
elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'):
inherits.append('gettext')
elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'):
deps.append('intltool-native')
elif keyword == 'AM_PATH_GLIB_2_0':
deps.append('glib-2.0')
elif keyword in ('AC_CHECK_PROG', 'AC_PATH_PROG', 'AX_WITH_PROG'):
res = progs_re.search(value)
if res:
for prog in shlex.split(res.group(1)):
prog = prog.split()[0]
for handler in handlers:
if handler.process_prog(srctree, keyword, value, prog, deps, outlines, inherits, values):
return
progclass = progclassmap.get(prog, None)
if progclass:
inherits.append(progclass)
else:
progdep = RecipeHandler.recipebinmap.get(prog, None)
if not progdep:
progdep = progmap.get(prog, None)
if progdep:
deps.append(progdep)
elif progdep is None:
if not prog.startswith('$'):
unmapped.append(prog)
elif keyword == 'AC_CHECK_LIB':
res = lib_re.search(value)
if res:
lib = res.group(1)
if not lib.startswith('$'):
libdeps.append(lib)
elif keyword == 'AX_CHECK_LIBRARY':
res = libx_re.search(value)
if res:
lib = res.group(2)
if not lib.startswith('$'):
header = res.group(1)
libdeps.append((lib, header))
elif keyword == 'AC_PATH_X':
deps.append('libx11')
elif keyword in ('AX_BOOST', 'BOOST_REQUIRE'):
deps.append('boost')
elif keyword in ('AC_PROG_LEX', 'AM_PROG_LEX', 'AX_PROG_FLEX'):
deps.append('flex-native')
elif keyword in ('AC_PROG_YACC', 'AX_PROG_BISON'):
deps.append('bison-native')
elif keyword == 'AX_CHECK_ZLIB':
deps.append('zlib')
elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'):
deps.append('openssl')
elif keyword in ('AX_LIB_CURL', 'LIBCURL_CHECK_CONFIG'):
deps.append('curl')
elif keyword == 'AX_LIB_BEECRYPT':
deps.append('beecrypt')
elif keyword == 'AX_LIB_EXPAT':
deps.append('expat')
elif keyword == 'AX_LIB_GCRYPT':
deps.append('libgcrypt')
elif keyword == 'AX_LIB_NETTLE':
deps.append('nettle')
elif keyword == 'AX_LIB_READLINE':
deps.append('readline')
elif keyword == 'AX_LIB_SQLITE3':
deps.append('sqlite3')
elif keyword == 'AX_LIB_TAGLIB':
deps.append('taglib')
elif keyword in ['AX_PKG_SWIG', 'AC_PROG_SWIG']:
deps.append('swig-native')
elif keyword == 'AX_PROG_XSLTPROC':
deps.append('libxslt-native')
elif keyword in ['AC_PYTHON_DEVEL', 'AX_PYTHON_DEVEL', 'AM_PATH_PYTHON']:
pythonclass = 'python3native'
elif keyword == 'AX_WITH_CURSES':
deps.append('ncurses')
elif keyword == 'AX_PATH_BDB':
deps.append('db')
elif keyword == 'AX_PATH_LIB_PCRE':
deps.append('libpcre')
elif keyword == 'AC_INIT':
if extravalues is not None:
res = ac_init_re.match(value)
if res:
extravalues['PN'] = process_value(res.group(1))
pv = process_value(res.group(2))
if validate_pv(pv):
extravalues['PV'] = pv
elif keyword == 'AM_INIT_AUTOMAKE':
if extravalues is not None:
if 'PN' not in extravalues:
res = am_init_re.match(value)
if res:
if res.group(1) != 'AC_PACKAGE_NAME':
extravalues['PN'] = process_value(res.group(1))
pv = process_value(res.group(2))
if validate_pv(pv):
extravalues['PV'] = pv
elif keyword == 'define(':
res = define_re.match(value)
if res:
key = res.group(2).strip('[]')
value = process_value(res.group(3))
if value is not None:
defines[key] = value
keywords = ['PKG_CHECK_MODULES',
'PKG_CHECK_EXISTS',
'AM_GNU_GETTEXT',
'AM_GLIB_GNU_GETTEXT',
'GETTEXT_PACKAGE',
'AC_PROG_INTLTOOL',
'IT_PROG_INTLTOOL',
'AM_PATH_GLIB_2_0',
'AC_CHECK_PROG',
'AC_PATH_PROG',
'AX_WITH_PROG',
'AC_CHECK_LIB',
'AX_CHECK_LIBRARY',
'AC_PATH_X',
'AX_BOOST',
'BOOST_REQUIRE',
'AC_PROG_LEX',
'AM_PROG_LEX',
'AX_PROG_FLEX',
'AC_PROG_YACC',
'AX_PROG_BISON',
'AX_CHECK_ZLIB',
'AX_CHECK_OPENSSL',
'AX_LIB_CRYPTO',
'AX_LIB_CURL',
'LIBCURL_CHECK_CONFIG',
'AX_LIB_BEECRYPT',
'AX_LIB_EXPAT',
'AX_LIB_GCRYPT',
'AX_LIB_NETTLE',
'AX_LIB_READLINE'
'AX_LIB_SQLITE3',
'AX_LIB_TAGLIB',
'AX_PKG_SWIG',
'AC_PROG_SWIG',
'AX_PROG_XSLTPROC',
'AC_PYTHON_DEVEL',
'AX_PYTHON_DEVEL',
'AM_PATH_PYTHON',
'AX_WITH_CURSES',
'AX_PATH_BDB',
'AX_PATH_LIB_PCRE',
'AC_INIT',
'AM_INIT_AUTOMAKE',
'define(',
]
for handler in handlers:
handler.extend_keywords(keywords)
for srcfile in srcfiles:
nesting = 0
in_keyword = ''
partial = ''
with open(srcfile, 'r', errors='surrogateescape') as f:
for line in f:
if in_keyword:
partial += ' ' + line.strip()
if partial.endswith('\\'):
partial = partial[:-1]
nesting = nesting + line.count('(') - line.count(')')
if nesting == 0:
process_macro(in_keyword, partial)
partial = ''
in_keyword = ''
else:
for keyword in keywords:
if keyword in line:
nesting = line.count('(') - line.count(')')
if nesting > 0:
partial = line.strip()
if partial.endswith('\\'):
partial = partial[:-1]
in_keyword = keyword
else:
process_macro(keyword, line.strip())
break
if in_keyword:
process_macro(in_keyword, partial)
if extravalues:
for k,v in list(extravalues.items()):
if v:
if v.startswith('$') or v.startswith('@') or v.startswith('%'):
del extravalues[k]
else:
extravalues[k] = v.strip('"\'').rstrip('()')
if unmapped:
outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped))))
RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data)
for handler in handlers:
handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values)
if inherits:
values['inherit'] = ' '.join(list(set(inherits)))
return values
class AutotoolsExtensionHandler(object):
'''Base class for Autotools extension handlers'''
def process_macro(self, srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values):
'''
Handle a macro parsed out of an autotools file. Note that if you want this to be called
for any macro other than the ones AutotoolsRecipeHandler already looks for, you'll need
to add it to the keywords list in extend_keywords().
Return True if you've completely handled the passed in macro, otherwise return False.
'''
return False
def extend_keywords(self, keywords):
'''Adds keywords to be recognised by the parser (so that you get a call to process_macro)'''
return
def process_prog(self, srctree, keyword, value, prog, deps, outlines, inherits, values):
'''
Handle an AC_PATH_PROG, AC_CHECK_PROG etc. line
Return True if you've completely handled the passed in macro, otherwise return False.
'''
return False
def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values):
'''
Apply any desired post-processing on the output
'''
return
class MakefileRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
makefile = RecipeHandler.checkfiles(srctree, ['Makefile', 'makefile', 'GNUmakefile'])
if makefile:
lines_after.append('# NOTE: this is a Makefile-only piece of software, so we cannot generate much of the')
lines_after.append('# recipe automatically - you will need to examine the Makefile yourself and ensure')
lines_after.append('# that the appropriate arguments are passed in.')
lines_after.append('')
scanfile = os.path.join(srctree, 'configure.scan')
skipscan = False
try:
stdout, stderr = bb.process.run('autoscan', cwd=srctree, shell=True)
except bb.process.ExecutionError as e:
skipscan = True
if scanfile and os.path.exists(scanfile):
values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, acfile=scanfile)
classes.extend(values.pop('inherit', '').split())
for var, value in values.items():
if var == 'DEPENDS':
lines_before.append('# NOTE: some of these dependencies may be optional, check the Makefile and/or upstream documentation')
lines_before.append('%s = "%s"' % (var, value))
lines_before.append('')
for f in ['configure.scan', 'autoscan.log']:
fp = os.path.join(srctree, f)
if os.path.exists(fp):
os.remove(fp)
self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
func = []
func.append('# You will almost certainly need to add additional arguments here')
func.append('oe_runmake')
self.genfunction(lines_after, 'do_compile', func)
installtarget = True
try:
stdout, stderr = bb.process.run('make -n install', cwd=srctree, shell=True)
except bb.process.ExecutionError as e:
if e.exitcode != 1:
installtarget = False
func = []
if installtarget:
func.append('# This is a guess; additional arguments may be required')
makeargs = ''
with open(makefile[0], 'r', errors='surrogateescape') as f:
for i in range(1, 100):
if 'DESTDIR' in f.readline():
makeargs += " 'DESTDIR=${D}'"
break
func.append('oe_runmake install%s' % makeargs)
else:
func.append('# NOTE: unable to determine what to put here - there is a Makefile but no')
func.append('# target named "install", so you will need to define this yourself')
self.genfunction(lines_after, 'do_install', func)
handled.append('buildsystem')
else:
lines_after.append('# NOTE: no Makefile found, unable to determine what needs to be done')
lines_after.append('')
self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
self.genfunction(lines_after, 'do_compile', ['# Specify compilation commands here'])
self.genfunction(lines_after, 'do_install', ['# Specify install commands here'])
class VersionFileRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'PV' not in extravalues:
# Look for a VERSION or version file containing a single line consisting
# only of a version number
filelist = RecipeHandler.checkfiles(srctree, ['VERSION', 'version'])
version = None
for fileitem in filelist:
linecount = 0
with open(fileitem, 'r', errors='surrogateescape') as f:
for line in f:
line = line.rstrip().strip('"\'')
linecount += 1
if line:
if linecount > 1:
version = None
break
else:
if validate_pv(line):
version = line
if version:
extravalues['PV'] = version
break
class SpecFileRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
if 'PV' in extravalues and 'PN' in extravalues:
return
filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True)
valuemap = {'Name': 'PN',
'Version': 'PV',
'Summary': 'SUMMARY',
'Url': 'HOMEPAGE',
'License': 'LICENSE'}
foundvalues = {}
for fileitem in filelist:
linecount = 0
with open(fileitem, 'r', errors='surrogateescape') as f:
for line in f:
for value, varname in valuemap.items():
if line.startswith(value + ':') and not varname in foundvalues:
foundvalues[varname] = line.split(':', 1)[1].strip()
break
if len(foundvalues) == len(valuemap):
break
# Drop values containing unexpanded RPM macros
for k in list(foundvalues.keys()):
if '%' in foundvalues[k]:
del foundvalues[k]
if 'PV' in foundvalues:
if not validate_pv(foundvalues['PV']):
del foundvalues['PV']
license = foundvalues.pop('LICENSE', None)
if license:
liccomment = '# NOTE: spec file indicates the license may be "%s"' % license
for i, line in enumerate(lines_before):
if line.startswith('LICENSE ='):
lines_before.insert(i, liccomment)
break
else:
lines_before.append(liccomment)
extravalues.update(foundvalues)
def register_recipe_handlers(handlers):
# Set priorities with some gaps so that other plugins can insert
# their own handlers (so avoid changing these numbers)
handlers.append((CmakeRecipeHandler(), 50))
handlers.append((AutotoolsRecipeHandler(), 40))
handlers.append((SconsRecipeHandler(), 30))
handlers.append((QmakeRecipeHandler(), 20))
handlers.append((MakefileRecipeHandler(), 10))
handlers.append((VersionFileRecipeHandler(), -1))
handlers.append((SpecFileRecipeHandler(), -1))

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,777 @@
# Recipe creation tool - go support plugin
#
# The code is based on golang internals. See the afftected
# methods for further reference and information.
#
# Copyright (C) 2023 Weidmueller GmbH & Co KG
# Author: Lukas Funke <lukas.funke@weidmueller.com>
#
# SPDX-License-Identifier: GPL-2.0-only
#
from collections import namedtuple
from enum import Enum
from html.parser import HTMLParser
from recipetool.create import RecipeHandler, handle_license_vars
from recipetool.create import guess_license, tidy_licenses, fixup_license
from recipetool.create import determine_from_url
from urllib.error import URLError, HTTPError
import bb.utils
import json
import logging
import os
import re
import subprocess
import sys
import shutil
import tempfile
import urllib.parse
import urllib.request
GoImport = namedtuple('GoImport', 'root vcs url suffix')
logger = logging.getLogger('recipetool')
CodeRepo = namedtuple(
'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
tinfoil = None
# Regular expression to parse pseudo semantic version
# see https://go.dev/ref/mod#pseudo-versions
re_pseudo_semver = re.compile(
r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
# Regular expression to parse semantic version
re_semver = re.compile(
r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class GoRecipeHandler(RecipeHandler):
"""Class to handle the go recipe creation"""
@staticmethod
def __ensure_go():
"""Check if the 'go' command is available in the recipes"""
recipe = "go-native"
if not tinfoil.recipes_parsed:
tinfoil.parse_recipes()
try:
rd = tinfoil.parse_recipe(recipe)
except bb.providers.NoProvider:
bb.error(
"Nothing provides '%s' which is required for the build" % (recipe))
bb.note(
"You will likely need to add a layer that provides '%s'" % (recipe))
return None
bindir = rd.getVar('STAGING_BINDIR_NATIVE')
gopath = os.path.join(bindir, 'go')
if not os.path.exists(gopath):
tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
if not os.path.exists(gopath):
logger.error(
'%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
return None
return bindir
def __resolve_repository_static(self, modulepath):
"""Resolve the repository in a static manner
The method is based on the go implementation of
`repoRootFromVCSPaths` in
https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
"""
url = urllib.parse.urlparse("https://" + modulepath)
req = urllib.request.Request(url.geturl())
try:
resp = urllib.request.urlopen(req)
# Some modulepath are just redirects to github (or some other vcs
# hoster). Therefore, we check if this modulepath redirects to
# somewhere else
if resp.geturl() != url.geturl():
bb.debug(1, "%s is redirectred to %s" %
(url.geturl(), resp.geturl()))
url = urllib.parse.urlparse(resp.geturl())
modulepath = url.netloc + url.path
except URLError as url_err:
# This is probably because the module path
# contains the subdir and major path. Thus,
# we ignore this error for now
logger.debug(
1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
host, _, _ = modulepath.partition('/')
class vcs(Enum):
pathprefix = "pathprefix"
regexp = "regexp"
type = "type"
repo = "repo"
check = "check"
schemelessRepo = "schemelessRepo"
# GitHub
vcsGitHub = {}
vcsGitHub[vcs.pathprefix] = "github.com"
vcsGitHub[vcs.regexp] = re.compile(
r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsGitHub[vcs.type] = "git"
vcsGitHub[vcs.repo] = "https://\\g<root>"
# Bitbucket
vcsBitbucket = {}
vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
vcsBitbucket[vcs.regexp] = re.compile(
r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsBitbucket[vcs.type] = "git"
vcsBitbucket[vcs.repo] = "https://\\g<root>"
# IBM DevOps Services (JazzHub)
vcsIBMDevOps = {}
vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
vcsIBMDevOps[vcs.regexp] = re.compile(
r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsIBMDevOps[vcs.type] = "git"
vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
# Git at Apache
vcsApacheGit = {}
vcsApacheGit[vcs.pathprefix] = "git.apache.org"
vcsApacheGit[vcs.regexp] = re.compile(
r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsApacheGit[vcs.type] = "git"
vcsApacheGit[vcs.repo] = "https://\\g<root>"
# Git at OpenStack
vcsOpenStackGit = {}
vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
vcsOpenStackGit[vcs.regexp] = re.compile(
r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
vcsOpenStackGit[vcs.type] = "git"
vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
# chiselapp.com for fossil
vcsChiselapp = {}
vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
vcsChiselapp[vcs.regexp] = re.compile(
r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
vcsChiselapp[vcs.type] = "fossil"
vcsChiselapp[vcs.repo] = "https://\\g<root>"
# General syntax for any server.
# Must be last.
vcsGeneralServer = {}
vcsGeneralServer[vcs.regexp] = re.compile(
"(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
vcsGeneralServer[vcs.schemelessRepo] = True
vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
vcsGeneralServer]
if modulepath.startswith("example.net") or modulepath == "rsc.io":
logger.warning("Suspicious module path %s" % modulepath)
return None
if modulepath.startswith("http:") or modulepath.startswith("https:"):
logger.warning("Import path should not start with %s %s" %
("http", "https"))
return None
rootpath = None
vcstype = None
repourl = None
suffix = None
for srv in vcsPaths:
m = srv[vcs.regexp].match(modulepath)
if vcs.pathprefix in srv:
if host == srv[vcs.pathprefix]:
rootpath = m.group('root')
vcstype = srv[vcs.type]
repourl = m.expand(srv[vcs.repo])
suffix = m.group('suffix')
break
elif m and srv[vcs.schemelessRepo]:
rootpath = m.group('root')
vcstype = m[vcs.type]
repourl = m[vcs.repo]
suffix = m.group('suffix')
break
return GoImport(rootpath, vcstype, repourl, suffix)
def __resolve_repository_dynamic(self, modulepath):
"""Resolve the repository root in a dynamic manner.
The method is based on the go implementation of
`repoRootForImportDynamic` in
https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
"""
url = urllib.parse.urlparse("https://" + modulepath)
class GoImportHTMLParser(HTMLParser):
def __init__(self):
super().__init__()
self.__srv = {}
def handle_starttag(self, tag, attrs):
if tag == 'meta' and list(
filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
content = list(
filter(lambda a: (a[0] == 'content'), attrs))
if content:
srv = content[0][1].split()
self.__srv[srv[0]] = srv
def go_import(self, modulepath):
if modulepath in self.__srv:
srv = self.__srv[modulepath]
return GoImport(srv[0], srv[1], srv[2], None)
return None
url = url.geturl() + "?go-get=1"
req = urllib.request.Request(url)
try:
body = urllib.request.urlopen(req).read()
except HTTPError as http_err:
logger.warning(
"Unclean status when fetching page from [%s]: %s", url, str(http_err))
body = http_err.fp.read()
except URLError as url_err:
logger.warning(
"Failed to fetch page from [%s]: %s", url, str(url_err))
return None
parser = GoImportHTMLParser()
parser.feed(body.decode('utf-8'))
parser.close()
return parser.go_import(modulepath)
def __resolve_from_golang_proxy(self, modulepath, version):
"""
Resolves repository data from golang proxy
"""
url = urllib.parse.urlparse("https://proxy.golang.org/"
+ modulepath
+ "/@v/"
+ version
+ ".info")
# Transform url to lower case, golang proxy doesn't like mixed case
req = urllib.request.Request(url.geturl().lower())
try:
resp = urllib.request.urlopen(req)
except URLError as url_err:
logger.warning(
"Failed to fetch page from [%s]: %s", url, str(url_err))
return None
golang_proxy_res = resp.read().decode('utf-8')
modinfo = json.loads(golang_proxy_res)
if modinfo and 'Origin' in modinfo:
origin = modinfo['Origin']
_root_url = urllib.parse.urlparse(origin['URL'])
# We normalize the repo URL since we don't want the scheme in it
_subdir = origin['Subdir'] if 'Subdir' in origin else None
_root, _, _ = self.__split_path_version(modulepath)
if _subdir:
_root = _root[:-len(_subdir)].strip('/')
_commit = origin['Hash']
_vcs = origin['VCS']
return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
return None
def __resolve_repository(self, modulepath):
"""
Resolves src uri from go module-path
"""
repodata = self.__resolve_repository_static(modulepath)
if not repodata or not repodata.url:
repodata = self.__resolve_repository_dynamic(modulepath)
if not repodata or not repodata.url:
logger.error(
"Could not resolve repository for module path '%s'" % modulepath)
# There is no way to recover from this
sys.exit(14)
if repodata:
logger.debug(1, "Resolved download path for import '%s' => %s" % (
modulepath, repodata.url))
return repodata
def __split_path_version(self, path):
i = len(path)
dot = False
for j in range(i, 0, -1):
if path[j - 1] < '0' or path[j - 1] > '9':
break
if path[j - 1] == '.':
dot = True
break
i = j - 1
if i <= 1 or i == len(
path) or path[i - 1] != 'v' or path[i - 2] != '/':
return path, "", True
prefix, pathMajor = path[:i - 2], path[i - 2:]
if dot or len(
pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
return path, "", False
return prefix, pathMajor, True
def __get_path_major(self, pathMajor):
if not pathMajor:
return ""
if pathMajor[0] != '/' and pathMajor[0] != '.':
logger.error(
"pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
pathMajor = pathMajor[:len("-unstable") - 2]
return pathMajor[1:]
def __build_coderepo(self, repo, path):
codedir = ""
pathprefix, pathMajor, _ = self.__split_path_version(path)
if repo.root == path:
pathprefix = path
elif path.startswith(repo.root):
codedir = pathprefix[len(repo.root):].strip('/')
pseudoMajor = self.__get_path_major(pathMajor)
logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
return CodeRepo(path, repo.root, codedir,
pathMajor, pathprefix, pseudoMajor)
def __resolve_version(self, repo, path, version):
hash = None
coderoot = self.__build_coderepo(repo, path)
def vcs_fetch_all():
tmpdir = tempfile.mkdtemp()
clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
bb.process.run(clone_cmd)
log_cmd = "git log --all --pretty='%H %d' --decorate=short"
output, _ = bb.process.run(
log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
bb.utils.prunedir(tmpdir)
return output.strip().split('\n')
def vcs_fetch_remote(tag):
# add * to grab ^{}
refs = {}
ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
repo.url, tag)
output, _ = bb.process.run(ls_remote_cmd)
output = output.strip().split('\n')
for line in output:
f = line.split(maxsplit=1)
if len(f) != 2:
continue
for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
if f[1].startswith(prefix):
refs[f[1][len(prefix):]] = f[0]
for key, hash in refs.items():
if key.endswith(r"^{}"):
refs[key.strip(r"^{}")] = hash
return refs[tag]
m_pseudo_semver = re_pseudo_semver.match(version)
if m_pseudo_semver:
remote_refs = vcs_fetch_all()
short_commit = m_pseudo_semver.group('commithash')
for l in remote_refs:
r = l.split(maxsplit=1)
sha1 = r[0] if len(r) else None
if not sha1:
logger.error(
"Ups: could not resolve abbref commit for %s" % short_commit)
elif sha1.startswith(short_commit):
hash = sha1
break
else:
m_semver = re_semver.match(version)
if m_semver:
def get_sha1_remote(re):
rsha1 = None
for line in remote_refs:
# Split lines of the following format:
# 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
lineparts = line.split(maxsplit=1)
sha1 = lineparts[0] if len(lineparts) else None
refstring = lineparts[1] if len(
lineparts) == 2 else None
if refstring:
# Normalize tag string and split in case of multiple
# regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
refs = refstring.strip('(), ').split(',')
for ref in refs:
if re.match(ref.strip()):
rsha1 = sha1
return rsha1
semver = "v" + m_semver.group('major') + "."\
+ m_semver.group('minor') + "."\
+ m_semver.group('patch') \
+ (("-" + m_semver.group('prerelease'))
if m_semver.group('prerelease') else "")
tag = os.path.join(
coderoot.codeDir, semver) if coderoot.codeDir else semver
# probe tag using 'ls-remote', which is faster than fetching
# complete history
hash = vcs_fetch_remote(tag)
if not hash:
# backup: fetch complete history
remote_refs = vcs_fetch_all()
hash = get_sha1_remote(
re.compile(fr"(tag:|HEAD ->) ({tag})"))
logger.debug(
"Resolving commit for tag '%s' -> '%s'", tag, hash)
return hash
def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
"""Generate SRC_URI functions for go imports"""
logger.info("Resolving repository for module %s", path)
# First try to resolve repo and commit from golang proxy
# Most info is already there and we don't have to go through the
# repository or even perform the version resolve magic
golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
if golang_proxy_info:
repo = golang_proxy_info[0]
commit = golang_proxy_info[1]
else:
# Fallback
# Resolve repository by 'hand'
repo = self.__resolve_repository(path)
commit = self.__resolve_version(repo, path, version)
url = urllib.parse.urlparse(repo.url)
repo_url = url.netloc + url.path
coderoot = self.__build_coderepo(repo, path)
inline_fcn = "${@go_src_uri("
inline_fcn += f"'{repo_url}','{version}'"
if repo_url != path:
inline_fcn += f",path='{path}'"
if coderoot.codeDir:
inline_fcn += f",subdir='{coderoot.codeDir}'"
if repo.vcs != 'git':
inline_fcn += f",vcs='{repo.vcs}'"
if replaces:
inline_fcn += f",replaces='{replaces}'"
if coderoot.pathMajor:
inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
inline_fcn += ")}"
return inline_fcn, commit
def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
import re
src_uris = []
src_revs = []
def generate_src_rev(path, version, commithash):
src_rev = f"# {path}@{version} => {commithash}\n"
# Ups...maybe someone manipulated the source repository and the
# version or commit could not be resolved. This is a sign of
# a) the supply chain was manipulated (bad)
# b) the implementation for the version resolving didn't work
# anymore (less bad)
if not commithash:
src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
src_rev += f"#!!! Could not resolve version !!!\n"
src_rev += f"#!!! Possible supply chain attack !!!\n"
src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
return src_rev
# we first go over replacement list, because we are essentialy
# interested only in the replaced path
if go_mod['Replace']:
for replacement in go_mod['Replace']:
oldpath = replacement['Old']['Path']
path = replacement['New']['Path']
version = ''
if 'Version' in replacement['New']:
version = replacement['New']['Version']
if os.path.exists(os.path.join(srctree, path)):
# the module refers to the local path, remove it from requirement list
# because it's a local module
go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
else:
# Replace the path and the version, so we don't iterate replacement list anymore
for require in go_mod['Require']:
if require['Path'] == oldpath:
require.update({'Path': path, 'Version': version})
break
for require in go_mod['Require']:
path = require['Path']
version = require['Version']
inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
path, version)
src_uris.append(inline_fcn)
src_revs.append(generate_src_rev(path, version, commithash))
# strip version part from module URL /vXX
baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
pn, _ = determine_from_url(baseurl)
go_mods_basename = "%s-modules.inc" % pn
go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
with open(go_mods_filename, "w") as f:
# We introduce this indirection to make the tests a little easier
f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
for uri in src_uris:
f.write(" " + uri + " \\\n")
f.write("\"\n\n")
for rev in src_revs:
f.write(rev + "\n")
extravalues['extrafiles'][go_mods_basename] = go_mods_filename
def __go_run_cmd(self, cmd, cwd, d):
return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
shell=True, cwd=cwd)
def __go_native_version(self, d):
stdout, _ = self.__go_run_cmd("go version", None, d)
m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
major = int(m.group(2))
minor = int(m.group(3))
patch = int(m.group(4))
return major, minor, patch
def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
patchfilename = "go.mod.patch"
go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
d)
self.__go_run_cmd("go mod tidy -go=%d.%d" %
(go_native_version_major, go_native_version_minor), srctree, d)
stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
# Create patch in order to upgrade go version
self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
# Restore original state
self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
go_mod = json.loads(stdout)
tmpfile = os.path.join(localfilesdir, patchfilename)
shutil.move(os.path.join(srctree, patchfilename), tmpfile)
extravalues['extrafiles'][patchfilename] = tmpfile
return go_mod, patchfilename
def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
# Perform vendoring to retrieve the correct modules.txt
tmp_vendor_dir = tempfile.mkdtemp()
# -v causes to go to print modules.txt to stderr
_, stderr = self.__go_run_cmd(
"go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
modules_txt_basename = "modules.txt"
modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
with open(modules_txt_filename, "w") as f:
f.write(stderr)
extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
licenses = []
lic_files_chksum = []
licvalues = guess_license(tmp_vendor_dir, d)
shutil.rmtree(tmp_vendor_dir)
if licvalues:
for licvalue in licvalues:
license = licvalue[0]
lics = tidy_licenses(fixup_license(license))
lics = [lic for lic in lics if lic not in licenses]
if len(lics):
licenses.extend(lics)
lic_files_chksum.append(
'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
# strip version part from module URL /vXX
baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
pn, _ = determine_from_url(baseurl)
licenses_basename = "%s-licenses.inc" % pn
licenses_filename = os.path.join(localfilesdir, licenses_basename)
with open(licenses_filename, "w") as f:
f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
' & '.join(sorted(licenses, key=str.casefold)))
# We introduce this indirection to make the tests a little easier
f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
for lic in lic_files_chksum:
f.write(" " + lic + " \\\n")
f.write("\"\n")
extravalues['extrafiles'][licenses_basename] = licenses_filename
def process(self, srctree, classes, lines_before,
lines_after, handled, extravalues):
if 'buildsystem' in handled:
return False
files = RecipeHandler.checkfiles(srctree, ['go.mod'])
if not files:
return False
d = bb.data.createCopy(tinfoil.config_data)
go_bindir = self.__ensure_go()
if not go_bindir:
sys.exit(14)
d.prependVar('PATH', '%s:' % go_bindir)
handled.append('buildsystem')
classes.append("go-vendor")
stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
go_mod = json.loads(stdout)
go_import = go_mod['Module']['Path']
go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go'])
go_version_major = int(go_version_match.group(1))
go_version_minor = int(go_version_match.group(2))
src_uris = []
localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
extravalues.setdefault('extrafiles', {})
# Use an explicit name determined from the module name because it
# might differ from the actual URL for replaced modules
# strip version part from module URL /vXX
baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
pn, _ = determine_from_url(baseurl)
# go.mod files with version < 1.17 may not include all indirect
# dependencies. Thus, we have to upgrade the go version.
if go_version_major == 1 and go_version_minor < 17:
logger.warning(
"go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.")
go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir,
extravalues, d)
src_uris.append(
"file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
# Check whether the module is vendored. If so, we have nothing to do.
# Otherwise we gather all dependencies and add them to the recipe
if not os.path.exists(os.path.join(srctree, "vendor")):
# Write additional $BPN-modules.inc file
self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d)
lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"")
lines_before.append("require %s-licenses.inc" % (pn))
self.__rewrite_src_uri(lines_before, ["file://modules.txt"])
self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d)
lines_before.append("require %s-modules.inc" % (pn))
# Do generic license handling
handle_license_vars(srctree, lines_before, handled, extravalues, d)
self.__rewrite_lic_uri(lines_before)
lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
def __update_lines_before(self, updated, newlines, lines_before):
if updated:
del lines_before[:]
for line in newlines:
# Hack to avoid newlines that edit_metadata inserts
if line.endswith('\n'):
line = line[:-1]
lines_before.append(line)
return updated
def __rewrite_lic_uri(self, lines_before):
def varfunc(varname, origvalue, op, newlines):
if varname == 'LIC_FILES_CHKSUM':
new_licenses = []
licenses = origvalue.split('\\')
for license in licenses:
if not license:
logger.warning("No license file was detected for the main module!")
# the license list of the main recipe must be empty
# this can happen for example in case of CLOSED license
# Fall through to complete recipe generation
continue
license = license.strip()
uri, chksum = license.split(';', 1)
url = urllib.parse.urlparse(uri)
new_uri = os.path.join(
url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
new_licenses.append(new_uri)
return new_licenses, None, -1, True
return origvalue, None, 0, True
updated, newlines = bb.utils.edit_metadata(
lines_before, ['LIC_FILES_CHKSUM'], varfunc)
return self.__update_lines_before(updated, newlines, lines_before)
def __rewrite_src_uri(self, lines_before, additional_uris = []):
def varfunc(varname, origvalue, op, newlines):
if varname == 'SRC_URI':
src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"]
src_uri.extend(additional_uris)
return src_uri, None, -1, True
return origvalue, None, 0, True
updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
return self.__update_lines_before(updated, newlines, lines_before)
def register_recipe_handlers(handlers):
handlers.append((GoRecipeHandler(), 60))

View File

@@ -0,0 +1,89 @@
# Recipe creation tool - kernel support plugin
#
# Copyright (C) 2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import logging
from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class KernelRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
import bb.process
if 'buildsystem' in handled:
return False
for tell in ['arch', 'firmware', 'Kbuild', 'Kconfig']:
if not os.path.exists(os.path.join(srctree, tell)):
return False
handled.append('buildsystem')
del lines_after[:]
del classes[:]
template = os.path.join(tinfoil.config_data.getVar('COREBASE'), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb')
def handle_var(varname, origvalue, op, newlines):
if varname in ['SRCREV', 'SRCREV_machine']:
while newlines[-1].startswith('#'):
del newlines[-1]
try:
stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree, shell=True)
except bb.process.ExecutionError as e:
stdout = None
if stdout:
return stdout.strip(), op, 0, True
elif varname == 'LINUX_VERSION':
makefile = os.path.join(srctree, 'Makefile')
if os.path.exists(makefile):
kversion = -1
kpatchlevel = -1
ksublevel = -1
kextraversion = ''
with open(makefile, 'r', errors='surrogateescape') as f:
for i, line in enumerate(f):
if i > 10:
break
if line.startswith('VERSION ='):
kversion = int(line.split('=')[1].strip())
elif line.startswith('PATCHLEVEL ='):
kpatchlevel = int(line.split('=')[1].strip())
elif line.startswith('SUBLEVEL ='):
ksublevel = int(line.split('=')[1].strip())
elif line.startswith('EXTRAVERSION ='):
kextraversion = line.split('=')[1].strip()
version = ''
if kversion > -1 and kpatchlevel > -1:
version = '%d.%d' % (kversion, kpatchlevel)
if ksublevel > -1:
version += '.%d' % ksublevel
version += kextraversion
if version:
return version, op, 0, True
elif varname == 'SRC_URI':
while newlines[-1].startswith('#'):
del newlines[-1]
elif varname == 'COMPATIBLE_MACHINE':
while newlines[-1].startswith('#'):
del newlines[-1]
machine = tinfoil.config_data.getVar('MACHINE')
return machine, op, 0, True
return origvalue, op, 0, True
with open(template, 'r') as f:
varlist = ['SRCREV', 'SRCREV_machine', 'SRC_URI', 'LINUX_VERSION', 'COMPATIBLE_MACHINE']
(_, newlines) = bb.utils.edit_metadata(f, varlist, handle_var)
lines_before[:] = [line.rstrip('\n') for line in newlines]
return True
def register_recipe_handlers(handlers):
handlers.append((KernelRecipeHandler(), 100))

View File

@@ -0,0 +1,142 @@
# Recipe creation tool - kernel module support plugin
#
# Copyright (C) 2016 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re
import logging
from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
class KernelModuleRecipeHandler(RecipeHandler):
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
import bb.process
if 'buildsystem' in handled:
return False
module_inc_re = re.compile(r'^#include\s+<linux/module.h>$')
makefiles = []
is_module = False
makefiles = []
files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True, excludedirs=['contrib', 'test', 'examples'])
if files:
for cfile in files:
# Look in same dir or parent for Makefile
for makefile in [os.path.join(os.path.dirname(cfile), 'Makefile'), os.path.join(os.path.dirname(os.path.dirname(cfile)), 'Makefile')]:
if makefile in makefiles:
break
else:
if os.path.exists(makefile):
makefiles.append(makefile)
break
else:
continue
with open(cfile, 'r', errors='surrogateescape') as f:
for line in f:
if module_inc_re.match(line.strip()):
is_module = True
break
if is_module:
break
if is_module:
classes.append('module')
handled.append('buildsystem')
# module.bbclass and the classes it inherits do most of the hard
# work, but we need to tweak it slightly depending on what the
# Makefile does (and there is a range of those)
# Check the makefile for the appropriate install target
install_lines = []
compile_lines = []
in_install = False
in_compile = False
install_target = None
with open(makefile, 'r', errors='surrogateescape') as f:
for line in f:
if line.startswith('install:'):
if not install_lines:
in_install = True
install_target = 'install'
elif line.startswith('modules_install:'):
install_lines = []
in_install = True
install_target = 'modules_install'
elif line.startswith('modules:'):
compile_lines = []
in_compile = True
elif line.startswith(('all:', 'default:')):
if not compile_lines:
in_compile = True
elif line:
if line[0] == '\t':
if in_install:
install_lines.append(line)
elif in_compile:
compile_lines.append(line)
elif ':' in line:
in_install = False
in_compile = False
def check_target(lines, install):
kdirpath = ''
manual_install = False
for line in lines:
splitline = line.split()
if splitline[0] in ['make', 'gmake', '$(MAKE)']:
if '-C' in splitline:
idx = splitline.index('-C') + 1
if idx < len(splitline):
kdirpath = splitline[idx]
break
elif install and splitline[0] == 'install':
if '.ko' in line:
manual_install = True
return kdirpath, manual_install
kdirpath = None
manual_install = False
if install_lines:
kdirpath, manual_install = check_target(install_lines, install=True)
if compile_lines and not kdirpath:
kdirpath, _ = check_target(compile_lines, install=False)
if manual_install or not install_lines:
lines_after.append('EXTRA_OEMAKE:append:task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"')
elif install_target and install_target != 'modules_install':
lines_after.append('MODULES_INSTALL_TARGET = "install"')
warnmsg = None
kdirvar = None
if kdirpath:
res = re.match(r'\$\(([^$)]+)\)', kdirpath)
if res:
kdirvar = res.group(1)
if kdirvar != 'KERNEL_SRC':
lines_after.append('EXTRA_OEMAKE += "%s=${STAGING_KERNEL_DIR}"' % kdirvar)
elif kdirpath.startswith('/lib/'):
warnmsg = 'Kernel path in install makefile is hardcoded - you will need to patch the makefile'
if not kdirvar and not warnmsg:
warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile'
if warnmsg:
warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.'
logger.warning(warnmsg)
lines_after.append('# %s' % warnmsg)
return True
return False
def register_recipe_handlers(handlers):
handlers.append((KernelModuleRecipeHandler(), 15))

View File

@@ -0,0 +1,310 @@
# Copyright (C) 2016 Intel Corporation
# Copyright (C) 2020 Savoir-Faire Linux
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""Recipe creation tool - npm module support plugin"""
import json
import logging
import os
import re
import sys
import tempfile
import bb
from bb.fetch2.npm import NpmEnvironment
from bb.fetch2.npm import npm_package
from bb.fetch2.npmsw import foreach_dependencies
from recipetool.create import RecipeHandler
from recipetool.create import get_license_md5sums
from recipetool.create import guess_license
from recipetool.create import split_pkg_licenses
logger = logging.getLogger('recipetool')
TINFOIL = None
def tinfoil_init(instance):
"""Initialize tinfoil"""
global TINFOIL
TINFOIL = instance
class NpmRecipeHandler(RecipeHandler):
"""Class to handle the npm recipe creation"""
@staticmethod
def _get_registry(lines):
"""Get the registry value from the 'npm://registry' url"""
registry = None
def _handle_registry(varname, origvalue, op, newlines):
nonlocal registry
if origvalue.startswith("npm://"):
registry = re.sub(r"^npm://", "http://", origvalue.split(";")[0])
return origvalue, None, 0, True
bb.utils.edit_metadata(lines, ["SRC_URI"], _handle_registry)
return registry
@staticmethod
def _ensure_npm():
"""Check if the 'npm' command is available in the recipes"""
if not TINFOIL.recipes_parsed:
TINFOIL.parse_recipes()
try:
d = TINFOIL.parse_recipe("nodejs-native")
except bb.providers.NoProvider:
bb.error("Nothing provides 'nodejs-native' which is required for the build")
bb.note("You will likely need to add a layer that provides nodejs")
sys.exit(14)
bindir = d.getVar("STAGING_BINDIR_NATIVE")
npmpath = os.path.join(bindir, "npm")
if not os.path.exists(npmpath):
TINFOIL.build_targets("nodejs-native", "addto_recipe_sysroot")
if not os.path.exists(npmpath):
bb.error("Failed to add 'npm' to sysroot")
sys.exit(14)
return bindir
@staticmethod
def _npm_global_configs(dev):
"""Get the npm global configuration"""
configs = []
if dev:
configs.append(("also", "development"))
else:
configs.append(("only", "production"))
configs.append(("save", "false"))
configs.append(("package-lock", "false"))
configs.append(("shrinkwrap", "false"))
return configs
def _run_npm_install(self, d, srctree, registry, dev):
"""Run the 'npm install' command without building the addons"""
configs = self._npm_global_configs(dev)
configs.append(("ignore-scripts", "true"))
if registry:
configs.append(("registry", registry))
bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
env = NpmEnvironment(d, configs=configs)
env.run("npm install", workdir=srctree)
def _generate_shrinkwrap(self, d, srctree, dev):
"""Check and generate the 'npm-shrinkwrap.json' file if needed"""
configs = self._npm_global_configs(dev)
env = NpmEnvironment(d, configs=configs)
env.run("npm shrinkwrap", workdir=srctree)
return os.path.join(srctree, "npm-shrinkwrap.json")
def _handle_licenses(self, srctree, shrinkwrap_file, dev):
"""Return the extra license files and the list of packages"""
licfiles = []
packages = {}
# Handle the parent package
packages["${PN}"] = ""
def _licfiles_append_fallback_readme_files(destdir):
"""Append README files as fallback to license files if a license files is missing"""
fallback = True
readmes = []
basedir = os.path.join(srctree, destdir)
for fn in os.listdir(basedir):
upper = fn.upper()
if upper.startswith("README"):
fullpath = os.path.join(basedir, fn)
readmes.append(fullpath)
if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper:
fallback = False
if fallback:
for readme in readmes:
licfiles.append(os.path.relpath(readme, srctree))
# Handle the dependencies
def _handle_dependency(name, params, destdir):
deptree = destdir.split('node_modules/')
suffix = "-".join([npm_package(dep) for dep in deptree])
packages["${PN}" + suffix] = destdir
_licfiles_append_fallback_readme_files(destdir)
with open(shrinkwrap_file, "r") as f:
shrinkwrap = json.load(f)
foreach_dependencies(shrinkwrap, _handle_dependency, dev)
return licfiles, packages
# Handle the peer dependencies
def _handle_peer_dependency(self, shrinkwrap_file):
"""Check if package has peer dependencies and show warning if it is the case"""
with open(shrinkwrap_file, "r") as f:
shrinkwrap = json.load(f)
packages = shrinkwrap.get("packages", {})
peer_deps = packages.get("", {}).get("peerDependencies", {})
for peer_dep in peer_deps:
peer_dep_yocto_name = npm_package(peer_dep)
bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
"Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
% peer_dep_yocto_name)
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
"""Handle the npm recipe creation"""
if "buildsystem" in handled:
return False
files = RecipeHandler.checkfiles(srctree, ["package.json"])
if not files:
return False
with open(files[0], "r") as f:
data = json.load(f)
if "name" not in data or "version" not in data:
return False
extravalues["PN"] = npm_package(data["name"])
extravalues["PV"] = data["version"]
if "description" in data:
extravalues["SUMMARY"] = data["description"]
if "homepage" in data:
extravalues["HOMEPAGE"] = data["homepage"]
dev = bb.utils.to_boolean(str(extravalues.get("NPM_INSTALL_DEV", "0")), False)
registry = self._get_registry(lines_before)
bb.note("Checking if npm is available ...")
# The native npm is used here (and not the host one) to ensure that the
# npm version is high enough to ensure an efficient dependency tree
# resolution and avoid issue with the shrinkwrap file format.
# Moreover the native npm is mandatory for the build.
bindir = self._ensure_npm()
d = bb.data.createCopy(TINFOIL.config_data)
d.prependVar("PATH", bindir + ":")
d.setVar("S", srctree)
bb.note("Generating shrinkwrap file ...")
# To generate the shrinkwrap file the dependencies have to be installed
# first. During the generation process some files may be updated /
# deleted. By default devtool tracks the diffs in the srctree and raises
# errors when finishing the recipe if some diffs are found.
git_exclude_file = os.path.join(srctree, ".git", "info", "exclude")
if os.path.exists(git_exclude_file):
with open(git_exclude_file, "r+") as f:
lines = f.readlines()
for line in ["/node_modules/", "/npm-shrinkwrap.json"]:
if line not in lines:
f.write(line + "\n")
lock_file = os.path.join(srctree, "package-lock.json")
lock_copy = lock_file + ".copy"
if os.path.exists(lock_file):
bb.utils.copyfile(lock_file, lock_copy)
self._run_npm_install(d, srctree, registry, dev)
shrinkwrap_file = self._generate_shrinkwrap(d, srctree, dev)
with open(shrinkwrap_file, "r") as f:
shrinkwrap = json.load(f)
if os.path.exists(lock_copy):
bb.utils.movefile(lock_copy, lock_file)
# Add the shrinkwrap file as 'extrafiles'
shrinkwrap_copy = shrinkwrap_file + ".copy"
bb.utils.copyfile(shrinkwrap_file, shrinkwrap_copy)
extravalues.setdefault("extrafiles", {})
extravalues["extrafiles"]["npm-shrinkwrap.json"] = shrinkwrap_copy
url_local = "npmsw://%s" % shrinkwrap_file
url_recipe= "npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json"
if dev:
url_local += ";dev=1"
url_recipe += ";dev=1"
# Add the npmsw url in the SRC_URI of the generated recipe
def _handle_srcuri(varname, origvalue, op, newlines):
"""Update the version value and add the 'npmsw://' url"""
value = origvalue.replace("version=" + data["version"], "version=${PV}")
value = value.replace("version=latest", "version=${PV}")
values = [line.strip() for line in value.strip('\n').splitlines()]
if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
values.append(url_recipe)
return values, None, 4, False
(_, newlines) = bb.utils.edit_metadata(lines_before, ["SRC_URI"], _handle_srcuri)
lines_before[:] = [line.rstrip('\n') for line in newlines]
# In order to generate correct licence checksums in the recipe the
# dependencies have to be fetched again using the npmsw url
bb.note("Fetching npm dependencies ...")
bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True)
fetcher = bb.fetch2.Fetch([url_local], d)
fetcher.download()
fetcher.unpack(srctree)
bb.note("Handling licences ...")
(licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev)
def _guess_odd_license(licfiles):
import bb
md5sums = get_license_md5sums(d, linenumbers=True)
chksums = []
licenses = []
for licfile in licfiles:
f = os.path.join(srctree, licfile)
md5value = bb.utils.md5_file(f)
(license, beginline, endline, md5) = md5sums.get(md5value,
(None, "", "", ""))
if not license:
license = "Unknown"
logger.info("Please add the following line for '%s' to a "
"'lib/recipetool/licenses.csv' and replace `Unknown`, "
"`X`, `Y` and `MD5` with the license, begin line, "
"end line and partial MD5 checksum:\n" \
"%s,Unknown,X,Y,MD5" % (licfile, md5value))
chksums.append("file://%s%s%s;md5=%s" % (licfile,
";beginline=%s" % (beginline) if beginline else "",
";endline=%s" % (endline) if endline else "",
md5 if md5 else md5value))
licenses.append((license, licfile, md5value))
return (licenses, chksums)
(licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
classes.append("npm")
handled.append("buildsystem")
# Check if package has peer dependencies and inform the user
self._handle_peer_dependency(shrinkwrap_file)
return True
def register_recipe_handlers(handlers):
"""Register the npm handler"""
handlers.append((NpmRecipeHandler(), 60))

View File

@@ -0,0 +1,44 @@
# Recipe creation tool - edit plugin
#
# This sub-command edits the recipe and appends for the specified target
#
# Example: recipetool edit busybox
#
# Copyright (C) 2018 Mentor Graphics Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
import errno
import logging
import os
import re
import subprocess
import sys
import scriptutils
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
def edit(args):
import oe.recipeutils
recipe_path = tinfoil.get_recipe_file(args.target)
appends = tinfoil.get_file_appends(recipe_path)
return scriptutils.run_editor([recipe_path] + list(appends), logger)
def register_commands(subparsers):
parser = subparsers.add_parser('edit',
help='Edit the recipe and appends for the specified target. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.')
parser.add_argument('target', help='Target recipe/provide to edit')
parser.set_defaults(func=edit, parserecipes=True)

View File

@@ -0,0 +1,37 @@
0636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only
12f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only
18810669f13b87348459e611d31ab760,GPL-2.0-only
252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only
2d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only
3214f080875748938ba060314b4f727d,LGPL-2.0-only
385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only
393a5ca445f6965873eca0259a17f833,GPL-2.0-only
3b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
3bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only
4325afd396febcb659c36b49533135d4,GPL-2.0-only
4fbd65380cdd255951079008b364516c,LGPL-2.1-only
54c7042be62e169199200bc6477f04d1,BSD-3-Clause
55ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only
59530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only
5f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only
6a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only
751419260aa954499f7abaabaa882bbe,GPL-2.0-only
7fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only
8ca43cbc842c2336e835926c2166c28b,GPL-2.0-only
94d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only
9ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only
9f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only
a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only
b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only
bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only
bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only
c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only
d32239bcb673463ab874e80d47fae504,GPL-3.0-only
d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only
d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only
db979804f025cf55aabec7129cb671ed,LGPL-2.0-only
eb723b61539feef013de476e68b5c50a,GPL-2.0-only
ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only
f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only
fad9b3332be894bab9bc501572864b29,LGPL-2.1-only
fbc093901857fcd118f065f900982c24,LGPL-2.1-only
1 0636e73ff0215e8d672dc4c32c317bb3 GPL-2.0-only
2 12f884d2ae1ff87c09e5b7ccc2c4ca7e GPL-2.0-only
3 18810669f13b87348459e611d31ab760 GPL-2.0-only
4 252890d9eee26aab7b432e8b8a616475 LGPL-2.0-only
5 2d5025d4aa3495befef8f17206a5b0a1 LGPL-2.1-only
6 3214f080875748938ba060314b4f727d LGPL-2.0-only
7 385c55653886acac3821999a3ccd17b3 Artistic-1.0 | GPL-2.0-only
8 393a5ca445f6965873eca0259a17f833 GPL-2.0-only
9 3b83ef96387f14655fc854ddc3c6bd57 Apache-2.0
10 3bf50002aefd002f49e7bb854063f7e7 LGPL-2.0-only
11 4325afd396febcb659c36b49533135d4 GPL-2.0-only
12 4fbd65380cdd255951079008b364516c LGPL-2.1-only
13 54c7042be62e169199200bc6477f04d1 BSD-3-Clause
14 55ca817ccb7d5b5b66355690e9abc605 LGPL-2.0-only
15 59530bdf33659b29e73d4adb9f9f6552 GPL-2.0-only
16 5f30f0716dfdd0d91eb439ebec522ec2 LGPL-2.0-only
17 6a6a8e020838b23406c81b19c1d46df6 LGPL-3.0-only
18 751419260aa954499f7abaabaa882bbe GPL-2.0-only
19 7fbc338309ac38fefcd64b04bb903e34 LGPL-2.1-only
20 8ca43cbc842c2336e835926c2166c28b GPL-2.0-only
21 94d55d512a9ba36caa9b7df079bae19f GPL-2.0-only
22 9ac2e7cff1ddaf48b6eab6028f23ef88 GPL-2.0-only
23 9f604d8a4f8e74f4f5140845a21b6674 LGPL-2.0-only
24 a6f89e2100d9b6cdffcea4f398e37343 LGPL-2.1-only
25 b234ee4d69f5fce4486a80fdaf4a4263 GPL-2.0-only
26 bbb461211a33b134d42ed5ee802b37ff LGPL-2.1-only
27 bfe1f75d606912a4111c90743d6c7325 MPL-1.1-only
28 c93c0550bd3173f4504b2cbd8991e50b GPL-2.0-only
29 d32239bcb673463ab874e80d47fae504 GPL-3.0-only
30 d7810fab7487fb0aad327b76f1be7cd7 GPL-2.0-only
31 d8045f3b8f929c1cb29a1e3fd737b499 LGPL-2.1-only
32 db979804f025cf55aabec7129cb671ed LGPL-2.0-only
33 eb723b61539feef013de476e68b5c50a GPL-2.0-only
34 ebb5c50ab7cab4baeffba14977030c07 GPL-2.0-only
35 f27defe1e96c2e1ecd4e0c9be8967949 GPL-3.0-only
36 fad9b3332be894bab9bc501572864b29 LGPL-2.1-only
37 fbc093901857fcd118f065f900982c24 LGPL-2.1-only

View File

@@ -0,0 +1,79 @@
# Recipe creation tool - newappend plugin
#
# This sub-command creates a bbappend for the specified target and prints the
# path to the bbappend.
#
# Example: recipetool newappend meta-mylayer busybox
#
# Copyright (C) 2015 Christopher Larson <kergoth@gmail.com>
#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
import errno
import logging
import os
import re
import subprocess
import sys
import scriptutils
logger = logging.getLogger('recipetool')
tinfoil = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
def layer(layerpath):
if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')):
raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath))
return layerpath
def newappend(args):
import oe.recipeutils
recipe_path = tinfoil.get_recipe_file(args.target)
rd = tinfoil.config_data.createCopy()
rd.setVar('FILE', recipe_path)
append_path, path_ok = oe.recipeutils.get_bbappend_path(rd, args.destlayer, args.wildcard_version)
if not append_path:
logger.error('Unable to determine layer directory containing %s', recipe_path)
return 1
if not path_ok:
logger.warning('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path))
layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()]
if not os.path.abspath(args.destlayer) in layerdirs:
logger.warning('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active')
if not os.path.exists(append_path):
bb.utils.mkdirhier(os.path.dirname(append_path))
try:
open(append_path, 'a').close()
except (OSError, IOError) as exc:
logger.critical(str(exc))
return 1
if args.edit:
return scriptutils.run_editor([append_path, recipe_path], logger)
else:
print(append_path)
def register_commands(subparsers):
parser = subparsers.add_parser('newappend',
help='Create a bbappend for the specified target in the specified layer')
parser.add_argument('-e', '--edit', help='Edit the new append. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.', action='store_true')
parser.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true')
parser.add_argument('destlayer', help='Base directory of the destination layer to write the bbappend to', type=layer)
parser.add_argument('target', help='Target recipe/provide to append')
parser.set_defaults(func=newappend, parserecipes=True)

View File

@@ -0,0 +1,66 @@
# Recipe creation tool - set variable plugin
#
# Copyright (C) 2015 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
import sys
import os
import argparse
import glob
import fnmatch
import re
import logging
import scriptutils
logger = logging.getLogger('recipetool')
tinfoil = None
plugins = None
def tinfoil_init(instance):
global tinfoil
tinfoil = instance
def setvar(args):
import oe.recipeutils
if args.delete:
if args.value:
logger.error('-D/--delete and specifying a value are mutually exclusive')
return 1
value = None
else:
if args.value is None:
logger.error('You must specify a value if not using -D/--delete')
return 1
value = args.value
varvalues = {args.varname: value}
if args.recipe_only:
patches = [oe.recipeutils.patch_recipe_file(args.recipefile, varvalues, patch=args.patch)]
else:
rd = tinfoil.parse_recipe_file(args.recipefile, False)
if not rd:
return 1
patches = oe.recipeutils.patch_recipe(rd, args.recipefile, varvalues, patch=args.patch)
if args.patch:
for patch in patches:
for line in patch:
sys.stdout.write(line)
tinfoil.modified_files()
return 0
def register_commands(subparsers):
parser_setvar = subparsers.add_parser('setvar',
help='Set a variable within a recipe',
description='Adds/updates the value a variable is set to in a recipe')
parser_setvar.add_argument('recipefile', help='Recipe file to update')
parser_setvar.add_argument('varname', help='Variable name to set')
parser_setvar.add_argument('value', nargs='?', help='New value to set the variable to')
parser_setvar.add_argument('--recipe-only', '-r', help='Do not set variable in any include file if present', action='store_true')
parser_setvar.add_argument('--patch', '-p', help='Create a patch to make the change instead of modifying the recipe', action='store_true')
parser_setvar.add_argument('--delete', '-D', help='Delete the specified value instead of setting it', action='store_true')
parser_setvar.set_defaults(func=setvar)