Complete Yocto mirror with license table for TQMa6UL (2038-compliance)
- 264 license table entries with exact download URLs (224/264 resolved) - Complete sources/ directory with all BitBake recipes - Build configuration: tqma6ul-multi-mba6ulx, spaetzle (musl) - Full traceability for Softwarefreigabeantrag - GCC 13.4.0, Linux 6.6.102, U-Boot 2023.04, musl 1.2.4 - License distribution: GPL-2.0 (24), MIT (23), GPL-2.0+ (18), BSD-3 (16)
This commit is contained in:
@@ -0,0 +1,30 @@
|
||||
From e8bd4f8ee56cbb12a61c1dcabf35a1835a863132 Mon Sep 17 00:00:00 2001
|
||||
From: Paulo Neves <ptsneves@gmail.com>
|
||||
Date: Tue, 7 Jun 2022 16:16:41 +0200
|
||||
Subject: [PATCH] Avoid shebang overflow on python-config.py
|
||||
|
||||
The whole native path may be too big, leading to shebang
|
||||
overflow. Let's just use the env shebang.
|
||||
|
||||
Denial reason: [1]
|
||||
|
||||
Upstream-Status: Denied [distribution]
|
||||
|
||||
[1] https://github.com/python/cpython/pull/93760#pullrequestreview-1005365737
|
||||
---
|
||||
Makefile.pre.in | 2 ++
|
||||
1 file changed, 2 insertions(+)
|
||||
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 2d235d2..1ac2263 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -2356,6 +2356,8 @@ python-config: $(srcdir)/Misc/python-config.in Misc/python-config.sh
|
||||
@ # Substitution happens here, as the completely-expanded BINDIR
|
||||
@ # is not available in configure
|
||||
sed -e "s,@EXENAME@,$(EXENAME)," < $(srcdir)/Misc/python-config.in >python-config.py
|
||||
+ @ # Otherwise we might get huge shebangs with native paths
|
||||
+ sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' python-config.py
|
||||
@ # Replace makefile compat. variable references with shell script compat. ones; $(VAR) -> ${VAR}
|
||||
LC_ALL=C sed -e 's,\$$(\([A-Za-z0-9_]*\)),\$$\{\1\},g' < Misc/python-config.sh >python-config
|
||||
@ # On Darwin, always use the python version of the script, the shell
|
||||
@@ -0,0 +1,46 @@
|
||||
From bbfb7fdf01f0502c7bf3d418f3a912ea76c93f24 Mon Sep 17 00:00:00 2001
|
||||
From: Alexander Kanavin <alex@linutronix.de>
|
||||
Date: Thu, 16 Sep 2021 16:35:37 +0200
|
||||
Subject: [PATCH] Lib/pty.py: handle stdin I/O errors same way as master I/O
|
||||
errors
|
||||
|
||||
reading stdin can throw the same I/O errors as reading from master fd does,
|
||||
e.g. when running under Yocto's test harness:
|
||||
======================================================================
|
||||
ERROR: test_spawn_doesnt_hang (test.test_pty.PtyTest)
|
||||
----------------------------------------------------------------------
|
||||
Traceback (most recent call last):
|
||||
File "/usr/lib/python3.10/test/test_pty.py", line 316, in test_spawn_doesnt_hang
|
||||
pty.spawn([sys.executable, '-c', 'print("hi there")'])
|
||||
File "/usr/lib/python3.10/pty.py", line 181, in spawn
|
||||
_copy(master_fd, master_read, stdin_read)
|
||||
File "/usr/lib/python3.10/pty.py", line 157, in _copy
|
||||
data = stdin_read(STDIN_FILENO)
|
||||
File "/usr/lib/python3.10/pty.py", line 132, in _read
|
||||
return os.read(fd, 1024)
|
||||
OSError: [Errno 5] Input/output error
|
||||
|
||||
So let's treat both channels the same.
|
||||
|
||||
Upstream-Status: Submitted [https://github.com/python/cpython/pull/28388]
|
||||
Signed-off-by: Alexander Kanavin <alex@linutronix.de>
|
||||
---
|
||||
Lib/pty.py | 5 ++++-
|
||||
1 file changed, 4 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/Lib/pty.py b/Lib/pty.py
|
||||
index 1d97994..fa8821b 100644
|
||||
--- a/Lib/pty.py
|
||||
+++ b/Lib/pty.py
|
||||
@@ -178,7 +178,10 @@ def _copy(master_fd, master_read=_read, stdin_read=_read):
|
||||
i_buf = i_buf[n:]
|
||||
|
||||
if stdin_avail and STDIN_FILENO in rfds:
|
||||
- data = stdin_read(STDIN_FILENO)
|
||||
+ try:
|
||||
+ data = stdin_read(STDIN_FILENO)
|
||||
+ except OSError:
|
||||
+ data = b""
|
||||
if not data:
|
||||
stdin_avail = False
|
||||
else:
|
||||
@@ -0,0 +1,31 @@
|
||||
From c739bf214b9dd6060db216b79077806fccb582ae Mon Sep 17 00:00:00 2001
|
||||
From: Alexander Kanavin <alex@linutronix.de>
|
||||
Date: Fri, 17 Nov 2023 14:26:32 +0100
|
||||
Subject: [PATCH] Lib/sysconfig.py: use prefix value from build configuration
|
||||
file
|
||||
|
||||
This allows correctly substituting them for target installs using
|
||||
native python.
|
||||
|
||||
Upstream-Status: Inappropriate [oe-core cross builds]
|
||||
Signed-off-by: Alexander Kanavin <alex@linutronix.de>
|
||||
---
|
||||
Lib/sysconfig.py | 5 +++++
|
||||
1 file changed, 5 insertions(+)
|
||||
|
||||
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||
index 6258b68..d59ec6e 100644
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -675,6 +675,11 @@ def _init_config_vars():
|
||||
_CONFIG_VARS['VPATH'] = sys._vpath
|
||||
if os.name == 'posix':
|
||||
_init_posix(_CONFIG_VARS)
|
||||
+ _CONFIG_VARS['installed_base'] = _CONFIG_VARS['prefix']
|
||||
+ _CONFIG_VARS['base'] = _CONFIG_VARS['prefix']
|
||||
+ _CONFIG_VARS['installed_platbase'] = _CONFIG_VARS['prefix']
|
||||
+ _CONFIG_VARS['platbase'] = _CONFIG_VARS['prefix']
|
||||
+ _CONFIG_VARS['platlibdir'] = _CONFIG_VARS['PLATLIBDIR']
|
||||
if _HAS_USER_BASE:
|
||||
# Setting 'userbase' is done below the call to the
|
||||
# init function to enable using 'get_config_var' in
|
||||
@@ -0,0 +1,25 @@
|
||||
From b9081b2e21983f2a828bc40a47ab278ef69f4dfe Mon Sep 17 00:00:00 2001
|
||||
From: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
Date: Wed, 30 Jan 2019 12:41:04 +0100
|
||||
Subject: [PATCH] Makefile.pre: use qemu wrapper when gathering profile data
|
||||
|
||||
Upstream-Status: Inappropriate [oe-core specific]
|
||||
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
---
|
||||
Makefile.pre.in | 3 +--
|
||||
1 file changed, 1 insertion(+), 2 deletions(-)
|
||||
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 083f4c7..dce36a5 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -660,8 +660,7 @@ profile-run-stamp:
|
||||
# enabled.
|
||||
$(MAKE) profile-gen-stamp
|
||||
# Next, run the profile task to generate the profile information.
|
||||
- @ # FIXME: can't run for a cross build
|
||||
- $(LLVM_PROF_FILE) $(RUNSHARED) ./$(BUILDPYTHON) $(PROFILE_TASK) || true
|
||||
+ ./pgo-wrapper ./python -m test.regrtest --pgo test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_support || true
|
||||
$(LLVM_PROF_MERGER)
|
||||
# Remove profile generation binary since we are done with it.
|
||||
$(MAKE) clean-retain-profile
|
||||
@@ -0,0 +1,72 @@
|
||||
From b4014e3d1d9e38b25f2840e65e2acd757f3e5d41 Mon Sep 17 00:00:00 2001
|
||||
From: Yi Fan Yu <yifan.yu@windriver.com>
|
||||
Date: Thu, 1 Apr 2021 13:08:37 -0700
|
||||
Subject: [PATCH] Skip failing tests due to load variability on YP AB
|
||||
|
||||
Skip these tests until AB-INT is solved.
|
||||
|
||||
[YOCTO #14296]
|
||||
|
||||
Upstream-Status: Inappropriate [OE-Specific]
|
||||
|
||||
Signed-off-by: Yi Fan Yu <yifan.yu@windriver.com>
|
||||
|
||||
Skip two additional tests due to suspected load variability failures.
|
||||
|
||||
[YOCTO #15131]
|
||||
[YOCTO #15177]
|
||||
|
||||
Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
---
|
||||
Lib/test/_test_multiprocessing.py | 3 +++
|
||||
Lib/test/test_time.py | 2 ++
|
||||
2 files changed, 5 insertions(+)
|
||||
|
||||
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
|
||||
index 3b4415b..1f94dec 100644
|
||||
--- a/Lib/test/_test_multiprocessing.py
|
||||
+++ b/Lib/test/_test_multiprocessing.py
|
||||
@@ -692,6 +692,7 @@ class _TestProcess(BaseTestCase):
|
||||
close_queue(q)
|
||||
|
||||
@support.requires_resource('walltime')
|
||||
+ @unittest.skip('timing related test, dependent on load')
|
||||
def test_many_processes(self):
|
||||
if self.TYPE == 'threads':
|
||||
self.skipTest('test not appropriate for {}'.format(self.TYPE))
|
||||
@@ -2223,6 +2224,7 @@ class _TestBarrier(BaseTestCase):
|
||||
except threading.BrokenBarrierError:
|
||||
results.append(True)
|
||||
|
||||
+ @unittest.skip('timing related test, dependent on load')
|
||||
def test_timeout(self):
|
||||
"""
|
||||
Test wait(timeout)
|
||||
@@ -5220,6 +5222,7 @@ class TestWait(unittest.TestCase):
|
||||
time.sleep(period)
|
||||
|
||||
@support.requires_resource('walltime')
|
||||
+ @unittest.skip('timing related test, dependent on load')
|
||||
def test_wait_integer(self):
|
||||
from multiprocessing.connection import wait
|
||||
|
||||
diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py
|
||||
index 9463add..4e0f39d 100644
|
||||
--- a/Lib/test/test_time.py
|
||||
+++ b/Lib/test/test_time.py
|
||||
@@ -536,6 +536,7 @@ class TimeTestCase(unittest.TestCase):
|
||||
@unittest.skipIf(
|
||||
support.is_wasi, "process_time not available on WASI"
|
||||
)
|
||||
+ @unittest.skip('timing related test, dependent on load')
|
||||
def test_process_time(self):
|
||||
# process_time() should not include time spend during a sleep
|
||||
start = time.process_time()
|
||||
@@ -549,6 +550,7 @@ class TimeTestCase(unittest.TestCase):
|
||||
self.assertTrue(info.monotonic)
|
||||
self.assertFalse(info.adjustable)
|
||||
|
||||
+ @unittest.skip('timing related test, dependent on load')
|
||||
def test_thread_time(self):
|
||||
if not hasattr(time, 'thread_time'):
|
||||
if sys.platform.startswith(('linux', 'win')):
|
||||
@@ -0,0 +1,36 @@
|
||||
From 5224cc0ac21f4c2574c24e0fee38b145ca15175b Mon Sep 17 00:00:00 2001
|
||||
From: Wentao Zhang <wentao.zhang@windriver.com>
|
||||
Date: Mon, 20 Mar 2023 13:39:52 +0800
|
||||
Subject: [PATCH] Update test_sysconfig for posix_user purelib
|
||||
|
||||
Steps to trigger the failed test:
|
||||
Edit local.conf to add something as follows:
|
||||
BASELIB = "lib64"
|
||||
IMAGE_INSTALL:append = " python3-tests".
|
||||
bitbake core-image-sato
|
||||
runqemu qemux86-64 nographic slirp
|
||||
Reproducer:
|
||||
$python3 -m test test_sysconfig
|
||||
|
||||
Update test_sysconfig.test_user_similar() for the posix_user scheme:
|
||||
"purelib" doesn't use sys.platlibdir.
|
||||
|
||||
Upstream-Status: Inappropriate [oe-core specific]
|
||||
Signed-off-by: Wentao Zhang <wentao.zhang@windriver.com>
|
||||
---
|
||||
Lib/test/test_sysconfig.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
|
||||
index 3468d0c..9ff174c 100644
|
||||
--- a/Lib/test/test_sysconfig.py
|
||||
+++ b/Lib/test/test_sysconfig.py
|
||||
@@ -390,7 +390,7 @@ class TestSysConfig(unittest.TestCase):
|
||||
expected = os.path.normpath(global_path.replace(base, user, 1))
|
||||
# bpo-44860: platlib of posix_user doesn't use sys.platlibdir,
|
||||
# whereas posix_prefix does.
|
||||
- if name == 'platlib':
|
||||
+ if name == 'platlib' or name == 'purelib':
|
||||
# Replace "/lib64/python3.11/site-packages" suffix
|
||||
# with "/lib/python3.11/site-packages".
|
||||
py_version_short = sysconfig.get_python_version()
|
||||
@@ -0,0 +1,37 @@
|
||||
From 6e3868c8c330f997bc242a8d51d742baac449ecc Mon Sep 17 00:00:00 2001
|
||||
From: Petr Viktorin <encukou@gmail.com>
|
||||
Date: Wed, 23 Aug 2023 20:00:07 +0200
|
||||
Subject: [PATCH] gh-107811: tarfile: treat overflow in UID/GID as failure to
|
||||
set it (#108369)
|
||||
|
||||
Upstream-Status: Backport [https://github.com/python/cpython/pull/108369]
|
||||
Signed-off-by: Khem Raj <raj.khem@gmail.com>
|
||||
---
|
||||
Lib/tarfile.py | 3 ++-
|
||||
.../Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst | 3 +++
|
||||
2 files changed, 5 insertions(+), 1 deletion(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst
|
||||
|
||||
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
|
||||
index 0a0f31e..4dfb67d 100755
|
||||
--- a/Lib/tarfile.py
|
||||
+++ b/Lib/tarfile.py
|
||||
@@ -2685,7 +2685,8 @@ class TarFile(object):
|
||||
os.lchown(targetpath, u, g)
|
||||
else:
|
||||
os.chown(targetpath, u, g)
|
||||
- except OSError as e:
|
||||
+ except (OSError, OverflowError) as e:
|
||||
+ # OverflowError can be raised if an ID doesn't fit in `id_t`
|
||||
raise ExtractError("could not change owner") from e
|
||||
|
||||
def chmod(self, tarinfo, targetpath):
|
||||
diff --git a/Misc/NEWS.d/next/Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst b/Misc/NEWS.d/next/Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst
|
||||
new file mode 100644
|
||||
index 0000000..ffca413
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst
|
||||
@@ -0,0 +1,3 @@
|
||||
+:mod:`tarfile`: extraction of members with overly large UID or GID (e.g. on
|
||||
+an OS with 32-bit :c:type:`!id_t`) now fails in the same way as failing to
|
||||
+set the ID.
|
||||
@@ -0,0 +1,120 @@
|
||||
From 82576cdb9d6d9736ba122592974b0e7727216a3f Mon Sep 17 00:00:00 2001
|
||||
From: Changqing Li <changqing.li@windriver.com>
|
||||
Date: Mon, 22 Oct 2018 15:19:51 +0800
|
||||
Subject: [PATCH] python3: use cc_basename to replace CC for checking compiler
|
||||
|
||||
When working path contains "clang"/"gcc"/"icc", it might be part of $CC
|
||||
because of the "--sysroot" parameter. That could cause judgement error
|
||||
about clang/gcc/icc compilers. e.g.
|
||||
When "icc" is containded in working path, below errors are reported when
|
||||
compiling python3:
|
||||
x86_64-wrs-linux-gcc: error: strict: No such file or directory
|
||||
x86_64-wrs-linux-gcc: error: unrecognized command line option '-fp-model'
|
||||
|
||||
Here use cc_basename to replace CC for checking compiler to avoid such
|
||||
kind of issue.
|
||||
|
||||
Upstream-Status: Submitted [https://github.com/python/cpython/pull/96399]
|
||||
|
||||
Signed-off-by: Li Zhou <li.zhou@windriver.com>
|
||||
|
||||
patch originally from Li Zhou, I just rework it to new version
|
||||
|
||||
Signed-off-by: Changqing Li <changqing.li@windriver.com>
|
||||
---
|
||||
configure.ac | 19 ++++++++++---------
|
||||
1 file changed, 10 insertions(+), 9 deletions(-)
|
||||
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 9270b5f..955daad 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -137,6 +137,7 @@ AC_CONFIG_HEADERS([pyconfig.h])
|
||||
AC_CANONICAL_HOST
|
||||
AC_SUBST([build])
|
||||
AC_SUBST([host])
|
||||
+LT_INIT
|
||||
|
||||
AS_VAR_IF([cross_compiling], [maybe],
|
||||
[AC_MSG_ERROR([Cross compiling required --host=HOST-TUPLE and --build=ARCH])]
|
||||
@@ -896,7 +897,7 @@ AC_SUBST([CXX])
|
||||
preset_cxx="$CXX"
|
||||
if test -z "$CXX"
|
||||
then
|
||||
- case "$CC" in
|
||||
+ case "$cc_basename" in
|
||||
gcc) AC_PATH_TOOL([CXX], [g++], [g++], [notfound]) ;;
|
||||
cc) AC_PATH_TOOL([CXX], [c++], [c++], [notfound]) ;;
|
||||
clang|*/clang) AC_PATH_TOOL([CXX], [clang++], [clang++], [notfound]) ;;
|
||||
@@ -1331,7 +1332,7 @@ rmdir CaseSensitiveTestDir
|
||||
|
||||
case $ac_sys_system in
|
||||
hp*|HP*)
|
||||
- case $CC in
|
||||
+ case $cc_basename in
|
||||
cc|*/cc) CC="$CC -Ae";;
|
||||
esac;;
|
||||
esac
|
||||
@@ -1857,7 +1858,7 @@ esac
|
||||
],
|
||||
[AC_MSG_RESULT([no])])
|
||||
if test "$Py_LTO" = 'true' ; then
|
||||
- case $CC in
|
||||
+ case $cc_basename in
|
||||
*clang*)
|
||||
LDFLAGS_NOLTO="-fno-lto"
|
||||
dnl Clang linker requires -flto in order to link objects with LTO information.
|
||||
@@ -1986,7 +1987,7 @@ then
|
||||
fi
|
||||
fi
|
||||
LLVM_PROF_ERR=no
|
||||
-case $CC in
|
||||
+case $cc_basename in
|
||||
*clang*)
|
||||
# Any changes made here should be reflected in the GCC+Darwin case below
|
||||
PGO_PROF_GEN_FLAG="-fprofile-instr-generate"
|
||||
@@ -2179,7 +2180,7 @@ AC_MSG_RESULT([$BOLT_APPLY_FLAGS])
|
||||
# compiler and platform. BASECFLAGS tweaks need to be made even if the
|
||||
# user set OPT.
|
||||
|
||||
-case $CC in
|
||||
+case $cc_basename in
|
||||
*clang*)
|
||||
cc_is_clang=1
|
||||
;;
|
||||
@@ -2451,7 +2452,7 @@ yes)
|
||||
|
||||
# ICC doesn't recognize the option, but only emits a warning
|
||||
## XXX does it emit an unused result warning and can it be disabled?
|
||||
- AS_CASE([$CC],
|
||||
+ AS_CASE([$cc_basename],
|
||||
[*icc*], [ac_cv_disable_unused_result_warning=no]
|
||||
[PY_CHECK_CC_WARNING([disable], [unused-result])])
|
||||
AS_VAR_IF([ac_cv_disable_unused_result_warning], [yes],
|
||||
@@ -2697,7 +2698,7 @@ yes)
|
||||
;;
|
||||
esac
|
||||
|
||||
-case "$CC" in
|
||||
+case "$cc_basename" in
|
||||
*mpicc*)
|
||||
CFLAGS_NODIST="$CFLAGS_NODIST"
|
||||
;;
|
||||
@@ -3532,7 +3533,7 @@ then
|
||||
then
|
||||
LINKFORSHARED="-Wl,--export-dynamic"
|
||||
fi;;
|
||||
- SunOS/5*) case $CC in
|
||||
+ SunOS/5*) case $cc_basename in
|
||||
*gcc*)
|
||||
if $CC -Xlinker --help 2>&1 | grep export-dynamic >/dev/null
|
||||
then
|
||||
@@ -6853,7 +6854,7 @@ if test "$ac_cv_gcc_asm_for_x87" = yes; then
|
||||
# Some versions of gcc miscompile inline asm:
|
||||
# http://gcc.gnu.org/bugzilla/show_bug.cgi?id=46491
|
||||
# http://gcc.gnu.org/ml/gcc/2010-11/msg00366.html
|
||||
- case $CC in
|
||||
+ case $cc_basename in
|
||||
*gcc*)
|
||||
AC_MSG_CHECKING([for gcc ipa-pure-const bug])
|
||||
saved_cflags="$CFLAGS"
|
||||
@@ -0,0 +1,29 @@
|
||||
From 5944f707fc04fb65caec3f0e1ce3a42169426c47 Mon Sep 17 00:00:00 2001
|
||||
From: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
Date: Fri, 15 Sep 2023 08:48:33 -0400
|
||||
Subject: [PATCH] skip no_stdout_fileno test due to load variability
|
||||
|
||||
Skip test_input_no_stdout_fileno so that it doesn't fail on systems
|
||||
under heavy load.
|
||||
|
||||
Upstream-Status: Inappropriate [OE-Specific]
|
||||
|
||||
[YOCTO #15210]
|
||||
|
||||
Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
---
|
||||
Lib/test/test_builtin.py | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
|
||||
index c71c568..e41ab5e 100644
|
||||
--- a/Lib/test/test_builtin.py
|
||||
+++ b/Lib/test/test_builtin.py
|
||||
@@ -2375,6 +2375,7 @@ class PtyTests(unittest.TestCase):
|
||||
# Check stdin/stdout error handler is used when invoking PyOS_Readline()
|
||||
self.check_input_tty("prompté", b"quux\xe9", "ascii")
|
||||
|
||||
+ @unittest.skip("Test may fail under heavy load")
|
||||
def test_input_no_stdout_fileno(self):
|
||||
# Issue #24402: If stdin is the original terminal but stdout.fileno()
|
||||
# fails, do not use the original stdout file descriptor
|
||||
@@ -0,0 +1,27 @@
|
||||
From 3aeeddb1325679d5c0471ad86806e92e72187138 Mon Sep 17 00:00:00 2001
|
||||
From: Alexander Kanavin <alex@linutronix.de>
|
||||
Date: Sun, 12 Sep 2021 21:44:36 +0200
|
||||
Subject: [PATCH] sysconfig.py: use platlibdir also for purelib
|
||||
|
||||
This is needed in multilib configurations where hardcoding 'lib'
|
||||
is not correct.
|
||||
|
||||
Upstream-Status: Inappropriate [oe-core specific]
|
||||
Signed-off-by: Alexander Kanavin <alex@linutronix.de>
|
||||
---
|
||||
Lib/sysconfig.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||
index 517b13a..6258b68 100644
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -28,7 +28,7 @@ _INSTALL_SCHEMES = {
|
||||
'posix_prefix': {
|
||||
'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}',
|
||||
'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}',
|
||||
- 'purelib': '{base}/lib/python{py_version_short}/site-packages',
|
||||
+ 'purelib': '{base}/{platlibdir}/python{py_version_short}/site-packages',
|
||||
'platlib': '{platbase}/{platlibdir}/python{py_version_short}/site-packages',
|
||||
'include':
|
||||
'{installed_base}/include/python{py_version_short}{abiflags}',
|
||||
@@ -0,0 +1,27 @@
|
||||
From a83311a1030b816f422dbb4457fc38c1289c224d Mon Sep 17 00:00:00 2001
|
||||
From: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
Date: Thu, 13 Jun 2024 10:54:31 -0400
|
||||
Subject: [PATCH] test_active_children: skip problematic test
|
||||
|
||||
This test is failing in some tests on the Autobuilder. Since it's of a
|
||||
similar nature to other failing/hanging tests, disable it for now.
|
||||
|
||||
Upstream-Status: Inappropriate [OE-Specific]
|
||||
|
||||
Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
---
|
||||
Lib/test/_test_multiprocessing.py | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
|
||||
index 1f94dec..3632219 100644
|
||||
--- a/Lib/test/_test_multiprocessing.py
|
||||
+++ b/Lib/test/_test_multiprocessing.py
|
||||
@@ -585,6 +585,7 @@ class _TestProcess(BaseTestCase):
|
||||
self.assertTrue(type(cpus) is int)
|
||||
self.assertTrue(cpus >= 1)
|
||||
|
||||
+ @unittest.skip("skipping problematic test")
|
||||
def test_active_children(self):
|
||||
self.assertEqual(type(self.active_children()), list)
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
From fbbf04dbeae217b985073263499174960e5fd142 Mon Sep 17 00:00:00 2001
|
||||
From: Tim Orling <timothy.t.orling@intel.com>
|
||||
Date: Fri, 18 Jun 2021 11:56:50 -0700
|
||||
Subject: [PATCH] test_ctypes.test_find: skip without tools-sdk
|
||||
|
||||
These tests need full packagegroup-core-buildessential, the
|
||||
easiest way to dynamically check for that is looking for
|
||||
'tools-sdk' in IMAGE_FEATURES.
|
||||
|
||||
Upstream-Status: Inappropriate [oe-specific]
|
||||
|
||||
Signed-off-by: Tim Orling <timothy.t.orling@intel.com>
|
||||
---
|
||||
Lib/test/test_ctypes/test_find.py | 2 ++
|
||||
1 file changed, 2 insertions(+)
|
||||
|
||||
diff --git a/Lib/test/test_ctypes/test_find.py b/Lib/test/test_ctypes/test_find.py
|
||||
index a41e949..eb5fe19 100644
|
||||
--- a/Lib/test/test_ctypes/test_find.py
|
||||
+++ b/Lib/test/test_ctypes/test_find.py
|
||||
@@ -113,10 +113,12 @@ class FindLibraryLinux(unittest.TestCase):
|
||||
# LD_LIBRARY_PATH)
|
||||
self.assertEqual(find_library(libname), 'lib%s.so' % libname)
|
||||
|
||||
+ @unittest.skip("Needs IMAGE_FEATURE += \"tools-sdk\"")
|
||||
def test_find_library_with_gcc(self):
|
||||
with unittest.mock.patch("ctypes.util._findSoname_ldconfig", lambda *args: None):
|
||||
self.assertNotEqual(find_library('c'), None)
|
||||
|
||||
+ @unittest.skip("Needs IMAGE_FEATURE += \"tools-sdk\"")
|
||||
def test_find_library_with_ld(self):
|
||||
with unittest.mock.patch("ctypes.util._findSoname_ldconfig", lambda *args: None), \
|
||||
unittest.mock.patch("ctypes.util._findLib_gcc", lambda *args: None):
|
||||
@@ -0,0 +1,27 @@
|
||||
From 9d658dd20f02edcf878b245d638c474c808ab8d1 Mon Sep 17 00:00:00 2001
|
||||
From: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
Date: Wed, 12 Jun 2024 10:29:03 -0400
|
||||
Subject: [PATCH] test_deadlock: skip problematic test
|
||||
|
||||
This test hangs frequently when run on the Autobuilder. Disable it in
|
||||
testing until the cause can be determined.
|
||||
|
||||
Upstream-Status: Inappropriate [OE-Specific]
|
||||
|
||||
Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
---
|
||||
Lib/test/test_concurrent_futures/test_deadlock.py | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/Lib/test/test_concurrent_futures/test_deadlock.py b/Lib/test/test_concurrent_futures/test_deadlock.py
|
||||
index e8cd8f6..021906b 100644
|
||||
--- a/Lib/test/test_concurrent_futures/test_deadlock.py
|
||||
+++ b/Lib/test/test_concurrent_futures/test_deadlock.py
|
||||
@@ -90,6 +90,7 @@ class ErrorAtUnpickle(object):
|
||||
return _raise_error_ignore_stderr, (UnpicklingError, )
|
||||
|
||||
|
||||
+@unittest.skip("skipping problematic test")
|
||||
class ExecutorDeadlockTest:
|
||||
TIMEOUT = support.LONG_TIMEOUT
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
From fcd5b7d30d3245ce92ea45dfbab3c7b7da690c20 Mon Sep 17 00:00:00 2001
|
||||
From: Mingli Yu <mingli.yu@windriver.com>
|
||||
Date: Mon, 5 Aug 2019 15:57:39 +0800
|
||||
Subject: [PATCH] test_locale.py: correct the test output format
|
||||
|
||||
Before this patch:
|
||||
# python3 -m test -v test_locale
|
||||
[snip]
|
||||
test_getsetlocale_issue1813 (test.test_locale.TestMiscellaneous) ... testing with ('tr_TR', 'ISO8859-9') ok
|
||||
[snip]
|
||||
|
||||
After this patch:
|
||||
# python3 -m test -v test_locale
|
||||
[snip]
|
||||
test_getsetlocale_issue1813 (test.test_locale.TestMiscellaneous) ... testing with ('tr_TR', 'ISO8859-9')... ok
|
||||
[snip]
|
||||
|
||||
Make the test ended with "... ok" is common in python
|
||||
unittest world, we should make it keep consistent
|
||||
with other test cases in case it may be ignored to
|
||||
record in the report if we use the common filter
|
||||
"... ok".
|
||||
|
||||
Upstream-Status: Submitted [https://github.com/python/cpython/pull/15132]
|
||||
|
||||
Rebased for 3.9.4, still not accepted upstream Signed-off-by: Alejandro Hernandez <alejandro@enedino.org>
|
||||
|
||||
Signed-off-by: Mingli Yu <mingli.yu@windriver.com>
|
||||
---
|
||||
Lib/test/test_locale.py | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py
|
||||
index cde80a4..e8ffd71 100644
|
||||
--- a/Lib/test/test_locale.py
|
||||
+++ b/Lib/test/test_locale.py
|
||||
@@ -561,7 +561,7 @@ class TestMiscellaneous(unittest.TestCase):
|
||||
self.skipTest('test needs Turkish locale')
|
||||
loc = locale.getlocale(locale.LC_CTYPE)
|
||||
if verbose:
|
||||
- print('testing with %a' % (loc,), end=' ', flush=True)
|
||||
+ print('testing with %a...' % (loc,), end=' ', flush=True)
|
||||
try:
|
||||
locale.setlocale(locale.LC_CTYPE, loc)
|
||||
except locale.Error as exc:
|
||||
@@ -0,0 +1,38 @@
|
||||
From 34fd0bc8afc67a11eea5d73f9e0edf045c5ce541 Mon Sep 17 00:00:00 2001
|
||||
From: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
Date: Tue, 13 Aug 2024 11:07:05 -0400
|
||||
Subject: [PATCH] test_readline: skip limited history test
|
||||
|
||||
This test was added recently and is failing on the ptest image when
|
||||
using the default PACKAGECONFIG settings (i.e. with editline instead of
|
||||
readline).. Disable it until the proper fix is determined.
|
||||
|
||||
A bug has been opened upstream: https://github.com/python/cpython/issues/123018
|
||||
|
||||
Upstream-Status: Inappropriate [OE-specific]
|
||||
|
||||
Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
---
|
||||
Lib/test/test_readline.py | 2 ++
|
||||
1 file changed, 2 insertions(+)
|
||||
|
||||
diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py
|
||||
index fab124a..291dd48 100644
|
||||
--- a/Lib/test/test_readline.py
|
||||
+++ b/Lib/test/test_readline.py
|
||||
@@ -141,6 +141,7 @@ class TestHistoryManipulation (unittest.TestCase):
|
||||
self.assertEqual(readline.get_history_item(1), "entrée 1")
|
||||
self.assertEqual(readline.get_history_item(2), "entrée 22")
|
||||
|
||||
+ @unittest.skip("Skipping problematic test")
|
||||
def test_write_read_limited_history(self):
|
||||
previous_length = readline.get_history_length()
|
||||
self.addCleanup(readline.set_history_length, previous_length)
|
||||
@@ -379,6 +380,7 @@ readline.write_history_file(history_file)
|
||||
self.assertIn(b"done", output)
|
||||
|
||||
|
||||
+ @unittest.skip("Skipping problematic test")
|
||||
def test_write_read_limited_history(self):
|
||||
previous_length = readline.get_history_length()
|
||||
self.addCleanup(readline.set_history_length, previous_length)
|
||||
@@ -0,0 +1,43 @@
|
||||
From d09a034acba8922158d38fd16be970b5a454428a Mon Sep 17 00:00:00 2001
|
||||
From: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
Date: Wed, 8 May 2024 11:58:09 -0400
|
||||
Subject: [PATCH] test_shutdown: skip problematic test
|
||||
|
||||
This test hangs frequently when run on the Autobuilder. Disable it in
|
||||
testing until the cause can be determined.
|
||||
|
||||
Upstream-Status: Inappropriate [OE-Specific]
|
||||
|
||||
Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
---
|
||||
Lib/test/test_concurrent_futures/test_shutdown.py | 3 +++
|
||||
1 file changed, 3 insertions(+)
|
||||
|
||||
diff --git a/Lib/test/test_concurrent_futures/test_shutdown.py b/Lib/test/test_concurrent_futures/test_shutdown.py
|
||||
index 7a4065a..6b878a4 100644
|
||||
--- a/Lib/test/test_concurrent_futures/test_shutdown.py
|
||||
+++ b/Lib/test/test_concurrent_futures/test_shutdown.py
|
||||
@@ -20,6 +20,7 @@ def sleep_and_print(t, msg):
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
+@unittest.skip("skipping problematic test")
|
||||
class ExecutorShutdownTest:
|
||||
def test_run_after_shutdown(self):
|
||||
self.executor.shutdown()
|
||||
@@ -156,6 +157,7 @@ class ExecutorShutdownTest:
|
||||
signal.signal(signal.SIGALRM, old_handler)
|
||||
|
||||
|
||||
+@unittest.skip("skipping problematic test")
|
||||
class ThreadPoolShutdownTest(ThreadPoolMixin, ExecutorShutdownTest, BaseTestCase):
|
||||
def test_threads_terminate(self):
|
||||
def acquire_lock(lock):
|
||||
@@ -252,6 +254,7 @@ class ThreadPoolShutdownTest(ThreadPoolMixin, ExecutorShutdownTest, BaseTestCase
|
||||
self.assertIn(out.strip(), [b"apple", b""])
|
||||
|
||||
|
||||
+@unittest.skip("skipping problematic test")
|
||||
class ProcessPoolShutdownTest(ExecutorShutdownTest):
|
||||
def test_processes_terminate(self):
|
||||
def acquire_lock(lock):
|
||||
@@ -0,0 +1,29 @@
|
||||
From 6715560de4d622c2d72ee7b587c916ac647c54bb Mon Sep 17 00:00:00 2001
|
||||
From: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
Date: Fri, 6 Oct 2023 10:59:44 -0400
|
||||
Subject: [PATCH] test_storlines: skip due to load variability
|
||||
|
||||
This is yet another test that intermittently fails on the Yocto AB when
|
||||
a worker is under heavy load, so skip it during testing.
|
||||
|
||||
Upstream-Status: Inappropriate [OE-Specific]
|
||||
|
||||
[YOCTO #14933]
|
||||
|
||||
Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com>
|
||||
---
|
||||
Lib/test/test_ftplib.py | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py
|
||||
index 4c4a449..b8c79a4 100644
|
||||
--- a/Lib/test/test_ftplib.py
|
||||
+++ b/Lib/test/test_ftplib.py
|
||||
@@ -629,6 +629,7 @@ class TestFTPClass(TestCase):
|
||||
self.client.storbinary('stor', f, rest=r)
|
||||
self.assertEqual(self.server.handler_instance.rest, str(r))
|
||||
|
||||
+ @unittest.skip('timing related test, dependent on load')
|
||||
def test_storlines(self):
|
||||
data = RETR_DATA.replace('\r\n', '\n').encode(self.client.encoding)
|
||||
f = io.BytesIO(data)
|
||||
@@ -0,0 +1,33 @@
|
||||
From 011b21dc9b090c0b97eaecbd80a9e0c1cd39b12d Mon Sep 17 00:00:00 2001
|
||||
From: Alexander Kanavin <alex@linutronix.de>
|
||||
Date: Fri, 17 Nov 2023 14:16:40 +0100
|
||||
Subject: [PATCH] configure.ac: do not add a curses include path from the host
|
||||
|
||||
This leads to host contamination, and particularly can cause
|
||||
curses modules to fail at runtime if the host curses is configured
|
||||
differently to native curses (observed on current OpenSuse Tumbleweed
|
||||
as dnf failures).
|
||||
|
||||
Upstream-Status: Inappropriate [oe-core specific]
|
||||
Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com>
|
||||
---
|
||||
configure.ac | 6 ------
|
||||
1 file changed, 6 deletions(-)
|
||||
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 6e465a4..13c4835 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -6558,12 +6558,6 @@ AS_VAR_IF([have_panel], [no], [
|
||||
AC_MSG_RESULT([$have_panel (CFLAGS: $PANEL_CFLAGS, LIBS: $PANEL_LIBS)])
|
||||
])
|
||||
|
||||
-# first curses header check
|
||||
-ac_save_cppflags="$CPPFLAGS"
|
||||
-if test "$cross_compiling" = no; then
|
||||
- CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw"
|
||||
-fi
|
||||
-
|
||||
# On Solaris, term.h requires curses.h
|
||||
AC_CHECK_HEADERS([term.h], [], [], [
|
||||
#ifdef HAVE_CURSES_H
|
||||
@@ -0,0 +1,31 @@
|
||||
From 6ebd9de3505be0965cfc37e2e4d0d882d75f0ec2 Mon Sep 17 00:00:00 2001
|
||||
From: Mark Hatle <mark.hatle@windriver.com>
|
||||
Date: Wed, 21 Sep 2011 20:55:33 -0500
|
||||
Subject: [PATCH] Lib/cgi.py: Update the script as mentioned in the comment
|
||||
|
||||
Upstream-Status: Inappropriate [distribution]
|
||||
|
||||
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
|
||||
---
|
||||
Lib/cgi.py | 11 +----------
|
||||
1 file changed, 1 insertion(+), 10 deletions(-)
|
||||
|
||||
diff --git a/Lib/cgi.py b/Lib/cgi.py
|
||||
index 8787567..ebe8652 100755
|
||||
--- a/Lib/cgi.py
|
||||
+++ b/Lib/cgi.py
|
||||
@@ -1,13 +1,4 @@
|
||||
-#! /usr/local/bin/python
|
||||
-
|
||||
-# NOTE: the above "/usr/local/bin/python" is NOT a mistake. It is
|
||||
-# intentionally NOT "/usr/bin/env python". On many systems
|
||||
-# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
|
||||
-# scripts, and /usr/local/bin is the default directory where Python is
|
||||
-# installed, so /usr/bin/env would be unable to find python. Granted,
|
||||
-# binary installations by Linux vendors often install Python in
|
||||
-# /usr/bin. So let those vendors patch cgi.py to match their choice
|
||||
-# of installation.
|
||||
+#! /usr/bin/env python
|
||||
|
||||
"""Support module for CGI (Common Gateway Interface) scripts.
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
logfile = open(sys.argv[1]).read()
|
||||
|
||||
necessary_bits = logfile.find("The necessary bits to build these optional modules were not found")
|
||||
to_find_bits = logfile.find("To find the necessary bits, look in setup.py in detect_modules() for the module's name.")
|
||||
if necessary_bits != -1:
|
||||
print("%s" %(logfile[necessary_bits:to_find_bits]))
|
||||
|
||||
failed_to_build = logfile.find("Failed to build these modules:")
|
||||
if failed_to_build != -1:
|
||||
failed_to_build_end = logfile.find("\n\n", failed_to_build)
|
||||
print("%s" %(logfile[failed_to_build:failed_to_build_end]))
|
||||
|
||||
if necessary_bits != -1 or failed_to_build != -1:
|
||||
sys.exit(1)
|
||||
|
||||
@@ -0,0 +1,444 @@
|
||||
# This script is used as a bitbake task to create a new python manifest
|
||||
# $ bitbake python -c create_manifest
|
||||
#
|
||||
# Our goal is to keep python-core as small as posible and add other python
|
||||
# packages only when the user needs them, hence why we split upstream python
|
||||
# into several packages.
|
||||
#
|
||||
# In a very simplistic way what this does is:
|
||||
# Launch python and see specifically what is required for it to run at a minimum
|
||||
#
|
||||
# Go through the python-manifest file and launch a separate task for every single
|
||||
# one of the files on each package, this task will check what was required for that
|
||||
# specific module to run, these modules will be called dependencies.
|
||||
# The output of such task will be a list of the modules or dependencies that were
|
||||
# found for that file.
|
||||
#
|
||||
# Such output will be parsed by this script, we will look for each dependency on the
|
||||
# manifest and if we find that another package already includes it, then we will add
|
||||
# that package as an RDEPENDS to the package we are currently checking; in case we dont
|
||||
# find the current dependency on any other package we will add it to the current package
|
||||
# as part of FILES.
|
||||
#
|
||||
#
|
||||
# This way we will create a new manifest from the data structure that was built during
|
||||
# this process, on this new manifest each package will contain specifically only
|
||||
# what it needs to run.
|
||||
#
|
||||
# There are some caveats which we try to deal with, such as repeated files on different
|
||||
# packages, packages that include folders, wildcards, and special packages.
|
||||
# Its also important to note that this method only works for python files, and shared
|
||||
# libraries. Static libraries, header files and binaries need to be dealt with manually.
|
||||
#
|
||||
# This script differs from its python2 version mostly on how shared libraries are handled
|
||||
# The manifest file for python3 has an extra field which contains the cached files for
|
||||
# each package.
|
||||
# Tha method to handle cached files does not work when a module includes a folder which
|
||||
# itself contains the pycache folder, gladly this is almost never the case.
|
||||
#
|
||||
# Author: Alejandro Enedino Hernandez Samaniego <alejandro at enedino dot org>
|
||||
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
import json
|
||||
import os
|
||||
import collections
|
||||
|
||||
if '-d' in sys.argv:
|
||||
debugFlag = '-d'
|
||||
else:
|
||||
debugFlag = ''
|
||||
|
||||
# Get python version from ${PYTHON_MAJMIN}
|
||||
pyversion = str(sys.argv[1])
|
||||
|
||||
# Hack to get native python search path (for folders), not fond of it but it works for now
|
||||
pivot = 'recipe-sysroot-native'
|
||||
for p in sys.path:
|
||||
if pivot in p:
|
||||
nativelibfolder = p[:p.find(pivot)+len(pivot)]
|
||||
|
||||
# Empty dict to hold the whole manifest
|
||||
new_manifest = collections.OrderedDict()
|
||||
|
||||
# Check for repeated files, folders and wildcards
|
||||
allfiles = []
|
||||
repeated = []
|
||||
wildcards = []
|
||||
|
||||
hasfolders = []
|
||||
allfolders = []
|
||||
|
||||
def isFolder(value):
|
||||
value = value.replace('${PYTHON_MAJMIN}',pyversion)
|
||||
if os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib')) or os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib64')) or os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib32')):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def isCached(item):
|
||||
if '__pycache__' in item:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def prepend_comments(comments, json_manifest):
|
||||
with open(json_manifest, 'r+') as manifest:
|
||||
json_contents = manifest.read()
|
||||
manifest.seek(0, 0)
|
||||
manifest.write(comments + json_contents)
|
||||
|
||||
def print_indent(msg, offset):
|
||||
for l in msg.splitlines():
|
||||
msg = ' ' * offset + l
|
||||
print(msg)
|
||||
|
||||
|
||||
# Read existing JSON manifest
|
||||
with open('python3-manifest.json') as manifest:
|
||||
# The JSON format doesn't allow comments so we hack the call to keep the comments using a marker
|
||||
manifest_str = manifest.read()
|
||||
json_start = manifest_str.find('# EOC') + 6 # EOC + \n
|
||||
manifest.seek(0)
|
||||
comments = manifest.read(json_start)
|
||||
manifest_str = manifest.read()
|
||||
old_manifest = json.loads(manifest_str, object_pairs_hook=collections.OrderedDict)
|
||||
|
||||
#
|
||||
# First pass to get core-package functionality, because we base everything on the fact that core is actually working
|
||||
# Not exactly the same so it should not be a function
|
||||
#
|
||||
|
||||
print_indent('Getting dependencies for package: core', 0)
|
||||
|
||||
|
||||
# This special call gets the core dependencies and
|
||||
# appends to the old manifest so it doesnt hurt what it
|
||||
# currently holds.
|
||||
# This way when other packages check for dependencies
|
||||
# on the new core package, they will still find them
|
||||
# even when checking the old_manifest
|
||||
|
||||
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', 'python-core-package', '%s' % debugFlag]).decode('utf8')
|
||||
for coredep in output.split():
|
||||
coredep = coredep.replace(pyversion,'${PYTHON_MAJMIN}')
|
||||
if isCached(coredep):
|
||||
if coredep not in old_manifest['core']['cached']:
|
||||
old_manifest['core']['cached'].append(coredep)
|
||||
else:
|
||||
if coredep not in old_manifest['core']['files']:
|
||||
old_manifest['core']['files'].append(coredep)
|
||||
|
||||
|
||||
# The second step is to loop through the existing files contained in the core package
|
||||
# according to the old manifest, identify if they are modules, or some other type
|
||||
# of file that we cant import (directories, binaries, configs) in which case we
|
||||
# can only assume they were added correctly (manually) so we ignore those and
|
||||
# pass them to the manifest directly.
|
||||
|
||||
for filedep in old_manifest['core']['files']:
|
||||
if isFolder(filedep):
|
||||
if isCached(filedep):
|
||||
if filedep not in old_manifest['core']['cached']:
|
||||
old_manifest['core']['cached'].append(filedep)
|
||||
else:
|
||||
if filedep not in old_manifest['core']['files']:
|
||||
old_manifest['core']['files'].append(filedep)
|
||||
continue
|
||||
if '${bindir}' in filedep:
|
||||
if filedep not in old_manifest['core']['files']:
|
||||
old_manifest['core']['files'].append(filedep)
|
||||
continue
|
||||
if filedep == '':
|
||||
continue
|
||||
if '${includedir}' in filedep:
|
||||
if filedep not in old_manifest['core']['files']:
|
||||
old_manifest['core']['files'].append(filedep)
|
||||
continue
|
||||
|
||||
# Get actual module name , shouldnt be affected by libdir/bindir, etc.
|
||||
pymodule = os.path.splitext(os.path.basename(os.path.normpath(filedep)))[0]
|
||||
|
||||
# We now know that were dealing with a python module, so we can import it
|
||||
# and check what its dependencies are.
|
||||
# We launch a separate task for each module for deterministic behavior.
|
||||
# Each module will only import what is necessary for it to work in specific.
|
||||
# The output of each task will contain each module's dependencies
|
||||
|
||||
print_indent('Getting dependencies for module: %s' % pymodule, 2)
|
||||
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', '%s' % pymodule, '%s' % debugFlag]).decode('utf8')
|
||||
print_indent('The following dependencies were found for module %s:\n' % pymodule, 4)
|
||||
print_indent(output, 6)
|
||||
|
||||
|
||||
for pymodule_dep in output.split():
|
||||
pymodule_dep = pymodule_dep.replace(pyversion,'${PYTHON_MAJMIN}')
|
||||
|
||||
if isCached(pymodule_dep):
|
||||
if pymodule_dep not in old_manifest['core']['cached']:
|
||||
old_manifest['core']['cached'].append(pymodule_dep)
|
||||
else:
|
||||
if pymodule_dep not in old_manifest['core']['files']:
|
||||
old_manifest['core']['files'].append(pymodule_dep)
|
||||
|
||||
|
||||
# At this point we are done with the core package.
|
||||
# The old_manifest dictionary is updated only for the core package because
|
||||
# all others will use this a base.
|
||||
|
||||
|
||||
print('\n\nChecking for directories...\n')
|
||||
# To improve the script speed, we check which packages contain directories
|
||||
# since we will be looping through (only) those later.
|
||||
for pypkg in old_manifest:
|
||||
for filedep in old_manifest[pypkg]['files']:
|
||||
if isFolder(filedep):
|
||||
print_indent('%s is a directory' % filedep, 2)
|
||||
if pypkg not in hasfolders:
|
||||
hasfolders.append(pypkg)
|
||||
if filedep not in allfolders:
|
||||
allfolders.append(filedep)
|
||||
|
||||
|
||||
|
||||
# This is the main loop that will handle each package.
|
||||
# It works in a similar fashion than the step before, but
|
||||
# we will now be updating a new dictionary that will eventually
|
||||
# become the new manifest.
|
||||
#
|
||||
# The following loops though all packages in the manifest,
|
||||
# through all files on each of them, and checks whether or not
|
||||
# they are modules and can be imported.
|
||||
# If they can be imported, then it checks for dependencies for
|
||||
# each of them by launching a separate task.
|
||||
# The output of that task is then parsed and the manifest is updated
|
||||
# accordingly, wether it should add the module on FILES for the current package
|
||||
# or if that module already belongs to another package then the current one
|
||||
# will RDEPEND on it
|
||||
|
||||
for pypkg in old_manifest:
|
||||
# Use an empty dict as data structure to hold data for each package and fill it up
|
||||
new_manifest[pypkg] = collections.OrderedDict()
|
||||
new_manifest[pypkg]['summary'] = old_manifest[pypkg]['summary']
|
||||
new_manifest[pypkg]['rdepends'] = []
|
||||
new_manifest[pypkg]['files'] = []
|
||||
new_manifest[pypkg]['cached'] = old_manifest[pypkg]['cached']
|
||||
|
||||
# All packages should depend on core
|
||||
if pypkg != 'core':
|
||||
new_manifest[pypkg]['rdepends'].append('core')
|
||||
new_manifest[pypkg]['cached'] = []
|
||||
|
||||
print('\n')
|
||||
print('--------------------------')
|
||||
print('Handling package %s' % pypkg)
|
||||
print('--------------------------')
|
||||
|
||||
# Handle special cases, we assume that when they were manually added
|
||||
# to the manifest we knew what we were doing.
|
||||
special_packages = ['misc', 'modules', 'dev', 'tests']
|
||||
if pypkg in special_packages or 'staticdev' in pypkg:
|
||||
print_indent('Passing %s package directly' % pypkg, 2)
|
||||
new_manifest[pypkg] = old_manifest[pypkg]
|
||||
continue
|
||||
|
||||
for filedep in old_manifest[pypkg]['files']:
|
||||
# We already handled core on the first pass, we can ignore it now
|
||||
if pypkg == 'core':
|
||||
if filedep not in new_manifest[pypkg]['files']:
|
||||
new_manifest[pypkg]['files'].append(filedep)
|
||||
continue
|
||||
|
||||
# Handle/ignore what we cant import
|
||||
if isFolder(filedep):
|
||||
new_manifest[pypkg]['files'].append(filedep)
|
||||
# Asyncio (and others) are both the package and the folder name, we should not skip those...
|
||||
path,mod = os.path.split(filedep)
|
||||
if mod != pypkg:
|
||||
continue
|
||||
if '${bindir}' in filedep:
|
||||
if filedep not in new_manifest[pypkg]['files']:
|
||||
new_manifest[pypkg]['files'].append(filedep)
|
||||
continue
|
||||
if filedep == '':
|
||||
continue
|
||||
if '${includedir}' in filedep:
|
||||
if filedep not in new_manifest[pypkg]['files']:
|
||||
new_manifest[pypkg]['files'].append(filedep)
|
||||
continue
|
||||
|
||||
# Get actual module name , shouldnt be affected by libdir/bindir, etc.
|
||||
# We need to check if the imported module comes from another (e.g. sqlite3.dump)
|
||||
path, pymodule = os.path.split(filedep)
|
||||
path = os.path.basename(path)
|
||||
pymodule = os.path.splitext(os.path.basename(pymodule))[0]
|
||||
|
||||
# If this condition is met, it means we need to import it from another module
|
||||
# or its the folder itself (e.g. unittest)
|
||||
if path == pypkg:
|
||||
if pymodule:
|
||||
pymodule = path + '.' + pymodule
|
||||
else:
|
||||
pymodule = path
|
||||
|
||||
|
||||
|
||||
# We now know that were dealing with a python module, so we can import it
|
||||
# and check what its dependencies are.
|
||||
# We launch a separate task for each module for deterministic behavior.
|
||||
# Each module will only import what is necessary for it to work in specific.
|
||||
# The output of each task will contain each module's dependencies
|
||||
|
||||
print_indent('\nGetting dependencies for module: %s' % pymodule, 2)
|
||||
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', '%s' % pymodule, '%s' % debugFlag]).decode('utf8')
|
||||
print_indent('The following dependencies were found for module %s:\n' % pymodule, 4)
|
||||
print_indent(output, 6)
|
||||
|
||||
reportFILES = []
|
||||
reportRDEPS = []
|
||||
|
||||
for pymodule_dep in output.split():
|
||||
|
||||
# Warning: This first part is ugly
|
||||
# One of the dependencies that was found, could be inside of one of the folders included by another package
|
||||
# We need to check if this happens so we can add the package containing the folder as an rdependency
|
||||
# e.g. Folder encodings contained in codecs
|
||||
# This would be solved if no packages included any folders
|
||||
|
||||
# This can be done in two ways:
|
||||
# 1 - We assume that if we take out the filename from the path we would get
|
||||
# the folder string, then we would check if folder string is in the list of folders
|
||||
# This would not work if a package contains a folder which contains another folder
|
||||
# e.g. path/folder1/folder2/filename folder_string= path/folder1/folder2
|
||||
# folder_string would not match any value contained in the list of folders
|
||||
#
|
||||
# 2 - We do it the other way around, checking if the folder is contained in the path
|
||||
# e.g. path/folder1/folder2/filename folder_string= path/folder1/folder2
|
||||
# is folder_string inside path/folder1/folder2/filename?,
|
||||
# Yes, it works, but we waste a couple of milliseconds.
|
||||
|
||||
pymodule_dep = pymodule_dep.replace(pyversion,'${PYTHON_MAJMIN}')
|
||||
inFolders = False
|
||||
for folder in allfolders:
|
||||
# The module could have a directory named after it, e.g. xml, if we take out the filename from the path
|
||||
# we'll end up with ${libdir}, and we want ${libdir}/xml
|
||||
if isFolder(pymodule_dep):
|
||||
check_path = pymodule_dep
|
||||
else:
|
||||
check_path = os.path.dirname(pymodule_dep)
|
||||
if folder in check_path :
|
||||
inFolders = True # Did we find a folder?
|
||||
folderFound = False # Second flag to break inner for
|
||||
# Loop only through packages which contain folders
|
||||
for pypkg_with_folder in hasfolders:
|
||||
if (folderFound == False):
|
||||
# print('Checking folder %s on package %s' % (pymodule_dep,pypkg_with_folder))
|
||||
for folder_dep in old_manifest[pypkg_with_folder]['files'] or folder_dep in old_manifest[pypkg_with_folder]['cached']:
|
||||
if folder_dep == folder:
|
||||
print ('%s directory found in %s' % (folder, pypkg_with_folder))
|
||||
folderFound = True
|
||||
if pypkg_with_folder not in new_manifest[pypkg]['rdepends'] and pypkg_with_folder != pypkg:
|
||||
new_manifest[pypkg]['rdepends'].append(pypkg_with_folder)
|
||||
else:
|
||||
break
|
||||
|
||||
# A folder was found so we're done with this item, we can go on
|
||||
if inFolders:
|
||||
continue
|
||||
|
||||
|
||||
|
||||
# No directories beyond this point
|
||||
# We might already have this module on the dictionary since it could depend on a (previously checked) module
|
||||
if pymodule_dep not in new_manifest[pypkg]['files'] and pymodule_dep not in new_manifest[pypkg]['cached']:
|
||||
# Handle core as a special package, we already did it so we pass it to NEW data structure directly
|
||||
if pypkg == 'core':
|
||||
print('Adding %s to %s FILES' % (pymodule_dep, pypkg))
|
||||
if pymodule_dep.endswith('*'):
|
||||
wildcards.append(pymodule_dep)
|
||||
if isCached(pymodule_dep):
|
||||
new_manifest[pypkg]['cached'].append(pymodule_dep)
|
||||
else:
|
||||
new_manifest[pypkg]['files'].append(pymodule_dep)
|
||||
|
||||
# Check for repeated files
|
||||
if pymodule_dep not in allfiles:
|
||||
allfiles.append(pymodule_dep)
|
||||
else:
|
||||
if pymodule_dep not in repeated:
|
||||
repeated.append(pymodule_dep)
|
||||
else:
|
||||
|
||||
|
||||
# Last step: Figure out if we this belongs to FILES or RDEPENDS
|
||||
# We check if this module is already contained on another package, so we add that one
|
||||
# as an RDEPENDS, or if its not, it means it should be contained on the current
|
||||
# package, and we should add it to FILES
|
||||
for possible_rdep in old_manifest:
|
||||
# Debug
|
||||
# print('Checking %s ' % pymodule_dep + ' in %s' % possible_rdep)
|
||||
if pymodule_dep in old_manifest[possible_rdep]['files'] or pymodule_dep in old_manifest[possible_rdep]['cached']:
|
||||
# Since were nesting, we need to check its not the same pypkg
|
||||
if(possible_rdep != pypkg):
|
||||
if possible_rdep not in new_manifest[pypkg]['rdepends']:
|
||||
# Add it to the new manifest data struct as RDEPENDS since it contains something this module needs
|
||||
reportRDEPS.append('Adding %s to %s RDEPENDS, because it contains %s\n' % (possible_rdep, pypkg, pymodule_dep))
|
||||
new_manifest[pypkg]['rdepends'].append(possible_rdep)
|
||||
break
|
||||
else:
|
||||
|
||||
# Since this module wasnt found on another package, it is not an RDEP,
|
||||
# so we add it to FILES for this package.
|
||||
# A module shouldn't contain itself (${libdir}/python3/sqlite3 shouldnt be on sqlite3 files)
|
||||
if os.path.basename(pymodule_dep) != pypkg:
|
||||
reportFILES.append(('Adding %s to %s FILES\n' % (pymodule_dep, pypkg)))
|
||||
if isCached(pymodule_dep):
|
||||
new_manifest[pypkg]['cached'].append(pymodule_dep)
|
||||
else:
|
||||
new_manifest[pypkg]['files'].append(pymodule_dep)
|
||||
if pymodule_dep.endswith('*'):
|
||||
wildcards.append(pymodule_dep)
|
||||
if pymodule_dep not in allfiles:
|
||||
allfiles.append(pymodule_dep)
|
||||
else:
|
||||
if pymodule_dep not in repeated:
|
||||
repeated.append(pymodule_dep)
|
||||
|
||||
print('\n')
|
||||
print('#################################')
|
||||
print('Summary for module %s' % pymodule)
|
||||
print('FILES found for module %s:' % pymodule)
|
||||
print(''.join(reportFILES))
|
||||
print('RDEPENDS found for module %s:' % pymodule)
|
||||
print(''.join(reportRDEPS))
|
||||
print('#################################')
|
||||
|
||||
print('The following FILES contain wildcards, please check if they are necessary')
|
||||
print(wildcards)
|
||||
print('The following FILES contain folders, please check if they are necessary')
|
||||
print(hasfolders)
|
||||
|
||||
|
||||
# Sort it just so it looks nicer
|
||||
for pypkg in new_manifest:
|
||||
new_manifest[pypkg]['files'].sort()
|
||||
new_manifest[pypkg]['cached'].sort()
|
||||
new_manifest[pypkg]['rdepends'].sort()
|
||||
|
||||
# Create the manifest from the data structure that was built
|
||||
with open('python3-manifest.json.new','w') as outfile:
|
||||
json.dump(new_manifest,outfile, indent=4)
|
||||
outfile.write('\n')
|
||||
|
||||
prepend_comments(comments,'python3-manifest.json.new')
|
||||
|
||||
if (repeated):
|
||||
error_msg = '\n\nERROR:\n'
|
||||
error_msg += 'The following files were found in more than one package),\n'
|
||||
error_msg += 'this is likely to happen when new files are introduced after an upgrade,\n'
|
||||
error_msg += 'please check which package should get it,\n modify the manifest accordingly and re-run the create_manifest task:\n'
|
||||
error_msg += '\n'.join(repeated)
|
||||
error_msg += '\n'
|
||||
sys.exit(error_msg)
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
From 0bcdb84db7801507b155a40db2228ba516edeb73 Mon Sep 17 00:00:00 2001
|
||||
From: Ricardo Ribalda <ricardo@ribalda.com>
|
||||
Date: Tue, 18 Nov 2014 03:35:33 -0500
|
||||
Subject: [PATCH] configure.ac: add CROSSPYTHONPATH into PYTHONPATH for
|
||||
PYTHON_FOR_BUILD
|
||||
|
||||
When building x86->x86 the system will try to execute .so and related items
|
||||
from the default PYTHONPATH. This will fail if the target CPU contains
|
||||
instructions that the host CPU does not have, add CROSSPYTHONPATH
|
||||
into PYTHONPATH so we can prepend the list to find correct libs.
|
||||
|
||||
Upstream-Status: Inappropriate [OE-Core integration specific]
|
||||
|
||||
Credits-to: Mark Hatle <mark.hatle@windriver.com>
|
||||
Credits-to: Jackie Huang <jackie.huang@windriver.com>
|
||||
Signed-off-by: Ricardo Ribalda <ricardo@ribalda.com>
|
||||
---
|
||||
configure.ac | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/configure.ac b/configure.ac
|
||||
index 955daad..6e465a4 100644
|
||||
--- a/configure.ac
|
||||
+++ b/configure.ac
|
||||
@@ -165,7 +165,7 @@ AC_ARG_WITH([build-python],
|
||||
dnl Build Python interpreter is used for regeneration and freezing.
|
||||
ac_cv_prog_PYTHON_FOR_REGEN=$with_build_python
|
||||
PYTHON_FOR_FREEZE="$with_build_python"
|
||||
- PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib _PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH) '$with_build_python
|
||||
+ PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(CROSSPYTHONPATH):$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib _PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH) '$with_build_python
|
||||
AC_MSG_RESULT([$with_build_python])
|
||||
], [
|
||||
AS_VAR_IF([cross_compiling], [yes],
|
||||
@@ -0,0 +1,39 @@
|
||||
From 1d6f0f5f8a1279fc9bc06266caa3f3b6f234c4cb Mon Sep 17 00:00:00 2001
|
||||
From: Richard Purdie <richard.purdie@linuxfoundation.org>
|
||||
Date: Fri, 27 May 2022 17:05:44 +0100
|
||||
Subject: [PATCH] python3: Ensure stale empty python module directories don't
|
||||
|
||||
There are two issues here. Firstly, the modules are accessed in on disk order. This
|
||||
means behaviour seen on one system might not reproduce on another and is a real headache.
|
||||
|
||||
Secondly, empty directories left behind by previous modules might be looked at. This
|
||||
has caused a long string of different issues for us.
|
||||
|
||||
As a result, patch this to a behaviour which works for us.
|
||||
|
||||
Upstream-Status: Submitted [https://github.com/python/cpython/issues/120492; need to first talk to upstream to see if they'll take one or both fixes]
|
||||
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
|
||||
---
|
||||
Lib/importlib/metadata/__init__.py | 9 ++++++++-
|
||||
1 file changed, 8 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py
|
||||
index e6ca178..ac5a75b 100644
|
||||
--- a/Lib/importlib/metadata/__init__.py
|
||||
+++ b/Lib/importlib/metadata/__init__.py
|
||||
@@ -710,7 +710,14 @@ class Lookup:
|
||||
self.infos = FreezableDefaultDict(list)
|
||||
self.eggs = FreezableDefaultDict(list)
|
||||
|
||||
- for child in path.children():
|
||||
+ for child in sorted(path.children()):
|
||||
+ childpath = pathlib.Path(path.root, child)
|
||||
+ try:
|
||||
+ if childpath.is_dir() and not any(childpath.iterdir()):
|
||||
+ # Empty directories aren't interesting
|
||||
+ continue
|
||||
+ except PermissionError:
|
||||
+ continue
|
||||
low = child.lower()
|
||||
if low.endswith((".dist-info", ".egg-info")):
|
||||
# rpartition is faster than splitext and suitable for this purpose.
|
||||
@@ -0,0 +1,174 @@
|
||||
# This script is launched on separate task for each python module
|
||||
# It checks for dependencies for that specific module and prints
|
||||
# them out, the output of this execution will have all dependencies
|
||||
# for a specific module, which will be parsed an dealt on create_manifest.py
|
||||
#
|
||||
# Author: Alejandro Enedino Hernandez Samaniego <alejandro at enedino dot org>
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# We can get a log per module, for all the dependencies that were found, but its messy.
|
||||
if '-d' in sys.argv:
|
||||
debug = True
|
||||
else:
|
||||
debug = False
|
||||
|
||||
# We can get a list of the modules which are currently required to run python
|
||||
# so we run python-core and get its modules, we then import what we need
|
||||
# and check what modules are currently running, if we substract them from the
|
||||
# modules we had initially, we get the dependencies for the module we imported.
|
||||
|
||||
# We use importlib to achieve this, so we also need to know what modules importlib needs
|
||||
import importlib
|
||||
|
||||
core_deps = set(sys.modules)
|
||||
|
||||
def fix_path(dep_path):
|
||||
import os
|
||||
# We DONT want the path on our HOST system
|
||||
pivot = 'recipe-sysroot-native'
|
||||
dep_path = dep_path[dep_path.find(pivot)+len(pivot):]
|
||||
|
||||
if '/usr/bin' in dep_path:
|
||||
dep_path = dep_path.replace('/usr/bin','${bindir}')
|
||||
|
||||
# Handle multilib, is there a better way?
|
||||
if '/usr/lib32' in dep_path:
|
||||
dep_path = dep_path.replace('/usr/lib32','${libdir}')
|
||||
if '/usr/lib64' in dep_path:
|
||||
dep_path = dep_path.replace('/usr/lib64','${libdir}')
|
||||
if '/usr/lib' in dep_path:
|
||||
dep_path = dep_path.replace('/usr/lib','${libdir}')
|
||||
if '/usr/include' in dep_path:
|
||||
dep_path = dep_path.replace('/usr/include','${includedir}')
|
||||
if '__init__.' in dep_path:
|
||||
dep_path = os.path.split(dep_path)[0]
|
||||
return dep_path
|
||||
|
||||
|
||||
# Module to import was passed as an argument
|
||||
current_module = str(sys.argv[1]).rstrip()
|
||||
if debug == True:
|
||||
log = open('temp/log_%s' % current_module.strip('.*'),'w')
|
||||
log.write('Module %s generated the following dependencies:\n' % current_module)
|
||||
try:
|
||||
m = importlib.import_module(current_module)
|
||||
# handle python packages which may not include all modules in the __init__
|
||||
if hasattr(m, '__file__') and os.path.basename(m.__file__) == "__init__.py":
|
||||
modulepath = os.path.dirname(m.__file__)
|
||||
for i in os.listdir(modulepath):
|
||||
if i.startswith("_") or not(i.endswith(".py")):
|
||||
continue
|
||||
submodule = "{}.{}".format(current_module, i[:-3])
|
||||
try:
|
||||
importlib.import_module(submodule)
|
||||
except:
|
||||
pass # ignore all import or other exceptions raised during import
|
||||
except ImportError as e:
|
||||
if debug == True:
|
||||
log.write('Module was not found\n')
|
||||
pass
|
||||
|
||||
|
||||
# Get current module dependencies, dif will contain a list of specific deps for this module
|
||||
module_deps = set(sys.modules)
|
||||
|
||||
# We handle the core package (1st pass on create_manifest.py) as a special case
|
||||
if current_module == 'python-core-package':
|
||||
dif = core_deps
|
||||
else:
|
||||
# We know this is not the core package, so there must be a difference.
|
||||
dif = module_deps-core_deps
|
||||
|
||||
|
||||
# Check where each dependency came from
|
||||
for item in dif:
|
||||
# Main module returns script filename, __main matches mp_main__ as well
|
||||
if 'main__' in item:
|
||||
continue
|
||||
|
||||
dep_path = ''
|
||||
try:
|
||||
if debug == True:
|
||||
log.write('\nCalling: sys.modules[' + '%s' % item + '].__file__\n')
|
||||
dep_path = sys.modules['%s' % item].__file__
|
||||
except AttributeError as e:
|
||||
# Deals with thread (builtin module) not having __file__ attribute
|
||||
if debug == True:
|
||||
log.write(item + ' ')
|
||||
log.write(str(e))
|
||||
log.write('\n')
|
||||
pass
|
||||
except NameError as e:
|
||||
# Deals with NameError: name 'dep_path' is not defined
|
||||
# because module is not found (wasn't compiled?), e.g. bddsm
|
||||
if debug == True:
|
||||
log.write(item+' ')
|
||||
log.write(str(e))
|
||||
pass
|
||||
|
||||
if dep_path == '':
|
||||
continue
|
||||
if debug == True:
|
||||
log.write('Dependency path found:\n%s\n' % dep_path)
|
||||
|
||||
# Site-customize is a special case since we (OpenEmbedded) put it there manually
|
||||
if 'sitecustomize' in dep_path:
|
||||
dep_path = '${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py'
|
||||
# Prints out result, which is what will be used by create_manifest
|
||||
print (dep_path)
|
||||
continue
|
||||
|
||||
dep_path = fix_path(dep_path)
|
||||
|
||||
import sysconfig
|
||||
soabi = sysconfig.get_config_var('SOABI')
|
||||
# Check if its a shared library and deconstruct it
|
||||
if soabi in dep_path:
|
||||
if debug == True:
|
||||
log.write('Shared library found in %s\n' % dep_path)
|
||||
dep_path = dep_path.replace(soabi,'*')
|
||||
print (dep_path)
|
||||
continue
|
||||
if "_sysconfigdata" in dep_path:
|
||||
dep_path = dep_path.replace(sysconfig._get_sysconfigdata_name(), "_sysconfigdata*")
|
||||
|
||||
if debug == True:
|
||||
log.write(dep_path+'\n')
|
||||
# Prints out result, which is what will be used by create_manifest
|
||||
print (dep_path)
|
||||
|
||||
|
||||
cpython_tag = sys.implementation.cache_tag
|
||||
cached = ''
|
||||
# Theres no naive way to find *.pyc files on python3
|
||||
try:
|
||||
if debug == True:
|
||||
log.write('\nCalling: sys.modules[' + '%s' % item + '].__cached__\n')
|
||||
cached = sys.modules['%s' % item].__cached__
|
||||
except AttributeError as e:
|
||||
# Deals with thread (builtin module) not having __cached__ attribute
|
||||
if debug == True:
|
||||
log.write(item + ' ')
|
||||
log.write(str(e))
|
||||
log.write('\n')
|
||||
pass
|
||||
except NameError as e:
|
||||
# Deals with NameError: name 'cached' is not defined
|
||||
if debug == True:
|
||||
log.write(item+' ')
|
||||
log.write(str(e))
|
||||
pass
|
||||
if cached is not None:
|
||||
if debug == True:
|
||||
log.write(cached + '\n')
|
||||
cached = fix_path(cached)
|
||||
cached = cached.replace(cpython_tag,'*')
|
||||
if "_sysconfigdata" in cached:
|
||||
cached = cached.replace(sysconfig._get_sysconfigdata_name(), "_sysconfigdata*")
|
||||
print (cached)
|
||||
|
||||
if debug == True:
|
||||
log.close()
|
||||
@@ -0,0 +1,31 @@
|
||||
From be22dd9b091af8f971f924fdbce5b439d9b2e850 Mon Sep 17 00:00:00 2001
|
||||
From: Richard Purdie <richard.purdie@linuxfoundation.org>
|
||||
Date: Tue, 13 Jul 2021 23:19:29 +0100
|
||||
Subject: [PATCH] python3: Fix make race
|
||||
|
||||
libainstall installs python-config.py but the .pyc cache files are generated
|
||||
by the libinstall target. This means some builds may not generate the pyc files
|
||||
for python-config.py depending on the order things happen in. This means builds
|
||||
are not always reproducible.
|
||||
|
||||
Add a dependency to avoid the race.
|
||||
|
||||
Upstream-Status: Pending
|
||||
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
|
||||
---
|
||||
Makefile.pre.in | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index dce36a5..2d235d2 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -2267,7 +2267,7 @@ COMPILEALL_OPTS=-j0
|
||||
TEST_MODULES=@TEST_MODULES@
|
||||
|
||||
.PHONY: libinstall
|
||||
-libinstall: all $(srcdir)/Modules/xxmodule.c
|
||||
+libinstall: all $(srcdir)/Modules/xxmodule.c libainstall
|
||||
@for i in $(SCRIPTDIR) $(LIBDEST); \
|
||||
do \
|
||||
if test ! -d $(DESTDIR)$$i; then \
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,21 @@
|
||||
#! /usr/bin/env python3
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
#
|
||||
# Copyright 2019 by Garmin Ltd. or its subsidiaries
|
||||
#
|
||||
# A script to reformat python sysconfig
|
||||
|
||||
import sys
|
||||
import pprint
|
||||
l = {}
|
||||
g = {}
|
||||
with open(sys.argv[1], 'r') as f:
|
||||
exec(f.read(), g, l)
|
||||
|
||||
with open(sys.argv[1], 'w') as f:
|
||||
for k in sorted(l.keys()):
|
||||
f.write('%s = ' % k)
|
||||
pprint.pprint(l[k], stream=f, width=1)
|
||||
f.write('\n')
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
SKIPPED_TESTS=
|
||||
{ SETUPTOOLS_USE_DISTUTILS=nonlocal python3 -m test $SKIPPED_TESTS -v -j 4 || echo "FAIL: python3" ; } | sed -u -e '/\.\.\. ok/ s/^/PASS: /g' -r -e '/\.\.\. (ERROR|FAIL)/ s/^/FAIL: /g' -e '/\.\.\. skipped/ s/^/SKIP: /g' -e 's/ \.\.\. ok//g' -e 's/ \.\.\. ERROR//g' -e 's/ \.\.\. FAIL//g' -e 's/ \.\.\. skipped//g'
|
||||
Reference in New Issue
Block a user