diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f33196bb0eb168..0f5d830b512865 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -298,8 +298,7 @@ InternalDocs/jit.md @brandtbucher @savannahostrowski @diegorusso @AA-T # Lazy imports (PEP 810) Objects/lazyimportobject.c @yhg1s @DinoV @pablogsal Include/internal/pycore_lazyimportobject.h @yhg1s @DinoV @pablogsal -Lib/test/test_import/test_lazy_imports.py @yhg1s @DinoV @pablogsal -Lib/test/test_import/data/lazy_imports/ @yhg1s @DinoV @pablogsal +Lib/test/test_lazy_import @yhg1s @DinoV @pablogsal # Micro-op / μop / Tier 2 Optimiser Python/optimizer.c @markshannon @Fidget-Spinner diff --git a/Grammar/python.gram b/Grammar/python.gram index 1212e8640a1a9c..3a91d426c36501 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -1447,6 +1447,8 @@ invalid_import_from_targets: RAISE_SYNTAX_ERROR_STARTING_FROM(token, "Expected one or more names after 'import'") } invalid_with_stmt: + | ['async'] 'with' ','.(expression ['as' star_target])+ trailing=',' ':' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(trailing, "the last 'with' item has a trailing comma") } | ['async'] 'with' ','.(expression ['as' star_target])+ NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } | ['async'] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } invalid_with_stmt_indent: diff --git a/Lib/profiling/sampling/sample.py b/Lib/profiling/sampling/sample.py index c6abfb1c8ee885..6a76bbeeb24ee3 100644 --- a/Lib/profiling/sampling/sample.py +++ b/Lib/profiling/sampling/sample.py @@ -164,7 +164,8 @@ def sample(self, collector, duration_sec=None, *, async_aware=False): # Don't print stats for live mode (curses is handling display) is_live_mode = LiveStatsCollector is not None and isinstance(collector, LiveStatsCollector) if not is_live_mode: - print(f"Captured {num_samples:n} samples in {fmt(running_time_sec, 2)} seconds") + s = "" if num_samples == 1 else "s" + print(f"Captured {num_samples:n} sample{s} in {fmt(running_time_sec, 2)} seconds") print(f"Sample rate: {fmt(sample_rate, 2)} samples/sec") print(f"Error rate: {fmt(error_rate, 2)}") diff --git a/Lib/profiling/sampling/stack_collector.py b/Lib/profiling/sampling/stack_collector.py index 4e213cfe41ca24..5a3497a5408414 100644 --- a/Lib/profiling/sampling/stack_collector.py +++ b/Lib/profiling/sampling/stack_collector.py @@ -144,9 +144,13 @@ def export(self, filename): num_functions = len(flamegraph_data.get("children", [])) total_time = flamegraph_data.get("value", 0) string_count = len(self._string_table) + s1 = "" if num_functions == 1 else "s" + s2 = "" if total_time == 1 else "s" + s3 = "" if string_count == 1 else "s" print( - f"Flamegraph data: {num_functions} root functions, total samples: {total_time}, " - f"{string_count} unique strings" + f"Flamegraph data: {num_functions} root function{s1}, " + f"{total_time} total sample{s2}, " + f"{string_count} unique string{s3}" ) if num_functions == 0: diff --git a/Lib/test/.ruff.toml b/Lib/test/.ruff.toml index b5be4c3afaf958..f3e6a46663e100 100644 --- a/Lib/test/.ruff.toml +++ b/Lib/test/.ruff.toml @@ -15,8 +15,9 @@ extend-exclude = [ # and tests re-use the same names as only the grammar is being checked. "test_grammar.py", # Lazy import syntax (PEP 810) is not yet supported by Ruff - "test_import/data/lazy_imports/*.py", - "test_import/data/lazy_imports/**/*.py", + "test_lazy_import/__init__.py", + "test_lazy_import/data/*.py", + "test_lazy_import/data/**/*.py", ] [lint] diff --git a/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode.py b/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode.py deleted file mode 100644 index 5076fa4894ebd6..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode.py +++ /dev/null @@ -1,2 +0,0 @@ -__lazy_modules__ = ['test.test_import.data.lazy_imports.basic2'] -import test.test_import.data.lazy_imports.basic2 diff --git a/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode_relative.py b/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode_relative.py deleted file mode 100644 index e37759348f3e91..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode_relative.py +++ /dev/null @@ -1,2 +0,0 @@ -__lazy_modules__ = ['test.test_import.data.lazy_imports.basic2'] -lazy from .basic2 import f diff --git a/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode_used.py b/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode_used.py deleted file mode 100644 index 64f36645f68790..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/basic_compatibility_mode_used.py +++ /dev/null @@ -1,3 +0,0 @@ -__lazy_modules__ = ['test.test_import.data.lazy_imports.basic2'] -import test.test_import.data.lazy_imports.basic2 -test.test_import.data.lazy_imports.basic2.f() diff --git a/Lib/test/test_import/data/lazy_imports/basic_dir.py b/Lib/test/test_import/data/lazy_imports/basic_dir.py deleted file mode 100644 index ca9e29d3d9962e..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/basic_dir.py +++ /dev/null @@ -1,2 +0,0 @@ -lazy import test.test_import.data.lazy_imports.basic2 -x = dir() diff --git a/Lib/test/test_import/data/lazy_imports/basic_from_unused.py b/Lib/test/test_import/data/lazy_imports/basic_from_unused.py deleted file mode 100644 index 686caa86a6caa7..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/basic_from_unused.py +++ /dev/null @@ -1 +0,0 @@ -lazy from test.test_import.data.lazy_imports import basic2 diff --git a/Lib/test/test_import/data/lazy_imports/basic_unused.py b/Lib/test/test_import/data/lazy_imports/basic_unused.py deleted file mode 100644 index bf8ae4613e4478..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/basic_unused.py +++ /dev/null @@ -1 +0,0 @@ -lazy import test.test_import.data.lazy_imports.basic2 diff --git a/Lib/test/test_import/data/lazy_imports/basic_used.py b/Lib/test/test_import/data/lazy_imports/basic_used.py deleted file mode 100644 index 84e354750f8ea2..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/basic_used.py +++ /dev/null @@ -1,3 +0,0 @@ -lazy import test.test_import.data.lazy_imports.basic2 as basic2 - -basic2.f() diff --git a/Lib/test/test_import/data/lazy_imports/compatibility_mode_func.py b/Lib/test/test_import/data/lazy_imports/compatibility_mode_func.py deleted file mode 100644 index 307338a0886ac3..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/compatibility_mode_func.py +++ /dev/null @@ -1,5 +0,0 @@ -__lazy_modules__ = ['test.test_import.data.lazy_imports.basic2'] -def f(): - import test.test_import.data.lazy_imports.basic2 - -f() diff --git a/Lib/test/test_import/data/lazy_imports/compatibility_mode_try_except.py b/Lib/test/test_import/data/lazy_imports/compatibility_mode_try_except.py deleted file mode 100644 index 6d54e69a9a4268..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/compatibility_mode_try_except.py +++ /dev/null @@ -1,5 +0,0 @@ -__lazy_modules__ = ['test.test_import.data.lazy_imports.basic2'] -try: - import test.test_import.data.lazy_imports.basic2 -except: - pass diff --git a/Lib/test/test_import/data/lazy_imports/dunder_lazy_import.py b/Lib/test/test_import/data/lazy_imports/dunder_lazy_import.py deleted file mode 100644 index 1a8a19c3c90814..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/dunder_lazy_import.py +++ /dev/null @@ -1 +0,0 @@ -basic = __lazy_import__('test.test_import.data.lazy_imports.basic2') diff --git a/Lib/test/test_import/data/lazy_imports/dunder_lazy_import_used.py b/Lib/test/test_import/data/lazy_imports/dunder_lazy_import_used.py deleted file mode 100644 index 2432ca17b16287..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/dunder_lazy_import_used.py +++ /dev/null @@ -1,3 +0,0 @@ -basic = __lazy_import__('test.test_import.data.lazy_imports', - fromlist=("basic2", )) -basic diff --git a/Lib/test/test_import/data/lazy_imports/eager_import_func.py b/Lib/test/test_import/data/lazy_imports/eager_import_func.py deleted file mode 100644 index 89e643ac183e9b..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/eager_import_func.py +++ /dev/null @@ -1,3 +0,0 @@ -def f(): - import test.test_import.data.lazy_imports.basic2 as basic2 - return basic2 diff --git a/Lib/test/test_import/data/lazy_imports/global_off.py b/Lib/test/test_import/data/lazy_imports/global_off.py deleted file mode 100644 index 4f202744a9ed42..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/global_off.py +++ /dev/null @@ -1,5 +0,0 @@ -import sys - -sys.set_lazy_imports("none") - -lazy import test.test_import.data.lazy_imports.basic2 as basic2 diff --git a/Lib/test/test_import/data/lazy_imports/global_on.py b/Lib/test/test_import/data/lazy_imports/global_on.py deleted file mode 100644 index 3f8e1d2aa01380..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/global_on.py +++ /dev/null @@ -1,5 +0,0 @@ -import sys - -sys.set_lazy_imports("all") - -import test.test_import.data.lazy_imports.basic2 as basic2 diff --git a/Lib/test/test_import/data/lazy_imports/lazy_compat_from.py b/Lib/test/test_import/data/lazy_imports/lazy_compat_from.py deleted file mode 100644 index f887f47b92c3f4..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/lazy_compat_from.py +++ /dev/null @@ -1,6 +0,0 @@ -# Test __lazy_modules__ with from imports -__lazy_modules__ = ['test.test_import.data.lazy_imports.basic2'] -from test.test_import.data.lazy_imports.basic2 import x, f - -def get_x(): - return x diff --git a/Lib/test/test_import/data/lazy_imports/lazy_import_pkg.py b/Lib/test/test_import/data/lazy_imports/lazy_import_pkg.py deleted file mode 100644 index 79aa9a567398bb..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/lazy_import_pkg.py +++ /dev/null @@ -1,2 +0,0 @@ -lazy import test.test_import.data.lazy_imports.pkg.bar -x = test.test_import.data.lazy_imports.pkg.bar.f diff --git a/Lib/test/test_import/data/lazy_imports/lazy_with.py b/Lib/test/test_import/data/lazy_imports/lazy_with.py deleted file mode 100644 index b383879936a219..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/lazy_with.py +++ /dev/null @@ -1,3 +0,0 @@ -import contextlib -with contextlib.nullcontext(): - lazy import test.test_import.data.lazy_imports.basic2 diff --git a/Lib/test/test_import/data/lazy_imports/lazy_with_from.py b/Lib/test/test_import/data/lazy_imports/lazy_with_from.py deleted file mode 100644 index 7936326a9e3d35..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/lazy_with_from.py +++ /dev/null @@ -1,3 +0,0 @@ -import contextlib -with contextlib.nullcontext(): - lazy import test.test_import.data.lazy_imports.basic2 as basic2 diff --git a/Lib/test/test_import/data/lazy_imports/modules_dict.py b/Lib/test/test_import/data/lazy_imports/modules_dict.py deleted file mode 100644 index 327f866398c86d..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/modules_dict.py +++ /dev/null @@ -1,5 +0,0 @@ -lazy import test.test_import.data.lazy_imports.basic2 as basic2 - -import sys -mod = sys.modules[__name__] -x = mod.__dict__ diff --git a/Lib/test/test_import/data/lazy_imports/modules_getattr.py b/Lib/test/test_import/data/lazy_imports/modules_getattr.py deleted file mode 100644 index ae1d4bb3f976ea..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/modules_getattr.py +++ /dev/null @@ -1,5 +0,0 @@ -lazy import test.test_import.data.lazy_imports.basic2 as basic2 - -import sys -mod = sys.modules[__name__] -x = mod.basic2 diff --git a/Lib/test/test_import/data/lazy_imports/modules_getattr_other.py b/Lib/test/test_import/data/lazy_imports/modules_getattr_other.py deleted file mode 100644 index e4d83e6336db72..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/modules_getattr_other.py +++ /dev/null @@ -1,5 +0,0 @@ -lazy import test.test_import.data.lazy_imports.basic2 as basic2 - -import sys -mod = sys.modules[__name__] -x = mod.__name__ diff --git a/Lib/test/test_import/data/lazy_imports/try_except_eager.py b/Lib/test/test_import/data/lazy_imports/try_except_eager.py deleted file mode 100644 index 4cdaa9a9b48eae..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/try_except_eager.py +++ /dev/null @@ -1,4 +0,0 @@ -try: - import test.test_import.data.lazy_imports.basic2 -except: - pass diff --git a/Lib/test/test_import/data/lazy_imports/try_except_eager_from.py b/Lib/test/test_import/data/lazy_imports/try_except_eager_from.py deleted file mode 100644 index 6eadaaa71ca454..00000000000000 --- a/Lib/test/test_import/data/lazy_imports/try_except_eager_from.py +++ /dev/null @@ -1,4 +0,0 @@ -try: - from test.test_import.data.lazy_imports.basic2 import f -except: - pass diff --git a/Lib/test/test_import/test_lazy_imports.py b/Lib/test/test_lazy_import/__init__.py similarity index 82% rename from Lib/test/test_import/test_lazy_imports.py rename to Lib/test/test_lazy_import/__init__.py index d4df772d2034d9..df19af05246dcd 100644 --- a/Lib/test/test_import/test_lazy_imports.py +++ b/Lib/test/test_lazy_import/__init__.py @@ -11,6 +11,8 @@ import tempfile import os +from test import support + try: import _testcapi except ImportError: @@ -23,7 +25,7 @@ class LazyImportTests(unittest.TestCase): def tearDown(self): """Clean up any test modules from sys.modules.""" for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -32,58 +34,58 @@ def tearDown(self): def test_basic_unused(self): """Lazy imported module should not be loaded if never accessed.""" - import test.test_import.data.lazy_imports.basic_unused - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) - self.assertIn("test.test_import.data.lazy_imports", sys.lazy_modules) - self.assertEqual(sys.lazy_modules["test.test_import.data.lazy_imports"], {"basic2"}) + import test.test_lazy_import.data.basic_unused + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) + self.assertIn("test.test_lazy_import.data", sys.lazy_modules) + self.assertEqual(sys.lazy_modules["test.test_lazy_import.data"], {"basic2"}) def test_sys_lazy_modules(self): try: - import test.test_import.data.lazy_imports.basic_from_unused + import test.test_lazy_import.data.basic_from_unused except ImportError as e: self.fail('lazy import failed') - self.assertFalse("test.test_import.data.lazy_imports.basic2" in sys.modules) - self.assertIn("test.test_import.data.lazy_imports", sys.lazy_modules) - self.assertEqual(sys.lazy_modules["test.test_import.data.lazy_imports"], {"basic2"}) - test.test_import.data.lazy_imports.basic_from_unused.basic2 + self.assertFalse("test.test_lazy_import.data.basic2" in sys.modules) + self.assertIn("test.test_lazy_import.data", sys.lazy_modules) + self.assertEqual(sys.lazy_modules["test.test_lazy_import.data"], {"basic2"}) + test.test_lazy_import.data.basic_from_unused.basic2 self.assertNotIn("test.test_import.data", sys.lazy_modules) def test_basic_unused_use_externally(self): """Lazy import should load module when accessed from outside.""" - from test.test_import.data.lazy_imports import basic_unused + from test.test_lazy_import.data import basic_unused - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) - x = basic_unused.test.test_import.data.lazy_imports.basic2 - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) + x = basic_unused.test.test_lazy_import.data.basic2 + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_basic_from_unused_use_externally(self): """Lazy 'from' import should load when accessed from outside.""" - from test.test_import.data.lazy_imports import basic_from_unused + from test.test_lazy_import.data import basic_from_unused - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) x = basic_from_unused.basic2 - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_basic_unused_dir(self): """dir() on module should not trigger lazy import reification.""" - import test.test_import.data.lazy_imports.basic_unused + import test.test_lazy_import.data.basic_unused - x = dir(test.test_import.data.lazy_imports.basic_unused) + x = dir(test.test_lazy_import.data.basic_unused) self.assertIn("test", x) - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_basic_dir(self): """dir() at module scope should not trigger lazy import reification.""" - from test.test_import.data.lazy_imports import basic_dir + from test.test_lazy_import.data import basic_dir self.assertIn("test", basic_dir.x) - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_basic_used(self): """Lazy import should load when accessed within the module.""" - import test.test_import.data.lazy_imports.basic_used - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.basic_used + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) class GlobalLazyImportModeTests(unittest.TestCase): @@ -91,7 +93,7 @@ class GlobalLazyImportModeTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -99,33 +101,33 @@ def tearDown(self): def test_global_off(self): """Mode 'none' should disable lazy imports entirely.""" - import test.test_import.data.lazy_imports.global_off - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.global_off + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_global_on(self): """Mode 'all' should make regular imports lazy.""" - import test.test_import.data.lazy_imports.global_on - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.global_on + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_global_filter(self): """Filter returning False should prevent lazy loading.""" - import test.test_import.data.lazy_imports.global_filter - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.global_filter + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_global_filter_true(self): """Filter returning True should allow lazy loading.""" - import test.test_import.data.lazy_imports.global_filter_true - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.global_filter_true + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_global_filter_from(self): """Filter should work with 'from' imports.""" - import test.test_import.data.lazy_imports.global_filter_from - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.global_filter_from + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_global_filter_from_true(self): """Filter returning True should allow lazy 'from' imports.""" - import test.test_import.data.lazy_imports.global_filter_from_true - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.global_filter_from_true + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) class CompatibilityModeTests(unittest.TestCase): @@ -133,7 +135,7 @@ class CompatibilityModeTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -141,28 +143,28 @@ def tearDown(self): def test_compatibility_mode(self): """__lazy_modules__ should enable lazy imports for listed modules.""" - import test.test_import.data.lazy_imports.basic_compatibility_mode - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.basic_compatibility_mode + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_compatibility_mode_used(self): """Using a lazy import from __lazy_modules__ should load the module.""" - import test.test_import.data.lazy_imports.basic_compatibility_mode_used - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.basic_compatibility_mode_used + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_compatibility_mode_func(self): """Imports inside functions should be eager even in compatibility mode.""" - import test.test_import.data.lazy_imports.compatibility_mode_func - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.compatibility_mode_func + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_compatibility_mode_try_except(self): """Imports in try/except should be eager even in compatibility mode.""" - import test.test_import.data.lazy_imports.compatibility_mode_try_except - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.compatibility_mode_try_except + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_compatibility_mode_relative(self): """__lazy_modules__ should work with relative imports.""" - import test.test_import.data.lazy_imports.basic_compatibility_mode_relative - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.basic_compatibility_mode_relative + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) class ModuleIntrospectionTests(unittest.TestCase): @@ -170,7 +172,7 @@ class ModuleIntrospectionTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -178,18 +180,18 @@ def tearDown(self): def test_modules_dict(self): """Accessing module.__dict__ should not trigger reification.""" - import test.test_import.data.lazy_imports.modules_dict - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.modules_dict + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_modules_getattr(self): """Module __getattr__ for lazy import name should trigger reification.""" - import test.test_import.data.lazy_imports.modules_getattr - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.modules_getattr + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_modules_getattr_other(self): """Module __getattr__ for other names should not trigger reification.""" - import test.test_import.data.lazy_imports.modules_getattr_other - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.modules_getattr_other + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) class LazyImportTypeTests(unittest.TestCase): @@ -197,7 +199,7 @@ class LazyImportTypeTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -205,8 +207,8 @@ def tearDown(self): def test_lazy_value_resolve(self): """resolve() method should force the lazy import to load.""" - import test.test_import.data.lazy_imports.lazy_get_value - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.lazy_get_value + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_lazy_import_type_exposed(self): """LazyImportType should be exposed in types module.""" @@ -223,7 +225,7 @@ class SyntaxRestrictionTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -232,22 +234,22 @@ def tearDown(self): def test_lazy_try_except(self): """lazy import inside try/except should raise SyntaxError.""" with self.assertRaises(SyntaxError): - import test.test_import.data.lazy_imports.badsyntax.lazy_try_except + import test.test_lazy_import.data.badsyntax.lazy_try_except def test_lazy_try_except_from(self): """lazy from import inside try/except should raise SyntaxError.""" with self.assertRaises(SyntaxError): - import test.test_import.data.lazy_imports.badsyntax.lazy_try_except_from + import test.test_lazy_import.data.badsyntax.lazy_try_except_from def test_lazy_try_except_from_star(self): """lazy from import * should raise SyntaxError.""" with self.assertRaises(SyntaxError): - import test.test_import.data.lazy_imports.badsyntax.lazy_try_except_from_star + import test.test_lazy_import.data.badsyntax.lazy_try_except_from_star def test_lazy_future_import(self): """lazy from __future__ import should raise SyntaxError.""" with self.assertRaises(SyntaxError) as cm: - import test.test_import.data.lazy_imports.badsyntax.lazy_future_import + import test.test_lazy_import.data.badsyntax.lazy_future_import # Check we highlight 'lazy' (column offset 0, end offset 4) self.assertEqual(cm.exception.offset, 1) self.assertEqual(cm.exception.end_offset, 5) @@ -255,7 +257,7 @@ def test_lazy_future_import(self): def test_lazy_import_func(self): """lazy import inside function should raise SyntaxError.""" with self.assertRaises(SyntaxError): - import test.test_import.data.lazy_imports.badsyntax.lazy_import_func + import test.test_lazy_import.data.badsyntax.lazy_import_func def test_lazy_import_exec_in_function(self): """lazy import via exec() inside a function should raise SyntaxError.""" @@ -268,6 +270,7 @@ def f(): f() self.assertIn("only allowed at module level", str(cm.exception)) + @support.requires_subprocess() def test_lazy_import_exec_at_module_level(self): """lazy import via exec() at module level should work.""" # exec() at module level (globals == locals) should allow lazy imports @@ -292,7 +295,7 @@ class EagerImportInLazyModeTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -301,21 +304,21 @@ def tearDown(self): def test_try_except_eager(self): """Imports in try/except should be eager even with mode='all'.""" sys.set_lazy_imports("all") - import test.test_import.data.lazy_imports.try_except_eager - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.try_except_eager + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_try_except_eager_from(self): """From imports in try/except should be eager even with mode='all'.""" sys.set_lazy_imports("all") - import test.test_import.data.lazy_imports.try_except_eager_from - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.try_except_eager_from + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_eager_import_func(self): """Imports inside functions should return modules, not proxies.""" sys.set_lazy_imports("all") - import test.test_import.data.lazy_imports.eager_import_func + import test.test_lazy_import.data.eager_import_func - f = test.test_import.data.lazy_imports.eager_import_func.f + f = test.test_lazy_import.data.eager_import_func.f self.assertEqual(type(f()), type(sys)) @@ -324,7 +327,7 @@ class WithStatementTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -332,13 +335,13 @@ def tearDown(self): def test_lazy_with(self): """lazy import with 'with' statement should work.""" - import test.test_import.data.lazy_imports.lazy_with - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.lazy_with + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_lazy_with_from(self): """lazy from import with 'with' statement should work.""" - import test.test_import.data.lazy_imports.lazy_with_from - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.lazy_with_from + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) class PackageTests(unittest.TestCase): @@ -346,7 +349,7 @@ class PackageTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -354,20 +357,20 @@ def tearDown(self): def test_lazy_import_pkg(self): """lazy import of package submodule should load the package.""" - import test.test_import.data.lazy_imports.lazy_import_pkg + import test.test_lazy_import.data.lazy_import_pkg - self.assertIn("test.test_import.data.lazy_imports.pkg", sys.modules) - self.assertIn("test.test_import.data.lazy_imports.pkg.bar", sys.modules) + self.assertIn("test.test_lazy_import.data.pkg", sys.modules) + self.assertIn("test.test_lazy_import.data.pkg.bar", sys.modules) def test_lazy_import_pkg_cross_import(self): """Cross-imports within package should preserve lazy imports.""" - import test.test_import.data.lazy_imports.pkg.c + import test.test_lazy_import.data.pkg.c - self.assertIn("test.test_import.data.lazy_imports.pkg", sys.modules) - self.assertIn("test.test_import.data.lazy_imports.pkg.c", sys.modules) - self.assertNotIn("test.test_import.data.lazy_imports.pkg.b", sys.modules) + self.assertIn("test.test_lazy_import.data.pkg", sys.modules) + self.assertIn("test.test_lazy_import.data.pkg.c", sys.modules) + self.assertNotIn("test.test_lazy_import.data.pkg.b", sys.modules) - g = test.test_import.data.lazy_imports.pkg.c.get_globals() + g = test.test_lazy_import.data.pkg.c.get_globals() self.assertEqual(type(g["x"]), int) self.assertEqual(type(g["b"]), types.LazyImportType) @@ -377,7 +380,7 @@ class DunderLazyImportTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -385,13 +388,13 @@ def tearDown(self): def test_dunder_lazy_import(self): """__lazy_import__ should create lazy import proxy.""" - import test.test_import.data.lazy_imports.dunder_lazy_import - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.dunder_lazy_import + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_dunder_lazy_import_used(self): """Using __lazy_import__ result should trigger module load.""" - import test.test_import.data.lazy_imports.dunder_lazy_import_used - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + import test.test_lazy_import.data.dunder_lazy_import_used + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_dunder_lazy_import_invalid_arguments(self): """__lazy_import__ should reject invalid arguments.""" @@ -406,9 +409,9 @@ def test_dunder_lazy_import_invalid_arguments(self): def test_dunder_lazy_import_builtins(self): """__lazy_import__ should use module's __builtins__ for __import__.""" - from test.test_import.data.lazy_imports import dunder_lazy_import_builtins + from test.test_lazy_import.data import dunder_lazy_import_builtins - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) self.assertEqual(dunder_lazy_import_builtins.basic.basic2, 42) @@ -417,7 +420,7 @@ class SysLazyImportsAPITests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -469,14 +472,15 @@ def test_lazy_modules_attribute_is_set(self): """sys.lazy_modules should be a set per PEP 810.""" self.assertIsInstance(sys.lazy_modules, dict) + @support.requires_subprocess() def test_lazy_modules_tracks_lazy_imports(self): """sys.lazy_modules should track lazily imported module names.""" code = textwrap.dedent(""" import sys initial_count = len(sys.lazy_modules) - import test.test_import.data.lazy_imports.basic_unused - assert "test.test_import.data.lazy_imports" in sys.lazy_modules - assert sys.lazy_modules["test.test_import.data.lazy_imports"] == {"basic2"} + import test.test_lazy_import.data.basic_unused + assert "test.test_lazy_import.data" in sys.lazy_modules + assert sys.lazy_modules["test.test_lazy_import.data"] == {"basic2"} assert len(sys.lazy_modules) > initial_count print("OK") """) @@ -489,6 +493,7 @@ def test_lazy_modules_tracks_lazy_imports(self): self.assertIn("OK", result.stdout) +@support.requires_subprocess() class ErrorHandlingTests(unittest.TestCase): """Tests for error handling during lazy import reification. @@ -498,7 +503,7 @@ class ErrorHandlingTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -510,10 +515,10 @@ def test_import_error_shows_chained_traceback(self): # AND where the access happened, per PEP 810 "Reification" section code = textwrap.dedent(""" import sys - lazy import test.test_import.data.lazy_imports.nonexistent_module + lazy import test.test_lazy_import.data.nonexistent_module try: - x = test.test_import.data.lazy_imports.nonexistent_module + x = test.test_lazy_import.data.nonexistent_module except ImportError as e: # Should have __cause__ showing the original error # The exception chain shows both where import was defined and where access happened @@ -533,7 +538,7 @@ def test_attribute_error_on_from_import_shows_chained_traceback(self): # Tests 'lazy from module import nonexistent' behavior code = textwrap.dedent(""" import sys - lazy from test.test_import.data.lazy_imports.basic2 import nonexistent_name + lazy from test.test_lazy_import.data.basic2 import nonexistent_name try: x = nonexistent_name @@ -560,11 +565,11 @@ def test_reification_retries_on_failure(self): import sys import types - lazy import test.test_import.data.lazy_imports.broken_module + lazy import test.test_lazy_import.data.broken_module # First access - should fail try: - x = test.test_import.data.lazy_imports.broken_module + x = test.test_lazy_import.data.broken_module except ValueError: pass @@ -574,7 +579,7 @@ def test_reification_retries_on_failure(self): # The root 'test' binding should still allow retry # Second access - should also fail (retry the import) try: - x = test.test_import.data.lazy_imports.broken_module + x = test.test_lazy_import.data.broken_module except ValueError: print("OK - retry worked") """) @@ -591,10 +596,10 @@ def test_error_during_module_execution_propagates(self): # Module that raises during import should propagate with chaining code = textwrap.dedent(""" import sys - lazy import test.test_import.data.lazy_imports.broken_module + lazy import test.test_lazy_import.data.broken_module try: - _ = test.test_import.data.lazy_imports.broken_module + _ = test.test_lazy_import.data.broken_module print("FAIL - should have raised") except ValueError as e: # The ValueError from the module should be the cause @@ -645,6 +650,7 @@ def hello(): self.assertIn("Error", result.stderr) +@support.requires_subprocess() class GlobalsAndDictTests(unittest.TestCase): """Tests for globals() and __dict__ behavior with lazy imports. @@ -655,7 +661,7 @@ class GlobalsAndDictTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -672,10 +678,10 @@ def test_globals_returns_lazy_proxy_when_accessed_from_function(self): import sys import types - lazy from test.test_import.data.lazy_imports.basic2 import x + lazy from test.test_lazy_import.data.basic2 import x # Check that module is not yet loaded - assert 'test.test_import.data.lazy_imports.basic2' not in sys.modules + assert 'test.test_lazy_import.data.basic2' not in sys.modules def check_lazy(): # Access through globals() from inside a function @@ -688,7 +694,7 @@ def check_lazy(): assert is_lazy, "Expected LazyImportType from function scope" # Module should STILL not be loaded - assert 'test.test_import.data.lazy_imports.basic2' not in sys.modules + assert 'test.test_lazy_import.data.basic2' not in sys.modules print("OK") """) result = subprocess.run( @@ -724,25 +730,25 @@ def test_globals_dict_access_returns_lazy_proxy_inline(self): def test_module_dict_returns_lazy_proxy_without_reifying(self): """module.__dict__ access should not trigger reification.""" - import test.test_import.data.lazy_imports.globals_access + import test.test_lazy_import.data.globals_access # Module not loaded yet via direct dict access - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) # Access via get_from_globals should return lazy proxy - lazy_obj = test.test_import.data.lazy_imports.globals_access.get_from_globals() + lazy_obj = test.test_lazy_import.data.globals_access.get_from_globals() self.assertEqual(type(lazy_obj), types.LazyImportType) - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) def test_direct_access_triggers_reification(self): """Direct name access (not through globals()) should trigger reification.""" - import test.test_import.data.lazy_imports.globals_access + import test.test_lazy_import.data.globals_access - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) # Direct access should reify - result = test.test_import.data.lazy_imports.globals_access.get_direct() - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + result = test.test_lazy_import.data.globals_access.get_direct() + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_resolve_method_forces_reification(self): """Calling resolve() on lazy proxy should force reification. @@ -754,9 +760,9 @@ def test_resolve_method_forces_reification(self): import sys import types - lazy from test.test_import.data.lazy_imports.basic2 import x + lazy from test.test_lazy_import.data.basic2 import x - assert 'test.test_import.data.lazy_imports.basic2' not in sys.modules + assert 'test.test_lazy_import.data.basic2' not in sys.modules def test_resolve(): g = globals() @@ -766,7 +772,7 @@ def test_resolve(): resolved = lazy_obj.resolve() # Now module should be loaded - assert 'test.test_import.data.lazy_imports.basic2' in sys.modules + assert 'test.test_lazy_import.data.basic2' in sys.modules assert resolved == 42 # x is 42 in basic2.py return True @@ -786,9 +792,9 @@ def test_add_lazy_to_globals(self): import sys import types - lazy from test.test_import.data.lazy_imports import basic2 + lazy from test.test_lazy_import.data import basic2 - assert 'test.test_import.data.lazy_imports.basic2' not in sys.modules + assert 'test.test_lazy_import.data.basic2' not in sys.modules class C: pass sneaky = C() @@ -814,6 +820,7 @@ def f(): self.assertIn("OK", result.stdout) +@support.requires_subprocess() class MultipleNameFromImportTests(unittest.TestCase): """Tests for lazy from ... import with multiple names. @@ -825,7 +832,7 @@ class MultipleNameFromImportTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -837,10 +844,10 @@ def test_accessing_one_name_leaves_others_as_proxies(self): import sys import types - lazy from test.test_import.data.lazy_imports.basic2 import f, x + lazy from test.test_lazy_import.data.basic2 import f, x # Neither should be loaded yet - assert 'test.test_import.data.lazy_imports.basic2' not in sys.modules + assert 'test.test_lazy_import.data.basic2' not in sys.modules g = globals() assert type(g['f']) is types.LazyImportType @@ -851,7 +858,7 @@ def test_accessing_one_name_leaves_others_as_proxies(self): assert value == 42 # Module is now loaded - assert 'test.test_import.data.lazy_imports.basic2' in sys.modules + assert 'test.test_lazy_import.data.basic2' in sys.modules # 'x' should be reified (int), 'f' should still be lazy proxy assert type(g['x']) is int, f"Expected int, got {type(g['x'])}" @@ -872,7 +879,7 @@ def test_all_names_reified_after_all_accessed(self): import sys import types - lazy from test.test_import.data.lazy_imports.basic2 import f, x + lazy from test.test_lazy_import.data.basic2 import f, x g = globals() @@ -894,6 +901,7 @@ def test_all_names_reified_after_all_accessed(self): self.assertIn("OK", result.stdout) +@support.requires_subprocess() class SysLazyModulesTrackingTests(unittest.TestCase): """Tests for sys.lazy_modules tracking behavior. @@ -902,7 +910,7 @@ class SysLazyModulesTrackingTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -918,18 +926,18 @@ def test_module_added_to_lazy_modules_on_lazy_import(self): initial_count = len(sys.lazy_modules) - lazy import test.test_import.data.lazy_imports.basic2 + lazy import test.test_lazy_import.data.basic2 # Should be in lazy_modules after lazy import - assert "test.test_import.data.lazy_imports" in sys.lazy_modules - assert sys.lazy_modules["test.test_import.data.lazy_imports"] == {"basic2"} + assert "test.test_lazy_import.data" in sys.lazy_modules + assert sys.lazy_modules["test.test_lazy_import.data"] == {"basic2"} assert len(sys.lazy_modules) > initial_count # Trigger reification - _ = test.test_import.data.lazy_imports.basic2.x + _ = test.test_lazy_import.data.basic2.x # Module should still be tracked (for diagnostics per PEP 810) - assert "test.test_import.data.lazy_imports" not in sys.lazy_modules + assert "test.test_lazy_import.data" not in sys.lazy_modules print("OK") """) result = subprocess.run( @@ -946,6 +954,7 @@ def test_lazy_modules_is_per_interpreter(self): self.assertIsInstance(sys.lazy_modules, dict) +@support.requires_subprocess() class CommandLineAndEnvVarTests(unittest.TestCase): """Tests for command-line and environment variable control. @@ -1000,11 +1009,11 @@ def test_cli_lazy_imports_normal_respects_lazy_keyword_only(self): # modules already loaded by the interpreter startup code = textwrap.dedent(""" import sys - import test.test_import.data.lazy_imports.basic2 # Should be eager - lazy import test.test_import.data.lazy_imports.pkg.b # Should be lazy + import test.test_lazy_import.data.basic2 # Should be eager + lazy import test.test_lazy_import.data.pkg.b # Should be lazy - eager_loaded = 'test.test_import.data.lazy_imports.basic2' in sys.modules - lazy_loaded = 'test.test_import.data.lazy_imports.pkg.b' in sys.modules + eager_loaded = 'test.test_lazy_import.data.basic2' in sys.modules + lazy_loaded = 'test.test_lazy_import.data.pkg.b' in sys.modules if eager_loaded and not lazy_loaded: print("OK") @@ -1107,6 +1116,7 @@ def test_sys_set_lazy_imports_overrides_cli(self): self.assertIn("EAGER", result.stdout) +@support.requires_subprocess() class FilterFunctionSignatureTests(unittest.TestCase): """Tests for the filter function signature per PEP 810. @@ -1115,7 +1125,7 @@ class FilterFunctionSignatureTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -1212,7 +1222,7 @@ class AdditionalSyntaxRestrictionTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -1223,9 +1233,10 @@ def test_lazy_import_inside_class_raises_syntax_error(self): # PEP 810: "The soft keyword is only allowed at the global (module) level, # not inside functions, class bodies, try blocks, or import *" with self.assertRaises(SyntaxError): - import test.test_import.data.lazy_imports.badsyntax.lazy_class_body + import test.test_lazy_import.data.badsyntax.lazy_class_body +@support.requires_subprocess() class MixedLazyEagerImportTests(unittest.TestCase): """Tests for mixing lazy and eager imports of the same module. @@ -1236,7 +1247,7 @@ class MixedLazyEagerImportTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -1292,7 +1303,7 @@ class RelativeImportTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -1300,26 +1311,26 @@ def tearDown(self): def test_relative_lazy_import(self): """lazy from . import submodule should work.""" - from test.test_import.data.lazy_imports import relative_lazy + from test.test_lazy_import.data import relative_lazy # basic2 should not be loaded yet - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) # Access triggers reification result = relative_lazy.get_basic2() - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) def test_relative_lazy_from_import(self): """lazy from .module import name should work.""" - from test.test_import.data.lazy_imports import relative_lazy_from + from test.test_lazy_import.data import relative_lazy_from # basic2 should not be loaded yet - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) # Access triggers reification result = relative_lazy_from.get_x() self.assertEqual(result, 42) - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) class LazyModulesCompatibilityFromImportTests(unittest.TestCase): @@ -1331,7 +1342,7 @@ class LazyModulesCompatibilityFromImportTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -1339,17 +1350,18 @@ def tearDown(self): def test_lazy_modules_makes_from_imports_lazy(self): """__lazy_modules__ should make from imports of listed modules lazy.""" - from test.test_import.data.lazy_imports import lazy_compat_from + from test.test_lazy_import.data import lazy_compat_from # basic2 should not be loaded yet because it's in __lazy_modules__ - self.assertNotIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertNotIn("test.test_lazy_import.data.basic2", sys.modules) # Access triggers reification result = lazy_compat_from.get_x() self.assertEqual(result, 42) - self.assertIn("test.test_import.data.lazy_imports.basic2", sys.modules) + self.assertIn("test.test_lazy_import.data.basic2", sys.modules) +@support.requires_subprocess() class ImportStateAtReificationTests(unittest.TestCase): """Tests for import system state at reification time. @@ -1361,7 +1373,7 @@ class ImportStateAtReificationTests(unittest.TestCase): def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -1408,12 +1420,13 @@ def test_sys_path_at_reification_time_is_used(self): self.assertIn("OK", result.stdout) +@support.requires_subprocess() class ThreadSafetyTests(unittest.TestCase): """Tests for thread-safety of lazy imports.""" def tearDown(self): for key in list(sys.modules.keys()): - if key.startswith('test.test_import.data.lazy_imports'): + if key.startswith('test.test_lazy_import.data'): del sys.modules[key] sys.set_lazy_imports_filter(None) @@ -1421,7 +1434,7 @@ def tearDown(self): def test_concurrent_lazy_import_reification(self): """Multiple threads racing to reify the same lazy import should succeed.""" - from test.test_import.data.lazy_imports import basic_unused + from test.test_lazy_import.data import basic_unused num_threads = 10 results = [None] * num_threads @@ -1431,7 +1444,7 @@ def test_concurrent_lazy_import_reification(self): def access_lazy_import(idx): try: barrier.wait() - module = basic_unused.test.test_import.data.lazy_imports.basic2 + module = basic_unused.test.test_lazy_import.data.basic2 results[idx] = module except Exception as e: errors.append((idx, e)) diff --git a/Lib/test/test_import/data/lazy_imports/badsyntax/lazy_class_body.py b/Lib/test/test_lazy_import/data/badsyntax/lazy_class_body.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/badsyntax/lazy_class_body.py rename to Lib/test/test_lazy_import/data/badsyntax/lazy_class_body.py diff --git a/Lib/test/test_import/data/lazy_imports/badsyntax/lazy_future_import.py b/Lib/test/test_lazy_import/data/badsyntax/lazy_future_import.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/badsyntax/lazy_future_import.py rename to Lib/test/test_lazy_import/data/badsyntax/lazy_future_import.py diff --git a/Lib/test/test_import/data/lazy_imports/badsyntax/lazy_import_func.py b/Lib/test/test_lazy_import/data/badsyntax/lazy_import_func.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/badsyntax/lazy_import_func.py rename to Lib/test/test_lazy_import/data/badsyntax/lazy_import_func.py diff --git a/Lib/test/test_import/data/lazy_imports/badsyntax/lazy_try_except.py b/Lib/test/test_lazy_import/data/badsyntax/lazy_try_except.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/badsyntax/lazy_try_except.py rename to Lib/test/test_lazy_import/data/badsyntax/lazy_try_except.py diff --git a/Lib/test/test_import/data/lazy_imports/badsyntax/lazy_try_except_from.py b/Lib/test/test_lazy_import/data/badsyntax/lazy_try_except_from.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/badsyntax/lazy_try_except_from.py rename to Lib/test/test_lazy_import/data/badsyntax/lazy_try_except_from.py diff --git a/Lib/test/test_import/data/lazy_imports/badsyntax/lazy_try_except_from_star.py b/Lib/test/test_lazy_import/data/badsyntax/lazy_try_except_from_star.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/badsyntax/lazy_try_except_from_star.py rename to Lib/test/test_lazy_import/data/badsyntax/lazy_try_except_from_star.py diff --git a/Lib/test/test_import/data/lazy_imports/basic2.py b/Lib/test/test_lazy_import/data/basic2.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/basic2.py rename to Lib/test/test_lazy_import/data/basic2.py diff --git a/Lib/test/test_lazy_import/data/basic_compatibility_mode.py b/Lib/test/test_lazy_import/data/basic_compatibility_mode.py new file mode 100644 index 00000000000000..33eeab8879c394 --- /dev/null +++ b/Lib/test/test_lazy_import/data/basic_compatibility_mode.py @@ -0,0 +1,2 @@ +__lazy_modules__ = ['test.test_lazy_import.data.basic2'] +import test.test_lazy_import.data.basic2 diff --git a/Lib/test/test_lazy_import/data/basic_compatibility_mode_relative.py b/Lib/test/test_lazy_import/data/basic_compatibility_mode_relative.py new file mode 100644 index 00000000000000..15c1ea77630b31 --- /dev/null +++ b/Lib/test/test_lazy_import/data/basic_compatibility_mode_relative.py @@ -0,0 +1,2 @@ +__lazy_modules__ = ['test.test_lazy_import.data.basic2'] +lazy from .basic2 import f diff --git a/Lib/test/test_lazy_import/data/basic_compatibility_mode_used.py b/Lib/test/test_lazy_import/data/basic_compatibility_mode_used.py new file mode 100644 index 00000000000000..78bb4ffa52314b --- /dev/null +++ b/Lib/test/test_lazy_import/data/basic_compatibility_mode_used.py @@ -0,0 +1,3 @@ +__lazy_modules__ = ['test.test_lazy_import.data.basic2'] +import test.test_lazy_import.data.basic2 +test.test_lazy_import.data.basic2.f() diff --git a/Lib/test/test_lazy_import/data/basic_dir.py b/Lib/test/test_lazy_import/data/basic_dir.py new file mode 100644 index 00000000000000..a88c8ed80a1dcf --- /dev/null +++ b/Lib/test/test_lazy_import/data/basic_dir.py @@ -0,0 +1,2 @@ +lazy import test.test_lazy_import.data.basic2 +x = dir() diff --git a/Lib/test/test_lazy_import/data/basic_from_unused.py b/Lib/test/test_lazy_import/data/basic_from_unused.py new file mode 100644 index 00000000000000..62fa04bc8faefd --- /dev/null +++ b/Lib/test/test_lazy_import/data/basic_from_unused.py @@ -0,0 +1 @@ +lazy from test.test_lazy_import.data import basic2 diff --git a/Lib/test/test_lazy_import/data/basic_unused.py b/Lib/test/test_lazy_import/data/basic_unused.py new file mode 100644 index 00000000000000..f3e502c59e7cdf --- /dev/null +++ b/Lib/test/test_lazy_import/data/basic_unused.py @@ -0,0 +1 @@ +lazy import test.test_lazy_import.data.basic2 diff --git a/Lib/test/test_lazy_import/data/basic_used.py b/Lib/test/test_lazy_import/data/basic_used.py new file mode 100644 index 00000000000000..7234bd2fe02a80 --- /dev/null +++ b/Lib/test/test_lazy_import/data/basic_used.py @@ -0,0 +1,3 @@ +lazy import test.test_lazy_import.data.basic2 as basic2 + +basic2.f() diff --git a/Lib/test/test_import/data/lazy_imports/broken_attr_module.py b/Lib/test/test_lazy_import/data/broken_attr_module.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/broken_attr_module.py rename to Lib/test/test_lazy_import/data/broken_attr_module.py diff --git a/Lib/test/test_import/data/lazy_imports/broken_module.py b/Lib/test/test_lazy_import/data/broken_module.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/broken_module.py rename to Lib/test/test_lazy_import/data/broken_module.py diff --git a/Lib/test/test_lazy_import/data/compatibility_mode_func.py b/Lib/test/test_lazy_import/data/compatibility_mode_func.py new file mode 100644 index 00000000000000..9405d2f832b8ee --- /dev/null +++ b/Lib/test/test_lazy_import/data/compatibility_mode_func.py @@ -0,0 +1,5 @@ +__lazy_modules__ = ['test.test_lazy_import.data.basic2'] +def f(): + import test.test_lazy_import.data.basic2 + +f() diff --git a/Lib/test/test_lazy_import/data/compatibility_mode_try_except.py b/Lib/test/test_lazy_import/data/compatibility_mode_try_except.py new file mode 100644 index 00000000000000..a746c4a34a50c9 --- /dev/null +++ b/Lib/test/test_lazy_import/data/compatibility_mode_try_except.py @@ -0,0 +1,5 @@ +__lazy_modules__ = ['test.test_lazy_import.data.basic2'] +try: + import test.test_lazy_import.data.basic2 +except: + pass diff --git a/Lib/test/test_lazy_import/data/dunder_lazy_import.py b/Lib/test/test_lazy_import/data/dunder_lazy_import.py new file mode 100644 index 00000000000000..c4f4d61623f956 --- /dev/null +++ b/Lib/test/test_lazy_import/data/dunder_lazy_import.py @@ -0,0 +1 @@ +basic = __lazy_import__('test.test_lazy_import.data.basic2') diff --git a/Lib/test/test_import/data/lazy_imports/dunder_lazy_import_builtins.py b/Lib/test/test_lazy_import/data/dunder_lazy_import_builtins.py similarity index 76% rename from Lib/test/test_import/data/lazy_imports/dunder_lazy_import_builtins.py rename to Lib/test/test_lazy_import/data/dunder_lazy_import_builtins.py index f1fed0fc31768e..8b6dbd7e3f684c 100644 --- a/Lib/test/test_import/data/lazy_imports/dunder_lazy_import_builtins.py +++ b/Lib/test/test_lazy_import/data/dunder_lazy_import_builtins.py @@ -9,6 +9,6 @@ def myimport(*args): "__import__": myimport, } basic2 = 42 -basic = __lazy_import__("test.test_import.data.lazy_imports.basic2", +basic = __lazy_import__("test.test_lazy_import.data.basic2", globals=new_globals) basic diff --git a/Lib/test/test_lazy_import/data/dunder_lazy_import_used.py b/Lib/test/test_lazy_import/data/dunder_lazy_import_used.py new file mode 100644 index 00000000000000..42c026994b1094 --- /dev/null +++ b/Lib/test/test_lazy_import/data/dunder_lazy_import_used.py @@ -0,0 +1,3 @@ +basic = __lazy_import__('test.test_lazy_import.data', + fromlist=("basic2", )) +basic diff --git a/Lib/test/test_lazy_import/data/eager_import_func.py b/Lib/test/test_lazy_import/data/eager_import_func.py new file mode 100644 index 00000000000000..1f7130209d4b56 --- /dev/null +++ b/Lib/test/test_lazy_import/data/eager_import_func.py @@ -0,0 +1,3 @@ +def f(): + import test.test_lazy_import.data.basic2 as basic2 + return basic2 diff --git a/Lib/test/test_import/data/lazy_imports/global_filter.py b/Lib/test/test_lazy_import/data/global_filter.py similarity index 52% rename from Lib/test/test_import/data/lazy_imports/global_filter.py rename to Lib/test/test_lazy_import/data/global_filter.py index 72cb5f2ef5a02b..0e2311842dfcbd 100644 --- a/Lib/test/test_import/data/lazy_imports/global_filter.py +++ b/Lib/test/test_lazy_import/data/global_filter.py @@ -2,9 +2,9 @@ def filter(module_name, imported_name, from_list): assert module_name == __name__ - assert imported_name == "test.test_import.data.lazy_imports.basic2" + assert imported_name == "test.test_lazy_import.data.basic2" return False sys.set_lazy_imports_filter(filter) -lazy import test.test_import.data.lazy_imports.basic2 as basic2 +lazy import test.test_lazy_import.data.basic2 as basic2 diff --git a/Lib/test/test_import/data/lazy_imports/global_filter_from.py b/Lib/test/test_lazy_import/data/global_filter_from.py similarity index 58% rename from Lib/test/test_import/data/lazy_imports/global_filter_from.py rename to Lib/test/test_lazy_import/data/global_filter_from.py index 93a1280292f8af..da9d22e87277f8 100644 --- a/Lib/test/test_import/data/lazy_imports/global_filter_from.py +++ b/Lib/test/test_lazy_import/data/global_filter_from.py @@ -2,10 +2,10 @@ def filter(module_name, imported_name, from_list): assert module_name == __name__ - assert imported_name == "test.test_import.data.lazy_imports.basic2" + assert imported_name == "test.test_lazy_import.data.basic2" assert from_list == ('f',) return False sys.set_lazy_imports_filter(filter) -lazy from test.test_import.data.lazy_imports.basic2 import f +lazy from test.test_lazy_import.data.basic2 import f diff --git a/Lib/test/test_import/data/lazy_imports/global_filter_from_true.py b/Lib/test/test_lazy_import/data/global_filter_from_true.py similarity index 61% rename from Lib/test/test_import/data/lazy_imports/global_filter_from_true.py rename to Lib/test/test_lazy_import/data/global_filter_from_true.py index bc51215ae26ce4..2d8b1de4c7c400 100644 --- a/Lib/test/test_import/data/lazy_imports/global_filter_from_true.py +++ b/Lib/test/test_lazy_import/data/global_filter_from_true.py @@ -2,11 +2,11 @@ def filter(module_name, imported_name, from_list): assert module_name == __name__ - assert imported_name == "test.test_import.data.lazy_imports.basic2" + assert imported_name == "test.test_lazy_import.data.basic2" assert from_list == ('f',) return True sys.set_lazy_imports("normal") sys.set_lazy_imports_filter(filter) -lazy from test.test_import.data.lazy_imports.basic2 import f +lazy from test.test_lazy_import.data.basic2 import f diff --git a/Lib/test/test_import/data/lazy_imports/global_filter_true.py b/Lib/test/test_lazy_import/data/global_filter_true.py similarity index 57% rename from Lib/test/test_import/data/lazy_imports/global_filter_true.py rename to Lib/test/test_lazy_import/data/global_filter_true.py index 4881b30fb02409..da4abeacf87cda 100644 --- a/Lib/test/test_import/data/lazy_imports/global_filter_true.py +++ b/Lib/test/test_lazy_import/data/global_filter_true.py @@ -2,10 +2,10 @@ def filter(module_name, imported_name, from_list): assert module_name == __name__ - assert imported_name == "test.test_import.data.lazy_imports.basic2" + assert imported_name == "test.test_lazy_import.data.basic2" return True sys.set_lazy_imports("normal") sys.set_lazy_imports_filter(filter) -lazy import test.test_import.data.lazy_imports.basic2 as basic2 +lazy import test.test_lazy_import.data.basic2 as basic2 diff --git a/Lib/test/test_lazy_import/data/global_off.py b/Lib/test/test_lazy_import/data/global_off.py new file mode 100644 index 00000000000000..95d1511dd93223 --- /dev/null +++ b/Lib/test/test_lazy_import/data/global_off.py @@ -0,0 +1,5 @@ +import sys + +sys.set_lazy_imports("none") + +lazy import test.test_lazy_import.data.basic2 as basic2 diff --git a/Lib/test/test_lazy_import/data/global_on.py b/Lib/test/test_lazy_import/data/global_on.py new file mode 100644 index 00000000000000..ddacab5468bab9 --- /dev/null +++ b/Lib/test/test_lazy_import/data/global_on.py @@ -0,0 +1,5 @@ +import sys + +sys.set_lazy_imports("all") + +import test.test_lazy_import.data.basic2 as basic2 diff --git a/Lib/test/test_import/data/lazy_imports/globals_access.py b/Lib/test/test_lazy_import/data/globals_access.py similarity index 72% rename from Lib/test/test_import/data/lazy_imports/globals_access.py rename to Lib/test/test_lazy_import/data/globals_access.py index c12c6a029c2b81..af6fc40115b21c 100644 --- a/Lib/test/test_import/data/lazy_imports/globals_access.py +++ b/Lib/test/test_lazy_import/data/globals_access.py @@ -1,5 +1,5 @@ # Test that globals() returns lazy proxy objects without reifying -lazy import test.test_import.data.lazy_imports.basic2 as basic2 +lazy import test.test_lazy_import.data.basic2 as basic2 def get_from_globals(): g = globals() diff --git a/Lib/test/test_lazy_import/data/lazy_compat_from.py b/Lib/test/test_lazy_import/data/lazy_compat_from.py new file mode 100644 index 00000000000000..d6f7ed9a78538b --- /dev/null +++ b/Lib/test/test_lazy_import/data/lazy_compat_from.py @@ -0,0 +1,6 @@ +# Test __lazy_modules__ with from imports +__lazy_modules__ = ['test.test_lazy_import.data.basic2'] +from test.test_lazy_import.data.basic2 import x, f + +def get_x(): + return x diff --git a/Lib/test/test_import/data/lazy_imports/lazy_get_value.py b/Lib/test/test_lazy_import/data/lazy_get_value.py similarity index 50% rename from Lib/test/test_import/data/lazy_imports/lazy_get_value.py rename to Lib/test/test_lazy_import/data/lazy_get_value.py index 0ff572fa1e398a..f216dc52f292fa 100644 --- a/Lib/test/test_import/data/lazy_imports/lazy_get_value.py +++ b/Lib/test/test_lazy_import/data/lazy_get_value.py @@ -1,4 +1,4 @@ -lazy import test.test_import.data.lazy_imports.basic2 as basic2 +lazy import test.test_lazy_import.data.basic2 as basic2 def f(): x = globals() diff --git a/Lib/test/test_lazy_import/data/lazy_import_pkg.py b/Lib/test/test_lazy_import/data/lazy_import_pkg.py new file mode 100644 index 00000000000000..f237b50fd422ff --- /dev/null +++ b/Lib/test/test_lazy_import/data/lazy_import_pkg.py @@ -0,0 +1,2 @@ +lazy import test.test_lazy_import.data.pkg.bar +x = test.test_lazy_import.data.pkg.bar.f diff --git a/Lib/test/test_lazy_import/data/lazy_with.py b/Lib/test/test_lazy_import/data/lazy_with.py new file mode 100644 index 00000000000000..30cf9d377624da --- /dev/null +++ b/Lib/test/test_lazy_import/data/lazy_with.py @@ -0,0 +1,3 @@ +import contextlib +with contextlib.nullcontext(): + lazy import test.test_lazy_import.data.basic2 diff --git a/Lib/test/test_lazy_import/data/lazy_with_from.py b/Lib/test/test_lazy_import/data/lazy_with_from.py new file mode 100644 index 00000000000000..794ba33f828596 --- /dev/null +++ b/Lib/test/test_lazy_import/data/lazy_with_from.py @@ -0,0 +1,3 @@ +import contextlib +with contextlib.nullcontext(): + lazy import test.test_lazy_import.data.basic2 as basic2 diff --git a/Lib/test/test_lazy_import/data/modules_dict.py b/Lib/test/test_lazy_import/data/modules_dict.py new file mode 100644 index 00000000000000..562a3b9a62f8d3 --- /dev/null +++ b/Lib/test/test_lazy_import/data/modules_dict.py @@ -0,0 +1,5 @@ +lazy import test.test_lazy_import.data.basic2 as basic2 + +import sys +mod = sys.modules[__name__] +x = mod.__dict__ diff --git a/Lib/test/test_lazy_import/data/modules_getattr.py b/Lib/test/test_lazy_import/data/modules_getattr.py new file mode 100644 index 00000000000000..f0b8e2bd0613b8 --- /dev/null +++ b/Lib/test/test_lazy_import/data/modules_getattr.py @@ -0,0 +1,5 @@ +lazy import test.test_lazy_import.data.basic2 as basic2 + +import sys +mod = sys.modules[__name__] +x = mod.basic2 diff --git a/Lib/test/test_lazy_import/data/modules_getattr_other.py b/Lib/test/test_lazy_import/data/modules_getattr_other.py new file mode 100644 index 00000000000000..642840c28722b3 --- /dev/null +++ b/Lib/test/test_lazy_import/data/modules_getattr_other.py @@ -0,0 +1,5 @@ +lazy import test.test_lazy_import.data.basic2 as basic2 + +import sys +mod = sys.modules[__name__] +x = mod.__name__ diff --git a/Lib/test/test_import/data/lazy_imports/multi_from_import.py b/Lib/test/test_lazy_import/data/multi_from_import.py similarity index 64% rename from Lib/test/test_import/data/lazy_imports/multi_from_import.py rename to Lib/test/test_lazy_import/data/multi_from_import.py index 96dc9757500549..dfd875975dd2f7 100644 --- a/Lib/test/test_import/data/lazy_imports/multi_from_import.py +++ b/Lib/test/test_lazy_import/data/multi_from_import.py @@ -1,5 +1,5 @@ # Test that lazy from import with multiple names only reifies accessed names -lazy from test.test_import.data.lazy_imports.basic2 import f, x +lazy from test.test_lazy_import.data.basic2 import f, x def get_globals(): return globals() diff --git a/Lib/test/test_import/data/lazy_imports/pkg/__init__.py b/Lib/test/test_lazy_import/data/pkg/__init__.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/pkg/__init__.py rename to Lib/test/test_lazy_import/data/pkg/__init__.py diff --git a/Lib/test/test_import/data/lazy_imports/pkg/b.py b/Lib/test/test_lazy_import/data/pkg/b.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/pkg/b.py rename to Lib/test/test_lazy_import/data/pkg/b.py diff --git a/Lib/test/test_import/data/lazy_imports/pkg/bar.py b/Lib/test/test_lazy_import/data/pkg/bar.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/pkg/bar.py rename to Lib/test/test_lazy_import/data/pkg/bar.py diff --git a/Lib/test/test_import/data/lazy_imports/pkg/c.py b/Lib/test/test_lazy_import/data/pkg/c.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/pkg/c.py rename to Lib/test/test_lazy_import/data/pkg/c.py diff --git a/Lib/test/test_import/data/lazy_imports/relative_lazy.py b/Lib/test/test_lazy_import/data/relative_lazy.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/relative_lazy.py rename to Lib/test/test_lazy_import/data/relative_lazy.py diff --git a/Lib/test/test_import/data/lazy_imports/relative_lazy_from.py b/Lib/test/test_lazy_import/data/relative_lazy_from.py similarity index 100% rename from Lib/test/test_import/data/lazy_imports/relative_lazy_from.py rename to Lib/test/test_lazy_import/data/relative_lazy_from.py diff --git a/Lib/test/test_lazy_import/data/try_except_eager.py b/Lib/test/test_lazy_import/data/try_except_eager.py new file mode 100644 index 00000000000000..90b4bc62898d2a --- /dev/null +++ b/Lib/test/test_lazy_import/data/try_except_eager.py @@ -0,0 +1,4 @@ +try: + import test.test_lazy_import.data.basic2 +except: + pass diff --git a/Lib/test/test_lazy_import/data/try_except_eager_from.py b/Lib/test/test_lazy_import/data/try_except_eager_from.py new file mode 100644 index 00000000000000..1e6c650d0d1c7d --- /dev/null +++ b/Lib/test/test_lazy_import/data/try_except_eager_from.py @@ -0,0 +1,4 @@ +try: + from test.test_lazy_import.data.basic2 import f +except: + pass diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index e48749626fccad..a3d485c998ac91 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -2061,6 +2061,30 @@ Traceback (most recent call last): SyntaxError: trailing comma not allowed without surrounding parentheses +>>> with item,: pass +Traceback (most recent call last): +SyntaxError: the last 'with' item has a trailing comma + +>>> with item as x,: pass +Traceback (most recent call last): +SyntaxError: the last 'with' item has a trailing comma + +>>> with item1, item2,: pass +Traceback (most recent call last): +SyntaxError: the last 'with' item has a trailing comma + +>>> with item1 as x, item2,: pass +Traceback (most recent call last): +SyntaxError: the last 'with' item has a trailing comma + +>>> with item1 as x, item2 as y,: pass +Traceback (most recent call last): +SyntaxError: the last 'with' item has a trailing comma + +>>> with item1, item2 as y,: pass +Traceback (most recent call last): +SyntaxError: the last 'with' item has a trailing comma + >>> import a from b Traceback (most recent call last): SyntaxError: Did you mean to use 'from ... import ...' instead? diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 3896f34a34c8d6..2fbc2a041269f4 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -5383,8 +5383,8 @@ def test_attribute_error_does_not_reify_lazy_imports(self): # pkg.bar prints "BAR_MODULE_LOADED" when imported. # If lazy import is reified during suggestion computation, we'll see it. code = textwrap.dedent(""" - lazy import test.test_import.data.lazy_imports.pkg.bar - test.test_import.data.lazy_imports.pkg.nonexistent + lazy import test.test_lazy_import.data.pkg.bar + test.test_lazy_import.data.pkg.nonexistent """) rc, stdout, stderr = assert_python_failure('-c', code) self.assertNotIn(b"BAR_MODULE_LOADED", stdout) @@ -5393,9 +5393,9 @@ def test_traceback_formatting_does_not_reify_lazy_imports(self): """Formatting a traceback should not trigger lazy import reification.""" code = textwrap.dedent(""" import traceback - lazy import test.test_import.data.lazy_imports.pkg.bar + lazy import test.test_lazy_import.data.pkg.bar try: - test.test_import.data.lazy_imports.pkg.nonexistent + test.test_lazy_import.data.pkg.nonexistent except AttributeError: traceback.format_exc() print("OK") @@ -5407,9 +5407,9 @@ def test_traceback_formatting_does_not_reify_lazy_imports(self): def test_suggestion_still_works_for_non_lazy_attributes(self): """Suggestions should still work for non-lazy module attributes.""" code = textwrap.dedent(""" - lazy import test.test_import.data.lazy_imports.pkg.bar + lazy import test.test_lazy_import.data.pkg.bar # Typo for __name__ - test.test_import.data.lazy_imports.pkg.__nme__ + test.test_lazy_import.data.pkg.__nme__ """) rc, stdout, stderr = assert_python_failure('-c', code) self.assertIn(b"__name__", stderr) diff --git a/Makefile.pre.in b/Makefile.pre.in index aba92666720d7d..da8d5483fd32e8 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2682,9 +2682,6 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_import/data/package3 \ test/test_import/data/package4 \ test/test_import/data/unwritable \ - test/test_import/data/lazy_imports \ - test/test_import/data/lazy_imports/pkg \ - test/test_import/data/lazy_imports/badsyntax \ test/test_importlib \ test/test_importlib/builtin \ test/test_importlib/extension \ @@ -2725,6 +2722,10 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_interpreters \ test/test_io \ test/test_json \ + test/test_lazy_import \ + test/test_lazy_import/data \ + test/test_lazy_import/data/pkg \ + test/test_lazy_import/data/badsyntax \ test/test_module \ test/test_multiprocessing_fork \ test/test_multiprocessing_forkserver \ diff --git a/Misc/NEWS.d/next/Build/2026-02-27-18-10-02.gh-issue-144533.21fk9L.rst b/Misc/NEWS.d/next/Build/2026-02-27-18-10-02.gh-issue-144533.21fk9L.rst new file mode 100644 index 00000000000000..d6e0201b90c550 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2026-02-27-18-10-02.gh-issue-144533.21fk9L.rst @@ -0,0 +1 @@ +Use wasmtime's ``--argv0`` to auto-discover sysconfig in WASI builds diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2026-02-26-18-00-00.gh-issue-145241.hL2k9Q.rst b/Misc/NEWS.d/next/Core_and_Builtins/2026-02-26-18-00-00.gh-issue-145241.hL2k9Q.rst new file mode 100644 index 00000000000000..a3253132a577ba --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2026-02-26-18-00-00.gh-issue-145241.hL2k9Q.rst @@ -0,0 +1,3 @@ +Specialized the parser error for when ``with`` items are followed +by a trailing comma (for example, ``with item,:``), raising a clearer +:exc:`SyntaxError` message. Patch by Pablo Galindo and Bartosz Sławecki. diff --git a/Parser/parser.c b/Parser/parser.c index 37c19c4c9020c8..f8d6d1ce89b54d 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -22,28 +22,28 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 695}, - {"as", 693}, - {"in", 708}, + {"if", 698}, + {"as", 696}, + {"in", 711}, {"or", 589}, {"is", 597}, {NULL, -1}, }, (KeywordToken[]) { {"del", 634}, - {"def", 712}, - {"for", 707}, - {"try", 669}, + {"def", 715}, + {"for", 710}, + {"try", 672}, {"and", 590}, - {"not", 716}, + {"not", 719}, {NULL, -1}, }, (KeywordToken[]) { {"from", 646}, {"pass", 527}, - {"with", 660}, - {"elif", 700}, - {"else", 699}, + {"with", 663}, + {"elif", 703}, + {"else", 702}, {"None", 628}, {"True", 627}, {NULL, -1}, @@ -52,9 +52,9 @@ static KeywordToken *reserved_keywords[] = { {"raise", 632}, {"yield", 588}, {"break", 528}, - {"async", 711}, - {"class", 714}, - {"while", 702}, + {"async", 714}, + {"class", 717}, + {"while", 705}, {"False", 629}, {"await", 598}, {NULL, -1}, @@ -64,12 +64,12 @@ static KeywordToken *reserved_keywords[] = { {"return", 522}, {"assert", 638}, {"global", 530}, - {"except", 690}, + {"except", 693}, {"lambda", 622}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 686}, + {"finally", 689}, {NULL, -1}, }, (KeywordToken[]) { @@ -1964,7 +1964,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 695) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 698) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2048,7 +2048,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 669) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 672) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2069,7 +2069,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 702) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 705) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -4525,7 +4525,7 @@ class_def_raw_rule(Parser *p) asdl_stmt_seq* c; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 714)) // token='class' + (_keyword = _PyPegen_expect_token(p, 717)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && @@ -4692,7 +4692,7 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 712)) // token='def' + (_keyword = _PyPegen_expect_token(p, 715)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4753,9 +4753,9 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 711)) // token='async' + (_keyword = _PyPegen_expect_token(p, 714)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 712)) // token='def' + (_keyword_1 = _PyPegen_expect_token(p, 715)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -6093,7 +6093,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6138,7 +6138,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6233,7 +6233,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 700)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 703)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6278,7 +6278,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 700)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 703)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6359,7 +6359,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword = _PyPegen_expect_token(p, 702)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6438,7 +6438,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 702)) // token='while' + (_keyword = _PyPegen_expect_token(p, 705)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6538,11 +6538,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='for' + (_keyword = _PyPegen_expect_token(p, 710)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 708)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 711)) // token='in' && (_cut_var = 1) && @@ -6600,13 +6600,13 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 711)) // token='async' + (_keyword = _PyPegen_expect_token(p, 714)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 707)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 710)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 708)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 711)) // token='in' && (_cut_var = 1) && @@ -6735,7 +6735,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='with' + (_keyword = _PyPegen_expect_token(p, 663)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6786,7 +6786,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='with' + (_keyword = _PyPegen_expect_token(p, 663)) // token='with' && (a = (asdl_withitem_seq*)_gather_34_rule(p)) // ','.with_item+ && @@ -6835,9 +6835,9 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 711)) // token='async' + (_keyword = _PyPegen_expect_token(p, 714)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 660)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 663)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6887,9 +6887,9 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 711)) // token='async' + (_keyword = _PyPegen_expect_token(p, 714)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 660)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 663)) // token='with' && (a = (asdl_withitem_seq*)_gather_34_rule(p)) // ','.with_item+ && @@ -6975,7 +6975,7 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && (t = star_target_rule(p)) // star_target && @@ -7100,7 +7100,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 669)) // token='try' + (_keyword = _PyPegen_expect_token(p, 672)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7144,7 +7144,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 669)) // token='try' + (_keyword = _PyPegen_expect_token(p, 672)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7192,7 +7192,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 669)) // token='try' + (_keyword = _PyPegen_expect_token(p, 672)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7291,7 +7291,7 @@ except_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (e = expression_rule(p)) // expression && @@ -7335,11 +7335,11 @@ except_block_rule(Parser *p) expr_ty e; expr_ty t; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (e = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='as' && (t = _PyPegen_name_token(p)) // NAME && @@ -7381,7 +7381,7 @@ except_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (e = expressions_rule(p)) // expressions && @@ -7422,7 +7422,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7534,7 +7534,7 @@ except_star_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7581,13 +7581,13 @@ except_star_block_rule(Parser *p) expr_ty e; expr_ty t; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (e = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='as' && (t = _PyPegen_name_token(p)) // NAME && @@ -7630,7 +7630,7 @@ except_star_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7730,7 +7730,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 689)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -8038,7 +8038,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -8233,7 +8233,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -11647,11 +11647,11 @@ if_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 702)) // token='else' && (c = expression_rule(p)) // expression ) @@ -12600,7 +12600,7 @@ inversion_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 716)) // token='not' + (_keyword = _PyPegen_expect_token(p, 719)) // token='not' && (a = inversion_rule(p)) // inversion ) @@ -13254,9 +13254,9 @@ notin_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 716)) // token='not' + (_keyword = _PyPegen_expect_token(p, 719)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 708)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 711)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -13302,7 +13302,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 708)) // token='in' + (_keyword = _PyPegen_expect_token(p, 711)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -13351,7 +13351,7 @@ isnot_bitwise_or_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 597)) // token='is' && - (_keyword_1 = _PyPegen_expect_token(p, 716)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 719)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -18010,13 +18010,13 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 711)) // token='async' + (_keyword = _PyPegen_expect_token(p, 714)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 707)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 710)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 708)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 711)) // token='in' && (_cut_var = 1) && @@ -18055,11 +18055,11 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='for' + (_keyword = _PyPegen_expect_token(p, 710)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 708)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 711)) // token='in' && (_cut_var = 1) && @@ -21386,11 +21386,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 702)) // token='else' && (c = expression_rule(p)) // expression ) @@ -21690,7 +21690,7 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (b = disjunction_rule(p)) // disjunction && @@ -21723,11 +21723,11 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 702)) // token='else' && _PyPegen_lookahead_for_expr(0, expression_rule, p) ) @@ -21759,11 +21759,11 @@ invalid_expression_rule(Parser *p) if ( (a = (stmt_ty)_tmp_118_rule(p)) // pass_stmt | break_stmt | continue_stmt && - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 702)) // token='else' && (c = simple_stmt_rule(p)) // simple_stmt ) @@ -21882,11 +21882,11 @@ invalid_if_expression_rule(Parser *p) if ( (disjunction_var = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 702)) // token='else' && (a = _PyPegen_expect_token(p, 16)) // token='*' ) @@ -21918,11 +21918,11 @@ invalid_if_expression_rule(Parser *p) if ( (disjunction_var = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 702)) // token='else' && (a = _PyPegen_expect_token(p, 35)) // token='**' ) @@ -24050,7 +24050,7 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && (a = expression_rule(p)) // expression && @@ -24100,13 +24100,13 @@ invalid_for_if_clause_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings void *_tmp_136_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 707)) // token='for' + (_keyword = _PyPegen_expect_token(p, 710)) // token='for' && (_tmp_136_var = _tmp_136_rule(p)) // bitwise_or ((',' bitwise_or))* ','? && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 708) // token='in' + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 711) // token='in' ) { D(fprintf(stderr, "%*c+ invalid_for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'")); @@ -24152,9 +24152,9 @@ invalid_for_target_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings expr_ty a; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 707)) // token='for' + (_keyword = _PyPegen_expect_token(p, 710)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) @@ -24364,7 +24364,7 @@ invalid_dotted_as_name_rule(Parser *p) if ( (dotted_name_var = dotted_name_rule(p)) // dotted_name && - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && _PyPegen_lookahead(0, _tmp_139_rule, p) && @@ -24415,7 +24415,7 @@ invalid_import_from_as_name_rule(Parser *p) if ( (name_var = _PyPegen_name_token(p)) // NAME && - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && _PyPegen_lookahead(0, _tmp_139_rule, p) && @@ -24515,6 +24515,7 @@ invalid_import_from_targets_rule(Parser *p) } // invalid_with_stmt: +// | 'async'? 'with' ','.(expression ['as' star_target])+ ',' ':' // | 'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE // | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE static void * @@ -24529,6 +24530,43 @@ invalid_with_stmt_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; + { // 'async'? 'with' ','.(expression ['as' star_target])+ ',' ':' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ',' ':'")); + asdl_seq * _gather_141_var; + Token * _keyword; + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + Token * trailing; + if ( + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? + && + (_keyword = _PyPegen_expect_token(p, 663)) // token='with' + && + (_gather_141_var = _gather_141_rule(p)) // ','.(expression ['as' star_target])+ + && + (trailing = _PyPegen_expect_token(p, 12)) // token=',' + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ',' ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( trailing , "the last 'with' item has a trailing comma" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ',' ':'")); + } { // 'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE if (p->error_indicator) { p->level--; @@ -24541,9 +24579,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 660)) // token='with' + (_keyword = _PyPegen_expect_token(p, 663)) // token='with' && (_gather_141_var = _gather_141_rule(p)) // ','.(expression ['as' star_target])+ && @@ -24579,9 +24617,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var_1); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 660)) // token='with' + (_keyword = _PyPegen_expect_token(p, 663)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -24641,9 +24679,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 660)) // token='with' + (a = _PyPegen_expect_token(p, 663)) // token='with' && (_gather_141_var = _gather_141_rule(p)) // ','.(expression ['as' star_target])+ && @@ -24684,9 +24722,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 660)) // token='with' + (a = _PyPegen_expect_token(p, 663)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -24749,7 +24787,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 669)) // token='try' + (a = _PyPegen_expect_token(p, 672)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24781,7 +24819,7 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 669)) // token='try' + (_keyword = _PyPegen_expect_token(p, 672)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24820,7 +24858,7 @@ invalid_try_stmt_rule(Parser *p) Token * b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 669)) // token='try' + (_keyword = _PyPegen_expect_token(p, 672)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24828,7 +24866,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_36_var = _loop1_36_rule(p)) // except_block+ && - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (b = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24867,7 +24905,7 @@ invalid_try_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * a; if ( - (_keyword = _PyPegen_expect_token(p, 669)) // token='try' + (_keyword = _PyPegen_expect_token(p, 672)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24875,7 +24913,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_37_var = _loop1_37_rule(p)) // except_star_block+ && - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (_opt_var = _tmp_146_rule(p), !p->error_indicator) // [expression ['as' NAME]] && @@ -24932,7 +24970,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty expressions_var; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (a = expression_rule(p)) // expression && @@ -24940,7 +24978,7 @@ invalid_except_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_keyword_1 = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24972,7 +25010,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (expression_var = expression_rule(p)) // expression && @@ -25003,7 +25041,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -25034,11 +25072,11 @@ invalid_except_stmt_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (expression_var = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='as' && (a = expression_rule(p)) // expression && @@ -25098,7 +25136,7 @@ invalid_except_star_stmt_rule(Parser *p) expr_ty expressions_var; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -25108,7 +25146,7 @@ invalid_except_star_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_keyword_1 = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -25141,7 +25179,7 @@ invalid_except_star_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -25175,7 +25213,7 @@ invalid_except_star_stmt_rule(Parser *p) void *_tmp_147_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -25209,13 +25247,13 @@ invalid_except_star_stmt_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='as' && (a = expression_rule(p)) // expression && @@ -25266,7 +25304,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 686)) // token='finally' + (a = _PyPegen_expect_token(p, 689)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -25322,7 +25360,7 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (expression_var = expression_rule(p)) // expression && @@ -25358,7 +25396,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -25414,7 +25452,7 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 690)) // token='except' + (a = _PyPegen_expect_token(p, 693)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -25691,7 +25729,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -25721,7 +25759,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && (a = expression_rule(p)) // expression ) @@ -25937,7 +25975,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25968,7 +26006,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 695)) // token='if' + (a = _PyPegen_expect_token(p, 698)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -26023,7 +26061,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 700)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 703)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -26054,7 +26092,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 700)) // token='elif' + (a = _PyPegen_expect_token(p, 703)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -26107,7 +26145,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 699)) // token='else' + (a = _PyPegen_expect_token(p, 702)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -26140,13 +26178,13 @@ invalid_else_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword = _PyPegen_expect_token(p, 702)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (block_var = block_rule(p)) // block && - (_keyword_1 = _PyPegen_expect_token(p, 700)) // token='elif' + (_keyword_1 = _PyPegen_expect_token(p, 703)) // token='elif' ) { D(fprintf(stderr, "%*c+ invalid_else_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else' ':' block 'elif'")); @@ -26193,7 +26231,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 702)) // token='while' + (_keyword = _PyPegen_expect_token(p, 705)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -26224,7 +26262,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 702)) // token='while' + (a = _PyPegen_expect_token(p, 705)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -26283,13 +26321,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 707)) // token='for' + (_keyword = _PyPegen_expect_token(p, 710)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 708)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 711)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -26324,13 +26362,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 707)) // token='for' + (a = _PyPegen_expect_token(p, 710)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 708)) // token='in' + (_keyword = _PyPegen_expect_token(p, 711)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -26396,9 +26434,9 @@ invalid_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 712)) // token='def' + (a = _PyPegen_expect_token(p, 715)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -26455,9 +26493,9 @@ invalid_def_raw_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty name_var; if ( - (_opt_var = _PyPegen_expect_token(p, 711), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 714), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 712)) // token='def' + (_keyword = _PyPegen_expect_token(p, 715)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -26521,7 +26559,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 714)) // token='class' + (_keyword = _PyPegen_expect_token(p, 717)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -26560,7 +26598,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 714)) // token='class' + (a = _PyPegen_expect_token(p, 717)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -28196,7 +28234,7 @@ invalid_arithmetic_rule(Parser *p) && (_tmp_157_var = _tmp_157_rule(p)) // '+' | '-' | '*' | '/' | '%' | '//' | '@' && - (a = _PyPegen_expect_token(p, 716)) // token='not' + (a = _PyPegen_expect_token(p, 719)) // token='not' && (b = inversion_rule(p)) // inversion ) @@ -28245,7 +28283,7 @@ invalid_factor_rule(Parser *p) if ( (_tmp_158_var = _tmp_158_rule(p)) // '+' | '-' | '~' && - (a = _PyPegen_expect_token(p, 716)) // token='not' + (a = _PyPegen_expect_token(p, 719)) // token='not' && (b = factor_rule(p)) // factor ) @@ -28668,7 +28706,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 712)) // token='def' + (_keyword = _PyPegen_expect_token(p, 715)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -28706,7 +28744,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 711)) // token='async' + (_keyword = _PyPegen_expect_token(p, 714)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -28744,7 +28782,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 714)) // token='class' + (_keyword = _PyPegen_expect_token(p, 717)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -28801,7 +28839,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='with' + (_keyword = _PyPegen_expect_token(p, 663)) // token='with' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); @@ -28820,7 +28858,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 711)) // token='async' + (_keyword = _PyPegen_expect_token(p, 714)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -28858,7 +28896,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 707)) // token='for' + (_keyword = _PyPegen_expect_token(p, 710)) // token='for' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); @@ -28877,7 +28915,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 711)) // token='async' + (_keyword = _PyPegen_expect_token(p, 714)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -29578,7 +29616,7 @@ _tmp_21_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -35550,7 +35588,7 @@ _tmp_117_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 699)) // token='else' + (_keyword = _PyPegen_expect_token(p, 702)) // token='else' ) { D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); @@ -37213,7 +37251,7 @@ _tmp_144_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 690)) // token='except' + (_keyword = _PyPegen_expect_token(p, 693)) // token='except' ) { D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); @@ -37232,7 +37270,7 @@ _tmp_144_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 689)) // token='finally' ) { D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); @@ -38534,7 +38572,7 @@ _tmp_166_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='if' + (_keyword = _PyPegen_expect_token(p, 698)) // token='if' && (z = disjunction_rule(p)) // disjunction ) @@ -39270,7 +39308,7 @@ _tmp_180_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 693)) // token='as' + (_keyword = _PyPegen_expect_token(p, 696)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) diff --git a/Platforms/WASI/__main__.py b/Platforms/WASI/__main__.py index 8302432fd2f106..471ac3297b2702 100644 --- a/Platforms/WASI/__main__.py +++ b/Platforms/WASI/__main__.py @@ -317,21 +317,8 @@ def configure_wasi_python(context, working_dir): wasi_build_dir = working_dir.relative_to(CHECKOUT) - python_build_dir = BUILD_DIR / "build" - lib_dirs = list(python_build_dir.glob("lib.*")) - assert len(lib_dirs) == 1, ( - f"Expected a single lib.* directory in {python_build_dir}" - ) - lib_dir = os.fsdecode(lib_dirs[0]) - python_version = lib_dir.rpartition("-")[-1] - sysconfig_data_dir = ( - f"{wasi_build_dir}/build/lib.wasi-wasm32-{python_version}" - ) - - # Use PYTHONPATH to include sysconfig data which must be anchored to the - # WASI guest's `/` directory. args = { - "PYTHONPATH": f"/{sysconfig_data_dir}", + "ARGV0": f"/{wasi_build_dir}/python.wasm", "PYTHON_WASM": working_dir / "python.wasm", } # Check dynamically for wasmtime in case it was specified manually via @@ -421,8 +408,8 @@ def main(): default_host_triple = config["targets"]["host-triple"] default_host_runner = ( f"{WASMTIME_HOST_RUNNER_VAR} run " - # For setting PYTHONPATH to the sysconfig data directory. - "--env PYTHONPATH={PYTHONPATH} " + # Set argv0 so that getpath.py can auto-discover the sysconfig data directory + "--argv0 {ARGV0} " # Map the checkout to / to load the stdlib from /Lib. f"--dir {os.fsdecode(CHECKOUT)}::/ " # Flags involving --optimize, --codegen, --debug, --wasm, and --wasi can be kept