Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/python/cpython
Browse files Browse the repository at this point in the history
  • Loading branch information
Yhg1s committed Mar 12, 2024
2 parents 128fbdf + a53cc3f commit 3f54d1c
Show file tree
Hide file tree
Showing 11 changed files with 239 additions and 162 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/stale.yml
Expand Up @@ -2,7 +2,7 @@ name: Mark stale pull requests

on:
schedule:
- cron: "0 0 * * *"
- cron: "0 */12 * * *"

permissions:
pull-requests: write
Expand Down
9 changes: 5 additions & 4 deletions Doc/glossary.rst
Expand Up @@ -841,10 +841,11 @@ Glossary
Some named tuples are built-in types (such as the above examples).
Alternatively, a named tuple can be created from a regular class
definition that inherits from :class:`tuple` and that defines named
fields. Such a class can be written by hand or it can be created with
the factory function :func:`collections.namedtuple`. The latter
technique also adds some extra methods that may not be found in
hand-written or built-in named tuples.
fields. Such a class can be written by hand, or it can be created by
inheriting :class:`typing.NamedTuple`, or with the factory function
:func:`collections.namedtuple`. The latter techniques also add some
extra methods that may not be found in hand-written or built-in named
tuples.

namespace
The place where a variable is stored. Namespaces are implemented as
Expand Down
112 changes: 103 additions & 9 deletions Doc/library/itertools.rst
Expand Up @@ -998,7 +998,7 @@ The following recipes have a more mathematical flavor:

def sum_of_squares(it):
"Add up the squares of the input values."
# sum_of_squares([10, 20, 30]) -> 1400
# sum_of_squares([10, 20, 30]) --> 1400
return math.sumprod(*tee(it))
def reshape(matrix, cols):
Expand All @@ -1019,17 +1019,16 @@ The following recipes have a more mathematical flavor:

def convolve(signal, kernel):
"""Discrete linear convolution of two iterables.
Equivalent to polynomial multiplication.

The kernel is fully consumed before the calculations begin.
The signal is consumed lazily and can be infinite.

Convolutions are mathematically commutative.
If the signal and kernel are swapped,
the output will be the same.
Convolutions are mathematically commutative; however, the inputs are
evaluated differently. The signal is consumed lazily and can be
infinite. The kernel is fully consumed before the calculations begin.

Article: https://betterexplained.com/articles/intuitive-convolution/
Video: https://www.youtube.com/watch?v=KuXjwB4LzSA
"""
# convolve([1, -1, -20], [1, -3]) --> 1 -4 -17 60
# convolve(data, [0.25, 0.25, 0.25, 0.25]) --> Moving average (blur)
# convolve(data, [1/2, 0, -1/2]) --> 1st derivative estimate
# convolve(data, [1, -2, 1]) --> 2nd derivative estimate
Expand Down Expand Up @@ -1067,7 +1066,7 @@ The following recipes have a more mathematical flavor:
f(x) = x³ -4x² -17x + 60
f'(x) = 3x² -8x -17
"""
# polynomial_derivative([1, -4, -17, 60]) -> [3, -8, -17]
# polynomial_derivative([1, -4, -17, 60]) --> [3, -8, -17]
n = len(coefficients)
powers = reversed(range(1, n))
return list(map(operator.mul, coefficients, powers))
Expand Down Expand Up @@ -1169,6 +1168,12 @@ The following recipes have a more mathematical flavor:

>>> take(10, count())
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> # Verify that the input is consumed lazily
>>> it = iter('abcdef')
>>> take(3, it)
['a', 'b', 'c']
>>> list(it)
['d', 'e', 'f']

>>> list(prepend(1, [2, 3, 4]))
[1, 2, 3, 4]
Expand All @@ -1181,25 +1186,45 @@ The following recipes have a more mathematical flavor:

>>> list(tail(3, 'ABCDEFG'))
['E', 'F', 'G']
>>> # Verify the input is consumed greedily
>>> input_iterator = iter('ABCDEFG')
>>> output_iterator = tail(3, input_iterator)
>>> list(input_iterator)
[]

>>> it = iter(range(10))
>>> consume(it, 3)
>>> # Verify the input is consumed lazily
>>> next(it)
3
>>> # Verify the input is consumed completely
>>> consume(it)
>>> next(it, 'Done')
'Done'

>>> nth('abcde', 3)
'd'

>>> nth('abcde', 9) is None
True
>>> # Verify that the input is consumed lazily
>>> it = iter('abcde')
>>> nth(it, 2)
'c'
>>> list(it)
['d', 'e']

>>> [all_equal(s) for s in ('', 'A', 'AAAA', 'AAAB', 'AAABA')]
[True, True, True, False, False]
>>> [all_equal(s, key=str.casefold) for s in ('', 'A', 'AaAa', 'AAAB', 'AAABA')]
[True, True, True, False, False]
>>> # Verify that the input is consumed lazily and that only
>>> # one element of a second equivalence class is used to disprove
>>> # the assertion that all elements are equal.
>>> it = iter('aaabbbccc')
>>> all_equal(it)
False
>>> ''.join(it)
'bbccc'

>>> quantify(range(99), lambda x: x%2==0)
50
Expand All @@ -1222,6 +1247,11 @@ The following recipes have a more mathematical flavor:

>>> list(ncycles('abc', 3))
['a', 'b', 'c', 'a', 'b', 'c', 'a', 'b', 'c']
>>> # Verify greedy consumption of input iterator
>>> input_iterator = iter('abc')
>>> output_iterator = ncycles(input_iterator, 3)
>>> list(input_iterator)
[]

>>> sum_of_squares([10, 20, 30])
1400
Expand All @@ -1248,19 +1278,41 @@ The following recipes have a more mathematical flavor:

>>> list(transpose([(1, 2, 3), (11, 22, 33)]))
[(1, 11), (2, 22), (3, 33)]
>>> # Verify that the inputs are consumed lazily
>>> input1 = iter([1, 2, 3])
>>> input2 = iter([11, 22, 33])
>>> output_iterator = transpose([input1, input2])
>>> next(output_iterator)
(1, 11)
>>> list(zip(input1, input2))
[(2, 22), (3, 33)]

>>> list(matmul([(7, 5), (3, 5)], [[2, 5], [7, 9]]))
[(49, 80), (41, 60)]
>>> list(matmul([[2, 5], [7, 9], [3, 4]], [[7, 11, 5, 4, 9], [3, 5, 2, 6, 3]]))
[(29, 47, 20, 38, 33), (76, 122, 53, 82, 90), (33, 53, 23, 36, 39)]

>>> list(convolve([1, -1, -20], [1, -3])) == [1, -4, -17, 60]
True
>>> data = [20, 40, 24, 32, 20, 28, 16]
>>> list(convolve(data, [0.25, 0.25, 0.25, 0.25]))
[5.0, 15.0, 21.0, 29.0, 29.0, 26.0, 24.0, 16.0, 11.0, 4.0]
>>> list(convolve(data, [1, -1]))
[20, 20, -16, 8, -12, 8, -12, -16]
>>> list(convolve(data, [1, -2, 1]))
[20, 0, -36, 24, -20, 20, -20, -4, 16]
>>> # Verify signal is consumed lazily and the kernel greedily
>>> signal_iterator = iter([10, 20, 30, 40, 50])
>>> kernel_iterator = iter([1, 2, 3])
>>> output_iterator = convolve(signal_iterator, kernel_iterator)
>>> list(kernel_iterator)
[]
>>> next(output_iterator)
10
>>> next(output_iterator)
40
>>> list(signal_iterator)
[30, 40, 50]

>>> from fractions import Fraction
>>> from decimal import Decimal
Expand Down Expand Up @@ -1348,6 +1400,17 @@ The following recipes have a more mathematical flavor:
>>> # Test list input. Lists do not support None for the stop argument
>>> list(iter_index(list('AABCADEAF'), 'A'))
[0, 1, 4, 7]
>>> # Verify that input is consumed lazily
>>> input_iterator = iter('AABCADEAF')
>>> output_iterator = iter_index(input_iterator, 'A')
>>> next(output_iterator)
0
>>> next(output_iterator)
1
>>> next(output_iterator)
4
>>> ''.join(input_iterator)
'DEAF'

>>> list(sieve(30))
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
Expand Down Expand Up @@ -1499,6 +1562,17 @@ The following recipes have a more mathematical flavor:
[0, 2, 4, 6, 8]
>>> list(odds)
[1, 3, 5, 7, 9]
>>> # Verify that the input is consumed lazily
>>> input_iterator = iter(range(10))
>>> evens, odds = partition(is_odd, input_iterator)
>>> next(odds)
1
>>> next(odds)
3
>>> next(evens)
0
>>> list(input_iterator)
[4, 5, 6, 7, 8, 9]

>>> list(subslices('ABCD'))
['A', 'AB', 'ABC', 'ABCD', 'B', 'BC', 'BCD', 'C', 'CD', 'D']
Expand All @@ -1518,13 +1592,27 @@ The following recipes have a more mathematical flavor:
['A', 'B', 'C', 'D']
>>> list(unique_everseen('ABBcCAD', str.casefold))
['A', 'B', 'c', 'D']
>>> # Verify that the input is consumed lazily
>>> input_iterator = iter('AAAABBBCCDAABBB')
>>> output_iterator = unique_everseen(input_iterator)
>>> next(output_iterator)
'A'
>>> ''.join(input_iterator)
'AAABBBCCDAABBB'

>>> list(unique_justseen('AAAABBBCCDAABBB'))
['A', 'B', 'C', 'D', 'A', 'B']
>>> list(unique_justseen('ABBCcAD', str.casefold))
['A', 'B', 'C', 'A', 'D']
>>> list(unique_justseen('ABBcCAD', str.casefold))
['A', 'B', 'c', 'A', 'D']
>>> # Verify that the input is consumed lazily
>>> input_iterator = iter('AAAABBBCCDAABBB')
>>> output_iterator = unique_justseen(input_iterator)
>>> next(output_iterator)
'A'
>>> ''.join(input_iterator)
'AAABBBCCDAABBB'

>>> d = dict(a=1, b=2, c=3)
>>> it = iter_except(d.popitem, KeyError)
Expand All @@ -1545,6 +1633,12 @@ The following recipes have a more mathematical flavor:

>>> first_true('ABC0DEF1', '9', str.isdigit)
'0'
>>> # Verify that inputs are consumed lazily
>>> it = iter('ABC0DEF1')
>>> first_true(it, predicate=str.isdigit)
'0'
>>> ''.join(it)
'DEF1'


.. testcode::
Expand Down
8 changes: 8 additions & 0 deletions Doc/library/typing.rst
Expand Up @@ -1233,6 +1233,10 @@ These can be used as types in annotations. They all support subscription using

.. versionadded:: 3.5.3

.. versionchanged:: 3.13

:data:`ClassVar` can now be nested in :data:`Final` and vice versa.

.. data:: Final

Special typing construct to indicate final names to type checkers.
Expand All @@ -1256,6 +1260,10 @@ These can be used as types in annotations. They all support subscription using

.. versionadded:: 3.8

.. versionchanged:: 3.13

:data:`Final` can now be nested in :data:`ClassVar` and vice versa.

.. data:: Required

Special typing construct to mark a :class:`TypedDict` key as required.
Expand Down
12 changes: 12 additions & 0 deletions Lib/test/support/import_helper.py
Expand Up @@ -268,6 +268,18 @@ def modules_cleanup(oldmodules):
sys.modules.update(oldmodules)


@contextlib.contextmanager
def isolated_modules():
"""
Save modules on entry and cleanup on exit.
"""
(saved,) = modules_setup()
try:
yield
finally:
modules_cleanup(saved)


def mock_register_at_fork(func):
# bpo-30599: Mock os.register_at_fork() when importing the random module,
# since this function doesn't allow to unregister callbacks and would leak
Expand Down
65 changes: 0 additions & 65 deletions Lib/test/test_frame.py
Expand Up @@ -293,71 +293,6 @@ def gen():
""")
assert_python_ok("-c", code)

@support.cpython_only
@unittest.skipIf(Py_GIL_DISABLED, "test requires precise GC scheduling")
def test_sneaky_frame_object(self):

def trace(frame, event, arg):
"""
Don't actually do anything, just force a frame object to be created.
"""

def callback(phase, info):
"""
Yo dawg, I heard you like frames, so I'm allocating a frame while
you're allocating a frame, so you can have a frame while you have a
frame!
"""
nonlocal sneaky_frame_object
sneaky_frame_object = sys._getframe().f_back.f_back
# We're done here:
gc.callbacks.remove(callback)

def f():
while True:
yield

old_threshold = gc.get_threshold()
old_callbacks = gc.callbacks[:]
old_enabled = gc.isenabled()
old_trace = sys.gettrace()
try:
# Stop the GC for a second while we set things up:
gc.disable()
# Create a paused generator:
g = f()
next(g)
# Move all objects to the oldest generation, and tell the GC to run
# on the *very next* allocation:
gc.collect()
gc.set_threshold(1, 0, 0)
sys._clear_internal_caches()
# Okay, so here's the nightmare scenario:
# - We're tracing the resumption of a generator, which creates a new
# frame object.
# - The allocation of this frame object triggers a collection
# *before* the frame object is actually created.
# - During the collection, we request the exact same frame object.
# This test does it with a GC callback, but in real code it would
# likely be a trace function, weakref callback, or finalizer.
# - The collection finishes, and the original frame object is
# created. We now have two frame objects fighting over ownership
# of the same interpreter frame!
sys.settrace(trace)
gc.callbacks.append(callback)
sneaky_frame_object = None
gc.enable()
next(g)
# g.gi_frame should be the frame object from the callback (the
# one that was *requested* second, but *created* first):
self.assertIs(g.gi_frame, sneaky_frame_object)
finally:
gc.set_threshold(*old_threshold)
gc.callbacks[:] = old_callbacks
sys.settrace(old_trace)
if old_enabled:
gc.enable()

@support.cpython_only
@threading_helper.requires_working_threading()
def test_sneaky_frame_object_teardown(self):
Expand Down
2 changes: 1 addition & 1 deletion Lib/test/test_importlib/resources/test_files.py
Expand Up @@ -70,7 +70,7 @@ def setUp(self):
self.addCleanup(self.fixtures.close)
self.site_dir = self.fixtures.enter_context(os_helper.temp_dir())
self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir))
self.fixtures.enter_context(import_helper.CleanImport())
self.fixtures.enter_context(import_helper.isolated_modules())


class ModulesFilesTests(SiteDir, unittest.TestCase):
Expand Down
@@ -0,0 +1,3 @@
Added import helper ``isolated_modules`` as ``CleanImport`` does not remove
modules imported during the context. Use it in importlib.resources tests to
avoid leaving ``mod`` around to impede importlib.metadata tests.

0 comments on commit 3f54d1c

Please sign in to comment.