diff --git a/.github/workflows/CI_build.yml b/.github/workflows/CI_build.yml index ab9edaf9..93156ae9 100644 --- a/.github/workflows/CI_build.yml +++ b/.github/workflows/CI_build.yml @@ -43,8 +43,8 @@ jobs: if: matrix.build_configuration == 'Release' working-directory: installer run: | - $env:PYTHONBUILDDIR_X64='..\packages\python.3.12.2\tools' - $env:PYTHONBUILDDIR='..\packages\pythonx86.3.12.2\tools' + $env:PYTHONBUILDDIR_X64='..\packages\python.3.12.3\tools' + $env:PYTHONBUILDDIR='..\packages\pythonx86.3.12.3\tools' Rename-Item -Path ".\buildPaths.bat.orig" -NewName "buildPaths.bat" $env:WIX_PATH="C:\Program Files (x86)\WiX Toolset v3.11\bin" $env:PATH = $env:PATH + ';' + $env:WIX_PATH diff --git a/PythonLib/full/_pyio.py b/PythonLib/full/_pyio.py index 9641d431..687076fb 100644 --- a/PythonLib/full/_pyio.py +++ b/PythonLib/full/_pyio.py @@ -1209,7 +1209,8 @@ def _readinto(self, buf, read1): return written def tell(self): - return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos + # GH-95782: Keep return value non-negative + return max(_BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos, 0) def seek(self, pos, whence=0): if whence not in valid_seek_flags: diff --git a/PythonLib/full/argparse.py b/PythonLib/full/argparse.py index 484a1efd..120cb6c8 100644 --- a/PythonLib/full/argparse.py +++ b/PythonLib/full/argparse.py @@ -225,7 +225,8 @@ def format_help(self): # add the heading if the section was non-empty if self.heading is not SUPPRESS and self.heading is not None: current_indent = self.formatter._current_indent - heading = '%*s%s:\n' % (current_indent, '', self.heading) + heading_text = _('%(heading)s:') % dict(heading=self.heading) + heading = '%*s%s\n' % (current_indent, '', heading_text) else: heading = '' @@ -415,6 +416,8 @@ def _format_actions_usage(self, actions, groups): suppressed_actions_count += 1 exposed_actions_count = group_action_count - suppressed_actions_count + if not exposed_actions_count: + continue if not group.required: if start in inserts: @@ -720,7 +723,7 @@ def _get_help_string(self, action): if action.default is not SUPPRESS: defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] if action.option_strings or action.nargs in defaulting_nargs: - help += ' (default: %(default)s)' + help += _(' (default: %(default)s)') return help @@ -1149,7 +1152,9 @@ def __init__(self, version=None, dest=SUPPRESS, default=SUPPRESS, - help="show program's version number and exit"): + help=None): + if help is None: + help = _("show program's version number and exit") super(_VersionAction, self).__init__( option_strings=option_strings, dest=dest, @@ -2004,7 +2009,7 @@ def consume_optional(start_index): # get the optional identified at this index option_tuple = option_string_indices[start_index] - action, option_string, explicit_arg = option_tuple + action, option_string, sep, explicit_arg = option_tuple # identify additional optionals in the same arg string # (e.g. -xyz is the same as -x -y -z if no args are required) @@ -2031,18 +2036,27 @@ def consume_optional(start_index): and option_string[1] not in chars and explicit_arg != '' ): + if sep or explicit_arg[0] in chars: + msg = _('ignored explicit argument %r') + raise ArgumentError(action, msg % explicit_arg) action_tuples.append((action, [], option_string)) char = option_string[0] option_string = char + explicit_arg[0] - new_explicit_arg = explicit_arg[1:] or None optionals_map = self._option_string_actions if option_string in optionals_map: action = optionals_map[option_string] - explicit_arg = new_explicit_arg + explicit_arg = explicit_arg[1:] + if not explicit_arg: + sep = explicit_arg = None + elif explicit_arg[0] == '=': + sep = '=' + explicit_arg = explicit_arg[1:] + else: + sep = '' else: - msg = _('ignored explicit argument %r') - raise ArgumentError(action, msg % explicit_arg) - + extras.append(char + explicit_arg) + stop = start_index + 1 + break # if the action expect exactly one argument, we've # successfully matched the option; exit the loop elif arg_count == 1: @@ -2262,18 +2276,17 @@ def _parse_optional(self, arg_string): # if the option string is present in the parser, return the action if arg_string in self._option_string_actions: action = self._option_string_actions[arg_string] - return action, arg_string, None + return action, arg_string, None, None # if it's just a single character, it was meant to be positional if len(arg_string) == 1: return None # if the option string before the "=" is present, return the action - if '=' in arg_string: - option_string, explicit_arg = arg_string.split('=', 1) - if option_string in self._option_string_actions: - action = self._option_string_actions[option_string] - return action, option_string, explicit_arg + option_string, sep, explicit_arg = arg_string.partition('=') + if sep and option_string in self._option_string_actions: + action = self._option_string_actions[option_string] + return action, option_string, sep, explicit_arg # search through all possible prefixes of the option string # and all actions in the parser for possible interpretations @@ -2282,7 +2295,7 @@ def _parse_optional(self, arg_string): # if multiple actions match, the option string was ambiguous if len(option_tuples) > 1: options = ', '.join([option_string - for action, option_string, explicit_arg in option_tuples]) + for action, option_string, sep, explicit_arg in option_tuples]) args = {'option': arg_string, 'matches': options} msg = _('ambiguous option: %(option)s could match %(matches)s') self.error(msg % args) @@ -2306,7 +2319,7 @@ def _parse_optional(self, arg_string): # it was meant to be an optional but there is no such option # in this parser (though it might be a valid option in a subparser) - return None, arg_string, None + return None, arg_string, None, None def _get_option_tuples(self, option_string): result = [] @@ -2316,15 +2329,13 @@ def _get_option_tuples(self, option_string): chars = self.prefix_chars if option_string[0] in chars and option_string[1] in chars: if self.allow_abbrev: - if '=' in option_string: - option_prefix, explicit_arg = option_string.split('=', 1) - else: - option_prefix = option_string - explicit_arg = None + option_prefix, sep, explicit_arg = option_string.partition('=') + if not sep: + sep = explicit_arg = None for option_string in self._option_string_actions: if option_string.startswith(option_prefix): action = self._option_string_actions[option_string] - tup = action, option_string, explicit_arg + tup = action, option_string, sep, explicit_arg result.append(tup) # single character options can be concatenated with their arguments @@ -2332,18 +2343,17 @@ def _get_option_tuples(self, option_string): # separate elif option_string[0] in chars and option_string[1] not in chars: option_prefix = option_string - explicit_arg = None short_option_prefix = option_string[:2] short_explicit_arg = option_string[2:] for option_string in self._option_string_actions: if option_string == short_option_prefix: action = self._option_string_actions[option_string] - tup = action, option_string, short_explicit_arg + tup = action, option_string, '', short_explicit_arg result.append(tup) elif option_string.startswith(option_prefix): action = self._option_string_actions[option_string] - tup = action, option_string, explicit_arg + tup = action, option_string, None, None result.append(tup) # shouldn't ever get here diff --git a/PythonLib/full/ast.py b/PythonLib/full/ast.py index de940d2e..b0995fa7 100644 --- a/PythonLib/full/ast.py +++ b/PythonLib/full/ast.py @@ -1268,14 +1268,18 @@ def visit_JoinedStr(self, node): quote_type = quote_types[0] self.write(f"{quote_type}{value}{quote_type}") - def _write_fstring_inner(self, node, scape_newlines=False): + def _write_fstring_inner(self, node, is_format_spec=False): if isinstance(node, JoinedStr): # for both the f-string itself, and format_spec for value in node.values: - self._write_fstring_inner(value, scape_newlines=scape_newlines) + self._write_fstring_inner(value, is_format_spec=is_format_spec) elif isinstance(node, Constant) and isinstance(node.value, str): value = node.value.replace("{", "{{").replace("}", "}}") - if scape_newlines: + + if is_format_spec: + value = value.replace("\\", "\\\\") + value = value.replace("'", "\\'") + value = value.replace('"', '\\"') value = value.replace("\n", "\\n") self.write(value) elif isinstance(node, FormattedValue): @@ -1299,10 +1303,7 @@ def unparse_inner(inner): self.write(f"!{chr(node.conversion)}") if node.format_spec: self.write(":") - self._write_fstring_inner( - node.format_spec, - scape_newlines=True - ) + self._write_fstring_inner(node.format_spec, is_format_spec=True) def visit_Name(self, node): self.write(node.id) diff --git a/PythonLib/full/asyncio/base_events.py b/PythonLib/full/asyncio/base_events.py index c16c445b..29eff049 100644 --- a/PythonLib/full/asyncio/base_events.py +++ b/PythonLib/full/asyncio/base_events.py @@ -45,6 +45,7 @@ from . import sslproto from . import staggered from . import tasks +from . import timeouts from . import transports from . import trsock from .log import logger @@ -596,23 +597,24 @@ async def shutdown_default_executor(self, timeout=None): thread = threading.Thread(target=self._do_shutdown, args=(future,)) thread.start() try: - await future - finally: - thread.join(timeout) - - if thread.is_alive(): + async with timeouts.timeout(timeout): + await future + except TimeoutError: warnings.warn("The executor did not finishing joining " - f"its threads within {timeout} seconds.", - RuntimeWarning, stacklevel=2) + f"its threads within {timeout} seconds.", + RuntimeWarning, stacklevel=2) self._default_executor.shutdown(wait=False) + else: + thread.join() def _do_shutdown(self, future): try: self._default_executor.shutdown(wait=True) if not self.is_closed(): - self.call_soon_threadsafe(future.set_result, None) + self.call_soon_threadsafe(futures._set_result_unless_cancelled, + future, None) except Exception as ex: - if not self.is_closed(): + if not self.is_closed() and not future.cancelled(): self.call_soon_threadsafe(future.set_exception, ex) def _check_running(self): diff --git a/PythonLib/full/asyncio/tasks.py b/PythonLib/full/asyncio/tasks.py index 65f2a6ef..0b22e28d 100644 --- a/PythonLib/full/asyncio/tasks.py +++ b/PythonLib/full/asyncio/tasks.py @@ -480,7 +480,7 @@ async def wait_for(fut, timeout): If the wait is cancelled, the task is also cancelled. - If the task supresses the cancellation and returns a value instead, + If the task suppresses the cancellation and returns a value instead, that value is returned. This function is a coroutine. diff --git a/PythonLib/full/asyncio/windows_events.py b/PythonLib/full/asyncio/windows_events.py index c9a5fb84..cb613451 100644 --- a/PythonLib/full/asyncio/windows_events.py +++ b/PythonLib/full/asyncio/windows_events.py @@ -8,6 +8,7 @@ import _overlapped import _winapi import errno +from functools import partial import math import msvcrt import socket @@ -323,13 +324,13 @@ def run_forever(self): if self._self_reading_future is not None: ov = self._self_reading_future._ov self._self_reading_future.cancel() - # self_reading_future was just cancelled so if it hasn't been - # finished yet, it never will be (it's possible that it has - # already finished and its callback is waiting in the queue, - # where it could still happen if the event loop is restarted). - # Unregister it otherwise IocpProactor.close will wait for it - # forever - if ov is not None: + # self_reading_future always uses IOCP, so even though it's + # been cancelled, we need to make sure that the IOCP message + # is received so that the kernel is not holding on to the + # memory, possibly causing memory corruption later. Only + # unregister it if IO is complete in all respects. Otherwise + # we need another _poll() later to complete the IO. + if ov is not None and not ov.pending: self._proactor._unregister(ov) self._self_reading_future = None @@ -466,6 +467,18 @@ def finish_socket_func(trans, key, ov): else: raise + @classmethod + def _finish_recvfrom(cls, trans, key, ov, *, empty_result): + try: + return cls.finish_socket_func(trans, key, ov) + except OSError as exc: + # WSARecvFrom will report ERROR_PORT_UNREACHABLE when the same + # socket is used to send to an address that is not listening. + if exc.winerror == _overlapped.ERROR_PORT_UNREACHABLE: + return empty_result, None + else: + raise + def recv(self, conn, nbytes, flags=0): self._register_with_iocp(conn) ov = _overlapped.Overlapped(NULL) @@ -500,7 +513,8 @@ def recvfrom(self, conn, nbytes, flags=0): except BrokenPipeError: return self._result((b'', None)) - return self._register(ov, conn, self.finish_socket_func) + return self._register(ov, conn, partial(self._finish_recvfrom, + empty_result=b'')) def recvfrom_into(self, conn, buf, flags=0): self._register_with_iocp(conn) @@ -510,17 +524,8 @@ def recvfrom_into(self, conn, buf, flags=0): except BrokenPipeError: return self._result((0, None)) - def finish_recv(trans, key, ov): - try: - return ov.getresult() - except OSError as exc: - if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, - _overlapped.ERROR_OPERATION_ABORTED): - raise ConnectionResetError(*exc.args) - else: - raise - - return self._register(ov, conn, finish_recv) + return self._register(ov, conn, partial(self._finish_recvfrom, + empty_result=0)) def sendto(self, conn, buf, flags=0, addr=None): self._register_with_iocp(conn) diff --git a/PythonLib/full/collections/__init__.py b/PythonLib/full/collections/__init__.py index 8652dc8a..5f000b5f 100644 --- a/PythonLib/full/collections/__init__.py +++ b/PythonLib/full/collections/__init__.py @@ -638,7 +638,8 @@ def elements(self): >>> sorted(c.elements()) ['A', 'A', 'B', 'B', 'C', 'C'] - # Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + Knuth's example for prime factors of 1836: 2**2 * 3**3 * 17**1 + >>> import math >>> prime_factors = Counter({2: 2, 3: 3, 17: 1}) >>> math.prod(prime_factors.elements()) @@ -679,7 +680,7 @@ def update(self, iterable=None, /, **kwds): ''' # The regular dict.update() operation makes no sense here because the - # replace behavior results in the some of original untouched counts + # replace behavior results in some of the original untouched counts # being mixed-in with all of the other counts for a mismash that # doesn't have a straight-forward interpretation in most counting # contexts. Instead, we implement straight-addition. Both the inputs diff --git a/PythonLib/full/configparser.py b/PythonLib/full/configparser.py index e8aae217..f96704eb 100644 --- a/PythonLib/full/configparser.py +++ b/PythonLib/full/configparser.py @@ -995,100 +995,102 @@ def _read(self, fp, fpname): lineno = 0 indent_level = 0 e = None # None, or an exception - for lineno, line in enumerate(fp, start=1): - comment_start = sys.maxsize - # strip inline comments - inline_prefixes = {p: -1 for p in self._inline_comment_prefixes} - while comment_start == sys.maxsize and inline_prefixes: - next_prefixes = {} - for prefix, index in inline_prefixes.items(): - index = line.find(prefix, index+1) - if index == -1: - continue - next_prefixes[prefix] = index - if index == 0 or (index > 0 and line[index-1].isspace()): - comment_start = min(comment_start, index) - inline_prefixes = next_prefixes - # strip full line comments - for prefix in self._comment_prefixes: - if line.strip().startswith(prefix): - comment_start = 0 - break - if comment_start == sys.maxsize: - comment_start = None - value = line[:comment_start].strip() - if not value: - if self._empty_lines_in_values: - # add empty line to the value, but only if there was no - # comment on the line - if (comment_start is None and - cursect is not None and - optname and - cursect[optname] is not None): - cursect[optname].append('') # newlines added at join - else: - # empty line marks end of value - indent_level = sys.maxsize - continue - # continuation line? - first_nonspace = self.NONSPACECRE.search(line) - cur_indent_level = first_nonspace.start() if first_nonspace else 0 - if (cursect is not None and optname and - cur_indent_level > indent_level): - cursect[optname].append(value) - # a section header or option header? - else: - indent_level = cur_indent_level - # is it a section header? - mo = self.SECTCRE.match(value) - if mo: - sectname = mo.group('header') - if sectname in self._sections: - if self._strict and sectname in elements_added: - raise DuplicateSectionError(sectname, fpname, - lineno) - cursect = self._sections[sectname] - elements_added.add(sectname) - elif sectname == self.default_section: - cursect = self._defaults + try: + for lineno, line in enumerate(fp, start=1): + comment_start = sys.maxsize + # strip inline comments + inline_prefixes = {p: -1 for p in self._inline_comment_prefixes} + while comment_start == sys.maxsize and inline_prefixes: + next_prefixes = {} + for prefix, index in inline_prefixes.items(): + index = line.find(prefix, index+1) + if index == -1: + continue + next_prefixes[prefix] = index + if index == 0 or (index > 0 and line[index-1].isspace()): + comment_start = min(comment_start, index) + inline_prefixes = next_prefixes + # strip full line comments + for prefix in self._comment_prefixes: + if line.strip().startswith(prefix): + comment_start = 0 + break + if comment_start == sys.maxsize: + comment_start = None + value = line[:comment_start].strip() + if not value: + if self._empty_lines_in_values: + # add empty line to the value, but only if there was no + # comment on the line + if (comment_start is None and + cursect is not None and + optname and + cursect[optname] is not None): + cursect[optname].append('') # newlines added at join else: - cursect = self._dict() - self._sections[sectname] = cursect - self._proxies[sectname] = SectionProxy(self, sectname) - elements_added.add(sectname) - # So sections can't start with a continuation line - optname = None - # no section header in the file? - elif cursect is None: - raise MissingSectionHeaderError(fpname, lineno, line) - # an option line? + # empty line marks end of value + indent_level = sys.maxsize + continue + # continuation line? + first_nonspace = self.NONSPACECRE.search(line) + cur_indent_level = first_nonspace.start() if first_nonspace else 0 + if (cursect is not None and optname and + cur_indent_level > indent_level): + cursect[optname].append(value) + # a section header or option header? else: - mo = self._optcre.match(value) + indent_level = cur_indent_level + # is it a section header? + mo = self.SECTCRE.match(value) if mo: - optname, vi, optval = mo.group('option', 'vi', 'value') - if not optname: - e = self._handle_error(e, fpname, lineno, line) - optname = self.optionxform(optname.rstrip()) - if (self._strict and - (sectname, optname) in elements_added): - raise DuplicateOptionError(sectname, optname, - fpname, lineno) - elements_added.add((sectname, optname)) - # This check is fine because the OPTCRE cannot - # match if it would set optval to None - if optval is not None: - optval = optval.strip() - cursect[optname] = [optval] + sectname = mo.group('header') + if sectname in self._sections: + if self._strict and sectname in elements_added: + raise DuplicateSectionError(sectname, fpname, + lineno) + cursect = self._sections[sectname] + elements_added.add(sectname) + elif sectname == self.default_section: + cursect = self._defaults else: - # valueless option handling - cursect[optname] = None + cursect = self._dict() + self._sections[sectname] = cursect + self._proxies[sectname] = SectionProxy(self, sectname) + elements_added.add(sectname) + # So sections can't start with a continuation line + optname = None + # no section header in the file? + elif cursect is None: + raise MissingSectionHeaderError(fpname, lineno, line) + # an option line? else: - # a non-fatal parsing error occurred. set up the - # exception but keep going. the exception will be - # raised at the end of the file and will contain a - # list of all bogus lines - e = self._handle_error(e, fpname, lineno, line) - self._join_multiline_values() + mo = self._optcre.match(value) + if mo: + optname, vi, optval = mo.group('option', 'vi', 'value') + if not optname: + e = self._handle_error(e, fpname, lineno, line) + optname = self.optionxform(optname.rstrip()) + if (self._strict and + (sectname, optname) in elements_added): + raise DuplicateOptionError(sectname, optname, + fpname, lineno) + elements_added.add((sectname, optname)) + # This check is fine because the OPTCRE cannot + # match if it would set optval to None + if optval is not None: + optval = optval.strip() + cursect[optname] = [optval] + else: + # valueless option handling + cursect[optname] = None + else: + # a non-fatal parsing error occurred. set up the + # exception but keep going. the exception will be + # raised at the end of the file and will contain a + # list of all bogus lines + e = self._handle_error(e, fpname, lineno, line) + finally: + self._join_multiline_values() # if any parsing errors occurred, raise an exception if e: raise e diff --git a/PythonLib/full/dataclasses.py b/PythonLib/full/dataclasses.py index 3eacba84..12b2dfd1 100644 --- a/PythonLib/full/dataclasses.py +++ b/PythonLib/full/dataclasses.py @@ -1168,8 +1168,10 @@ def _dataclass_setstate(self, state): def _get_slots(cls): match cls.__dict__.get('__slots__'): + # A class which does not define __slots__ at all is equivalent + # to a class defining __slots__ = ('__dict__', '__weakref__') case None: - return + yield from ('__dict__', '__weakref__') case str(slot): yield slot # Slots may be any iterable, but we cannot handle an iterator diff --git a/PythonLib/full/doctest.py b/PythonLib/full/doctest.py index 4630e400..696bb966 100644 --- a/PythonLib/full/doctest.py +++ b/PythonLib/full/doctest.py @@ -1124,7 +1124,7 @@ def _find_lineno(self, obj, source_lines): obj = obj.fget if inspect.isfunction(obj) and getattr(obj, '__doc__', None): # We don't use `docstring` var here, because `obj` can be changed. - obj = obj.__code__ + obj = inspect.unwrap(obj).__code__ if inspect.istraceback(obj): obj = obj.tb_frame if inspect.isframe(obj): obj = obj.f_code if inspect.iscode(obj): @@ -2203,13 +2203,13 @@ def __init__(self, test, optionflags=0, setUp=None, tearDown=None, unittest.TestCase.__init__(self) self._dt_optionflags = optionflags self._dt_checker = checker - self._dt_globs = test.globs.copy() self._dt_test = test self._dt_setUp = setUp self._dt_tearDown = tearDown def setUp(self): test = self._dt_test + self._dt_globs = test.globs.copy() if self._dt_setUp is not None: self._dt_setUp(test) diff --git a/PythonLib/full/email/_header_value_parser.py b/PythonLib/full/email/_header_value_parser.py index 5b653f66..e4a342d4 100644 --- a/PythonLib/full/email/_header_value_parser.py +++ b/PythonLib/full/email/_header_value_parser.py @@ -949,6 +949,7 @@ class _InvalidEwError(errors.HeaderParseError): # up other parse trees. Maybe should have tests for that, too. DOT = ValueTerminal('.', 'dot') ListSeparator = ValueTerminal(',', 'list-separator') +ListSeparator.as_ew_allowed = False RouteComponentMarker = ValueTerminal('@', 'route-component-marker') # @@ -2022,7 +2023,7 @@ def get_address_list(value): address_list.defects.append(errors.InvalidHeaderDefect( "invalid address in address-list")) if value: # Must be a , at this point. - address_list.append(ValueTerminal(',', 'list-separator')) + address_list.append(ListSeparator) value = value[1:] return address_list, value diff --git a/PythonLib/full/email/generator.py b/PythonLib/full/email/generator.py index 7ccbe10e..c8056ad4 100644 --- a/PythonLib/full/email/generator.py +++ b/PythonLib/full/email/generator.py @@ -243,7 +243,7 @@ def _handle_text(self, msg): # existing message. msg = deepcopy(msg) del msg['content-transfer-encoding'] - msg.set_payload(payload, charset) + msg.set_payload(msg._payload, charset) payload = msg.get_payload() self._munge_cte = (msg['content-transfer-encoding'], msg['content-type']) diff --git a/PythonLib/full/email/message.py b/PythonLib/full/email/message.py index fe769580..a14cca56 100644 --- a/PythonLib/full/email/message.py +++ b/PythonLib/full/email/message.py @@ -340,7 +340,7 @@ def set_payload(self, payload, charset=None): return if not isinstance(charset, Charset): charset = Charset(charset) - payload = payload.encode(charset.output_charset) + payload = payload.encode(charset.output_charset, 'surrogateescape') if hasattr(payload, 'decode'): self._payload = payload.decode('ascii', 'surrogateescape') else: diff --git a/PythonLib/full/enum.py b/PythonLib/full/enum.py index 1502bfe9..af561383 100644 --- a/PythonLib/full/enum.py +++ b/PythonLib/full/enum.py @@ -166,6 +166,11 @@ def _dedent(text): lines[j] = l[i:] return '\n'.join(lines) +class _not_given: + def __repr__(self): + return('') +_not_given = _not_given() + class _auto_null: def __repr__(self): return '_auto_null' @@ -283,9 +288,10 @@ def __set_name__(self, enum_class, member_name): enum_member._sort_order_ = len(enum_class._member_names_) if Flag is not None and issubclass(enum_class, Flag): - enum_class._flag_mask_ |= value - if _is_single_bit(value): - enum_class._singles_mask_ |= value + if isinstance(value, int): + enum_class._flag_mask_ |= value + if _is_single_bit(value): + enum_class._singles_mask_ |= value enum_class._all_bits_ = 2 ** ((enum_class._flag_mask_).bit_length()) - 1 # If another member with the same value was already defined, the @@ -313,6 +319,7 @@ def __set_name__(self, enum_class, member_name): elif ( Flag is not None and issubclass(enum_class, Flag) + and isinstance(value, int) and _is_single_bit(value) ): # no other instances found, record this member in _member_names_ @@ -457,10 +464,11 @@ def __setitem__(self, key, value): if isinstance(value, auto): single = True value = (value, ) - if type(value) is tuple and any(isinstance(v, auto) for v in value): + if isinstance(value, tuple) and any(isinstance(v, auto) for v in value): # insist on an actual tuple, no subclasses, in keeping with only supporting # top-level auto() usage (not contained in any other data structure) auto_valued = [] + t = type(value) for v in value: if isinstance(v, auto): non_auto_store = False @@ -475,7 +483,12 @@ def __setitem__(self, key, value): if single: value = auto_valued[0] else: - value = tuple(auto_valued) + try: + # accepts iterable as multiple arguments? + value = t(auto_valued) + except TypeError: + # then pass them in singlely + value = t(*auto_valued) self._member_names[key] = None if non_auto_store: self._last_values.append(value) @@ -710,7 +723,7 @@ def __bool__(cls): """ return True - def __call__(cls, value, names=None, *values, module=None, qualname=None, type=None, start=1, boundary=None): + def __call__(cls, value, names=_not_given, *values, module=None, qualname=None, type=None, start=1, boundary=None): """ Either returns an existing member, or creates a new enum class. @@ -739,18 +752,18 @@ def __call__(cls, value, names=None, *values, module=None, qualname=None, type=N """ if cls._member_map_: # simple value lookup if members exist - if names: + if names is not _not_given: value = (value, names) + values return cls.__new__(cls, value) # otherwise, functional API: we're creating a new Enum type - if names is None and type is None: + if names is _not_given and type is None: # no body? no data-type? possibly wrong usage raise TypeError( f"{cls} has no members; specify `names=()` if you meant to create a new, empty, enum" ) return cls._create_( class_name=value, - names=names, + names=None if names is _not_given else names, module=module, qualname=qualname, type=type, @@ -1528,37 +1541,50 @@ def __str__(self): def __bool__(self): return bool(self._value_) + def _get_value(self, flag): + if isinstance(flag, self.__class__): + return flag._value_ + elif self._member_type_ is not object and isinstance(flag, self._member_type_): + return flag + return NotImplemented + def __or__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with |") value = self._value_ - return self.__class__(value | other) + return self.__class__(value | other_value) def __and__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with &") value = self._value_ - return self.__class__(value & other) + return self.__class__(value & other_value) def __xor__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with ^") value = self._value_ - return self.__class__(value ^ other) + return self.__class__(value ^ other_value) def __invert__(self): + if self._get_value(self) is None: + raise TypeError(f"'{self}' cannot be inverted") + if self._inverted_ is None: if self._boundary_ in (EJECT, KEEP): self._inverted_ = self.__class__(~self._value_) @@ -1625,7 +1651,7 @@ def global_flag_repr(self): cls_name = self.__class__.__name__ if self._name_ is None: return "%s.%s(%r)" % (module, cls_name, self._value_) - if _is_single_bit(self): + if _is_single_bit(self._value_): return '%s.%s' % (module, self._name_) if self._boundary_ is not FlagBoundary.KEEP: return '|'.join(['%s.%s' % (module, name) for name in self.name.split('|')]) diff --git a/PythonLib/full/glob.py b/PythonLib/full/glob.py index a7256422..50beef37 100644 --- a/PythonLib/full/glob.py +++ b/PythonLib/full/glob.py @@ -132,7 +132,8 @@ def glob1(dirname, pattern): def _glob2(dirname, pattern, dir_fd, dironly, include_hidden=False): assert _isrecursive(pattern) - yield pattern[:0] + if not dirname or _isdir(dirname, dir_fd): + yield pattern[:0] yield from _rlistdir(dirname, dir_fd, dironly, include_hidden=include_hidden) diff --git a/PythonLib/full/http/client.py b/PythonLib/full/http/client.py index 5eebfcca..a353716a 100644 --- a/PythonLib/full/http/client.py +++ b/PythonLib/full/http/client.py @@ -936,17 +936,23 @@ def _get_hostport(self, host, port): host = host[:i] else: port = self.default_port - if host and host[0] == '[' and host[-1] == ']': - host = host[1:-1] + if host and host[0] == '[' and host[-1] == ']': + host = host[1:-1] return (host, port) def set_debuglevel(self, level): self.debuglevel = level + def _wrap_ipv6(self, ip): + if b':' in ip and ip[0] != b'['[0]: + return b"[" + ip + b"]" + return ip + def _tunnel(self): connect = b"CONNECT %s:%d %s\r\n" % ( - self._tunnel_host.encode("idna"), self._tunnel_port, + self._wrap_ipv6(self._tunnel_host.encode("idna")), + self._tunnel_port, self._http_vsn_str.encode("ascii")) headers = [connect] for header, value in self._tunnel_headers.items(): @@ -1221,9 +1227,8 @@ def putrequest(self, method, url, skip_host=False, # As per RFC 273, IPv6 address should be wrapped with [] # when used as Host header - + host_enc = self._wrap_ipv6(host_enc) if ":" in host: - host_enc = b'[' + host_enc + b']' host_enc = _strip_ipv6_iface(host_enc) if port == self.default_port: diff --git a/PythonLib/full/idlelib/editor.py b/PythonLib/full/idlelib/editor.py index 8ee8eba6..7bfa0932 100644 --- a/PythonLib/full/idlelib/editor.py +++ b/PythonLib/full/idlelib/editor.py @@ -1044,7 +1044,9 @@ def open_recent_file(fn_closure=file_name): def saved_change_hook(self): short = self.short_title() long = self.long_title() - if short and long: + if short and long and not macosx.isCocoaTk(): + # Don't use both values on macOS because + # that doesn't match platform conventions. title = short + " - " + long + _py_version elif short: title = short @@ -1059,6 +1061,13 @@ def saved_change_hook(self): self.top.wm_title(title) self.top.wm_iconname(icon) + if macosx.isCocoaTk(): + # Add a proxy icon to the window title + self.top.wm_attributes("-titlepath", long) + + # Maintain the modification status for the window + self.top.wm_attributes("-modified", not self.get_saved()) + def get_saved(self): return self.undo.get_saved() diff --git a/PythonLib/full/importlib/_bootstrap_external.py b/PythonLib/full/importlib/_bootstrap_external.py index e6f75a9f..61dafc0f 100644 --- a/PythonLib/full/importlib/_bootstrap_external.py +++ b/PythonLib/full/importlib/_bootstrap_external.py @@ -1450,6 +1450,9 @@ def invalidate_caches(): # https://bugs.python.org/issue45703 _NamespacePath._epoch += 1 + from importlib.metadata import MetadataPathFinder + MetadataPathFinder.invalidate_caches() + @staticmethod def _path_hooks(path): """Search sys.path_hooks for a finder for 'path'.""" diff --git a/PythonLib/full/importlib/metadata/__init__.py b/PythonLib/full/importlib/metadata/__init__.py index 82e0ce1b..54156e93 100644 --- a/PythonLib/full/importlib/metadata/__init__.py +++ b/PythonLib/full/importlib/metadata/__init__.py @@ -795,6 +795,7 @@ def _search_paths(cls, name, paths): path.search(prepared) for path in map(FastPath, paths) ) + @classmethod def invalidate_caches(cls): FastPath.__new__.cache_clear() diff --git a/PythonLib/full/importlib/resources/simple.py b/PythonLib/full/importlib/resources/simple.py index 7770c922..96f117fe 100644 --- a/PythonLib/full/importlib/resources/simple.py +++ b/PythonLib/full/importlib/resources/simple.py @@ -88,7 +88,7 @@ def is_dir(self): def open(self, mode='r', *args, **kwargs): stream = self.parent.reader.open_binary(self.name) if 'b' not in mode: - stream = io.TextIOWrapper(*args, **kwargs) + stream = io.TextIOWrapper(stream, *args, **kwargs) return stream def joinpath(self, name): diff --git a/PythonLib/full/importlib/util.py b/PythonLib/full/importlib/util.py index f4d6e823..3743e6aa 100644 --- a/PythonLib/full/importlib/util.py +++ b/PythonLib/full/importlib/util.py @@ -13,6 +13,7 @@ import _imp import sys +import threading import types @@ -145,7 +146,7 @@ class _incompatible_extension_module_restrictions: You can get the same effect as this function by implementing the basic interface of multi-phase init (PEP 489) and lying about - support for mulitple interpreters (or per-interpreter GIL). + support for multiple interpreters (or per-interpreter GIL). """ def __init__(self, *, disable_check): @@ -171,36 +172,53 @@ class _LazyModule(types.ModuleType): def __getattribute__(self, attr): """Trigger the load of the module and return the attribute.""" - # All module metadata must be garnered from __spec__ in order to avoid - # using mutated values. - # Stop triggering this method. - self.__class__ = types.ModuleType - # Get the original name to make sure no object substitution occurred - # in sys.modules. - original_name = self.__spec__.name - # Figure out exactly what attributes were mutated between the creation - # of the module and now. - attrs_then = self.__spec__.loader_state['__dict__'] - attrs_now = self.__dict__ - attrs_updated = {} - for key, value in attrs_now.items(): - # Code that set the attribute may have kept a reference to the - # assigned object, making identity more important than equality. - if key not in attrs_then: - attrs_updated[key] = value - elif id(attrs_now[key]) != id(attrs_then[key]): - attrs_updated[key] = value - self.__spec__.loader.exec_module(self) - # If exec_module() was used directly there is no guarantee the module - # object was put into sys.modules. - if original_name in sys.modules: - if id(self) != id(sys.modules[original_name]): - raise ValueError(f"module object for {original_name!r} " - "substituted in sys.modules during a lazy " - "load") - # Update after loading since that's what would happen in an eager - # loading situation. - self.__dict__.update(attrs_updated) + __spec__ = object.__getattribute__(self, '__spec__') + loader_state = __spec__.loader_state + with loader_state['lock']: + # Only the first thread to get the lock should trigger the load + # and reset the module's class. The rest can now getattr(). + if object.__getattribute__(self, '__class__') is _LazyModule: + # Reentrant calls from the same thread must be allowed to proceed without + # triggering the load again. + # exec_module() and self-referential imports are the primary ways this can + # happen, but in any case we must return something to avoid deadlock. + if loader_state['is_loading']: + return object.__getattribute__(self, attr) + loader_state['is_loading'] = True + + __dict__ = object.__getattribute__(self, '__dict__') + + # All module metadata must be gathered from __spec__ in order to avoid + # using mutated values. + # Get the original name to make sure no object substitution occurred + # in sys.modules. + original_name = __spec__.name + # Figure out exactly what attributes were mutated between the creation + # of the module and now. + attrs_then = loader_state['__dict__'] + attrs_now = __dict__ + attrs_updated = {} + for key, value in attrs_now.items(): + # Code that set an attribute may have kept a reference to the + # assigned object, making identity more important than equality. + if key not in attrs_then: + attrs_updated[key] = value + elif id(attrs_now[key]) != id(attrs_then[key]): + attrs_updated[key] = value + __spec__.loader.exec_module(self) + # If exec_module() was used directly there is no guarantee the module + # object was put into sys.modules. + if original_name in sys.modules: + if id(self) != id(sys.modules[original_name]): + raise ValueError(f"module object for {original_name!r} " + "substituted in sys.modules during a lazy " + "load") + # Update after loading since that's what would happen in an eager + # loading situation. + __dict__.update(attrs_updated) + # Finally, stop triggering this method. + self.__class__ = types.ModuleType + return getattr(self, attr) def __delattr__(self, attr): @@ -244,5 +262,7 @@ def exec_module(self, module): loader_state = {} loader_state['__dict__'] = module.__dict__.copy() loader_state['__class__'] = module.__class__ + loader_state['lock'] = threading.RLock() + loader_state['is_loading'] = False module.__spec__.loader_state = loader_state module.__class__ = _LazyModule diff --git a/PythonLib/full/inspect.py b/PythonLib/full/inspect.py index a550202b..819ce940 100644 --- a/PythonLib/full/inspect.py +++ b/PythonLib/full/inspect.py @@ -760,18 +760,14 @@ def unwrap(func, *, stop=None): :exc:`ValueError` is raised if a cycle is encountered. """ - if stop is None: - def _is_wrapper(f): - return hasattr(f, '__wrapped__') - else: - def _is_wrapper(f): - return hasattr(f, '__wrapped__') and not stop(f) f = func # remember the original func for error reporting # Memoise by id to tolerate non-hashable objects, but store objects to # ensure they aren't destroyed, which would allow their IDs to be reused. memo = {id(f): f} recursion_limit = sys.getrecursionlimit() - while _is_wrapper(func): + while not isinstance(func, type) and hasattr(func, '__wrapped__'): + if stop is not None and stop(func): + break func = func.__wrapped__ id_func = id(func) if (id_func in memo) or (len(memo) >= recursion_limit): @@ -2007,15 +2003,17 @@ def _signature_get_user_defined_method(cls, method_name): named ``method_name`` and returns it only if it is a pure python function. """ - try: - meth = getattr(cls, method_name) - except AttributeError: - return + if method_name == '__new__': + meth = getattr(cls, method_name, None) else: - if not isinstance(meth, _NonUserDefinedCallables): - # Once '__signature__' will be added to 'C'-level - # callables, this check won't be necessary - return meth + meth = getattr_static(cls, method_name, None) + if meth is None or isinstance(meth, _NonUserDefinedCallables): + # Once '__signature__' will be added to 'C'-level + # callables, this check won't be necessary + return None + if method_name != '__new__': + meth = _descriptor_get(meth, cls) + return meth def _signature_get_partial(wrapped_sig, partial, extra_args=()): @@ -2460,6 +2458,15 @@ def _signature_from_function(cls, func, skip_bound_arg=True, __validate_parameters__=is_duck_function) +def _descriptor_get(descriptor, obj): + if isclass(descriptor): + return descriptor + get = getattr(type(descriptor), '__get__', _sentinel) + if get is _sentinel: + return descriptor + return get(descriptor, obj, type(obj)) + + def _signature_from_callable(obj, *, follow_wrapper_chains=True, skip_bound_arg=True, @@ -2568,7 +2575,6 @@ def _signature_from_callable(obj, *, wrapped_sig = _get_signature_of(obj.func) return _signature_get_partial(wrapped_sig, obj) - sig = None if isinstance(obj, type): # obj is a class or a metaclass @@ -2576,88 +2582,65 @@ def _signature_from_callable(obj, *, # in its metaclass call = _signature_get_user_defined_method(type(obj), '__call__') if call is not None: - sig = _get_signature_of(call) - else: - factory_method = None - new = _signature_get_user_defined_method(obj, '__new__') - init = _signature_get_user_defined_method(obj, '__init__') - - # Go through the MRO and see if any class has user-defined - # pure Python __new__ or __init__ method - for base in obj.__mro__: - # Now we check if the 'obj' class has an own '__new__' method - if new is not None and '__new__' in base.__dict__: - factory_method = new - break - # or an own '__init__' method - elif init is not None and '__init__' in base.__dict__: - factory_method = init - break + return _get_signature_of(call) - if factory_method is not None: - sig = _get_signature_of(factory_method) - - if sig is None: - # At this point we know, that `obj` is a class, with no user- - # defined '__init__', '__new__', or class-level '__call__' - - for base in obj.__mro__[:-1]: - # Since '__text_signature__' is implemented as a - # descriptor that extracts text signature from the - # class docstring, if 'obj' is derived from a builtin - # class, its own '__text_signature__' may be 'None'. - # Therefore, we go through the MRO (except the last - # class in there, which is 'object') to find the first - # class with non-empty text signature. - try: - text_sig = base.__text_signature__ - except AttributeError: - pass - else: - if text_sig: - # If 'base' class has a __text_signature__ attribute: - # return a signature based on it - return _signature_fromstr(sigcls, base, text_sig) - - # No '__text_signature__' was found for the 'obj' class. - # Last option is to check if its '__init__' is - # object.__init__ or type.__init__. - if type not in obj.__mro__: - # We have a class (not metaclass), but no user-defined - # __init__ or __new__ for it - if (obj.__init__ is object.__init__ and - obj.__new__ is object.__new__): - # Return a signature of 'object' builtin. - return sigcls.from_callable(object) - else: - raise ValueError( - 'no signature found for builtin type {!r}'.format(obj)) + new = _signature_get_user_defined_method(obj, '__new__') + init = _signature_get_user_defined_method(obj, '__init__') - elif not isinstance(obj, _NonUserDefinedCallables): - # An object with __call__ - # We also check that the 'obj' is not an instance of - # types.WrapperDescriptorType or types.MethodWrapperType to avoid - # infinite recursion (and even potential segfault) - call = _signature_get_user_defined_method(type(obj), '__call__') - if call is not None: + # Go through the MRO and see if any class has user-defined + # pure Python __new__ or __init__ method + for base in obj.__mro__: + # Now we check if the 'obj' class has an own '__new__' method + if new is not None and '__new__' in base.__dict__: + sig = _get_signature_of(new) + if skip_bound_arg: + sig = _signature_bound_method(sig) + return sig + # or an own '__init__' method + elif init is not None and '__init__' in base.__dict__: + return _get_signature_of(init) + + # At this point we know, that `obj` is a class, with no user- + # defined '__init__', '__new__', or class-level '__call__' + + for base in obj.__mro__[:-1]: + # Since '__text_signature__' is implemented as a + # descriptor that extracts text signature from the + # class docstring, if 'obj' is derived from a builtin + # class, its own '__text_signature__' may be 'None'. + # Therefore, we go through the MRO (except the last + # class in there, which is 'object') to find the first + # class with non-empty text signature. try: - sig = _get_signature_of(call) - except ValueError as ex: - msg = 'no signature found for {!r}'.format(obj) - raise ValueError(msg) from ex - - if sig is not None: - # For classes and objects we skip the first parameter of their - # __call__, __new__, or __init__ methods - if skip_bound_arg: - return _signature_bound_method(sig) - else: - return sig + text_sig = base.__text_signature__ + except AttributeError: + pass + else: + if text_sig: + # If 'base' class has a __text_signature__ attribute: + # return a signature based on it + return _signature_fromstr(sigcls, base, text_sig) + + # No '__text_signature__' was found for the 'obj' class. + # Last option is to check if its '__init__' is + # object.__init__ or type.__init__. + if type not in obj.__mro__: + # We have a class (not metaclass), but no user-defined + # __init__ or __new__ for it + if (obj.__init__ is object.__init__ and + obj.__new__ is object.__new__): + # Return a signature of 'object' builtin. + return sigcls.from_callable(object) + else: + raise ValueError( + 'no signature found for builtin type {!r}'.format(obj)) - if isinstance(obj, types.BuiltinFunctionType): - # Raise a nicer error message for builtins - msg = 'no signature found for builtin function {!r}'.format(obj) - raise ValueError(msg) + else: + # An object with __call__ + call = getattr_static(type(obj), '__call__', None) + if call is not None: + call = _descriptor_get(call, obj) + return _get_signature_of(call) raise ValueError('callable {!r} is not supported by signature'.format(obj)) diff --git a/PythonLib/full/json/encoder.py b/PythonLib/full/json/encoder.py index 45f54774..597849ec 100644 --- a/PythonLib/full/json/encoder.py +++ b/PythonLib/full/json/encoder.py @@ -174,7 +174,7 @@ def default(self, o): else: return list(iterable) # Let the base class default method raise the TypeError - return JSONEncoder.default(self, o) + return super().default(o) """ raise TypeError(f'Object of type {o.__class__.__name__} ' diff --git a/PythonLib/full/linecache.py b/PythonLib/full/linecache.py index 97644a8e..ed4c9700 100644 --- a/PythonLib/full/linecache.py +++ b/PythonLib/full/linecache.py @@ -166,13 +166,11 @@ def lazycache(filename, module_globals): return False # Try for a __loader__, if available if module_globals and '__name__' in module_globals: - name = module_globals['__name__'] - if (loader := module_globals.get('__loader__')) is None: - if spec := module_globals.get('__spec__'): - try: - loader = spec.loader - except AttributeError: - pass + spec = module_globals.get('__spec__') + name = getattr(spec, 'name', None) or module_globals['__name__'] + loader = getattr(spec, 'loader', None) + if loader is None: + loader = module_globals.get('__loader__') get_source = getattr(loader, 'get_source', None) if name and get_source: diff --git a/PythonLib/full/logging/__init__.py b/PythonLib/full/logging/__init__.py index 056380fb..22d31983 100644 --- a/PythonLib/full/logging/__init__.py +++ b/PythonLib/full/logging/__init__.py @@ -1521,7 +1521,7 @@ def debug(self, msg, *args, **kwargs): To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.debug("Houston, we have a %s", "thorny problem", exc_info=1) + logger.debug("Houston, we have a %s", "thorny problem", exc_info=True) """ if self.isEnabledFor(DEBUG): self._log(DEBUG, msg, args, **kwargs) @@ -1533,7 +1533,7 @@ def info(self, msg, *args, **kwargs): To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.info("Houston, we have a %s", "notable problem", exc_info=1) + logger.info("Houston, we have a %s", "notable problem", exc_info=True) """ if self.isEnabledFor(INFO): self._log(INFO, msg, args, **kwargs) @@ -1545,7 +1545,7 @@ def warning(self, msg, *args, **kwargs): To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1) + logger.warning("Houston, we have a %s", "bit of a problem", exc_info=True) """ if self.isEnabledFor(WARNING): self._log(WARNING, msg, args, **kwargs) @@ -1562,7 +1562,7 @@ def error(self, msg, *args, **kwargs): To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.error("Houston, we have a %s", "major problem", exc_info=1) + logger.error("Houston, we have a %s", "major problem", exc_info=True) """ if self.isEnabledFor(ERROR): self._log(ERROR, msg, args, **kwargs) @@ -1580,7 +1580,7 @@ def critical(self, msg, *args, **kwargs): To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.critical("Houston, we have a %s", "major disaster", exc_info=1) + logger.critical("Houston, we have a %s", "major disaster", exc_info=True) """ if self.isEnabledFor(CRITICAL): self._log(CRITICAL, msg, args, **kwargs) @@ -1598,7 +1598,7 @@ def log(self, level, msg, *args, **kwargs): To pass exception information, use the keyword argument exc_info with a true value, e.g. - logger.log(level, "We have a %s", "mysterious problem", exc_info=1) + logger.log(level, "We have a %s", "mysterious problem", exc_info=True) """ if not isinstance(level, int): if raiseExceptions: @@ -1985,18 +1985,11 @@ def hasHandlers(self): """ return self.logger.hasHandlers() - def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False): + def _log(self, level, msg, args, **kwargs): """ Low-level log implementation, proxied to allow nested logger adapters. """ - return self.logger._log( - level, - msg, - args, - exc_info=exc_info, - extra=extra, - stack_info=stack_info, - ) + return self.logger._log(level, msg, args, **kwargs) @property def manager(self): @@ -2056,7 +2049,7 @@ def basicConfig(**kwargs): that this argument is incompatible with 'filename' - if both are present, 'stream' is ignored. handlers If specified, this should be an iterable of already created - handlers, which will be added to the root handler. Any handler + handlers, which will be added to the root logger. Any handler in the list which does not have a formatter assigned will be assigned the formatter created in this function. force If this keyword is specified as true, any existing handlers diff --git a/PythonLib/full/logging/handlers.py b/PythonLib/full/logging/handlers.py index 6e88184b..1ae6bb84 100644 --- a/PythonLib/full/logging/handlers.py +++ b/PythonLib/full/logging/handlers.py @@ -232,19 +232,19 @@ def __init__(self, filename, when='h', interval=1, backupCount=0, if self.when == 'S': self.interval = 1 # one second self.suffix = "%Y-%m-%d_%H-%M-%S" - self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}(\.\w+)?$" + extMatch = r"(? (plen + 1) and not fileName[plen+1].isdigit()): - continue - - if fileName[:plen] == prefix: - suffix = fileName[plen:] - # See bpo-45628: The date/time suffix could be anywhere in the - # filename - parts = suffix.split('.') - for part in parts: - if self.extMatch.match(part): + if self.namer is None: + prefix = baseName + '.' + plen = len(prefix) + for fileName in fileNames: + if fileName[:plen] == prefix: + suffix = fileName[plen:] + if self.extMatch.fullmatch(suffix): + result.append(os.path.join(dirName, fileName)) + else: + for fileName in fileNames: + # Our files could be just about anything after custom naming, + # but they should contain the datetime suffix. + # Try to find the datetime suffix in the file name and verify + # that the file name can be generated by this handler. + m = self.extMatch.search(fileName) + while m: + dfn = self.namer(self.baseFilename + "." + m[0]) + if os.path.basename(dfn) == fileName: result.append(os.path.join(dirName, fileName)) break + m = self.extMatch.search(fileName, m.start() + 1) + if len(result) < self.backupCount: result = [] else: @@ -410,17 +416,14 @@ def doRollover(self): then we have to get a list of matching filenames, sort them and remove the one with the oldest suffix. """ - if self.stream: - self.stream.close() - self.stream = None # get the time that this sequence started at and make it a TimeTuple currentTime = int(time.time()) - dstNow = time.localtime(currentTime)[-1] t = self.rolloverAt - self.interval if self.utc: timeTuple = time.gmtime(t) else: timeTuple = time.localtime(t) + dstNow = time.localtime(currentTime)[-1] dstThen = timeTuple[-1] if dstNow != dstThen: if dstNow: @@ -431,26 +434,19 @@ def doRollover(self): dfn = self.rotation_filename(self.baseFilename + "." + time.strftime(self.suffix, timeTuple)) if os.path.exists(dfn): - os.remove(dfn) + # Already rolled over. + return + + if self.stream: + self.stream.close() + self.stream = None self.rotate(self.baseFilename, dfn) if self.backupCount > 0: for s in self.getFilesToDelete(): os.remove(s) if not self.delay: self.stream = self._open() - newRolloverAt = self.computeRollover(currentTime) - while newRolloverAt <= currentTime: - newRolloverAt = newRolloverAt + self.interval - #If DST changes and midnight or weekly rollover, adjust for this. - if (self.when == 'MIDNIGHT' or self.when.startswith('W')) and not self.utc: - dstAtRollover = time.localtime(newRolloverAt)[-1] - if dstNow != dstAtRollover: - if not dstNow: # DST kicks in before next rollover, so we need to deduct an hour - addend = -3600 - else: # DST bows out before next rollover, so we need to add an hour - addend = 3600 - newRolloverAt += addend - self.rolloverAt = newRolloverAt + self.rolloverAt = self.computeRollover(currentTime) class WatchedFileHandler(logging.FileHandler): """ diff --git a/PythonLib/full/mailbox.py b/PythonLib/full/mailbox.py index c8b3444f..c7f060ce 100644 --- a/PythonLib/full/mailbox.py +++ b/PythonLib/full/mailbox.py @@ -698,9 +698,13 @@ def flush(self): _sync_close(new_file) # self._file is about to get replaced, so no need to sync. self._file.close() - # Make sure the new file's mode is the same as the old file's - mode = os.stat(self._path).st_mode - os.chmod(new_file.name, mode) + # Make sure the new file's mode and owner are the same as the old file's + info = os.stat(self._path) + os.chmod(new_file.name, info.st_mode) + try: + os.chown(new_file.name, info.st_uid, info.st_gid) + except (AttributeError, OSError): + pass try: os.rename(new_file.name, self._path) except FileExistsError: diff --git a/PythonLib/full/mimetypes.py b/PythonLib/full/mimetypes.py index 37228de4..3cc027aa 100644 --- a/PythonLib/full/mimetypes.py +++ b/PythonLib/full/mimetypes.py @@ -120,7 +120,13 @@ def guess_type(self, url, strict=True): but non-standard types. """ url = os.fspath(url) - scheme, url = urllib.parse._splittype(url) + p = urllib.parse.urlparse(url) + if p.scheme and len(p.scheme) > 1: + scheme = p.scheme + url = p.path + else: + scheme = None + url = os.path.splitdrive(url)[1] if scheme == 'data': # syntax of data URLs: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data diff --git a/PythonLib/full/multiprocessing/connection.py b/PythonLib/full/multiprocessing/connection.py index dbbf106f..d0582e3c 100644 --- a/PythonLib/full/multiprocessing/connection.py +++ b/PythonLib/full/multiprocessing/connection.py @@ -476,8 +476,9 @@ def accept(self): ''' if self._listener is None: raise OSError('listener is closed') + c = self._listener.accept() - if self._authkey: + if self._authkey is not None: deliver_challenge(c, self._authkey) answer_challenge(c, self._authkey) return c diff --git a/PythonLib/full/os.py b/PythonLib/full/os.py index 598c9e50..7ee7d695 100644 --- a/PythonLib/full/os.py +++ b/PythonLib/full/os.py @@ -473,7 +473,7 @@ def fwalk(top=".", topdown=True, onerror=None, *, follow_symlinks=False, dir_fd= # lstat()/open()/fstat() trick. if not follow_symlinks: orig_st = stat(top, follow_symlinks=False, dir_fd=dir_fd) - topfd = open(top, O_RDONLY, dir_fd=dir_fd) + topfd = open(top, O_RDONLY | O_NONBLOCK, dir_fd=dir_fd) try: if (follow_symlinks or (st.S_ISDIR(orig_st.st_mode) and path.samestat(orig_st, stat(topfd)))): @@ -522,7 +522,7 @@ def _fwalk(topfd, toppath, isbytes, topdown, onerror, follow_symlinks): assert entries is not None name, entry = name orig_st = entry.stat(follow_symlinks=False) - dirfd = open(name, O_RDONLY, dir_fd=topfd) + dirfd = open(name, O_RDONLY | O_NONBLOCK, dir_fd=topfd) except OSError as err: if onerror is not None: onerror(err) diff --git a/PythonLib/full/pdb.py b/PythonLib/full/pdb.py index a838a26b..225c9f25 100644 --- a/PythonLib/full/pdb.py +++ b/PythonLib/full/pdb.py @@ -154,6 +154,7 @@ def namespace(self): __name__='__main__', __file__=self, __builtins__=__builtins__, + __spec__=None, ) @property @@ -298,26 +299,13 @@ def setup(self, f, tb): # cache it here to ensure that modifications are not overwritten. self.curframe_locals = self.curframe.f_locals self.set_convenience_variable(self.curframe, '_frame', self.curframe) - return self.execRcLines() - # Can be executed earlier than 'setup' if desired - def execRcLines(self): - if not self.rcLines: - return - # local copy because of recursion - rcLines = self.rcLines - rcLines.reverse() - # execute every line only once - self.rcLines = [] - while rcLines: - line = rcLines.pop().strip() - if line and line[0] != '#': - if self.onecmd(line): - # if onecmd returns True, the command wants to exit - # from the interaction, save leftover rc lines - # to execute before next interaction - self.rcLines += reversed(rcLines) - return True + if self.rcLines: + self.cmdqueue = [ + line for line in self.rcLines + if line.strip() and not line.strip().startswith("#") + ] + self.rcLines = [] # Override Bdb methods @@ -430,12 +418,10 @@ def interaction(self, frame, traceback): pass else: Pdb._previous_sigint_handler = None - if self.setup(frame, traceback): - # no interaction desired at this time (happens if .pdbrc contains - # a command like "continue") - self.forget() - return - self.print_stack_entry(self.stack[self.curindex]) + self.setup(frame, traceback) + # if we have more commands to process, do not show the stack entry + if not self.cmdqueue: + self.print_stack_entry(self.stack[self.curindex]) self._cmdloop() self.forget() @@ -522,7 +508,7 @@ def precmd(self, line): if marker >= 0: # queue up everything after marker next = line[marker+2:].lstrip() - self.cmdqueue.append(next) + self.cmdqueue.insert(0, next) line = line[:marker].rstrip() # Replace all the convenience variables @@ -546,13 +532,12 @@ def handle_command_def(self, line): """Handles one command line during command list definition.""" cmd, arg, line = self.parseline(line) if not cmd: - return + return False if cmd == 'silent': self.commands_silent[self.commands_bnum] = True - return # continue to handle other cmd def in the cmd list + return False # continue to handle other cmd def in the cmd list elif cmd == 'end': - self.cmdqueue = [] - return 1 # end of cmd list + return True # end of cmd list cmdlist = self.commands[self.commands_bnum] if arg: cmdlist.append(cmd+' '+arg) @@ -566,9 +551,8 @@ def handle_command_def(self, line): # one of the resuming commands if func.__name__ in self.commands_resuming: self.commands_doprompt[self.commands_bnum] = False - self.cmdqueue = [] - return 1 - return + return True + return False # interface abstraction functions diff --git a/PythonLib/full/pickletools.py b/PythonLib/full/pickletools.py index 95a77aeb..51ee4a7a 100644 --- a/PythonLib/full/pickletools.py +++ b/PythonLib/full/pickletools.py @@ -1253,7 +1253,7 @@ def __init__(self, name, code, arg, stack_before=[], stack_after=[pyint], proto=2, - doc="""Long integer using found-byte length. + doc="""Long integer using four-byte length. A more efficient encoding of a Python long; the long4 encoding says it all."""), diff --git a/PythonLib/full/pydoc.py b/PythonLib/full/pydoc.py index 84bbf588..9a881239 100644 --- a/PythonLib/full/pydoc.py +++ b/PythonLib/full/pydoc.py @@ -204,6 +204,19 @@ def classname(object, modname): name = object.__module__ + '.' + name return name +def parentname(object, modname): + """Get a name of the enclosing class (qualified it with a module name + if necessary) or module.""" + if '.' in object.__qualname__: + name = object.__qualname__.rpartition('.')[0] + if object.__module__ != modname: + return object.__module__ + '.' + name + else: + return name + else: + if object.__module__ != modname: + return object.__module__ + def isdata(object): """Check if an object is of a type that probably means it's data.""" return not (inspect.ismodule(object) or inspect.isclass(object) or @@ -298,13 +311,15 @@ def visiblename(name, all=None, obj=None): return not name.startswith('_') def classify_class_attrs(object): - """Wrap inspect.classify_class_attrs, with fixup for data descriptors.""" + """Wrap inspect.classify_class_attrs, with fixup for data descriptors and bound methods.""" results = [] for (name, kind, cls, value) in inspect.classify_class_attrs(object): if inspect.isdatadescriptor(value): kind = 'data descriptor' if isinstance(value, property) and value.fset is None: kind = 'readonly property' + elif kind == 'method' and _is_bound_method(value): + kind = 'static method' results.append((name, kind, cls, value)) return results @@ -514,7 +529,7 @@ def getdocloc(self, object, basedir=sysconfig.get_path('stdlib')): '_thread', 'zipimport') or (file.startswith(basedir) and not file.startswith(os.path.join(basedir, 'site-packages')))) and - object.__name__ not in ('xml.etree', 'test.pydoc_mod')): + object.__name__ not in ('xml.etree', 'test.test_pydoc.pydoc_mod')): if docloc.startswith(("http://", "https://")): docloc = "{}/{}.html".format(docloc.rstrip("/"), object.__name__.lower()) else: @@ -658,6 +673,25 @@ def classlink(self, object, modname): module.__name__, name, classname(object, modname)) return classname(object, modname) + def parentlink(self, object, modname): + """Make a link for the enclosing class or module.""" + link = None + name, module = object.__name__, sys.modules.get(object.__module__) + if hasattr(module, name) and getattr(module, name) is object: + if '.' in object.__qualname__: + name = object.__qualname__.rpartition('.')[0] + if object.__module__ != modname: + link = '%s.html#%s' % (module.__name__, name) + else: + link = '#%s' % name + else: + if object.__module__ != modname: + link = '%s.html' % module.__name__ + if link: + return '%s' % (link, parentname(object, modname)) + else: + return parentname(object, modname) + def modulelink(self, object): """Make a link for a module.""" return '%s' % (object.__name__, object.__name__) @@ -902,7 +936,7 @@ def spill(msg, attrs, predicate): push(self.docdata(value, name, mod)) else: push(self.document(value, name, mod, - funcs, classes, mdict, object)) + funcs, classes, mdict, object, homecls)) push('\n') return attrs @@ -1025,24 +1059,44 @@ def formatvalue(self, object): return self.grey('=' + self.repr(object)) def docroutine(self, object, name=None, mod=None, - funcs={}, classes={}, methods={}, cl=None): + funcs={}, classes={}, methods={}, cl=None, homecls=None): """Produce HTML documentation for a function or method object.""" realname = object.__name__ name = name or realname - anchor = (cl and cl.__name__ or '') + '-' + name + if homecls is None: + homecls = cl + anchor = ('' if cl is None else cl.__name__) + '-' + name note = '' - skipdocs = 0 + skipdocs = False + imfunc = None if _is_bound_method(object): - imclass = object.__self__.__class__ - if cl: - if imclass is not cl: - note = ' from ' + self.classlink(imclass, mod) + imself = object.__self__ + if imself is cl: + imfunc = getattr(object, '__func__', None) + elif inspect.isclass(imself): + note = ' class method of %s' % self.classlink(imself, mod) else: - if object.__self__ is not None: - note = ' method of %s instance' % self.classlink( - object.__self__.__class__, mod) - else: - note = ' unbound %s method' % self.classlink(imclass,mod) + note = ' method of %s instance' % self.classlink( + imself.__class__, mod) + elif (inspect.ismethoddescriptor(object) or + inspect.ismethodwrapper(object)): + try: + objclass = object.__objclass__ + except AttributeError: + pass + else: + if cl is None: + note = ' unbound %s method' % self.classlink(objclass, mod) + elif objclass is not homecls: + note = ' from ' + self.classlink(objclass, mod) + else: + imfunc = object + if inspect.isfunction(imfunc) and homecls is not None and ( + imfunc.__module__ != homecls.__module__ or + imfunc.__qualname__ != homecls.__qualname__ + '.' + realname): + pname = self.parentlink(imfunc, mod) + if pname: + note = ' from %s' % pname if (inspect.iscoroutinefunction(object) or inspect.isasyncgenfunction(object)): @@ -1053,10 +1107,13 @@ def docroutine(self, object, name=None, mod=None, if name == realname: title = '%s' % (anchor, realname) else: - if cl and inspect.getattr_static(cl, realname, []) is object: + if (cl is not None and + inspect.getattr_static(cl, realname, []) is object): reallink = '%s' % ( cl.__name__ + '-' + realname, realname) - skipdocs = 1 + skipdocs = True + if note.startswith(' from '): + note = '' else: reallink = realname title = '%s = %s' % ( @@ -1074,7 +1131,8 @@ def docroutine(self, object, name=None, mod=None, # XXX lambda's won't usually have func_annotations['return'] # since the syntax doesn't support but it is possible. # So removing parentheses isn't truly safe. - argspec = argspec[1:-1] # remove parentheses + if not object.__annotations__: + argspec = argspec[1:-1] # remove parentheses if not argspec: argspec = '(...)' @@ -1089,7 +1147,7 @@ def docroutine(self, object, name=None, mod=None, doc = doc and '
%s
' % doc return '
%s
%s
\n' % (decl, doc) - def docdata(self, object, name=None, mod=None, cl=None): + def docdata(self, object, name=None, mod=None, cl=None, *ignored): """Produce html documentation for a data descriptor.""" results = [] push = results.append @@ -1200,7 +1258,7 @@ def formattree(self, tree, modname, parent=None, prefix=''): entry, modname, c, prefix + ' ') return result - def docmodule(self, object, name=None, mod=None): + def docmodule(self, object, name=None, mod=None, *ignored): """Produce text documentation for a given module object.""" name = object.__name__ # ignore the passed-in name synop, desc = splitdoc(getdoc(object)) @@ -1384,7 +1442,7 @@ def spill(msg, attrs, predicate): push(self.docdata(value, name, mod)) else: push(self.document(value, - name, mod, object)) + name, mod, object, homecls)) return attrs def spilldescriptors(msg, attrs, predicate): @@ -1459,23 +1517,43 @@ def formatvalue(self, object): """Format an argument default value as text.""" return '=' + self.repr(object) - def docroutine(self, object, name=None, mod=None, cl=None): + def docroutine(self, object, name=None, mod=None, cl=None, homecls=None): """Produce text documentation for a function or method object.""" realname = object.__name__ name = name or realname + if homecls is None: + homecls = cl note = '' - skipdocs = 0 + skipdocs = False + imfunc = None if _is_bound_method(object): - imclass = object.__self__.__class__ - if cl: - if imclass is not cl: - note = ' from ' + classname(imclass, mod) + imself = object.__self__ + if imself is cl: + imfunc = getattr(object, '__func__', None) + elif inspect.isclass(imself): + note = ' class method of %s' % classname(imself, mod) else: - if object.__self__ is not None: - note = ' method of %s instance' % classname( - object.__self__.__class__, mod) - else: - note = ' unbound %s method' % classname(imclass,mod) + note = ' method of %s instance' % classname( + imself.__class__, mod) + elif (inspect.ismethoddescriptor(object) or + inspect.ismethodwrapper(object)): + try: + objclass = object.__objclass__ + except AttributeError: + pass + else: + if cl is None: + note = ' unbound %s method' % classname(objclass, mod) + elif objclass is not homecls: + note = ' from ' + classname(objclass, mod) + else: + imfunc = object + if inspect.isfunction(imfunc) and homecls is not None and ( + imfunc.__module__ != homecls.__module__ or + imfunc.__qualname__ != homecls.__qualname__ + '.' + realname): + pname = parentname(imfunc, mod) + if pname: + note = ' from %s' % pname if (inspect.iscoroutinefunction(object) or inspect.isasyncgenfunction(object)): @@ -1486,8 +1564,11 @@ def docroutine(self, object, name=None, mod=None, cl=None): if name == realname: title = self.bold(realname) else: - if cl and inspect.getattr_static(cl, realname, []) is object: - skipdocs = 1 + if (cl is not None and + inspect.getattr_static(cl, realname, []) is object): + skipdocs = True + if note.startswith(' from '): + note = '' title = self.bold(name) + ' = ' + realname argspec = None @@ -1503,7 +1584,8 @@ def docroutine(self, object, name=None, mod=None, cl=None): # XXX lambda's won't usually have func_annotations['return'] # since the syntax doesn't support but it is possible. # So removing parentheses isn't truly safe. - argspec = argspec[1:-1] # remove parentheses + if not object.__annotations__: + argspec = argspec[1:-1] # remove parentheses if not argspec: argspec = '(...)' decl = asyncqualifier + title + argspec + note @@ -1514,7 +1596,7 @@ def docroutine(self, object, name=None, mod=None, cl=None): doc = getdoc(object) or '' return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n') - def docdata(self, object, name=None, mod=None, cl=None): + def docdata(self, object, name=None, mod=None, cl=None, *ignored): """Produce text documentation for a data descriptor.""" results = [] push = results.append @@ -1530,7 +1612,8 @@ def docdata(self, object, name=None, mod=None, cl=None): docproperty = docdata - def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None): + def docother(self, object, name=None, mod=None, parent=None, *ignored, + maxlen=None, doc=None): """Produce text documentation for a data object.""" repr = self.repr(object) if maxlen: @@ -2410,6 +2493,7 @@ def __init__(self, urlhandler, host, port): threading.Thread.__init__(self) self.serving = False self.error = None + self.docserver = None def run(self): """Start the server.""" @@ -2442,9 +2526,9 @@ def stop(self): thread = ServerThread(urlhandler, hostname, port) thread.start() - # Wait until thread.serving is True to make sure we are - # really up before returning. - while not thread.error and not thread.serving: + # Wait until thread.serving is True and thread.docserver is set + # to make sure we are really up before returning. + while not thread.error and not (thread.serving and thread.docserver): time.sleep(.01) return thread diff --git a/PythonLib/full/pydoc_data/topics.py b/PythonLib/full/pydoc_data/topics.py index e97e543a..0eb0e7dc 100644 --- a/PythonLib/full/pydoc_data/topics.py +++ b/PythonLib/full/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Feb 6 21:16:37 2024 +# Autogenerated by Sphinx on Tue Apr 9 09:17:41 2024 # as part of the release process. topics = {'assert': 'The "assert" statement\n' '**********************\n' @@ -722,9 +722,9 @@ '\n' 'object.__dir__(self)\n' '\n' - ' Called when "dir()" is called on the object. A ' - 'sequence must be\n' - ' returned. "dir()" converts the returned sequence to a ' + ' Called when "dir()" is called on the object. An ' + 'iterable must be\n' + ' returned. "dir()" converts the returned iterable to a ' 'list and\n' ' sorts it.\n' '\n' @@ -751,8 +751,8 @@ 'returned.\n' '\n' 'The "__dir__" function should accept no arguments, and ' - 'return a\n' - 'sequence of strings that represents the names accessible ' + 'return an\n' + 'iterable of strings that represents the names accessible ' 'on module. If\n' 'present, this function overrides the standard "dir()" ' 'search on a\n' @@ -4724,7 +4724,7 @@ 'reflection,\n' ' and "__eq__()" and "__ne__()" are their own reflection. ' 'If the\n' - ' operands are of different types, and right operand’s ' + ' operands are of different types, and the right operand’s ' 'type is a\n' ' direct or indirect subclass of the left operand’s type, ' 'the\n' @@ -4734,6 +4734,11 @@ 'is not\n' ' considered.\n' '\n' + ' When no appropriate method returns any value other than\n' + ' "NotImplemented", the "==" and "!=" operators will fall ' + 'back to\n' + ' "is" and "is not", respectively.\n' + '\n' 'object.__hash__(self)\n' '\n' ' Called by built-in function "hash()" and for operations ' @@ -5212,22 +5217,23 @@ 'the\n' 'current directory, it is read with "\'utf-8\'" encoding and ' 'executed as\n' - 'if it had been typed at the debugger prompt. This is ' - 'particularly\n' - 'useful for aliases. If both files exist, the one in the home\n' - 'directory is read first and aliases defined there can be ' - 'overridden by\n' - 'the local file.\n' - '\n' - 'Changed in version 3.11: ".pdbrc" is now read with "\'utf-8\'" ' - 'encoding.\n' - 'Previously, it was read with the system locale encoding.\n' + 'if it had been typed at the debugger prompt, with the exception ' + 'that\n' + 'empty lines and lines starting with "#" are ignored. This is\n' + 'particularly useful for aliases. If both files exist, the one ' + 'in the\n' + 'home directory is read first and aliases defined there can be\n' + 'overridden by the local file.\n' '\n' 'Changed in version 3.2: ".pdbrc" can now contain commands that\n' 'continue debugging, such as "continue" or "next". Previously, ' 'these\n' 'commands had no effect.\n' '\n' + 'Changed in version 3.11: ".pdbrc" is now read with "\'utf-8\'" ' + 'encoding.\n' + 'Previously, it was read with the system locale encoding.\n' + '\n' 'h(elp) [command]\n' '\n' ' Without argument, print the list of available commands. With ' @@ -8559,32 +8565,36 @@ '\n' ' nonlocal_stmt ::= "nonlocal" identifier ("," identifier)*\n' '\n' - 'The "nonlocal" statement causes the listed identifiers to refer ' - 'to\n' - 'previously bound variables in the nearest enclosing scope ' - 'excluding\n' - 'globals. This is important because the default behavior for ' - 'binding is\n' - 'to search the local namespace first. The statement allows\n' - 'encapsulated code to rebind variables outside of the local ' - 'scope\n' - 'besides the global (module) scope.\n' - '\n' - 'Names listed in a "nonlocal" statement, unlike those listed in ' - 'a\n' - '"global" statement, must refer to pre-existing bindings in an\n' - 'enclosing scope (the scope in which a new binding should be ' - 'created\n' - 'cannot be determined unambiguously).\n' - '\n' - 'Names listed in a "nonlocal" statement must not collide with ' - 'pre-\n' - 'existing bindings in the local scope.\n' + 'When the definition of a function or class is nested (enclosed) ' + 'within\n' + 'the definitions of other functions, its nonlocal scopes are the ' + 'local\n' + 'scopes of the enclosing functions. The "nonlocal" statement ' + 'causes the\n' + 'listed identifiers to refer to names previously bound in ' + 'nonlocal\n' + 'scopes. It allows encapsulated code to rebind such nonlocal\n' + 'identifiers. If a name is bound in more than one nonlocal ' + 'scope, the\n' + 'nearest binding is used. If a name is not bound in any nonlocal ' + 'scope,\n' + 'or if there is no nonlocal scope, a "SyntaxError" is raised.\n' + '\n' + 'The nonlocal statement applies to the entire scope of a function ' + 'or\n' + 'class body. A "SyntaxError" is raised if a variable is used or\n' + 'assigned to prior to its nonlocal declaration in the scope.\n' '\n' 'See also:\n' '\n' ' **PEP 3104** - Access to Names in Outer Scopes\n' - ' The specification for the "nonlocal" statement.\n', + ' The specification for the "nonlocal" statement.\n' + '\n' + '**Programmer’s note:** "nonlocal" is a directive to the parser ' + 'and\n' + 'applies only to code parsed along with it. See the note for ' + 'the\n' + '"global" statement.\n', 'numbers': 'Numeric literals\n' '****************\n' '\n' @@ -8680,7 +8690,7 @@ '"__rsub__()"\n' ' method, "type(y).__rsub__(y, x)" is called if ' '"type(x).__sub__(x,\n' - ' y)" returns *NotImplemented*.\n' + ' y)" returns "NotImplemented".\n' '\n' ' Note that ternary "pow()" will not try calling ' '"__rpow__()" (the\n' @@ -8723,14 +8733,18 @@ 'the result\n' ' (which could be, but does not have to be, *self*). If a ' 'specific\n' - ' method is not defined, the augmented assignment falls ' - 'back to the\n' - ' normal methods. For instance, if *x* is an instance of ' - 'a class\n' - ' with an "__iadd__()" method, "x += y" is equivalent to ' - '"x =\n' - ' x.__iadd__(y)" . Otherwise, "x.__add__(y)" and ' - '"y.__radd__(x)" are\n' + ' method is not defined, or if that method returns ' + '"NotImplemented",\n' + ' the augmented assignment falls back to the normal ' + 'methods. For\n' + ' instance, if *x* is an instance of a class with an ' + '"__iadd__()"\n' + ' method, "x += y" is equivalent to "x = x.__iadd__(y)" . ' + 'If\n' + ' "__iadd__()" does not exist, or if "x.__iadd__(y)" ' + 'returns\n' + ' "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" ' + 'are\n' ' considered, as with the evaluation of "x + y". In ' 'certain\n' ' situations, augmented assignment can result in ' @@ -8811,7 +8825,7 @@ 'Every object has an identity, a type and a value. An object’s\n' '*identity* never changes once it has been created; you may think ' 'of it\n' - 'as the object’s address in memory. The ‘"is"’ operator compares ' + 'as the object’s address in memory. The "is" operator compares ' 'the\n' 'identity of two objects; the "id()" function returns an integer\n' 'representing its identity.\n' @@ -8876,7 +8890,7 @@ 'Note that the use of the implementation’s tracing or debugging\n' 'facilities may keep objects alive that would normally be ' 'collectable.\n' - 'Also note that catching an exception with a ‘"try"…"except"’ ' + 'Also note that catching an exception with a "try"…"except" ' 'statement\n' 'may keep objects alive.\n' '\n' @@ -8891,8 +8905,9 @@ 'release the external resource, usually a "close()" method. ' 'Programs\n' 'are strongly recommended to explicitly close such objects. The\n' - '‘"try"…"finally"’ statement and the ‘"with"’ statement provide\n' - 'convenient ways to do this.\n' + '"try"…"finally" statement and the "with" statement provide ' + 'convenient\n' + 'ways to do this.\n' '\n' 'Some objects contain references to other objects; these are ' 'called\n' @@ -9269,10 +9284,7 @@ 'The try statement.\n' '\n' 'Changed in version 3.3: "None" is now permitted as "Y" in "raise X\n' - 'from Y".\n' - '\n' - 'New in version 3.3: The "__suppress_context__" attribute to ' - 'suppress\n' + 'from Y".Added the "__suppress_context__" attribute to suppress\n' 'automatic display of the exception context.\n' '\n' 'Changed in version 3.11: If the traceback of the active exception ' @@ -10057,8 +10069,8 @@ 'reflection,\n' ' and "__eq__()" and "__ne__()" are their own reflection. ' 'If the\n' - ' operands are of different types, and right operand’s type ' - 'is a\n' + ' operands are of different types, and the right operand’s ' + 'type is a\n' ' direct or indirect subclass of the left operand’s type, ' 'the\n' ' reflected method of the right operand has priority, ' @@ -10067,6 +10079,11 @@ 'is not\n' ' considered.\n' '\n' + ' When no appropriate method returns any value other than\n' + ' "NotImplemented", the "==" and "!=" operators will fall ' + 'back to\n' + ' "is" and "is not", respectively.\n' + '\n' 'object.__hash__(self)\n' '\n' ' Called by built-in function "hash()" and for operations ' @@ -10308,9 +10325,9 @@ '\n' 'object.__dir__(self)\n' '\n' - ' Called when "dir()" is called on the object. A sequence ' + ' Called when "dir()" is called on the object. An iterable ' 'must be\n' - ' returned. "dir()" converts the returned sequence to a ' + ' returned. "dir()" converts the returned iterable to a ' 'list and\n' ' sorts it.\n' '\n' @@ -10337,8 +10354,8 @@ 'returned.\n' '\n' 'The "__dir__" function should accept no arguments, and ' - 'return a\n' - 'sequence of strings that represents the names accessible on ' + 'return an\n' + 'iterable of strings that represents the names accessible on ' 'module. If\n' 'present, this function overrides the standard "dir()" search ' 'on a\n' @@ -11606,7 +11623,7 @@ '"__rsub__()"\n' ' method, "type(y).__rsub__(y, x)" is called if ' '"type(x).__sub__(x,\n' - ' y)" returns *NotImplemented*.\n' + ' y)" returns "NotImplemented".\n' '\n' ' Note that ternary "pow()" will not try calling ' '"__rpow__()" (the\n' @@ -11649,14 +11666,17 @@ 'the result\n' ' (which could be, but does not have to be, *self*). If a ' 'specific\n' - ' method is not defined, the augmented assignment falls ' - 'back to the\n' - ' normal methods. For instance, if *x* is an instance of a ' - 'class\n' - ' with an "__iadd__()" method, "x += y" is equivalent to "x ' - '=\n' - ' x.__iadd__(y)" . Otherwise, "x.__add__(y)" and ' - '"y.__radd__(x)" are\n' + ' method is not defined, or if that method returns ' + '"NotImplemented",\n' + ' the augmented assignment falls back to the normal ' + 'methods. For\n' + ' instance, if *x* is an instance of a class with an ' + '"__iadd__()"\n' + ' method, "x += y" is equivalent to "x = x.__iadd__(y)" . ' + 'If\n' + ' "__iadd__()" does not exist, or if "x.__iadd__(y)" ' + 'returns\n' + ' "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" are\n' ' considered, as with the evaluation of "x + y". In ' 'certain\n' ' situations, augmented assignment can result in unexpected ' @@ -12998,9 +13018,8 @@ '\n' 'New in version 3.3: The "\'rb\'" prefix of raw bytes literals has ' 'been\n' - 'added as a synonym of "\'br\'".\n' - '\n' - 'New in version 3.3: Support for the unicode legacy literal\n' + 'added as a synonym of "\'br\'".Support for the unicode legacy ' + 'literal\n' '("u\'value\'") was reintroduced to simplify the maintenance of ' 'dual\n' 'Python 2.x and 3.x codebases. See **PEP 414** for more ' @@ -13709,14 +13728,18 @@ 'contains\n' 'the numbers 0, 1, …, *n*-1. Item *i* of sequence *a* is selected ' 'by\n' - '"a[i]".\n' + '"a[i]". Some sequences, including built-in sequences, interpret\n' + 'negative subscripts by adding the sequence length. For example,\n' + '"a[-2]" equals "a[n-2]", the second to last item of sequence a ' + 'with\n' + 'length "n".\n' '\n' 'Sequences also support slicing: "a[i:j]" selects all items with ' 'index\n' '*k* such that *i* "<=" *k* "<" *j*. When used as an expression, a\n' - 'slice is a sequence of the same type. This implies that the index ' - 'set\n' - 'is renumbered so that it starts at 0.\n' + 'slice is a sequence of the same type. The comment above about ' + 'negative\n' + 'indexes also applies to negative slice positions.\n' '\n' 'Some sequences also support “extended slicing” with a third “step”\n' 'parameter: "a[i:j:k]" selects all items of *a* with index *x* where ' @@ -14461,7 +14484,9 @@ 'name |\n' '+----------------------------------------------------+----------------------------------------------------+\n' '| codeobject.co_qualname | The fully ' - 'qualified function name |\n' + 'qualified function name New in version |\n' + '| | ' + '3.11. |\n' '+----------------------------------------------------+----------------------------------------------------+\n' '| codeobject.co_argcount | The total ' 'number of positional *parameters* |\n' @@ -14681,6 +14706,14 @@ 'tools.\n' ' The PEP that introduced the "co_lines()" method.\n' '\n' + 'codeobject.replace(**kwargs)\n' + '\n' + ' Return a copy of the code object with new values for the ' + 'specified\n' + ' fields.\n' + '\n' + ' New in version 3.8.\n' + '\n' '\n' 'Frame objects\n' '-------------\n' @@ -16019,7 +16052,7 @@ '\n' ' For sorting examples and a brief sorting tutorial, see ' 'Sorting\n' - ' HOW TO.\n' + ' Techniques.\n' '\n' ' **CPython implementation detail:** While a list is being ' 'sorted,\n' @@ -16234,9 +16267,8 @@ 'objects\n' 'based on the sequence of values they define (instead of ' 'comparing\n' - 'based on object identity).\n' - '\n' - 'New in version 3.3: The "start", "stop" and "step" attributes.\n' + 'based on object identity).Added the "start", "stop" and "step"\n' + 'attributes.\n' '\n' 'See also:\n' '\n' diff --git a/PythonLib/full/shutil.py b/PythonLib/full/shutil.py index 96463007..3a2b6be3 100644 --- a/PythonLib/full/shutil.py +++ b/PythonLib/full/shutil.py @@ -676,7 +676,7 @@ def _rmtree_safe_fd(topfd, path, onexc): continue if is_dir: try: - dirfd = os.open(entry.name, os.O_RDONLY, dir_fd=topfd) + dirfd = os.open(entry.name, os.O_RDONLY | os.O_NONBLOCK, dir_fd=topfd) dirfd_closed = False except OSError as err: onexc(os.open, fullname, err) @@ -775,7 +775,7 @@ def onexc(*args): onexc(os.lstat, path, err) return try: - fd = os.open(path, os.O_RDONLY, dir_fd=dir_fd) + fd = os.open(path, os.O_RDONLY | os.O_NONBLOCK, dir_fd=dir_fd) fd_closed = False except Exception as err: onexc(os.open, path, err) diff --git a/PythonLib/full/subprocess.py b/PythonLib/full/subprocess.py index 3264d9af..1d17ae36 100644 --- a/PythonLib/full/subprocess.py +++ b/PythonLib/full/subprocess.py @@ -1581,6 +1581,8 @@ def _wait(self, timeout): """Internal implementation of wait() on Windows.""" if timeout is None: timeout_millis = _winapi.INFINITE + elif timeout <= 0: + timeout_millis = 0 else: timeout_millis = int(timeout * 1000) if self.returncode is None: diff --git a/PythonLib/full/tokenize.py b/PythonLib/full/tokenize.py index 49e8144e..7af7a5cc 100644 --- a/PythonLib/full/tokenize.py +++ b/PythonLib/full/tokenize.py @@ -170,6 +170,7 @@ def __init__(self): self.tokens = [] self.prev_row = 1 self.prev_col = 0 + self.prev_type = None self.encoding = None def add_whitespace(self, start): @@ -185,6 +186,29 @@ def add_whitespace(self, start): if col_offset: self.tokens.append(" " * col_offset) + def escape_brackets(self, token): + characters = [] + consume_until_next_bracket = False + for character in token: + if character == "}": + if consume_until_next_bracket: + consume_until_next_bracket = False + else: + characters.append(character) + if character == "{": + n_backslashes = sum( + 1 for char in _itertools.takewhile( + "\\".__eq__, + characters[-2::-1] + ) + ) + if n_backslashes % 2 == 0: + characters.append(character) + else: + consume_until_next_bracket = True + characters.append(character) + return "".join(characters) + def untokenize(self, iterable): it = iter(iterable) indents = [] @@ -216,11 +240,13 @@ def untokenize(self, iterable): startline = False elif tok_type == FSTRING_MIDDLE: if '{' in token or '}' in token: + token = self.escape_brackets(token) + last_line = token.splitlines()[-1] end_line, end_col = end - end = (end_line, end_col + token.count('{') + token.count('}')) - token = re.sub('{', '{{', token) - token = re.sub('}', '}}', token) - + extra_chars = last_line.count("{{") + last_line.count("}}") + end = (end_line, end_col + extra_chars) + elif tok_type in (STRING, FSTRING_START) and self.prev_type in (STRING, FSTRING_END): + self.tokens.append(" ") self.add_whitespace(start) self.tokens.append(token) @@ -228,6 +254,7 @@ def untokenize(self, iterable): if tok_type in (NEWLINE, NL): self.prev_row += 1 self.prev_col = 0 + self.prev_type = tok_type return "".join(self.tokens) def compat(self, token, iterable): @@ -235,6 +262,7 @@ def compat(self, token, iterable): toks_append = self.tokens.append startline = token[0] in (NEWLINE, NL) prevstring = False + in_fstring = 0 for tok in _itertools.chain([token], iterable): toknum, tokval = tok[:2] @@ -253,6 +281,10 @@ def compat(self, token, iterable): else: prevstring = False + if toknum == FSTRING_START: + in_fstring += 1 + elif toknum == FSTRING_END: + in_fstring -= 1 if toknum == INDENT: indents.append(tokval) continue @@ -265,11 +297,18 @@ def compat(self, token, iterable): toks_append(indents[-1]) startline = False elif toknum == FSTRING_MIDDLE: - if '{' in tokval or '}' in tokval: - tokval = re.sub('{', '{{', tokval) - tokval = re.sub('}', '}}', tokval) + tokval = self.escape_brackets(tokval) + + # Insert a space between two consecutive brackets if we are in an f-string + if tokval in {"{", "}"} and self.tokens and self.tokens[-1] == tokval and in_fstring: + tokval = ' ' + tokval + + # Insert a space between two consecutive f-strings + if toknum in (STRING, FSTRING_START) and self.prev_type in (STRING, FSTRING_END): + self.tokens.append(" ") toks_append(tokval) + self.prev_type = toknum def untokenize(iterable): diff --git a/PythonLib/full/typing.py b/PythonLib/full/typing.py index ffe7ce8d..b58c2d30 100644 --- a/PythonLib/full/typing.py +++ b/PythonLib/full/typing.py @@ -314,19 +314,33 @@ def _unpack_args(args): newargs.append(arg) return newargs -def _deduplicate(params): +def _deduplicate(params, *, unhashable_fallback=False): # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - return params - + try: + return dict.fromkeys(params) + except TypeError: + if not unhashable_fallback: + raise + # Happens for cases like `Annotated[dict, {'x': IntValidator()}]` + return _deduplicate_unhashable(params) + +def _deduplicate_unhashable(unhashable_params): + new_unhashable = [] + for t in unhashable_params: + if t not in new_unhashable: + new_unhashable.append(t) + return new_unhashable + +def _compare_args_orderless(first_args, second_args): + first_unhashable = _deduplicate_unhashable(first_args) + second_unhashable = _deduplicate_unhashable(second_args) + t = list(second_unhashable) + try: + for elem in first_unhashable: + t.remove(elem) + except ValueError: + return False + return not t def _remove_dups_flatten(parameters): """Internal helper for Union creation and substitution. @@ -341,7 +355,7 @@ def _remove_dups_flatten(parameters): else: params.append(p) - return tuple(_deduplicate(params)) + return tuple(_deduplicate(params, unhashable_fallback=True)) def _flatten_literal_params(parameters): @@ -530,7 +544,7 @@ class Any(metaclass=_AnyMeta): def __new__(cls, *args, **kwargs): if cls is Any: raise TypeError("Any cannot be instantiated") - return super().__new__(cls, *args, **kwargs) + return super().__new__(cls) @_SpecialForm @@ -832,22 +846,25 @@ def TypeGuard(self, parameters): 2. If the return value is ``True``, the type of its argument is the type inside ``TypeGuard``. - For example:: + For example:: + + def is_str_list(val: list[object]) -> TypeGuard[list[str]]: + '''Determines whether all objects in the list are strings''' + return all(isinstance(x, str) for x in val) - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... + def func1(val: list[object]): + if is_str_list(val): + # Type of ``val`` is narrowed to ``list[str]``. + print(" ".join(val)) + else: + # Type of ``val`` remains as ``list[object]``. + print("Not a list of strings!") Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower form of ``TypeA`` (it can even be a wider form) and this may lead to type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of + narrowing ``list[object]`` to ``list[str]`` even though the latter is not + a subtype of the former, since ``list`` is invariant. The responsibility of writing type-safe type guards is left to the user. ``TypeGuard`` also works with type variables. For more information, see @@ -872,7 +889,7 @@ def __init__(self, arg, is_argument=True, module=None, *, is_class=False): # If we do `def f(*args: *Ts)`, then we'll have `arg = '*Ts'`. # Unfortunately, this isn't a valid expression on its own, so we # do the unpacking manually. - if arg[0] == '*': + if arg.startswith('*'): arg_to_compile = f'({arg},)[0]' # E.g. (*Ts,)[0] or (*tuple[int, int],)[0] else: arg_to_compile = arg @@ -1140,7 +1157,9 @@ def __call__(self, *args, **kwargs): result = self.__origin__(*args, **kwargs) try: result.__orig_class__ = self - except AttributeError: + # Some objects raise TypeError (or something even more exotic) + # if you try to set attributes on them; we guard against that here + except Exception: pass return result @@ -1546,7 +1565,10 @@ def copy_with(self, params): def __eq__(self, other): if not isinstance(other, (_UnionGenericAlias, types.UnionType)): return NotImplemented - return set(self.__args__) == set(other.__args__) + try: # fast path + return set(self.__args__) == set(other.__args__) + except TypeError: # not hashable, slow path + return _compare_args_orderless(self.__args__, other.__args__) def __hash__(self): return hash(frozenset(self.__args__)) @@ -3254,11 +3276,11 @@ def __enter__(self) -> 'TextIO': class _DeprecatedType(type): def __getattribute__(cls, name): - if name not in ("__dict__", "__module__") and name in cls.__dict__: + if name not in {"__dict__", "__module__", "__doc__"} and name in cls.__dict__: warnings.warn( f"{cls.__name__} is deprecated, import directly " f"from typing instead. {cls.__name__} will be removed " - "in Python 3.12.", + "in Python 3.13.", DeprecationWarning, stacklevel=2, ) diff --git a/PythonLib/full/unittest/mock.py b/PythonLib/full/unittest/mock.py index a2187580..0e1b9ace 100644 --- a/PythonLib/full/unittest/mock.py +++ b/PythonLib/full/unittest/mock.py @@ -543,7 +543,7 @@ def __get_return_value(self): if self._mock_delegate is not None: ret = self._mock_delegate.return_value - if ret is DEFAULT: + if ret is DEFAULT and self._mock_wraps is None: ret = self._get_child_mock( _new_parent=self, _new_name='()' ) @@ -1204,6 +1204,9 @@ def _execute_mock_call(self, /, *args, **kwargs): if self._mock_return_value is not DEFAULT: return self.return_value + if self._mock_delegate and self._mock_delegate.return_value is not DEFAULT: + return self.return_value + if self._mock_wraps is not None: return self._mock_wraps(*args, **kwargs) @@ -2754,9 +2757,12 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, if _parent is not None and not instance: _parent._mock_children[_name] = mock + wrapped = kwargs.get('wraps') + if is_type and not instance and 'return_value' not in kwargs: mock.return_value = create_autospec(spec, spec_set, instance=True, - _name='()', _parent=mock) + _name='()', _parent=mock, + wraps=wrapped) for entry in dir(spec): if _is_magic(entry): @@ -2778,6 +2784,9 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, continue kwargs = {'spec': original} + # Wrap child attributes also. + if wrapped and hasattr(wrapped, entry): + kwargs.update(wraps=original) if spec_set: kwargs = {'spec_set': original} diff --git a/PythonLib/full/urllib/parse.py b/PythonLib/full/urllib/parse.py index c129b0d7..fc9e7c99 100644 --- a/PythonLib/full/urllib/parse.py +++ b/PythonLib/full/urllib/parse.py @@ -763,42 +763,48 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, Returns a list, as G-d intended. """ - qs, _coerce_result = _coerce_args(qs) - separator, _ = _coerce_args(separator) - if not separator or (not isinstance(separator, (str, bytes))): + if not separator or not isinstance(separator, (str, bytes)): raise ValueError("Separator must be of type string or bytes.") + if isinstance(qs, str): + if not isinstance(separator, str): + separator = str(separator, 'ascii') + eq = '=' + def _unquote(s): + return unquote_plus(s, encoding=encoding, errors=errors) + else: + if not qs: + return [] + # Use memoryview() to reject integers and iterables, + # acceptable by the bytes constructor. + qs = bytes(memoryview(qs)) + if isinstance(separator, str): + separator = bytes(separator, 'ascii') + eq = b'=' + def _unquote(s): + return unquote_to_bytes(s.replace(b'+', b' ')) + + if not qs: + return [] # If max_num_fields is defined then check that the number of fields # is less than max_num_fields. This prevents a memory exhaustion DOS # attack via post bodies with many fields. if max_num_fields is not None: - num_fields = 1 + qs.count(separator) if qs else 0 + num_fields = 1 + qs.count(separator) if max_num_fields < num_fields: raise ValueError('Max number of fields exceeded') r = [] - query_args = qs.split(separator) if qs else [] - for name_value in query_args: - if not name_value and not strict_parsing: - continue - nv = name_value.split('=', 1) - if len(nv) != 2: - if strict_parsing: + for name_value in qs.split(separator): + if name_value or strict_parsing: + name, has_eq, value = name_value.partition(eq) + if not has_eq and strict_parsing: raise ValueError("bad query field: %r" % (name_value,)) - # Handle case of a control-name with no equal sign - if keep_blank_values: - nv.append('') - else: - continue - if len(nv[1]) or keep_blank_values: - name = nv[0].replace('+', ' ') - name = unquote(name, encoding=encoding, errors=errors) - name = _coerce_result(name) - value = nv[1].replace('+', ' ') - value = unquote(value, encoding=encoding, errors=errors) - value = _coerce_result(value) - r.append((name, value)) + if value or keep_blank_values: + name = _unquote(name) + value = _unquote(value) + r.append((name, value)) return r def unquote_plus(string, encoding='utf-8', errors='replace'): diff --git a/PythonLib/full/urllib/request.py b/PythonLib/full/urllib/request.py index 5314b3f2..7228a355 100644 --- a/PythonLib/full/urllib/request.py +++ b/PythonLib/full/urllib/request.py @@ -2589,6 +2589,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings): } """ from fnmatch import fnmatch + from ipaddress import AddressValueError, IPv4Address hostonly, port = _splitport(host) @@ -2605,20 +2606,17 @@ def ip2num(ipAddr): return True hostIP = None + try: + hostIP = int(IPv4Address(hostonly)) + except AddressValueError: + pass for value in proxy_settings.get('exceptions', ()): # Items in the list are strings like these: *.local, 169.254/16 if not value: continue m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value) - if m is not None: - if hostIP is None: - try: - hostIP = socket.gethostbyname(hostonly) - hostIP = ip2num(hostIP) - except OSError: - continue - + if m is not None and hostIP is not None: base = ip2num(m.group(1)) mask = m.group(2) if mask is None: @@ -2641,6 +2639,31 @@ def ip2num(ipAddr): return False +# Same as _proxy_bypass_macosx_sysconf, testable on all platforms +def _proxy_bypass_winreg_override(host, override): + """Return True if the host should bypass the proxy server. + + The proxy override list is obtained from the Windows + Internet settings proxy override registry value. + + An example of a proxy override value is: + "www.example.com;*.example.net; 192.168.0.1" + """ + from fnmatch import fnmatch + + host, _ = _splitport(host) + proxy_override = override.split(';') + for test in proxy_override: + test = test.strip() + # "" should bypass the proxy server for all intranet addresses + if test == '': + if '.' not in host: + return True + elif fnmatch(host, test): + return True + return False + + if sys.platform == 'darwin': from _scproxy import _get_proxy_settings, _get_proxies @@ -2739,7 +2762,7 @@ def proxy_bypass_registry(host): import winreg except ImportError: # Std modules, so should be around - but you never know! - return 0 + return False try: internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') @@ -2749,40 +2772,10 @@ def proxy_bypass_registry(host): 'ProxyOverride')[0]) # ^^^^ Returned as Unicode but problems if not converted to ASCII except OSError: - return 0 + return False if not proxyEnable or not proxyOverride: - return 0 - # try to make a host list from name and IP address. - rawHost, port = _splitport(host) - host = [rawHost] - try: - addr = socket.gethostbyname(rawHost) - if addr != rawHost: - host.append(addr) - except OSError: - pass - try: - fqdn = socket.getfqdn(rawHost) - if fqdn != rawHost: - host.append(fqdn) - except OSError: - pass - # make a check value list from the registry entry: replace the - # '' string by the localhost entry and the corresponding - # canonical entry. - proxyOverride = proxyOverride.split(';') - # now check if we match one of the registry values. - for test in proxyOverride: - if test == '': - if '.' not in rawHost: - return 1 - test = test.replace(".", r"\.") # mask dots - test = test.replace("*", r".*") # change glob sequence - test = test.replace("?", r".") # change glob char - for val in host: - if re.match(test, val, re.I): - return 1 - return 0 + return False + return _proxy_bypass_winreg_override(host, proxyOverride) def proxy_bypass(host): """Return True, if host should be bypassed. diff --git a/PythonLib/full/venv/scripts/common/Activate.ps1 b/PythonLib/full/venv/scripts/common/Activate.ps1 index 18fae967..d7fe51b1 100644 --- a/PythonLib/full/venv/scripts/common/Activate.ps1 +++ b/PythonLib/full/venv/scripts/common/Activate.ps1 @@ -247,7 +247,7 @@ Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH $Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" # SIG # Begin signature block -# MIIvJAYJKoZIhvcNAQcCoIIvFTCCLxECAQExDzANBglghkgBZQMEAgEFADB5Bgor +# MIIvIwYJKoZIhvcNAQcCoIIvFDCCLxACAQExDzANBglghkgBZQMEAgEFADB5Bgor # BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG # KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCBnL745ElCYk8vk # dBtMuQhLeWJ3ZGfzKW4DHCYzAn+QB6CCE8MwggWQMIIDeKADAgECAhAFmxtXno4h @@ -356,147 +356,147 @@ $Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" # rRNtnV8UfidPBL4ZHkTcClQbCoz0UbLhkiDvIS00Dn+BBcxw/TKqVL4Oaz3bkMSs # M46LciTeucHY9ExRVt3zy7i149sd+F4QozPqn7FrSVHXmem3r7bjyHTxOgqxRCVa # 18Vtx7P/8bYSBeS+WHCKcliFCecspusCDSlnRUjZwyPdP0VHxaZg2unjHY3rMYIa -# tzCCGrMCAQEwfTBpMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIElu +# tjCCGrICAQEwfTBpMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIElu # Yy4xQTA/BgNVBAMTOERpZ2lDZXJ0IFRydXN0ZWQgRzQgQ29kZSBTaWduaW5nIFJT # QTQwOTYgU0hBMzg0IDIwMjEgQ0ExAhAHHxQbizANJfMU6yMM0NHdMA0GCWCGSAFl # AwQCAQUAoIHIMBkGCSqGSIb3DQEJAzEMBgorBgEEAYI3AgEEMBwGCisGAQQBgjcC # AQsxDjAMBgorBgEEAYI3AgEVMC8GCSqGSIb3DQEJBDEiBCBnAZ6P7YvTwq0fbF62 # o7E75R0LxsW5OtyYiFESQckLhjBcBgorBgEEAYI3AgEMMU4wTKBGgEQAQgB1AGkA -# bAB0ADoAIABSAGUAbABlAGEAcwBlAF8AdgAzAC4AMQAyAC4AMgBfADIAMAAyADQA -# MAAyADAANgAuADAAMaECgAAwDQYJKoZIhvcNAQEBBQAEggIATbW6c9buReeKQQso -# g6hMn1qdYYpVw4mI50/iuRhryAKIbOF/nXFzfU2O2v7lf7eDemq47UrEagsI/fkz -# jHQCeGD49o9yUP2oWxswpBZnuo4KIVl+rd8i3bA8YwEyxcSseoiNAVJVB/x8irzu -# RXQj8bC7Cq2OG3VTyTX4m2ZGUHqZ4FdykziD771ljrl9EoJMsamH6u6nvX8D7u3D -# sX5hdLLfkO8NIca0NN6b+S0jOQk5Mds4h7XUbpre2QCn9h62TEMf+VWExSst3Wrr -# 0dZmzO76EO/4HiYqPdbvwkFjNddIwRUC/jHWr0/fWWutRPb+91M0U5L/uvaqz39/ -# cteqXMzVilOqcMLdRY2i8VbEEF97xADWTjU1fg1ANEiYHOAzR1/BP9OtZPP/TDo6 -# OKzZrJT6mJ07MgevqTLHSmSWZMQc+cWar8HiWSacGcY+DXhMUYaa6EdvITvKlPkE -# RO87xlQ3xmV2d2D3Vz98bU42FY7PKejRJQgClPLtW3fA7EC7o5sn8E2XgOVjLP5W -# 8IYgr0UUSJI4odUtuzXbdxjc0MUZedlX2RufM7x/OScv2CSSBMuByJ1Z4j1qQylm -# M0j/MjVyhMr4c8TH5z77Ec1HPc79dwCKTmrI9ze2mNzb7a7abG59mWmyuenIlu3r -# 1L9agonr/oNDuIpkw3GhQRMcIMahghdAMIIXPAYKKwYBBAGCNwMDATGCFywwghco -# BgkqhkiG9w0BBwKgghcZMIIXFQIBAzEPMA0GCWCGSAFlAwQCAQUAMHgGCyqGSIb3 -# DQEJEAEEoGkEZzBlAgEBBglghkgBhv1sBwEwMTANBglghkgBZQMEAgEFAAQg05m0 -# ZnzFt5OfLCqM3dUoLNWuyDSET+7H8CVzSSScfIcCEQD8wmvVfDaiineK4eXUaB6n -# GA8yMDI0MDIwNjIyMDg1OFqgghMJMIIGwjCCBKqgAwIBAgIQBUSv85SdCDmmv9s/ -# X+VhFjANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGln -# aUNlcnQsIEluYy4xOzA5BgNVBAMTMkRpZ2lDZXJ0IFRydXN0ZWQgRzQgUlNBNDA5 -# NiBTSEEyNTYgVGltZVN0YW1waW5nIENBMB4XDTIzMDcxNDAwMDAwMFoXDTM0MTAx -# MzIzNTk1OVowSDELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJbmMu -# MSAwHgYDVQQDExdEaWdpQ2VydCBUaW1lc3RhbXAgMjAyMzCCAiIwDQYJKoZIhvcN -# AQEBBQADggIPADCCAgoCggIBAKNTRYcdg45brD5UsyPgz5/X5dLnXaEOCdwvSKOX -# ejsqnGfcYhVYwamTEafNqrJq3RApih5iY2nTWJw1cb86l+uUUI8cIOrHmjsvlmbj -# aedp/lvD1isgHMGXlLSlUIHyz8sHpjBoyoNC2vx/CSSUpIIa2mq62DvKXd4ZGIX7 -# ReoNYWyd/nFexAaaPPDFLnkPG2ZS48jWPl/aQ9OE9dDH9kgtXkV1lnX+3RChG4PB -# uOZSlbVH13gpOWvgeFmX40QrStWVzu8IF+qCZE3/I+PKhu60pCFkcOvV5aDaY7Mu -# 6QXuqvYk9R28mxyyt1/f8O52fTGZZUdVnUokL6wrl76f5P17cz4y7lI0+9S769Sg -# LDSb495uZBkHNwGRDxy1Uc2qTGaDiGhiu7xBG3gZbeTZD+BYQfvYsSzhUa+0rRUG -# FOpiCBPTaR58ZE2dD9/O0V6MqqtQFcmzyrzXxDtoRKOlO0L9c33u3Qr/eTQQfqZc -# ClhMAD6FaXXHg2TWdc2PEnZWpST618RrIbroHzSYLzrqawGw9/sqhux7UjipmAmh -# cbJsca8+uG+W1eEQE/5hRwqM/vC2x9XH3mwk8L9CgsqgcT2ckpMEtGlwJw1Pt7U2 -# 0clfCKRwo+wK8REuZODLIivK8SgTIUlRfgZm0zu++uuRONhRB8qUt+JQofM604qD -# y0B7AgMBAAGjggGLMIIBhzAOBgNVHQ8BAf8EBAMCB4AwDAYDVR0TAQH/BAIwADAW -# BgNVHSUBAf8EDDAKBggrBgEFBQcDCDAgBgNVHSAEGTAXMAgGBmeBDAEEAjALBglg -# hkgBhv1sBwEwHwYDVR0jBBgwFoAUuhbZbU2FL3MpdpovdYxqII+eyG8wHQYDVR0O -# BBYEFKW27xPn783QZKHVVqllMaPe1eNJMFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6 -# Ly9jcmwzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNFJTQTQwOTZTSEEy -# NTZUaW1lU3RhbXBpbmdDQS5jcmwwgZAGCCsGAQUFBwEBBIGDMIGAMCQGCCsGAQUF -# BzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wWAYIKwYBBQUHMAKGTGh0dHA6 -# Ly9jYWNlcnRzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNFJTQTQwOTZT -# SEEyNTZUaW1lU3RhbXBpbmdDQS5jcnQwDQYJKoZIhvcNAQELBQADggIBAIEa1t6g -# qbWYF7xwjU+KPGic2CX/yyzkzepdIpLsjCICqbjPgKjZ5+PF7SaCinEvGN1Ott5s -# 1+FgnCvt7T1IjrhrunxdvcJhN2hJd6PrkKoS1yeF844ektrCQDifXcigLiV4JZ0q -# BXqEKZi2V3mP2yZWK7Dzp703DNiYdk9WuVLCtp04qYHnbUFcjGnRuSvExnvPnPp4 -# 4pMadqJpddNQ5EQSviANnqlE0PjlSXcIWiHFtM+YlRpUurm8wWkZus8W8oM3NG6w -# QSbd3lqXTzON1I13fXVFoaVYJmoDRd7ZULVQjK9WvUzF4UbFKNOt50MAcN7MmJ4Z -# iQPq1JE3701S88lgIcRWR+3aEUuMMsOI5ljitts++V+wQtaP4xeR0arAVeOGv6wn -# LEHQmjNKqDbUuXKWfpd5OEhfysLcPTLfddY2Z1qJ+Panx+VPNTwAvb6cKmx5Adza -# ROY63jg7B145WPR8czFVoIARyxQMfq68/qTreWWqaNYiyjvrmoI1VygWy2nyMpqy -# 0tg6uLFGhmu6F/3Ed2wVbK6rr3M66ElGt9V/zLY4wNjsHPW2obhDLN9OTH0eaHDA -# dwrUAuBcYLso/zjlUlrWrBciI0707NMX+1Br/wd3H3GXREHJuEbTbDJ8WC9nR2Xl -# G3O2mflrLAZG70Ee8PBf4NvZrZCARK+AEEGKMIIGrjCCBJagAwIBAgIQBzY3tyRU -# fNhHrP0oZipeWzANBgkqhkiG9w0BAQsFADBiMQswCQYDVQQGEwJVUzEVMBMGA1UE -# ChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSEwHwYD -# VQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwHhcNMjIwMzIzMDAwMDAwWhcN -# MzcwMzIyMjM1OTU5WjBjMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs -# IEluYy4xOzA5BgNVBAMTMkRpZ2lDZXJ0IFRydXN0ZWQgRzQgUlNBNDA5NiBTSEEy -# NTYgVGltZVN0YW1waW5nIENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC -# AgEAxoY1BkmzwT1ySVFVxyUDxPKRN6mXUaHW0oPRnkyibaCwzIP5WvYRoUQVQl+k -# iPNo+n3znIkLf50fng8zH1ATCyZzlm34V6gCff1DtITaEfFzsbPuK4CEiiIY3+va -# PcQXf6sZKz5C3GeO6lE98NZW1OcoLevTsbV15x8GZY2UKdPZ7Gnf2ZCHRgB720RB -# idx8ald68Dd5n12sy+iEZLRS8nZH92GDGd1ftFQLIWhuNyG7QKxfst5Kfc71ORJn -# 7w6lY2zkpsUdzTYNXNXmG6jBZHRAp8ByxbpOH7G1WE15/tePc5OsLDnipUjW8LAx -# E6lXKZYnLvWHpo9OdhVVJnCYJn+gGkcgQ+NDY4B7dW4nJZCYOjgRs/b2nuY7W+yB -# 3iIU2YIqx5K/oN7jPqJz+ucfWmyU8lKVEStYdEAoq3NDzt9KoRxrOMUp88qqlnNC -# aJ+2RrOdOqPVA+C/8KI8ykLcGEh/FDTP0kyr75s9/g64ZCr6dSgkQe1CvwWcZklS -# UPRR8zZJTYsg0ixXNXkrqPNFYLwjjVj33GHek/45wPmyMKVM1+mYSlg+0wOI/rOP -# 015LdhJRk8mMDDtbiiKowSYI+RQQEgN9XyO7ZONj4KbhPvbCdLI/Hgl27KtdRnXi -# YKNYCQEoAA6EVO7O6V3IXjASvUaetdN2udIOa5kM0jO0zbECAwEAAaOCAV0wggFZ -# MBIGA1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFLoW2W1NhS9zKXaaL3WMaiCP -# nshvMB8GA1UdIwQYMBaAFOzX44LScV1kTN8uZz/nupiuHA9PMA4GA1UdDwEB/wQE -# AwIBhjATBgNVHSUEDDAKBggrBgEFBQcDCDB3BggrBgEFBQcBAQRrMGkwJAYIKwYB -# BQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBBBggrBgEFBQcwAoY1aHR0 -# cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0VHJ1c3RlZFJvb3RHNC5j -# cnQwQwYDVR0fBDwwOjA4oDagNIYyaHR0cDovL2NybDMuZGlnaWNlcnQuY29tL0Rp -# Z2lDZXJ0VHJ1c3RlZFJvb3RHNC5jcmwwIAYDVR0gBBkwFzAIBgZngQwBBAIwCwYJ -# YIZIAYb9bAcBMA0GCSqGSIb3DQEBCwUAA4ICAQB9WY7Ak7ZvmKlEIgF+ZtbYIULh -# sBguEE0TzzBTzr8Y+8dQXeJLKftwig2qKWn8acHPHQfpPmDI2AvlXFvXbYf6hCAl -# NDFnzbYSlm/EUExiHQwIgqgWvalWzxVzjQEiJc6VaT9Hd/tydBTX/6tPiix6q4XN -# Q1/tYLaqT5Fmniye4Iqs5f2MvGQmh2ySvZ180HAKfO+ovHVPulr3qRCyXen/KFSJ -# 8NWKcXZl2szwcqMj+sAngkSumScbqyQeJsG33irr9p6xeZmBo1aGqwpFyd/EjaDn -# mPv7pp1yr8THwcFqcdnGE4AJxLafzYeHJLtPo0m5d2aR8XKc6UsCUqc3fpNTrDsd -# CEkPlM05et3/JWOZJyw9P2un8WbDQc1PtkCbISFA0LcTJM3cHXg65J6t5TRxktcm -# a+Q4c6umAU+9Pzt4rUyt+8SVe+0KXzM5h0F4ejjpnOHdI/0dKNPH+ejxmF/7K9h+ -# 8kaddSweJywm228Vex4Ziza4k9Tm8heZWcpw8De/mADfIBZPJ/tgZxahZrrdVcA6 -# KYawmKAr7ZVBtzrVFZgxtGIJDwq9gdkT/r+k0fNX2bwE+oLeMt8EifAAzV3C+dAj -# fwAL5HYCJtnwZXZCpimHCUcr5n8apIUP/JiW9lVUKx+A+sDyDivl1vupL0QVSucT -# Dh3bNzgaoSv27dZ8/DCCBY0wggR1oAMCAQICEA6bGI750C3n79tQ4ghAGFowDQYJ -# KoZIhvcNAQEMBQAwZTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IElu -# YzEZMBcGA1UECxMQd3d3LmRpZ2ljZXJ0LmNvbTEkMCIGA1UEAxMbRGlnaUNlcnQg -# QXNzdXJlZCBJRCBSb290IENBMB4XDTIyMDgwMTAwMDAwMFoXDTMxMTEwOTIzNTk1 -# OVowYjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UE -# CxMQd3d3LmRpZ2ljZXJ0LmNvbTEhMB8GA1UEAxMYRGlnaUNlcnQgVHJ1c3RlZCBS -# b290IEc0MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAv+aQc2jeu+Rd -# SjwwIjBpM+zCpyUuySE98orYWcLhKac9WKt2ms2uexuEDcQwH/MbpDgW61bGl20d -# q7J58soR0uRf1gU8Ug9SH8aeFaV+vp+pVxZZVXKvaJNwwrK6dZlqczKU0RBEEC7f -# gvMHhOZ0O21x4i0MG+4g1ckgHWMpLc7sXk7Ik/ghYZs06wXGXuxbGrzryc/NrDRA -# X7F6Zu53yEioZldXn1RYjgwrt0+nMNlW7sp7XeOtyU9e5TXnMcvak17cjo+A2raR -# mECQecN4x7axxLVqGDgDEI3Y1DekLgV9iPWCPhCRcKtVgkEy19sEcypukQF8IUzU -# vK4bA3VdeGbZOjFEmjNAvwjXWkmkwuapoGfdpCe8oU85tRFYF/ckXEaPZPfBaYh2 -# mHY9WV1CdoeJl2l6SPDgohIbZpp0yt5LHucOY67m1O+SkjqePdwA5EUlibaaRBkr -# fsCUtNJhbesz2cXfSwQAzH0clcOP9yGyshG3u3/y1YxwLEFgqrFjGESVGnZifvaA -# sPvoZKYz0YkH4b235kOkGLimdwHhD5QMIR2yVCkliWzlDlJRR3S+Jqy2QXXeeqxf -# jT/JvNNBERJb5RBQ6zHFynIWIgnffEx1P2PsIV/EIFFrb7GrhotPwtZFX50g/KEe -# xcCPorF+CiaZ9eRpL5gdLfXZqbId5RsCAwEAAaOCATowggE2MA8GA1UdEwEB/wQF -# MAMBAf8wHQYDVR0OBBYEFOzX44LScV1kTN8uZz/nupiuHA9PMB8GA1UdIwQYMBaA -# FEXroq/0ksuCMS1Ri6enIZ3zbcgPMA4GA1UdDwEB/wQEAwIBhjB5BggrBgEFBQcB -# AQRtMGswJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBDBggr -# BgEFBQcwAoY3aHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0QXNz -# dXJlZElEUm9vdENBLmNydDBFBgNVHR8EPjA8MDqgOKA2hjRodHRwOi8vY3JsMy5k -# aWdpY2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3JsMBEGA1UdIAQK -# MAgwBgYEVR0gADANBgkqhkiG9w0BAQwFAAOCAQEAcKC/Q1xV5zhfoKN0Gz22Ftf3 -# v1cHvZqsoYcs7IVeqRq7IviHGmlUIu2kiHdtvRoU9BNKei8ttzjv9P+Aufih9/Jy -# 3iS8UgPITtAq3votVs/59PesMHqai7Je1M/RQ0SbQyHrlnKhSLSZy51PpwYDE3cn -# RNTnf+hZqPC/Lwum6fI0POz3A8eHqNJMQBk1RmppVLC4oVaO7KTVPeix3P0c2PR3 -# WlxUjG/voVA9/HYJaISfb8rbII01YBwCA8sgsKxYoA5AY8WYIsGyWfVVa88nq2x2 -# zm8jLfR+cWojayL/ErhULSd+2DrZ8LaHlv1b0VysGMNNn3O3AamfV6peKOK5lDGC -# A3YwggNyAgEBMHcwYzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ -# bmMuMTswOQYDVQQDEzJEaWdpQ2VydCBUcnVzdGVkIEc0IFJTQTQwOTYgU0hBMjU2 -# IFRpbWVTdGFtcGluZyBDQQIQBUSv85SdCDmmv9s/X+VhFjANBglghkgBZQMEAgEF -# AKCB0TAaBgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwHAYJKoZIhvcNAQkFMQ8X -# DTI0MDIwNjIyMDg1OFowKwYLKoZIhvcNAQkQAgwxHDAaMBgwFgQUZvArMsLCyQ+C -# Xc6qisnGTxmcz0AwLwYJKoZIhvcNAQkEMSIEINMdi4rFvW7VPP2AXVn0aK7rkofX -# zdeHGr26+9NSIEg6MDcGCyqGSIb3DQEJEAIvMSgwJjAkMCIEINL25G3tdCLM0dRA -# V2hBNm+CitpVmq4zFq9NGprUDHgoMA0GCSqGSIb3DQEBAQUABIICAEa0h9uNUeCI -# slg/9MrVGvyDPHdViMo+hEB+YgYnYy3yiMQr2BfKvKGcEK9+cbFZoW5F1wM+3Kry -# Gd+fpUCiF2MzvgxYjipgyHmHi4fMhBKF5vC8923FiwyQawZmxiPU/N9pOKEf8MHD -# ggpT91iWxHLgIX8fklieNBBbZxtGPXoyZTDaqeaJhBBJtT1FDZ7nKJbA5sp+1QQj -# nKnjpeCbAD73IgJ/ZayCGmgYwiPhY68j5+pRQt0KGp2+e1pLS9TuEbS2QNIfZEmv -# hFklZEF1P9w9U+Wr/1vnPafeNELPFTaFrELMgQL7wWCM+6uNdZQgT+2DlMWKWWFt -# kunVOD0m1SURurCEHGRi+Y8w0A22uOYnMbItMGVXbgESTwi7yDMCqrrKEkBeJXqr -# Ig8RVOmDAH2a7LPO0cUzE1Ql2oJypJH7wl8338QzVBvRYNJIJ8ISDPn80u2lFWIK -# RFwglrbMcj0b6i+g8J0dMRSMBxkg9hhS922sOLwGNUJ/pJ5IwxFBqf6pdp+WFOA8 -# IQ6ynnSSDm82O6giiEL+v2iukua9VG/IKpJMccUKwal+yM4o097MjzzfjdS4oRWJ -# pMz2Wu3m0Tg4o5S09KR7zCHWqlutSriuC2nirqNE+onlbxYUx2JrXk5V+LrfNDAQ -# 0MPcQ1A5d6j7kNjY2262Nh6dU6fPNnUT +# bAB0ADoAIABSAGUAbABlAGEAcwBlAF8AdgAzAC4AMQAyAC4AMwBfADIAMAAyADQA +# MAA0ADAAOQAuADAAMaECgAAwDQYJKoZIhvcNAQEBBQAEggIAhvmA8nPNyn7jYkuO +# UobHjUZGgjE7xB8BnrDg5IErORsyIbCSQpdJ4oJ4cnEiVFElql5hVsEY6JWfguaj +# zELkcgfK5etJxPtNSua/FiBbXqitnDNyHtReLTb+u/T5110GDiTMzpytPorKc21h +# 6VRXrPmjSTp+Nuji0lulPR9KN6Lxua72t/LZYfT83iZD0eP7rnDoLHqoslb5llEV +# CaKgLZ8ap8l5uDwHqMm4FYvU9BswEcTQ75wD7jFQd5yGpOzht0hWjAgq9C+nI4Uu +# nygUpyj1XUAw9FaeRYiVtI0+hkmLNSkUELScNsyFSpZEOJWii3nKkRcupSp8YORJ +# d24/DjkUuNQMzlHDoVX07E8MFnzzGtgftoRilePSS6qer/RC+SHb18ioY4F0l/EU +# A8xb+ztHHdeR7OuThn4S/1TMCYJdmqUYpIkmAt1dG/eUlkp+R1DY1KdpT71L828L +# XgWipnS0PJuHSrWb428U7uF439jB2HjI35a5QclApg2UTeDPQvE0GhLPfJyJCYZ8 +# UYLtbOnd82M4pUJvU6NKmfxZuT2OhsXxQPDTwaqdKaVaBupPbsSTC0o58uarJSSi +# G5pj4NQyyt1IvZmM2ESV/FufDG5/zSoJ+HO5Ah/AV5ikPlgGwzjdY8vG2CSYJ9os +# kU9LigwvXmSH/4QYoWN7NtCTCAuhghc/MIIXOwYKKwYBBAGCNwMDATGCFyswghcn +# BgkqhkiG9w0BBwKgghcYMIIXFAIBAzEPMA0GCWCGSAFlAwQCAQUAMHcGCyqGSIb3 +# DQEJEAEEoGgEZjBkAgEBBglghkgBhv1sBwEwMTANBglghkgBZQMEAgEFAAQgOm4M +# dN4NqIa6dQy8bSjVKj542yV4A8OJPLti6aa7ZLUCEGqMmddBL+N1YSiZhxqcmY0Y +# DzIwMjQwNDA5MTQ0MTE2WqCCEwkwggbCMIIEqqADAgECAhAFRK/zlJ0IOaa/2z9f +# 5WEWMA0GCSqGSIb3DQEBCwUAMGMxCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5EaWdp +# Q2VydCwgSW5jLjE7MDkGA1UEAxMyRGlnaUNlcnQgVHJ1c3RlZCBHNCBSU0E0MDk2 +# IFNIQTI1NiBUaW1lU3RhbXBpbmcgQ0EwHhcNMjMwNzE0MDAwMDAwWhcNMzQxMDEz +# MjM1OTU5WjBIMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4x +# IDAeBgNVBAMTF0RpZ2lDZXJ0IFRpbWVzdGFtcCAyMDIzMIICIjANBgkqhkiG9w0B +# AQEFAAOCAg8AMIICCgKCAgEAo1NFhx2DjlusPlSzI+DPn9fl0uddoQ4J3C9Io5d6 +# OyqcZ9xiFVjBqZMRp82qsmrdECmKHmJjadNYnDVxvzqX65RQjxwg6seaOy+WZuNp +# 52n+W8PWKyAcwZeUtKVQgfLPywemMGjKg0La/H8JJJSkghraarrYO8pd3hkYhftF +# 6g1hbJ3+cV7EBpo88MUueQ8bZlLjyNY+X9pD04T10Mf2SC1eRXWWdf7dEKEbg8G4 +# 5lKVtUfXeCk5a+B4WZfjRCtK1ZXO7wgX6oJkTf8j48qG7rSkIWRw69XloNpjsy7p +# Be6q9iT1HbybHLK3X9/w7nZ9MZllR1WdSiQvrCuXvp/k/XtzPjLuUjT71Lvr1KAs +# NJvj3m5kGQc3AZEPHLVRzapMZoOIaGK7vEEbeBlt5NkP4FhB+9ixLOFRr7StFQYU +# 6mIIE9NpHnxkTZ0P387RXoyqq1AVybPKvNfEO2hEo6U7Qv1zfe7dCv95NBB+plwK +# WEwAPoVpdceDZNZ1zY8SdlalJPrXxGshuugfNJgvOuprAbD3+yqG7HtSOKmYCaFx +# smxxrz64b5bV4RAT/mFHCoz+8LbH1cfebCTwv0KCyqBxPZySkwS0aXAnDU+3tTbR +# yV8IpHCj7ArxES5k4MsiK8rxKBMhSVF+BmbTO77665E42FEHypS34lCh8zrTioPL +# QHsCAwEAAaOCAYswggGHMA4GA1UdDwEB/wQEAwIHgDAMBgNVHRMBAf8EAjAAMBYG +# A1UdJQEB/wQMMAoGCCsGAQUFBwMIMCAGA1UdIAQZMBcwCAYGZ4EMAQQCMAsGCWCG +# SAGG/WwHATAfBgNVHSMEGDAWgBS6FtltTYUvcyl2mi91jGogj57IbzAdBgNVHQ4E +# FgQUpbbvE+fvzdBkodVWqWUxo97V40kwWgYDVR0fBFMwUTBPoE2gS4ZJaHR0cDov +# L2NybDMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0VHJ1c3RlZEc0UlNBNDA5NlNIQTI1 +# NlRpbWVTdGFtcGluZ0NBLmNybDCBkAYIKwYBBQUHAQEEgYMwgYAwJAYIKwYBBQUH +# MAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBYBggrBgEFBQcwAoZMaHR0cDov +# L2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0VHJ1c3RlZEc0UlNBNDA5NlNI +# QTI1NlRpbWVTdGFtcGluZ0NBLmNydDANBgkqhkiG9w0BAQsFAAOCAgEAgRrW3qCp +# tZgXvHCNT4o8aJzYJf/LLOTN6l0ikuyMIgKpuM+AqNnn48XtJoKKcS8Y3U623mzX +# 4WCcK+3tPUiOuGu6fF29wmE3aEl3o+uQqhLXJ4Xzjh6S2sJAOJ9dyKAuJXglnSoF +# eoQpmLZXeY/bJlYrsPOnvTcM2Jh2T1a5UsK2nTipgedtQVyMadG5K8TGe8+c+nji +# kxp2oml101DkRBK+IA2eqUTQ+OVJdwhaIcW0z5iVGlS6ubzBaRm6zxbygzc0brBB +# Jt3eWpdPM43UjXd9dUWhpVgmagNF3tlQtVCMr1a9TMXhRsUo063nQwBw3syYnhmJ +# A+rUkTfvTVLzyWAhxFZH7doRS4wyw4jmWOK22z75X7BC1o/jF5HRqsBV44a/rCcs +# QdCaM0qoNtS5cpZ+l3k4SF/Kwtw9Mt911jZnWon49qfH5U81PAC9vpwqbHkB3NpE +# 5jreODsHXjlY9HxzMVWggBHLFAx+rrz+pOt5Zapo1iLKO+uagjVXKBbLafIymrLS +# 2Dq4sUaGa7oX/cR3bBVsrquvczroSUa31X/MtjjA2Owc9bahuEMs305MfR5ocMB3 +# CtQC4Fxguyj/OOVSWtasFyIjTvTs0xf7UGv/B3cfcZdEQcm4RtNsMnxYL2dHZeUb +# c7aZ+WssBkbvQR7w8F/g29mtkIBEr4AQQYowggauMIIElqADAgECAhAHNje3JFR8 +# 2Ees/ShmKl5bMA0GCSqGSIb3DQEBCwUAMGIxCzAJBgNVBAYTAlVTMRUwEwYDVQQK +# EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xITAfBgNV +# BAMTGERpZ2lDZXJ0IFRydXN0ZWQgUm9vdCBHNDAeFw0yMjAzMjMwMDAwMDBaFw0z +# NzAzMjIyMzU5NTlaMGMxCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5EaWdpQ2VydCwg +# SW5jLjE7MDkGA1UEAxMyRGlnaUNlcnQgVHJ1c3RlZCBHNCBSU0E0MDk2IFNIQTI1 +# NiBUaW1lU3RhbXBpbmcgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +# AQDGhjUGSbPBPXJJUVXHJQPE8pE3qZdRodbSg9GeTKJtoLDMg/la9hGhRBVCX6SI +# 82j6ffOciQt/nR+eDzMfUBMLJnOWbfhXqAJ9/UO0hNoR8XOxs+4rgISKIhjf69o9 +# xBd/qxkrPkLcZ47qUT3w1lbU5ygt69OxtXXnHwZljZQp09nsad/ZkIdGAHvbREGJ +# 3HxqV3rwN3mfXazL6IRktFLydkf3YYMZ3V+0VAshaG43IbtArF+y3kp9zvU5Emfv +# DqVjbOSmxR3NNg1c1eYbqMFkdECnwHLFuk4fsbVYTXn+149zk6wsOeKlSNbwsDET +# qVcplicu9Yemj052FVUmcJgmf6AaRyBD40NjgHt1biclkJg6OBGz9vae5jtb7IHe +# IhTZgirHkr+g3uM+onP65x9abJTyUpURK1h0QCirc0PO30qhHGs4xSnzyqqWc0Jo +# n7ZGs506o9UD4L/wojzKQtwYSH8UNM/STKvvmz3+DrhkKvp1KCRB7UK/BZxmSVJQ +# 9FHzNklNiyDSLFc1eSuo80VgvCONWPfcYd6T/jnA+bIwpUzX6ZhKWD7TA4j+s4/T +# Xkt2ElGTyYwMO1uKIqjBJgj5FBASA31fI7tk42PgpuE+9sJ0sj8eCXbsq11GdeJg +# o1gJASgADoRU7s7pXcheMBK9Rp6103a50g5rmQzSM7TNsQIDAQABo4IBXTCCAVkw +# EgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQUuhbZbU2FL3MpdpovdYxqII+e +# yG8wHwYDVR0jBBgwFoAU7NfjgtJxXWRM3y5nP+e6mK4cD08wDgYDVR0PAQH/BAQD +# AgGGMBMGA1UdJQQMMAoGCCsGAQUFBwMIMHcGCCsGAQUFBwEBBGswaTAkBggrBgEF +# BQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29tMEEGCCsGAQUFBzAChjVodHRw +# Oi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNlcnRUcnVzdGVkUm9vdEc0LmNy +# dDBDBgNVHR8EPDA6MDigNqA0hjJodHRwOi8vY3JsMy5kaWdpY2VydC5jb20vRGln +# aUNlcnRUcnVzdGVkUm9vdEc0LmNybDAgBgNVHSAEGTAXMAgGBmeBDAEEAjALBglg +# hkgBhv1sBwEwDQYJKoZIhvcNAQELBQADggIBAH1ZjsCTtm+YqUQiAX5m1tghQuGw +# GC4QTRPPMFPOvxj7x1Bd4ksp+3CKDaopafxpwc8dB+k+YMjYC+VcW9dth/qEICU0 +# MWfNthKWb8RQTGIdDAiCqBa9qVbPFXONASIlzpVpP0d3+3J0FNf/q0+KLHqrhc1D +# X+1gtqpPkWaeLJ7giqzl/Yy8ZCaHbJK9nXzQcAp876i8dU+6WvepELJd6f8oVInw +# 1YpxdmXazPByoyP6wCeCRK6ZJxurJB4mwbfeKuv2nrF5mYGjVoarCkXJ38SNoOeY +# +/umnXKvxMfBwWpx2cYTgAnEtp/Nh4cku0+jSbl3ZpHxcpzpSwJSpzd+k1OsOx0I +# SQ+UzTl63f8lY5knLD0/a6fxZsNBzU+2QJshIUDQtxMkzdwdeDrknq3lNHGS1yZr +# 5Dhzq6YBT70/O3itTK37xJV77QpfMzmHQXh6OOmc4d0j/R0o08f56PGYX/sr2H7y +# Rp11LB4nLCbbbxV7HhmLNriT1ObyF5lZynDwN7+YAN8gFk8n+2BnFqFmut1VwDop +# hrCYoCvtlUG3OtUVmDG0YgkPCr2B2RP+v6TR81fZvAT6gt4y3wSJ8ADNXcL50CN/ +# AAvkdgIm2fBldkKmKYcJRyvmfxqkhQ/8mJb2VVQrH4D6wPIOK+XW+6kvRBVK5xMO +# Hds3OBqhK/bt1nz8MIIFjTCCBHWgAwIBAgIQDpsYjvnQLefv21DiCEAYWjANBgkq +# hkiG9w0BAQwFADBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5j +# MRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBB +# c3N1cmVkIElEIFJvb3QgQ0EwHhcNMjIwODAxMDAwMDAwWhcNMzExMTA5MjM1OTU5 +# WjBiMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQL +# ExB3d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJv +# b3QgRzQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1K +# PDAiMGkz7MKnJS7JIT3yithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2r +# snnyyhHS5F/WBTxSD1Ifxp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C +# 8weE5nQ7bXHiLQwb7iDVySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBf +# sXpm7nfISKhmV1efVFiODCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGY +# QJB5w3jHtrHEtWoYOAMQjdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8 +# rhsDdV14Ztk6MUSaM0C/CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaY +# dj1ZXUJ2h4mXaXpI8OCiEhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+ +# wJS00mFt6zPZxd9LBADMfRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw +# ++hkpjPRiQfhvbfmQ6QYuKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+N +# P8m800ERElvlEFDrMcXKchYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7F +# wI+isX4KJpn15GkvmB0t9dmpsh3lGwIDAQABo4IBOjCCATYwDwYDVR0TAQH/BAUw +# AwEB/zAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wHwYDVR0jBBgwFoAU +# Reuir/SSy4IxLVGLp6chnfNtyA8wDgYDVR0PAQH/BAQDAgGGMHkGCCsGAQUFBwEB +# BG0wazAkBggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29tMEMGCCsG +# AQUFBzAChjdodHRwOi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNlcnRBc3N1 +# cmVkSURSb290Q0EuY3J0MEUGA1UdHwQ+MDwwOqA4oDaGNGh0dHA6Ly9jcmwzLmRp +# Z2ljZXJ0LmNvbS9EaWdpQ2VydEFzc3VyZWRJRFJvb3RDQS5jcmwwEQYDVR0gBAow +# CDAGBgRVHSAAMA0GCSqGSIb3DQEBDAUAA4IBAQBwoL9DXFXnOF+go3QbPbYW1/e/ +# Vwe9mqyhhyzshV6pGrsi+IcaaVQi7aSId229GhT0E0p6Ly23OO/0/4C5+KH38nLe +# JLxSA8hO0Cre+i1Wz/n096wwepqLsl7Uz9FDRJtDIeuWcqFItJnLnU+nBgMTdydE +# 1Od/6Fmo8L8vC6bp8jQ87PcDx4eo0kxAGTVGamlUsLihVo7spNU96LHc/RzY9Hda +# XFSMb++hUD38dglohJ9vytsgjTVgHAIDyyCwrFigDkBjxZgiwbJZ9VVrzyerbHbO +# byMt9H5xaiNrIv8SuFQtJ37YOtnwtoeW/VvRXKwYw02fc7cBqZ9Xql4o4rmUMYID +# djCCA3ICAQEwdzBjMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIElu +# Yy4xOzA5BgNVBAMTMkRpZ2lDZXJ0IFRydXN0ZWQgRzQgUlNBNDA5NiBTSEEyNTYg +# VGltZVN0YW1waW5nIENBAhAFRK/zlJ0IOaa/2z9f5WEWMA0GCWCGSAFlAwQCAQUA +# oIHRMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAcBgkqhkiG9w0BCQUxDxcN +# MjQwNDA5MTQ0MTE2WjArBgsqhkiG9w0BCRACDDEcMBowGDAWBBRm8CsywsLJD4Jd +# zqqKycZPGZzPQDAvBgkqhkiG9w0BCQQxIgQgZm7KOZQwUzSPGy0QMkwOOcWaG+w5 +# Nm03VWlT93Aec3QwNwYLKoZIhvcNAQkQAi8xKDAmMCQwIgQg0vbkbe10IszR1EBX +# aEE2b4KK2lWarjMWr00amtQMeCgwDQYJKoZIhvcNAQEBBQAEggIATk+R26sfZdRy +# JMi4Ei4Xaj5TwNTbaUxG2P3ggvT5YpIxZobFK2OohvHFyr8g3ipihMeFc4LdruGS +# y/8GNua9BnUIYHBXMaxEtxYtiIs32a1R73VSoL/RHVXt/kNI0818/wVOI1aCZdt5 +# MJpjiopPNuneZqH5iym6URk1bvrKlWMFhgA8VggbqQwKiqEtVM7Fy744tuDQjqvE +# 5TG0Ts298gWzsDnwknFei8RKrbCsr+TRIQ5mWvs9zIBo6d2Rs+JMclGjTwJ71o50 +# Tcq7LgVDldWXARib9mQJmagw+y5LkG66W8I+i1mwQ/9Ur2Lrwnophg7F1HkODXI+ +# 7i3qeNDmnZLXiQM2WTsZ8OlEygAbFtE+ue4ucwFAMbh+1DRCOuwQoN2JMFGNtcj5 +# cbhBBb7am6mYGzg7sGvM7inLEKRV+pqJx1gV/SG+DIVQrHzdlvWTdRmJHIDBMqxr +# fpnulV80FMuiBJXvKU0RYEgQHw9QDyUNhdQeXSjBd9myt0EW2WyJLY3cYzjmZE1A +# AcQePJfMbMfYuIBtz7rIXZKNL7/wznLalKxS4aulS7chi5qRiaPJG3lFhlMx//cD +# 91DxPlbbpCymSjO+3aMRAZyEXjmpHPbROZDSkqLXFf8OYcw4uUZLPLxjbmbZM1hH +# dz3vPpscd6J1Tz93GwJu/8fZvMEYDjo= # SIG # End signature block diff --git a/PythonLib/full/xml/etree/ElementTree.py b/PythonLib/full/xml/etree/ElementTree.py index bb7362d1..fd2cc870 100644 --- a/PythonLib/full/xml/etree/ElementTree.py +++ b/PythonLib/full/xml/etree/ElementTree.py @@ -1313,6 +1313,11 @@ def read_events(self): else: yield event + def flush(self): + if self._parser is None: + raise ValueError("flush() called after end of stream") + self._parser.flush() + def XML(text, parser=None): """Parse XML document from string constant. @@ -1719,6 +1724,15 @@ def close(self): del self.parser, self._parser del self.target, self._target + def flush(self): + was_enabled = self.parser.GetReparseDeferralEnabled() + try: + self.parser.SetReparseDeferralEnabled(False) + self.parser.Parse(b"", False) + except self._error as v: + self._raiseerror(v) + finally: + self.parser.SetReparseDeferralEnabled(was_enabled) # -------------------------------------------------------------------- # C14N 2.0 diff --git a/PythonLib/full/xml/sax/expatreader.py b/PythonLib/full/xml/sax/expatreader.py index b9ad5269..ba3c1e98 100644 --- a/PythonLib/full/xml/sax/expatreader.py +++ b/PythonLib/full/xml/sax/expatreader.py @@ -214,6 +214,20 @@ def feed(self, data, isFinal=False): # FIXME: when to invoke error()? self._err_handler.fatalError(exc) + def flush(self): + if self._parser is None: + return + + was_enabled = self._parser.GetReparseDeferralEnabled() + try: + self._parser.SetReparseDeferralEnabled(False) + self._parser.Parse(b"", False) + except expat.error as e: + exc = SAXParseException(expat.ErrorString(e.code), e, self) + self._err_handler.fatalError(exc) + finally: + self._parser.SetReparseDeferralEnabled(was_enabled) + def _close_source(self): source = self._source try: diff --git a/PythonLib/full/zipfile/__init__.py b/PythonLib/full/zipfile/__init__.py index 89184842..91358156 100644 --- a/PythonLib/full/zipfile/__init__.py +++ b/PythonLib/full/zipfile/__init__.py @@ -582,7 +582,15 @@ def from_file(cls, filename, arcname=None, *, strict_timestamps=True): def is_dir(self): """Return True if this archive member is a directory.""" - return self.filename.endswith('/') + if self.filename.endswith('/'): + return True + # The ZIP format specification requires to use forward slashes + # as the directory separator, but in practice some ZIP files + # created on Windows can use backward slashes. For compatibility + # with the extraction code which already handles this: + if os.path.altsep: + return self.filename.endswith((os.path.sep, os.path.altsep)) + return False # ZIP encryption uses the CRC32 one-byte primitive for scrambling some @@ -1554,7 +1562,8 @@ def comment(self, comment): self._didModify = True def read(self, name, pwd=None): - """Return file bytes for name.""" + """Return file bytes for name. 'pwd' is the password to decrypt + encrypted files.""" with self.open(name, "r", pwd) as fp: return fp.read() @@ -1706,7 +1715,8 @@ def extract(self, member, path=None, pwd=None): """Extract a member from the archive to the current working directory, using its full name. Its file information is extracted as accurately as possible. `member' may be a filename or a ZipInfo object. You can - specify a different directory using `path'. + specify a different directory using `path'. You can specify the + password to decrypt the file using 'pwd'. """ if path is None: path = os.getcwd() @@ -1719,7 +1729,8 @@ def extractall(self, path=None, members=None, pwd=None): """Extract all members from the archive to the current working directory. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned - by namelist(). + by namelist(). You can specify the password to decrypt all files + using 'pwd'. """ if members is None: members = self.namelist() diff --git a/PythonLib/full_dll/_asyncio.pyd b/PythonLib/full_dll/_asyncio.pyd index 3ba47752..61c18bd4 100644 Binary files a/PythonLib/full_dll/_asyncio.pyd and b/PythonLib/full_dll/_asyncio.pyd differ diff --git a/PythonLib/full_dll/_bz2.pyd b/PythonLib/full_dll/_bz2.pyd index fe9f8c17..ba6ee43d 100644 Binary files a/PythonLib/full_dll/_bz2.pyd and b/PythonLib/full_dll/_bz2.pyd differ diff --git a/PythonLib/full_dll/_ctypes.pyd b/PythonLib/full_dll/_ctypes.pyd index e06132b1..6cfcf8ca 100644 Binary files a/PythonLib/full_dll/_ctypes.pyd and b/PythonLib/full_dll/_ctypes.pyd differ diff --git a/PythonLib/full_dll/_decimal.pyd b/PythonLib/full_dll/_decimal.pyd index 936b0f27..201be093 100644 Binary files a/PythonLib/full_dll/_decimal.pyd and b/PythonLib/full_dll/_decimal.pyd differ diff --git a/PythonLib/full_dll/_elementtree.pyd b/PythonLib/full_dll/_elementtree.pyd index cfd08534..bd35e8c0 100644 Binary files a/PythonLib/full_dll/_elementtree.pyd and b/PythonLib/full_dll/_elementtree.pyd differ diff --git a/PythonLib/full_dll/_hashlib.pyd b/PythonLib/full_dll/_hashlib.pyd index ebc1bc56..7820aae6 100644 Binary files a/PythonLib/full_dll/_hashlib.pyd and b/PythonLib/full_dll/_hashlib.pyd differ diff --git a/PythonLib/full_dll/_lzma.pyd b/PythonLib/full_dll/_lzma.pyd index 965567d3..a0c25bc6 100644 Binary files a/PythonLib/full_dll/_lzma.pyd and b/PythonLib/full_dll/_lzma.pyd differ diff --git a/PythonLib/full_dll/_msi.pyd b/PythonLib/full_dll/_msi.pyd index a81db7ac..7e5ecf25 100644 Binary files a/PythonLib/full_dll/_msi.pyd and b/PythonLib/full_dll/_msi.pyd differ diff --git a/PythonLib/full_dll/_multiprocessing.pyd b/PythonLib/full_dll/_multiprocessing.pyd index 04cc2c9b..7e0c148b 100644 Binary files a/PythonLib/full_dll/_multiprocessing.pyd and b/PythonLib/full_dll/_multiprocessing.pyd differ diff --git a/PythonLib/full_dll/_overlapped.pyd b/PythonLib/full_dll/_overlapped.pyd index 93baa89c..6a6a2857 100644 Binary files a/PythonLib/full_dll/_overlapped.pyd and b/PythonLib/full_dll/_overlapped.pyd differ diff --git a/PythonLib/full_dll/_queue.pyd b/PythonLib/full_dll/_queue.pyd index 69fccd26..000e6d54 100644 Binary files a/PythonLib/full_dll/_queue.pyd and b/PythonLib/full_dll/_queue.pyd differ diff --git a/PythonLib/full_dll/_socket.pyd b/PythonLib/full_dll/_socket.pyd index 81f9448c..ad00a25c 100644 Binary files a/PythonLib/full_dll/_socket.pyd and b/PythonLib/full_dll/_socket.pyd differ diff --git a/PythonLib/full_dll/_sqlite3.pyd b/PythonLib/full_dll/_sqlite3.pyd index 24866253..8b549e7a 100644 Binary files a/PythonLib/full_dll/_sqlite3.pyd and b/PythonLib/full_dll/_sqlite3.pyd differ diff --git a/PythonLib/full_dll/_ssl.pyd b/PythonLib/full_dll/_ssl.pyd index 2d64142d..3310ad94 100644 Binary files a/PythonLib/full_dll/_ssl.pyd and b/PythonLib/full_dll/_ssl.pyd differ diff --git a/PythonLib/full_dll/_uuid.pyd b/PythonLib/full_dll/_uuid.pyd index 1fa49afb..bf4a36c8 100644 Binary files a/PythonLib/full_dll/_uuid.pyd and b/PythonLib/full_dll/_uuid.pyd differ diff --git a/PythonLib/full_dll/_wmi.pyd b/PythonLib/full_dll/_wmi.pyd index 911c5610..6568d502 100644 Binary files a/PythonLib/full_dll/_wmi.pyd and b/PythonLib/full_dll/_wmi.pyd differ diff --git a/PythonLib/full_dll/_zoneinfo.pyd b/PythonLib/full_dll/_zoneinfo.pyd index 387ac805..1f2f6dec 100644 Binary files a/PythonLib/full_dll/_zoneinfo.pyd and b/PythonLib/full_dll/_zoneinfo.pyd differ diff --git a/PythonLib/full_dll/pyexpat.pyd b/PythonLib/full_dll/pyexpat.pyd index 21a2a30d..4157f0af 100644 Binary files a/PythonLib/full_dll/pyexpat.pyd and b/PythonLib/full_dll/pyexpat.pyd differ diff --git a/PythonLib/full_dll/select.pyd b/PythonLib/full_dll/select.pyd index 1c7124c2..dfab1f91 100644 Binary files a/PythonLib/full_dll/select.pyd and b/PythonLib/full_dll/select.pyd differ diff --git a/PythonLib/full_dll/sqlite3.dll b/PythonLib/full_dll/sqlite3.dll index 255e4e73..c7ba339f 100644 Binary files a/PythonLib/full_dll/sqlite3.dll and b/PythonLib/full_dll/sqlite3.dll differ diff --git a/PythonLib/full_dll/unicodedata.pyd b/PythonLib/full_dll/unicodedata.pyd index 3b222998..118f0f15 100644 Binary files a/PythonLib/full_dll/unicodedata.pyd and b/PythonLib/full_dll/unicodedata.pyd differ diff --git a/PythonLib/full_dll/winsound.pyd b/PythonLib/full_dll/winsound.pyd index 9d5740ef..2d76d8c4 100644 Binary files a/PythonLib/full_dll/winsound.pyd and b/PythonLib/full_dll/winsound.pyd differ diff --git a/PythonLib/full_dll/zlib1.dll b/PythonLib/full_dll/zlib1.dll index 2bc705c6..e9edb906 100644 Binary files a/PythonLib/full_dll/zlib1.dll and b/PythonLib/full_dll/zlib1.dll differ diff --git a/PythonLib/full_dll_x64/_asyncio.pyd b/PythonLib/full_dll_x64/_asyncio.pyd index bfb63aec..a99d0a24 100644 Binary files a/PythonLib/full_dll_x64/_asyncio.pyd and b/PythonLib/full_dll_x64/_asyncio.pyd differ diff --git a/PythonLib/full_dll_x64/_bz2.pyd b/PythonLib/full_dll_x64/_bz2.pyd index 20df2beb..67bf7c2a 100644 Binary files a/PythonLib/full_dll_x64/_bz2.pyd and b/PythonLib/full_dll_x64/_bz2.pyd differ diff --git a/PythonLib/full_dll_x64/_ctypes.pyd b/PythonLib/full_dll_x64/_ctypes.pyd index ec6846b7..2f874ac0 100644 Binary files a/PythonLib/full_dll_x64/_ctypes.pyd and b/PythonLib/full_dll_x64/_ctypes.pyd differ diff --git a/PythonLib/full_dll_x64/_decimal.pyd b/PythonLib/full_dll_x64/_decimal.pyd index 42a55be3..6ad2cff1 100644 Binary files a/PythonLib/full_dll_x64/_decimal.pyd and b/PythonLib/full_dll_x64/_decimal.pyd differ diff --git a/PythonLib/full_dll_x64/_elementtree.pyd b/PythonLib/full_dll_x64/_elementtree.pyd index 79dce231..a4f39e94 100644 Binary files a/PythonLib/full_dll_x64/_elementtree.pyd and b/PythonLib/full_dll_x64/_elementtree.pyd differ diff --git a/PythonLib/full_dll_x64/_hashlib.pyd b/PythonLib/full_dll_x64/_hashlib.pyd index 45063e4b..59e583ec 100644 Binary files a/PythonLib/full_dll_x64/_hashlib.pyd and b/PythonLib/full_dll_x64/_hashlib.pyd differ diff --git a/PythonLib/full_dll_x64/_lzma.pyd b/PythonLib/full_dll_x64/_lzma.pyd index de405e49..4a9c6892 100644 Binary files a/PythonLib/full_dll_x64/_lzma.pyd and b/PythonLib/full_dll_x64/_lzma.pyd differ diff --git a/PythonLib/full_dll_x64/_msi.pyd b/PythonLib/full_dll_x64/_msi.pyd index 7da43e11..d0ec5395 100644 Binary files a/PythonLib/full_dll_x64/_msi.pyd and b/PythonLib/full_dll_x64/_msi.pyd differ diff --git a/PythonLib/full_dll_x64/_multiprocessing.pyd b/PythonLib/full_dll_x64/_multiprocessing.pyd index 323c3333..559d1676 100644 Binary files a/PythonLib/full_dll_x64/_multiprocessing.pyd and b/PythonLib/full_dll_x64/_multiprocessing.pyd differ diff --git a/PythonLib/full_dll_x64/_overlapped.pyd b/PythonLib/full_dll_x64/_overlapped.pyd index 2b6787a7..a7ee3868 100644 Binary files a/PythonLib/full_dll_x64/_overlapped.pyd and b/PythonLib/full_dll_x64/_overlapped.pyd differ diff --git a/PythonLib/full_dll_x64/_queue.pyd b/PythonLib/full_dll_x64/_queue.pyd index 4e9f3c7d..66791401 100644 Binary files a/PythonLib/full_dll_x64/_queue.pyd and b/PythonLib/full_dll_x64/_queue.pyd differ diff --git a/PythonLib/full_dll_x64/_socket.pyd b/PythonLib/full_dll_x64/_socket.pyd index 5e12bc34..a9ba93b5 100644 Binary files a/PythonLib/full_dll_x64/_socket.pyd and b/PythonLib/full_dll_x64/_socket.pyd differ diff --git a/PythonLib/full_dll_x64/_sqlite3.pyd b/PythonLib/full_dll_x64/_sqlite3.pyd index d0378648..64eb1dc6 100644 Binary files a/PythonLib/full_dll_x64/_sqlite3.pyd and b/PythonLib/full_dll_x64/_sqlite3.pyd differ diff --git a/PythonLib/full_dll_x64/_ssl.pyd b/PythonLib/full_dll_x64/_ssl.pyd index 2a10ae1f..33227dce 100644 Binary files a/PythonLib/full_dll_x64/_ssl.pyd and b/PythonLib/full_dll_x64/_ssl.pyd differ diff --git a/PythonLib/full_dll_x64/_uuid.pyd b/PythonLib/full_dll_x64/_uuid.pyd index 4f9e6668..ec243aee 100644 Binary files a/PythonLib/full_dll_x64/_uuid.pyd and b/PythonLib/full_dll_x64/_uuid.pyd differ diff --git a/PythonLib/full_dll_x64/_wmi.pyd b/PythonLib/full_dll_x64/_wmi.pyd index fa2c3739..0fd9e761 100644 Binary files a/PythonLib/full_dll_x64/_wmi.pyd and b/PythonLib/full_dll_x64/_wmi.pyd differ diff --git a/PythonLib/full_dll_x64/_zoneinfo.pyd b/PythonLib/full_dll_x64/_zoneinfo.pyd index 0d3e372b..ced6136d 100644 Binary files a/PythonLib/full_dll_x64/_zoneinfo.pyd and b/PythonLib/full_dll_x64/_zoneinfo.pyd differ diff --git a/PythonLib/full_dll_x64/pyexpat.pyd b/PythonLib/full_dll_x64/pyexpat.pyd index dd8e3a09..906673a3 100644 Binary files a/PythonLib/full_dll_x64/pyexpat.pyd and b/PythonLib/full_dll_x64/pyexpat.pyd differ diff --git a/PythonLib/full_dll_x64/select.pyd b/PythonLib/full_dll_x64/select.pyd index 938ea8fe..9b78fffa 100644 Binary files a/PythonLib/full_dll_x64/select.pyd and b/PythonLib/full_dll_x64/select.pyd differ diff --git a/PythonLib/full_dll_x64/sqlite3.dll b/PythonLib/full_dll_x64/sqlite3.dll index b00f0dc2..bcb2cc08 100644 Binary files a/PythonLib/full_dll_x64/sqlite3.dll and b/PythonLib/full_dll_x64/sqlite3.dll differ diff --git a/PythonLib/full_dll_x64/unicodedata.pyd b/PythonLib/full_dll_x64/unicodedata.pyd index f601fa6c..ad96372b 100644 Binary files a/PythonLib/full_dll_x64/unicodedata.pyd and b/PythonLib/full_dll_x64/unicodedata.pyd differ diff --git a/PythonLib/full_dll_x64/winsound.pyd b/PythonLib/full_dll_x64/winsound.pyd index 9036da5f..e3fc3b07 100644 Binary files a/PythonLib/full_dll_x64/winsound.pyd and b/PythonLib/full_dll_x64/winsound.pyd differ diff --git a/PythonLib/full_dll_x64/zlib1.dll b/PythonLib/full_dll_x64/zlib1.dll index c0174dbd..25d4c140 100644 Binary files a/PythonLib/full_dll_x64/zlib1.dll and b/PythonLib/full_dll_x64/zlib1.dll differ diff --git a/PythonLib/min/_pyio.py b/PythonLib/min/_pyio.py index 9641d431..687076fb 100644 --- a/PythonLib/min/_pyio.py +++ b/PythonLib/min/_pyio.py @@ -1209,7 +1209,8 @@ def _readinto(self, buf, read1): return written def tell(self): - return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos + # GH-95782: Keep return value non-negative + return max(_BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos, 0) def seek(self, pos, whence=0): if whence not in valid_seek_flags: diff --git a/PythonLib/min/argparse.py b/PythonLib/min/argparse.py index 484a1efd..120cb6c8 100644 --- a/PythonLib/min/argparse.py +++ b/PythonLib/min/argparse.py @@ -225,7 +225,8 @@ def format_help(self): # add the heading if the section was non-empty if self.heading is not SUPPRESS and self.heading is not None: current_indent = self.formatter._current_indent - heading = '%*s%s:\n' % (current_indent, '', self.heading) + heading_text = _('%(heading)s:') % dict(heading=self.heading) + heading = '%*s%s\n' % (current_indent, '', heading_text) else: heading = '' @@ -415,6 +416,8 @@ def _format_actions_usage(self, actions, groups): suppressed_actions_count += 1 exposed_actions_count = group_action_count - suppressed_actions_count + if not exposed_actions_count: + continue if not group.required: if start in inserts: @@ -720,7 +723,7 @@ def _get_help_string(self, action): if action.default is not SUPPRESS: defaulting_nargs = [OPTIONAL, ZERO_OR_MORE] if action.option_strings or action.nargs in defaulting_nargs: - help += ' (default: %(default)s)' + help += _(' (default: %(default)s)') return help @@ -1149,7 +1152,9 @@ def __init__(self, version=None, dest=SUPPRESS, default=SUPPRESS, - help="show program's version number and exit"): + help=None): + if help is None: + help = _("show program's version number and exit") super(_VersionAction, self).__init__( option_strings=option_strings, dest=dest, @@ -2004,7 +2009,7 @@ def consume_optional(start_index): # get the optional identified at this index option_tuple = option_string_indices[start_index] - action, option_string, explicit_arg = option_tuple + action, option_string, sep, explicit_arg = option_tuple # identify additional optionals in the same arg string # (e.g. -xyz is the same as -x -y -z if no args are required) @@ -2031,18 +2036,27 @@ def consume_optional(start_index): and option_string[1] not in chars and explicit_arg != '' ): + if sep or explicit_arg[0] in chars: + msg = _('ignored explicit argument %r') + raise ArgumentError(action, msg % explicit_arg) action_tuples.append((action, [], option_string)) char = option_string[0] option_string = char + explicit_arg[0] - new_explicit_arg = explicit_arg[1:] or None optionals_map = self._option_string_actions if option_string in optionals_map: action = optionals_map[option_string] - explicit_arg = new_explicit_arg + explicit_arg = explicit_arg[1:] + if not explicit_arg: + sep = explicit_arg = None + elif explicit_arg[0] == '=': + sep = '=' + explicit_arg = explicit_arg[1:] + else: + sep = '' else: - msg = _('ignored explicit argument %r') - raise ArgumentError(action, msg % explicit_arg) - + extras.append(char + explicit_arg) + stop = start_index + 1 + break # if the action expect exactly one argument, we've # successfully matched the option; exit the loop elif arg_count == 1: @@ -2262,18 +2276,17 @@ def _parse_optional(self, arg_string): # if the option string is present in the parser, return the action if arg_string in self._option_string_actions: action = self._option_string_actions[arg_string] - return action, arg_string, None + return action, arg_string, None, None # if it's just a single character, it was meant to be positional if len(arg_string) == 1: return None # if the option string before the "=" is present, return the action - if '=' in arg_string: - option_string, explicit_arg = arg_string.split('=', 1) - if option_string in self._option_string_actions: - action = self._option_string_actions[option_string] - return action, option_string, explicit_arg + option_string, sep, explicit_arg = arg_string.partition('=') + if sep and option_string in self._option_string_actions: + action = self._option_string_actions[option_string] + return action, option_string, sep, explicit_arg # search through all possible prefixes of the option string # and all actions in the parser for possible interpretations @@ -2282,7 +2295,7 @@ def _parse_optional(self, arg_string): # if multiple actions match, the option string was ambiguous if len(option_tuples) > 1: options = ', '.join([option_string - for action, option_string, explicit_arg in option_tuples]) + for action, option_string, sep, explicit_arg in option_tuples]) args = {'option': arg_string, 'matches': options} msg = _('ambiguous option: %(option)s could match %(matches)s') self.error(msg % args) @@ -2306,7 +2319,7 @@ def _parse_optional(self, arg_string): # it was meant to be an optional but there is no such option # in this parser (though it might be a valid option in a subparser) - return None, arg_string, None + return None, arg_string, None, None def _get_option_tuples(self, option_string): result = [] @@ -2316,15 +2329,13 @@ def _get_option_tuples(self, option_string): chars = self.prefix_chars if option_string[0] in chars and option_string[1] in chars: if self.allow_abbrev: - if '=' in option_string: - option_prefix, explicit_arg = option_string.split('=', 1) - else: - option_prefix = option_string - explicit_arg = None + option_prefix, sep, explicit_arg = option_string.partition('=') + if not sep: + sep = explicit_arg = None for option_string in self._option_string_actions: if option_string.startswith(option_prefix): action = self._option_string_actions[option_string] - tup = action, option_string, explicit_arg + tup = action, option_string, sep, explicit_arg result.append(tup) # single character options can be concatenated with their arguments @@ -2332,18 +2343,17 @@ def _get_option_tuples(self, option_string): # separate elif option_string[0] in chars and option_string[1] not in chars: option_prefix = option_string - explicit_arg = None short_option_prefix = option_string[:2] short_explicit_arg = option_string[2:] for option_string in self._option_string_actions: if option_string == short_option_prefix: action = self._option_string_actions[option_string] - tup = action, option_string, short_explicit_arg + tup = action, option_string, '', short_explicit_arg result.append(tup) elif option_string.startswith(option_prefix): action = self._option_string_actions[option_string] - tup = action, option_string, explicit_arg + tup = action, option_string, None, None result.append(tup) # shouldn't ever get here diff --git a/PythonLib/min/ast.py b/PythonLib/min/ast.py index de940d2e..b0995fa7 100644 --- a/PythonLib/min/ast.py +++ b/PythonLib/min/ast.py @@ -1268,14 +1268,18 @@ def visit_JoinedStr(self, node): quote_type = quote_types[0] self.write(f"{quote_type}{value}{quote_type}") - def _write_fstring_inner(self, node, scape_newlines=False): + def _write_fstring_inner(self, node, is_format_spec=False): if isinstance(node, JoinedStr): # for both the f-string itself, and format_spec for value in node.values: - self._write_fstring_inner(value, scape_newlines=scape_newlines) + self._write_fstring_inner(value, is_format_spec=is_format_spec) elif isinstance(node, Constant) and isinstance(node.value, str): value = node.value.replace("{", "{{").replace("}", "}}") - if scape_newlines: + + if is_format_spec: + value = value.replace("\\", "\\\\") + value = value.replace("'", "\\'") + value = value.replace('"', '\\"') value = value.replace("\n", "\\n") self.write(value) elif isinstance(node, FormattedValue): @@ -1299,10 +1303,7 @@ def unparse_inner(inner): self.write(f"!{chr(node.conversion)}") if node.format_spec: self.write(":") - self._write_fstring_inner( - node.format_spec, - scape_newlines=True - ) + self._write_fstring_inner(node.format_spec, is_format_spec=True) def visit_Name(self, node): self.write(node.id) diff --git a/PythonLib/min/configparser.py b/PythonLib/min/configparser.py index e8aae217..f96704eb 100644 --- a/PythonLib/min/configparser.py +++ b/PythonLib/min/configparser.py @@ -995,100 +995,102 @@ def _read(self, fp, fpname): lineno = 0 indent_level = 0 e = None # None, or an exception - for lineno, line in enumerate(fp, start=1): - comment_start = sys.maxsize - # strip inline comments - inline_prefixes = {p: -1 for p in self._inline_comment_prefixes} - while comment_start == sys.maxsize and inline_prefixes: - next_prefixes = {} - for prefix, index in inline_prefixes.items(): - index = line.find(prefix, index+1) - if index == -1: - continue - next_prefixes[prefix] = index - if index == 0 or (index > 0 and line[index-1].isspace()): - comment_start = min(comment_start, index) - inline_prefixes = next_prefixes - # strip full line comments - for prefix in self._comment_prefixes: - if line.strip().startswith(prefix): - comment_start = 0 - break - if comment_start == sys.maxsize: - comment_start = None - value = line[:comment_start].strip() - if not value: - if self._empty_lines_in_values: - # add empty line to the value, but only if there was no - # comment on the line - if (comment_start is None and - cursect is not None and - optname and - cursect[optname] is not None): - cursect[optname].append('') # newlines added at join - else: - # empty line marks end of value - indent_level = sys.maxsize - continue - # continuation line? - first_nonspace = self.NONSPACECRE.search(line) - cur_indent_level = first_nonspace.start() if first_nonspace else 0 - if (cursect is not None and optname and - cur_indent_level > indent_level): - cursect[optname].append(value) - # a section header or option header? - else: - indent_level = cur_indent_level - # is it a section header? - mo = self.SECTCRE.match(value) - if mo: - sectname = mo.group('header') - if sectname in self._sections: - if self._strict and sectname in elements_added: - raise DuplicateSectionError(sectname, fpname, - lineno) - cursect = self._sections[sectname] - elements_added.add(sectname) - elif sectname == self.default_section: - cursect = self._defaults + try: + for lineno, line in enumerate(fp, start=1): + comment_start = sys.maxsize + # strip inline comments + inline_prefixes = {p: -1 for p in self._inline_comment_prefixes} + while comment_start == sys.maxsize and inline_prefixes: + next_prefixes = {} + for prefix, index in inline_prefixes.items(): + index = line.find(prefix, index+1) + if index == -1: + continue + next_prefixes[prefix] = index + if index == 0 or (index > 0 and line[index-1].isspace()): + comment_start = min(comment_start, index) + inline_prefixes = next_prefixes + # strip full line comments + for prefix in self._comment_prefixes: + if line.strip().startswith(prefix): + comment_start = 0 + break + if comment_start == sys.maxsize: + comment_start = None + value = line[:comment_start].strip() + if not value: + if self._empty_lines_in_values: + # add empty line to the value, but only if there was no + # comment on the line + if (comment_start is None and + cursect is not None and + optname and + cursect[optname] is not None): + cursect[optname].append('') # newlines added at join else: - cursect = self._dict() - self._sections[sectname] = cursect - self._proxies[sectname] = SectionProxy(self, sectname) - elements_added.add(sectname) - # So sections can't start with a continuation line - optname = None - # no section header in the file? - elif cursect is None: - raise MissingSectionHeaderError(fpname, lineno, line) - # an option line? + # empty line marks end of value + indent_level = sys.maxsize + continue + # continuation line? + first_nonspace = self.NONSPACECRE.search(line) + cur_indent_level = first_nonspace.start() if first_nonspace else 0 + if (cursect is not None and optname and + cur_indent_level > indent_level): + cursect[optname].append(value) + # a section header or option header? else: - mo = self._optcre.match(value) + indent_level = cur_indent_level + # is it a section header? + mo = self.SECTCRE.match(value) if mo: - optname, vi, optval = mo.group('option', 'vi', 'value') - if not optname: - e = self._handle_error(e, fpname, lineno, line) - optname = self.optionxform(optname.rstrip()) - if (self._strict and - (sectname, optname) in elements_added): - raise DuplicateOptionError(sectname, optname, - fpname, lineno) - elements_added.add((sectname, optname)) - # This check is fine because the OPTCRE cannot - # match if it would set optval to None - if optval is not None: - optval = optval.strip() - cursect[optname] = [optval] + sectname = mo.group('header') + if sectname in self._sections: + if self._strict and sectname in elements_added: + raise DuplicateSectionError(sectname, fpname, + lineno) + cursect = self._sections[sectname] + elements_added.add(sectname) + elif sectname == self.default_section: + cursect = self._defaults else: - # valueless option handling - cursect[optname] = None + cursect = self._dict() + self._sections[sectname] = cursect + self._proxies[sectname] = SectionProxy(self, sectname) + elements_added.add(sectname) + # So sections can't start with a continuation line + optname = None + # no section header in the file? + elif cursect is None: + raise MissingSectionHeaderError(fpname, lineno, line) + # an option line? else: - # a non-fatal parsing error occurred. set up the - # exception but keep going. the exception will be - # raised at the end of the file and will contain a - # list of all bogus lines - e = self._handle_error(e, fpname, lineno, line) - self._join_multiline_values() + mo = self._optcre.match(value) + if mo: + optname, vi, optval = mo.group('option', 'vi', 'value') + if not optname: + e = self._handle_error(e, fpname, lineno, line) + optname = self.optionxform(optname.rstrip()) + if (self._strict and + (sectname, optname) in elements_added): + raise DuplicateOptionError(sectname, optname, + fpname, lineno) + elements_added.add((sectname, optname)) + # This check is fine because the OPTCRE cannot + # match if it would set optval to None + if optval is not None: + optval = optval.strip() + cursect[optname] = [optval] + else: + # valueless option handling + cursect[optname] = None + else: + # a non-fatal parsing error occurred. set up the + # exception but keep going. the exception will be + # raised at the end of the file and will contain a + # list of all bogus lines + e = self._handle_error(e, fpname, lineno, line) + finally: + self._join_multiline_values() # if any parsing errors occurred, raise an exception if e: raise e diff --git a/PythonLib/min/dataclasses.py b/PythonLib/min/dataclasses.py index 3eacba84..12b2dfd1 100644 --- a/PythonLib/min/dataclasses.py +++ b/PythonLib/min/dataclasses.py @@ -1168,8 +1168,10 @@ def _dataclass_setstate(self, state): def _get_slots(cls): match cls.__dict__.get('__slots__'): + # A class which does not define __slots__ at all is equivalent + # to a class defining __slots__ = ('__dict__', '__weakref__') case None: - return + yield from ('__dict__', '__weakref__') case str(slot): yield slot # Slots may be any iterable, but we cannot handle an iterator diff --git a/PythonLib/min/doctest.py b/PythonLib/min/doctest.py index 4630e400..696bb966 100644 --- a/PythonLib/min/doctest.py +++ b/PythonLib/min/doctest.py @@ -1124,7 +1124,7 @@ def _find_lineno(self, obj, source_lines): obj = obj.fget if inspect.isfunction(obj) and getattr(obj, '__doc__', None): # We don't use `docstring` var here, because `obj` can be changed. - obj = obj.__code__ + obj = inspect.unwrap(obj).__code__ if inspect.istraceback(obj): obj = obj.tb_frame if inspect.isframe(obj): obj = obj.f_code if inspect.iscode(obj): @@ -2203,13 +2203,13 @@ def __init__(self, test, optionflags=0, setUp=None, tearDown=None, unittest.TestCase.__init__(self) self._dt_optionflags = optionflags self._dt_checker = checker - self._dt_globs = test.globs.copy() self._dt_test = test self._dt_setUp = setUp self._dt_tearDown = tearDown def setUp(self): test = self._dt_test + self._dt_globs = test.globs.copy() if self._dt_setUp is not None: self._dt_setUp(test) diff --git a/PythonLib/min/enum.py b/PythonLib/min/enum.py index 1502bfe9..af561383 100644 --- a/PythonLib/min/enum.py +++ b/PythonLib/min/enum.py @@ -166,6 +166,11 @@ def _dedent(text): lines[j] = l[i:] return '\n'.join(lines) +class _not_given: + def __repr__(self): + return('') +_not_given = _not_given() + class _auto_null: def __repr__(self): return '_auto_null' @@ -283,9 +288,10 @@ def __set_name__(self, enum_class, member_name): enum_member._sort_order_ = len(enum_class._member_names_) if Flag is not None and issubclass(enum_class, Flag): - enum_class._flag_mask_ |= value - if _is_single_bit(value): - enum_class._singles_mask_ |= value + if isinstance(value, int): + enum_class._flag_mask_ |= value + if _is_single_bit(value): + enum_class._singles_mask_ |= value enum_class._all_bits_ = 2 ** ((enum_class._flag_mask_).bit_length()) - 1 # If another member with the same value was already defined, the @@ -313,6 +319,7 @@ def __set_name__(self, enum_class, member_name): elif ( Flag is not None and issubclass(enum_class, Flag) + and isinstance(value, int) and _is_single_bit(value) ): # no other instances found, record this member in _member_names_ @@ -457,10 +464,11 @@ def __setitem__(self, key, value): if isinstance(value, auto): single = True value = (value, ) - if type(value) is tuple and any(isinstance(v, auto) for v in value): + if isinstance(value, tuple) and any(isinstance(v, auto) for v in value): # insist on an actual tuple, no subclasses, in keeping with only supporting # top-level auto() usage (not contained in any other data structure) auto_valued = [] + t = type(value) for v in value: if isinstance(v, auto): non_auto_store = False @@ -475,7 +483,12 @@ def __setitem__(self, key, value): if single: value = auto_valued[0] else: - value = tuple(auto_valued) + try: + # accepts iterable as multiple arguments? + value = t(auto_valued) + except TypeError: + # then pass them in singlely + value = t(*auto_valued) self._member_names[key] = None if non_auto_store: self._last_values.append(value) @@ -710,7 +723,7 @@ def __bool__(cls): """ return True - def __call__(cls, value, names=None, *values, module=None, qualname=None, type=None, start=1, boundary=None): + def __call__(cls, value, names=_not_given, *values, module=None, qualname=None, type=None, start=1, boundary=None): """ Either returns an existing member, or creates a new enum class. @@ -739,18 +752,18 @@ def __call__(cls, value, names=None, *values, module=None, qualname=None, type=N """ if cls._member_map_: # simple value lookup if members exist - if names: + if names is not _not_given: value = (value, names) + values return cls.__new__(cls, value) # otherwise, functional API: we're creating a new Enum type - if names is None and type is None: + if names is _not_given and type is None: # no body? no data-type? possibly wrong usage raise TypeError( f"{cls} has no members; specify `names=()` if you meant to create a new, empty, enum" ) return cls._create_( class_name=value, - names=names, + names=None if names is _not_given else names, module=module, qualname=qualname, type=type, @@ -1528,37 +1541,50 @@ def __str__(self): def __bool__(self): return bool(self._value_) + def _get_value(self, flag): + if isinstance(flag, self.__class__): + return flag._value_ + elif self._member_type_ is not object and isinstance(flag, self._member_type_): + return flag + return NotImplemented + def __or__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with |") value = self._value_ - return self.__class__(value | other) + return self.__class__(value | other_value) def __and__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with &") value = self._value_ - return self.__class__(value & other) + return self.__class__(value & other_value) def __xor__(self, other): - if isinstance(other, self.__class__): - other = other._value_ - elif self._member_type_ is not object and isinstance(other, self._member_type_): - other = other - else: + other_value = self._get_value(other) + if other_value is NotImplemented: return NotImplemented + + for flag in self, other: + if self._get_value(flag) is None: + raise TypeError(f"'{flag}' cannot be combined with other flags with ^") value = self._value_ - return self.__class__(value ^ other) + return self.__class__(value ^ other_value) def __invert__(self): + if self._get_value(self) is None: + raise TypeError(f"'{self}' cannot be inverted") + if self._inverted_ is None: if self._boundary_ in (EJECT, KEEP): self._inverted_ = self.__class__(~self._value_) @@ -1625,7 +1651,7 @@ def global_flag_repr(self): cls_name = self.__class__.__name__ if self._name_ is None: return "%s.%s(%r)" % (module, cls_name, self._value_) - if _is_single_bit(self): + if _is_single_bit(self._value_): return '%s.%s' % (module, self._name_) if self._boundary_ is not FlagBoundary.KEEP: return '|'.join(['%s.%s' % (module, name) for name in self.name.split('|')]) diff --git a/PythonLib/min/glob.py b/PythonLib/min/glob.py index a7256422..50beef37 100644 --- a/PythonLib/min/glob.py +++ b/PythonLib/min/glob.py @@ -132,7 +132,8 @@ def glob1(dirname, pattern): def _glob2(dirname, pattern, dir_fd, dironly, include_hidden=False): assert _isrecursive(pattern) - yield pattern[:0] + if not dirname or _isdir(dirname, dir_fd): + yield pattern[:0] yield from _rlistdir(dirname, dir_fd, dironly, include_hidden=include_hidden) diff --git a/PythonLib/min/inspect.py b/PythonLib/min/inspect.py index a550202b..819ce940 100644 --- a/PythonLib/min/inspect.py +++ b/PythonLib/min/inspect.py @@ -760,18 +760,14 @@ def unwrap(func, *, stop=None): :exc:`ValueError` is raised if a cycle is encountered. """ - if stop is None: - def _is_wrapper(f): - return hasattr(f, '__wrapped__') - else: - def _is_wrapper(f): - return hasattr(f, '__wrapped__') and not stop(f) f = func # remember the original func for error reporting # Memoise by id to tolerate non-hashable objects, but store objects to # ensure they aren't destroyed, which would allow their IDs to be reused. memo = {id(f): f} recursion_limit = sys.getrecursionlimit() - while _is_wrapper(func): + while not isinstance(func, type) and hasattr(func, '__wrapped__'): + if stop is not None and stop(func): + break func = func.__wrapped__ id_func = id(func) if (id_func in memo) or (len(memo) >= recursion_limit): @@ -2007,15 +2003,17 @@ def _signature_get_user_defined_method(cls, method_name): named ``method_name`` and returns it only if it is a pure python function. """ - try: - meth = getattr(cls, method_name) - except AttributeError: - return + if method_name == '__new__': + meth = getattr(cls, method_name, None) else: - if not isinstance(meth, _NonUserDefinedCallables): - # Once '__signature__' will be added to 'C'-level - # callables, this check won't be necessary - return meth + meth = getattr_static(cls, method_name, None) + if meth is None or isinstance(meth, _NonUserDefinedCallables): + # Once '__signature__' will be added to 'C'-level + # callables, this check won't be necessary + return None + if method_name != '__new__': + meth = _descriptor_get(meth, cls) + return meth def _signature_get_partial(wrapped_sig, partial, extra_args=()): @@ -2460,6 +2458,15 @@ def _signature_from_function(cls, func, skip_bound_arg=True, __validate_parameters__=is_duck_function) +def _descriptor_get(descriptor, obj): + if isclass(descriptor): + return descriptor + get = getattr(type(descriptor), '__get__', _sentinel) + if get is _sentinel: + return descriptor + return get(descriptor, obj, type(obj)) + + def _signature_from_callable(obj, *, follow_wrapper_chains=True, skip_bound_arg=True, @@ -2568,7 +2575,6 @@ def _signature_from_callable(obj, *, wrapped_sig = _get_signature_of(obj.func) return _signature_get_partial(wrapped_sig, obj) - sig = None if isinstance(obj, type): # obj is a class or a metaclass @@ -2576,88 +2582,65 @@ def _signature_from_callable(obj, *, # in its metaclass call = _signature_get_user_defined_method(type(obj), '__call__') if call is not None: - sig = _get_signature_of(call) - else: - factory_method = None - new = _signature_get_user_defined_method(obj, '__new__') - init = _signature_get_user_defined_method(obj, '__init__') - - # Go through the MRO and see if any class has user-defined - # pure Python __new__ or __init__ method - for base in obj.__mro__: - # Now we check if the 'obj' class has an own '__new__' method - if new is not None and '__new__' in base.__dict__: - factory_method = new - break - # or an own '__init__' method - elif init is not None and '__init__' in base.__dict__: - factory_method = init - break + return _get_signature_of(call) - if factory_method is not None: - sig = _get_signature_of(factory_method) - - if sig is None: - # At this point we know, that `obj` is a class, with no user- - # defined '__init__', '__new__', or class-level '__call__' - - for base in obj.__mro__[:-1]: - # Since '__text_signature__' is implemented as a - # descriptor that extracts text signature from the - # class docstring, if 'obj' is derived from a builtin - # class, its own '__text_signature__' may be 'None'. - # Therefore, we go through the MRO (except the last - # class in there, which is 'object') to find the first - # class with non-empty text signature. - try: - text_sig = base.__text_signature__ - except AttributeError: - pass - else: - if text_sig: - # If 'base' class has a __text_signature__ attribute: - # return a signature based on it - return _signature_fromstr(sigcls, base, text_sig) - - # No '__text_signature__' was found for the 'obj' class. - # Last option is to check if its '__init__' is - # object.__init__ or type.__init__. - if type not in obj.__mro__: - # We have a class (not metaclass), but no user-defined - # __init__ or __new__ for it - if (obj.__init__ is object.__init__ and - obj.__new__ is object.__new__): - # Return a signature of 'object' builtin. - return sigcls.from_callable(object) - else: - raise ValueError( - 'no signature found for builtin type {!r}'.format(obj)) + new = _signature_get_user_defined_method(obj, '__new__') + init = _signature_get_user_defined_method(obj, '__init__') - elif not isinstance(obj, _NonUserDefinedCallables): - # An object with __call__ - # We also check that the 'obj' is not an instance of - # types.WrapperDescriptorType or types.MethodWrapperType to avoid - # infinite recursion (and even potential segfault) - call = _signature_get_user_defined_method(type(obj), '__call__') - if call is not None: + # Go through the MRO and see if any class has user-defined + # pure Python __new__ or __init__ method + for base in obj.__mro__: + # Now we check if the 'obj' class has an own '__new__' method + if new is not None and '__new__' in base.__dict__: + sig = _get_signature_of(new) + if skip_bound_arg: + sig = _signature_bound_method(sig) + return sig + # or an own '__init__' method + elif init is not None and '__init__' in base.__dict__: + return _get_signature_of(init) + + # At this point we know, that `obj` is a class, with no user- + # defined '__init__', '__new__', or class-level '__call__' + + for base in obj.__mro__[:-1]: + # Since '__text_signature__' is implemented as a + # descriptor that extracts text signature from the + # class docstring, if 'obj' is derived from a builtin + # class, its own '__text_signature__' may be 'None'. + # Therefore, we go through the MRO (except the last + # class in there, which is 'object') to find the first + # class with non-empty text signature. try: - sig = _get_signature_of(call) - except ValueError as ex: - msg = 'no signature found for {!r}'.format(obj) - raise ValueError(msg) from ex - - if sig is not None: - # For classes and objects we skip the first parameter of their - # __call__, __new__, or __init__ methods - if skip_bound_arg: - return _signature_bound_method(sig) - else: - return sig + text_sig = base.__text_signature__ + except AttributeError: + pass + else: + if text_sig: + # If 'base' class has a __text_signature__ attribute: + # return a signature based on it + return _signature_fromstr(sigcls, base, text_sig) + + # No '__text_signature__' was found for the 'obj' class. + # Last option is to check if its '__init__' is + # object.__init__ or type.__init__. + if type not in obj.__mro__: + # We have a class (not metaclass), but no user-defined + # __init__ or __new__ for it + if (obj.__init__ is object.__init__ and + obj.__new__ is object.__new__): + # Return a signature of 'object' builtin. + return sigcls.from_callable(object) + else: + raise ValueError( + 'no signature found for builtin type {!r}'.format(obj)) - if isinstance(obj, types.BuiltinFunctionType): - # Raise a nicer error message for builtins - msg = 'no signature found for builtin function {!r}'.format(obj) - raise ValueError(msg) + else: + # An object with __call__ + call = getattr_static(type(obj), '__call__', None) + if call is not None: + call = _descriptor_get(call, obj) + return _get_signature_of(call) raise ValueError('callable {!r} is not supported by signature'.format(obj)) diff --git a/PythonLib/min/linecache.py b/PythonLib/min/linecache.py index 97644a8e..ed4c9700 100644 --- a/PythonLib/min/linecache.py +++ b/PythonLib/min/linecache.py @@ -166,13 +166,11 @@ def lazycache(filename, module_globals): return False # Try for a __loader__, if available if module_globals and '__name__' in module_globals: - name = module_globals['__name__'] - if (loader := module_globals.get('__loader__')) is None: - if spec := module_globals.get('__spec__'): - try: - loader = spec.loader - except AttributeError: - pass + spec = module_globals.get('__spec__') + name = getattr(spec, 'name', None) or module_globals['__name__'] + loader = getattr(spec, 'loader', None) + if loader is None: + loader = module_globals.get('__loader__') get_source = getattr(loader, 'get_source', None) if name and get_source: diff --git a/PythonLib/min/mailbox.py b/PythonLib/min/mailbox.py index c8b3444f..c7f060ce 100644 --- a/PythonLib/min/mailbox.py +++ b/PythonLib/min/mailbox.py @@ -698,9 +698,13 @@ def flush(self): _sync_close(new_file) # self._file is about to get replaced, so no need to sync. self._file.close() - # Make sure the new file's mode is the same as the old file's - mode = os.stat(self._path).st_mode - os.chmod(new_file.name, mode) + # Make sure the new file's mode and owner are the same as the old file's + info = os.stat(self._path) + os.chmod(new_file.name, info.st_mode) + try: + os.chown(new_file.name, info.st_uid, info.st_gid) + except (AttributeError, OSError): + pass try: os.rename(new_file.name, self._path) except FileExistsError: diff --git a/PythonLib/min/mimetypes.py b/PythonLib/min/mimetypes.py index 37228de4..3cc027aa 100644 --- a/PythonLib/min/mimetypes.py +++ b/PythonLib/min/mimetypes.py @@ -120,7 +120,13 @@ def guess_type(self, url, strict=True): but non-standard types. """ url = os.fspath(url) - scheme, url = urllib.parse._splittype(url) + p = urllib.parse.urlparse(url) + if p.scheme and len(p.scheme) > 1: + scheme = p.scheme + url = p.path + else: + scheme = None + url = os.path.splitdrive(url)[1] if scheme == 'data': # syntax of data URLs: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data diff --git a/PythonLib/min/os.py b/PythonLib/min/os.py index 598c9e50..7ee7d695 100644 --- a/PythonLib/min/os.py +++ b/PythonLib/min/os.py @@ -473,7 +473,7 @@ def fwalk(top=".", topdown=True, onerror=None, *, follow_symlinks=False, dir_fd= # lstat()/open()/fstat() trick. if not follow_symlinks: orig_st = stat(top, follow_symlinks=False, dir_fd=dir_fd) - topfd = open(top, O_RDONLY, dir_fd=dir_fd) + topfd = open(top, O_RDONLY | O_NONBLOCK, dir_fd=dir_fd) try: if (follow_symlinks or (st.S_ISDIR(orig_st.st_mode) and path.samestat(orig_st, stat(topfd)))): @@ -522,7 +522,7 @@ def _fwalk(topfd, toppath, isbytes, topdown, onerror, follow_symlinks): assert entries is not None name, entry = name orig_st = entry.stat(follow_symlinks=False) - dirfd = open(name, O_RDONLY, dir_fd=topfd) + dirfd = open(name, O_RDONLY | O_NONBLOCK, dir_fd=topfd) except OSError as err: if onerror is not None: onerror(err) diff --git a/PythonLib/min/pdb.py b/PythonLib/min/pdb.py index a838a26b..225c9f25 100644 --- a/PythonLib/min/pdb.py +++ b/PythonLib/min/pdb.py @@ -154,6 +154,7 @@ def namespace(self): __name__='__main__', __file__=self, __builtins__=__builtins__, + __spec__=None, ) @property @@ -298,26 +299,13 @@ def setup(self, f, tb): # cache it here to ensure that modifications are not overwritten. self.curframe_locals = self.curframe.f_locals self.set_convenience_variable(self.curframe, '_frame', self.curframe) - return self.execRcLines() - # Can be executed earlier than 'setup' if desired - def execRcLines(self): - if not self.rcLines: - return - # local copy because of recursion - rcLines = self.rcLines - rcLines.reverse() - # execute every line only once - self.rcLines = [] - while rcLines: - line = rcLines.pop().strip() - if line and line[0] != '#': - if self.onecmd(line): - # if onecmd returns True, the command wants to exit - # from the interaction, save leftover rc lines - # to execute before next interaction - self.rcLines += reversed(rcLines) - return True + if self.rcLines: + self.cmdqueue = [ + line for line in self.rcLines + if line.strip() and not line.strip().startswith("#") + ] + self.rcLines = [] # Override Bdb methods @@ -430,12 +418,10 @@ def interaction(self, frame, traceback): pass else: Pdb._previous_sigint_handler = None - if self.setup(frame, traceback): - # no interaction desired at this time (happens if .pdbrc contains - # a command like "continue") - self.forget() - return - self.print_stack_entry(self.stack[self.curindex]) + self.setup(frame, traceback) + # if we have more commands to process, do not show the stack entry + if not self.cmdqueue: + self.print_stack_entry(self.stack[self.curindex]) self._cmdloop() self.forget() @@ -522,7 +508,7 @@ def precmd(self, line): if marker >= 0: # queue up everything after marker next = line[marker+2:].lstrip() - self.cmdqueue.append(next) + self.cmdqueue.insert(0, next) line = line[:marker].rstrip() # Replace all the convenience variables @@ -546,13 +532,12 @@ def handle_command_def(self, line): """Handles one command line during command list definition.""" cmd, arg, line = self.parseline(line) if not cmd: - return + return False if cmd == 'silent': self.commands_silent[self.commands_bnum] = True - return # continue to handle other cmd def in the cmd list + return False # continue to handle other cmd def in the cmd list elif cmd == 'end': - self.cmdqueue = [] - return 1 # end of cmd list + return True # end of cmd list cmdlist = self.commands[self.commands_bnum] if arg: cmdlist.append(cmd+' '+arg) @@ -566,9 +551,8 @@ def handle_command_def(self, line): # one of the resuming commands if func.__name__ in self.commands_resuming: self.commands_doprompt[self.commands_bnum] = False - self.cmdqueue = [] - return 1 - return + return True + return False # interface abstraction functions diff --git a/PythonLib/min/pickletools.py b/PythonLib/min/pickletools.py index 95a77aeb..51ee4a7a 100644 --- a/PythonLib/min/pickletools.py +++ b/PythonLib/min/pickletools.py @@ -1253,7 +1253,7 @@ def __init__(self, name, code, arg, stack_before=[], stack_after=[pyint], proto=2, - doc="""Long integer using found-byte length. + doc="""Long integer using four-byte length. A more efficient encoding of a Python long; the long4 encoding says it all."""), diff --git a/PythonLib/min/pydoc.py b/PythonLib/min/pydoc.py index 84bbf588..9a881239 100644 --- a/PythonLib/min/pydoc.py +++ b/PythonLib/min/pydoc.py @@ -204,6 +204,19 @@ def classname(object, modname): name = object.__module__ + '.' + name return name +def parentname(object, modname): + """Get a name of the enclosing class (qualified it with a module name + if necessary) or module.""" + if '.' in object.__qualname__: + name = object.__qualname__.rpartition('.')[0] + if object.__module__ != modname: + return object.__module__ + '.' + name + else: + return name + else: + if object.__module__ != modname: + return object.__module__ + def isdata(object): """Check if an object is of a type that probably means it's data.""" return not (inspect.ismodule(object) or inspect.isclass(object) or @@ -298,13 +311,15 @@ def visiblename(name, all=None, obj=None): return not name.startswith('_') def classify_class_attrs(object): - """Wrap inspect.classify_class_attrs, with fixup for data descriptors.""" + """Wrap inspect.classify_class_attrs, with fixup for data descriptors and bound methods.""" results = [] for (name, kind, cls, value) in inspect.classify_class_attrs(object): if inspect.isdatadescriptor(value): kind = 'data descriptor' if isinstance(value, property) and value.fset is None: kind = 'readonly property' + elif kind == 'method' and _is_bound_method(value): + kind = 'static method' results.append((name, kind, cls, value)) return results @@ -514,7 +529,7 @@ def getdocloc(self, object, basedir=sysconfig.get_path('stdlib')): '_thread', 'zipimport') or (file.startswith(basedir) and not file.startswith(os.path.join(basedir, 'site-packages')))) and - object.__name__ not in ('xml.etree', 'test.pydoc_mod')): + object.__name__ not in ('xml.etree', 'test.test_pydoc.pydoc_mod')): if docloc.startswith(("http://", "https://")): docloc = "{}/{}.html".format(docloc.rstrip("/"), object.__name__.lower()) else: @@ -658,6 +673,25 @@ def classlink(self, object, modname): module.__name__, name, classname(object, modname)) return classname(object, modname) + def parentlink(self, object, modname): + """Make a link for the enclosing class or module.""" + link = None + name, module = object.__name__, sys.modules.get(object.__module__) + if hasattr(module, name) and getattr(module, name) is object: + if '.' in object.__qualname__: + name = object.__qualname__.rpartition('.')[0] + if object.__module__ != modname: + link = '%s.html#%s' % (module.__name__, name) + else: + link = '#%s' % name + else: + if object.__module__ != modname: + link = '%s.html' % module.__name__ + if link: + return '%s' % (link, parentname(object, modname)) + else: + return parentname(object, modname) + def modulelink(self, object): """Make a link for a module.""" return '%s' % (object.__name__, object.__name__) @@ -902,7 +936,7 @@ def spill(msg, attrs, predicate): push(self.docdata(value, name, mod)) else: push(self.document(value, name, mod, - funcs, classes, mdict, object)) + funcs, classes, mdict, object, homecls)) push('\n') return attrs @@ -1025,24 +1059,44 @@ def formatvalue(self, object): return self.grey('=' + self.repr(object)) def docroutine(self, object, name=None, mod=None, - funcs={}, classes={}, methods={}, cl=None): + funcs={}, classes={}, methods={}, cl=None, homecls=None): """Produce HTML documentation for a function or method object.""" realname = object.__name__ name = name or realname - anchor = (cl and cl.__name__ or '') + '-' + name + if homecls is None: + homecls = cl + anchor = ('' if cl is None else cl.__name__) + '-' + name note = '' - skipdocs = 0 + skipdocs = False + imfunc = None if _is_bound_method(object): - imclass = object.__self__.__class__ - if cl: - if imclass is not cl: - note = ' from ' + self.classlink(imclass, mod) + imself = object.__self__ + if imself is cl: + imfunc = getattr(object, '__func__', None) + elif inspect.isclass(imself): + note = ' class method of %s' % self.classlink(imself, mod) else: - if object.__self__ is not None: - note = ' method of %s instance' % self.classlink( - object.__self__.__class__, mod) - else: - note = ' unbound %s method' % self.classlink(imclass,mod) + note = ' method of %s instance' % self.classlink( + imself.__class__, mod) + elif (inspect.ismethoddescriptor(object) or + inspect.ismethodwrapper(object)): + try: + objclass = object.__objclass__ + except AttributeError: + pass + else: + if cl is None: + note = ' unbound %s method' % self.classlink(objclass, mod) + elif objclass is not homecls: + note = ' from ' + self.classlink(objclass, mod) + else: + imfunc = object + if inspect.isfunction(imfunc) and homecls is not None and ( + imfunc.__module__ != homecls.__module__ or + imfunc.__qualname__ != homecls.__qualname__ + '.' + realname): + pname = self.parentlink(imfunc, mod) + if pname: + note = ' from %s' % pname if (inspect.iscoroutinefunction(object) or inspect.isasyncgenfunction(object)): @@ -1053,10 +1107,13 @@ def docroutine(self, object, name=None, mod=None, if name == realname: title = '%s' % (anchor, realname) else: - if cl and inspect.getattr_static(cl, realname, []) is object: + if (cl is not None and + inspect.getattr_static(cl, realname, []) is object): reallink = '%s' % ( cl.__name__ + '-' + realname, realname) - skipdocs = 1 + skipdocs = True + if note.startswith(' from '): + note = '' else: reallink = realname title = '%s = %s' % ( @@ -1074,7 +1131,8 @@ def docroutine(self, object, name=None, mod=None, # XXX lambda's won't usually have func_annotations['return'] # since the syntax doesn't support but it is possible. # So removing parentheses isn't truly safe. - argspec = argspec[1:-1] # remove parentheses + if not object.__annotations__: + argspec = argspec[1:-1] # remove parentheses if not argspec: argspec = '(...)' @@ -1089,7 +1147,7 @@ def docroutine(self, object, name=None, mod=None, doc = doc and '
%s
' % doc return '
%s
%s
\n' % (decl, doc) - def docdata(self, object, name=None, mod=None, cl=None): + def docdata(self, object, name=None, mod=None, cl=None, *ignored): """Produce html documentation for a data descriptor.""" results = [] push = results.append @@ -1200,7 +1258,7 @@ def formattree(self, tree, modname, parent=None, prefix=''): entry, modname, c, prefix + ' ') return result - def docmodule(self, object, name=None, mod=None): + def docmodule(self, object, name=None, mod=None, *ignored): """Produce text documentation for a given module object.""" name = object.__name__ # ignore the passed-in name synop, desc = splitdoc(getdoc(object)) @@ -1384,7 +1442,7 @@ def spill(msg, attrs, predicate): push(self.docdata(value, name, mod)) else: push(self.document(value, - name, mod, object)) + name, mod, object, homecls)) return attrs def spilldescriptors(msg, attrs, predicate): @@ -1459,23 +1517,43 @@ def formatvalue(self, object): """Format an argument default value as text.""" return '=' + self.repr(object) - def docroutine(self, object, name=None, mod=None, cl=None): + def docroutine(self, object, name=None, mod=None, cl=None, homecls=None): """Produce text documentation for a function or method object.""" realname = object.__name__ name = name or realname + if homecls is None: + homecls = cl note = '' - skipdocs = 0 + skipdocs = False + imfunc = None if _is_bound_method(object): - imclass = object.__self__.__class__ - if cl: - if imclass is not cl: - note = ' from ' + classname(imclass, mod) + imself = object.__self__ + if imself is cl: + imfunc = getattr(object, '__func__', None) + elif inspect.isclass(imself): + note = ' class method of %s' % classname(imself, mod) else: - if object.__self__ is not None: - note = ' method of %s instance' % classname( - object.__self__.__class__, mod) - else: - note = ' unbound %s method' % classname(imclass,mod) + note = ' method of %s instance' % classname( + imself.__class__, mod) + elif (inspect.ismethoddescriptor(object) or + inspect.ismethodwrapper(object)): + try: + objclass = object.__objclass__ + except AttributeError: + pass + else: + if cl is None: + note = ' unbound %s method' % classname(objclass, mod) + elif objclass is not homecls: + note = ' from ' + classname(objclass, mod) + else: + imfunc = object + if inspect.isfunction(imfunc) and homecls is not None and ( + imfunc.__module__ != homecls.__module__ or + imfunc.__qualname__ != homecls.__qualname__ + '.' + realname): + pname = parentname(imfunc, mod) + if pname: + note = ' from %s' % pname if (inspect.iscoroutinefunction(object) or inspect.isasyncgenfunction(object)): @@ -1486,8 +1564,11 @@ def docroutine(self, object, name=None, mod=None, cl=None): if name == realname: title = self.bold(realname) else: - if cl and inspect.getattr_static(cl, realname, []) is object: - skipdocs = 1 + if (cl is not None and + inspect.getattr_static(cl, realname, []) is object): + skipdocs = True + if note.startswith(' from '): + note = '' title = self.bold(name) + ' = ' + realname argspec = None @@ -1503,7 +1584,8 @@ def docroutine(self, object, name=None, mod=None, cl=None): # XXX lambda's won't usually have func_annotations['return'] # since the syntax doesn't support but it is possible. # So removing parentheses isn't truly safe. - argspec = argspec[1:-1] # remove parentheses + if not object.__annotations__: + argspec = argspec[1:-1] # remove parentheses if not argspec: argspec = '(...)' decl = asyncqualifier + title + argspec + note @@ -1514,7 +1596,7 @@ def docroutine(self, object, name=None, mod=None, cl=None): doc = getdoc(object) or '' return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n') - def docdata(self, object, name=None, mod=None, cl=None): + def docdata(self, object, name=None, mod=None, cl=None, *ignored): """Produce text documentation for a data descriptor.""" results = [] push = results.append @@ -1530,7 +1612,8 @@ def docdata(self, object, name=None, mod=None, cl=None): docproperty = docdata - def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None): + def docother(self, object, name=None, mod=None, parent=None, *ignored, + maxlen=None, doc=None): """Produce text documentation for a data object.""" repr = self.repr(object) if maxlen: @@ -2410,6 +2493,7 @@ def __init__(self, urlhandler, host, port): threading.Thread.__init__(self) self.serving = False self.error = None + self.docserver = None def run(self): """Start the server.""" @@ -2442,9 +2526,9 @@ def stop(self): thread = ServerThread(urlhandler, hostname, port) thread.start() - # Wait until thread.serving is True to make sure we are - # really up before returning. - while not thread.error and not thread.serving: + # Wait until thread.serving is True and thread.docserver is set + # to make sure we are really up before returning. + while not thread.error and not (thread.serving and thread.docserver): time.sleep(.01) return thread diff --git a/PythonLib/min/shutil.py b/PythonLib/min/shutil.py index 96463007..3a2b6be3 100644 --- a/PythonLib/min/shutil.py +++ b/PythonLib/min/shutil.py @@ -676,7 +676,7 @@ def _rmtree_safe_fd(topfd, path, onexc): continue if is_dir: try: - dirfd = os.open(entry.name, os.O_RDONLY, dir_fd=topfd) + dirfd = os.open(entry.name, os.O_RDONLY | os.O_NONBLOCK, dir_fd=topfd) dirfd_closed = False except OSError as err: onexc(os.open, fullname, err) @@ -775,7 +775,7 @@ def onexc(*args): onexc(os.lstat, path, err) return try: - fd = os.open(path, os.O_RDONLY, dir_fd=dir_fd) + fd = os.open(path, os.O_RDONLY | os.O_NONBLOCK, dir_fd=dir_fd) fd_closed = False except Exception as err: onexc(os.open, path, err) diff --git a/PythonLib/min/subprocess.py b/PythonLib/min/subprocess.py index 3264d9af..1d17ae36 100644 --- a/PythonLib/min/subprocess.py +++ b/PythonLib/min/subprocess.py @@ -1581,6 +1581,8 @@ def _wait(self, timeout): """Internal implementation of wait() on Windows.""" if timeout is None: timeout_millis = _winapi.INFINITE + elif timeout <= 0: + timeout_millis = 0 else: timeout_millis = int(timeout * 1000) if self.returncode is None: diff --git a/PythonLib/min/tokenize.py b/PythonLib/min/tokenize.py index 49e8144e..7af7a5cc 100644 --- a/PythonLib/min/tokenize.py +++ b/PythonLib/min/tokenize.py @@ -170,6 +170,7 @@ def __init__(self): self.tokens = [] self.prev_row = 1 self.prev_col = 0 + self.prev_type = None self.encoding = None def add_whitespace(self, start): @@ -185,6 +186,29 @@ def add_whitespace(self, start): if col_offset: self.tokens.append(" " * col_offset) + def escape_brackets(self, token): + characters = [] + consume_until_next_bracket = False + for character in token: + if character == "}": + if consume_until_next_bracket: + consume_until_next_bracket = False + else: + characters.append(character) + if character == "{": + n_backslashes = sum( + 1 for char in _itertools.takewhile( + "\\".__eq__, + characters[-2::-1] + ) + ) + if n_backslashes % 2 == 0: + characters.append(character) + else: + consume_until_next_bracket = True + characters.append(character) + return "".join(characters) + def untokenize(self, iterable): it = iter(iterable) indents = [] @@ -216,11 +240,13 @@ def untokenize(self, iterable): startline = False elif tok_type == FSTRING_MIDDLE: if '{' in token or '}' in token: + token = self.escape_brackets(token) + last_line = token.splitlines()[-1] end_line, end_col = end - end = (end_line, end_col + token.count('{') + token.count('}')) - token = re.sub('{', '{{', token) - token = re.sub('}', '}}', token) - + extra_chars = last_line.count("{{") + last_line.count("}}") + end = (end_line, end_col + extra_chars) + elif tok_type in (STRING, FSTRING_START) and self.prev_type in (STRING, FSTRING_END): + self.tokens.append(" ") self.add_whitespace(start) self.tokens.append(token) @@ -228,6 +254,7 @@ def untokenize(self, iterable): if tok_type in (NEWLINE, NL): self.prev_row += 1 self.prev_col = 0 + self.prev_type = tok_type return "".join(self.tokens) def compat(self, token, iterable): @@ -235,6 +262,7 @@ def compat(self, token, iterable): toks_append = self.tokens.append startline = token[0] in (NEWLINE, NL) prevstring = False + in_fstring = 0 for tok in _itertools.chain([token], iterable): toknum, tokval = tok[:2] @@ -253,6 +281,10 @@ def compat(self, token, iterable): else: prevstring = False + if toknum == FSTRING_START: + in_fstring += 1 + elif toknum == FSTRING_END: + in_fstring -= 1 if toknum == INDENT: indents.append(tokval) continue @@ -265,11 +297,18 @@ def compat(self, token, iterable): toks_append(indents[-1]) startline = False elif toknum == FSTRING_MIDDLE: - if '{' in tokval or '}' in tokval: - tokval = re.sub('{', '{{', tokval) - tokval = re.sub('}', '}}', tokval) + tokval = self.escape_brackets(tokval) + + # Insert a space between two consecutive brackets if we are in an f-string + if tokval in {"{", "}"} and self.tokens and self.tokens[-1] == tokval and in_fstring: + tokval = ' ' + tokval + + # Insert a space between two consecutive f-strings + if toknum in (STRING, FSTRING_START) and self.prev_type in (STRING, FSTRING_END): + self.tokens.append(" ") toks_append(tokval) + self.prev_type = toknum def untokenize(iterable): diff --git a/PythonLib/min/typing.py b/PythonLib/min/typing.py index ffe7ce8d..b58c2d30 100644 --- a/PythonLib/min/typing.py +++ b/PythonLib/min/typing.py @@ -314,19 +314,33 @@ def _unpack_args(args): newargs.append(arg) return newargs -def _deduplicate(params): +def _deduplicate(params, *, unhashable_fallback=False): # Weed out strict duplicates, preserving the first of each occurrence. - all_params = set(params) - if len(all_params) < len(params): - new_params = [] - for t in params: - if t in all_params: - new_params.append(t) - all_params.remove(t) - params = new_params - assert not all_params, all_params - return params - + try: + return dict.fromkeys(params) + except TypeError: + if not unhashable_fallback: + raise + # Happens for cases like `Annotated[dict, {'x': IntValidator()}]` + return _deduplicate_unhashable(params) + +def _deduplicate_unhashable(unhashable_params): + new_unhashable = [] + for t in unhashable_params: + if t not in new_unhashable: + new_unhashable.append(t) + return new_unhashable + +def _compare_args_orderless(first_args, second_args): + first_unhashable = _deduplicate_unhashable(first_args) + second_unhashable = _deduplicate_unhashable(second_args) + t = list(second_unhashable) + try: + for elem in first_unhashable: + t.remove(elem) + except ValueError: + return False + return not t def _remove_dups_flatten(parameters): """Internal helper for Union creation and substitution. @@ -341,7 +355,7 @@ def _remove_dups_flatten(parameters): else: params.append(p) - return tuple(_deduplicate(params)) + return tuple(_deduplicate(params, unhashable_fallback=True)) def _flatten_literal_params(parameters): @@ -530,7 +544,7 @@ class Any(metaclass=_AnyMeta): def __new__(cls, *args, **kwargs): if cls is Any: raise TypeError("Any cannot be instantiated") - return super().__new__(cls, *args, **kwargs) + return super().__new__(cls) @_SpecialForm @@ -832,22 +846,25 @@ def TypeGuard(self, parameters): 2. If the return value is ``True``, the type of its argument is the type inside ``TypeGuard``. - For example:: + For example:: + + def is_str_list(val: list[object]) -> TypeGuard[list[str]]: + '''Determines whether all objects in the list are strings''' + return all(isinstance(x, str) for x in val) - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... + def func1(val: list[object]): + if is_str_list(val): + # Type of ``val`` is narrowed to ``list[str]``. + print(" ".join(val)) + else: + # Type of ``val`` remains as ``list[object]``. + print("Not a list of strings!") Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower form of ``TypeA`` (it can even be a wider form) and this may lead to type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of + narrowing ``list[object]`` to ``list[str]`` even though the latter is not + a subtype of the former, since ``list`` is invariant. The responsibility of writing type-safe type guards is left to the user. ``TypeGuard`` also works with type variables. For more information, see @@ -872,7 +889,7 @@ def __init__(self, arg, is_argument=True, module=None, *, is_class=False): # If we do `def f(*args: *Ts)`, then we'll have `arg = '*Ts'`. # Unfortunately, this isn't a valid expression on its own, so we # do the unpacking manually. - if arg[0] == '*': + if arg.startswith('*'): arg_to_compile = f'({arg},)[0]' # E.g. (*Ts,)[0] or (*tuple[int, int],)[0] else: arg_to_compile = arg @@ -1140,7 +1157,9 @@ def __call__(self, *args, **kwargs): result = self.__origin__(*args, **kwargs) try: result.__orig_class__ = self - except AttributeError: + # Some objects raise TypeError (or something even more exotic) + # if you try to set attributes on them; we guard against that here + except Exception: pass return result @@ -1546,7 +1565,10 @@ def copy_with(self, params): def __eq__(self, other): if not isinstance(other, (_UnionGenericAlias, types.UnionType)): return NotImplemented - return set(self.__args__) == set(other.__args__) + try: # fast path + return set(self.__args__) == set(other.__args__) + except TypeError: # not hashable, slow path + return _compare_args_orderless(self.__args__, other.__args__) def __hash__(self): return hash(frozenset(self.__args__)) @@ -3254,11 +3276,11 @@ def __enter__(self) -> 'TextIO': class _DeprecatedType(type): def __getattribute__(cls, name): - if name not in ("__dict__", "__module__") and name in cls.__dict__: + if name not in {"__dict__", "__module__", "__doc__"} and name in cls.__dict__: warnings.warn( f"{cls.__name__} is deprecated, import directly " f"from typing instead. {cls.__name__} will be removed " - "in Python 3.12.", + "in Python 3.13.", DeprecationWarning, stacklevel=2, ) diff --git a/PythonLib/tcl/tkinter/__init__.py b/PythonLib/tcl/tkinter/__init__.py index 8b8fdfe3..43b0cbec 100644 --- a/PythonLib/tcl/tkinter/__init__.py +++ b/PythonLib/tcl/tkinter/__init__.py @@ -3074,11 +3074,16 @@ def __init__(self, master=None, cnf={}, **kw): Widget.__init__(self, master, 'checkbutton', cnf, kw) def _setup(self, master, cnf): + # Because Checkbutton defaults to a variable with the same name as + # the widget, Checkbutton default names must be globally unique, + # not just unique within the parent widget. if not cnf.get('name'): global _checkbutton_count name = self.__class__.__name__.lower() _checkbutton_count += 1 - cnf['name'] = f'!{name}{_checkbutton_count}' + # To avoid collisions with ttk.Checkbutton, use the different + # name template. + cnf['name'] = f'!{name}-{_checkbutton_count}' super()._setup(master, cnf) def deselect(self): diff --git a/PythonLib/tcl_dll/_tkinter.pyd b/PythonLib/tcl_dll/_tkinter.pyd index b1b08428..e06a128a 100644 Binary files a/PythonLib/tcl_dll/_tkinter.pyd and b/PythonLib/tcl_dll/_tkinter.pyd differ diff --git a/PythonLib/tcl_dll/tcl86t.dll b/PythonLib/tcl_dll/tcl86t.dll index f0657a14..d3d64c92 100644 Binary files a/PythonLib/tcl_dll/tcl86t.dll and b/PythonLib/tcl_dll/tcl86t.dll differ diff --git a/PythonLib/tcl_dll/tk86t.dll b/PythonLib/tcl_dll/tk86t.dll index 3807aa7d..0e68552e 100644 Binary files a/PythonLib/tcl_dll/tk86t.dll and b/PythonLib/tcl_dll/tk86t.dll differ diff --git a/PythonLib/tcl_dll_x64/_tkinter.pyd b/PythonLib/tcl_dll_x64/_tkinter.pyd index a7b15371..64d70773 100644 Binary files a/PythonLib/tcl_dll_x64/_tkinter.pyd and b/PythonLib/tcl_dll_x64/_tkinter.pyd differ diff --git a/PythonLib/tcl_dll_x64/tcl86t.dll b/PythonLib/tcl_dll_x64/tcl86t.dll index b48c1178..ff52bea5 100644 Binary files a/PythonLib/tcl_dll_x64/tcl86t.dll and b/PythonLib/tcl_dll_x64/tcl86t.dll differ diff --git a/PythonLib/tcl_dll_x64/tk86t.dll b/PythonLib/tcl_dll_x64/tk86t.dll index 66b41a45..074f5697 100644 Binary files a/PythonLib/tcl_dll_x64/tk86t.dll and b/PythonLib/tcl_dll_x64/tk86t.dll differ diff --git a/PythonScript/project/PythonSettings.props b/PythonScript/project/PythonSettings.props index 537a6ce2..a6ab9a1e 100644 --- a/PythonScript/project/PythonSettings.props +++ b/PythonScript/project/PythonSettings.props @@ -4,8 +4,8 @@ - $(SolutionDir)packages\pythonx86.3.12.2\tools - $(SolutionDir)packages\python.3.12.2\tools + $(SolutionDir)packages\pythonx86.3.12.3\tools + $(SolutionDir)packages\python.3.12.3\tools $(PythonBase)\libs $(PythonBaseX64)\libs C:\Program Files (x86)\HTML Help Workshop diff --git a/PythonScript/project/packages.config b/PythonScript/project/packages.config index 2d84c1fd..f4e3c281 100644 --- a/PythonScript/project/packages.config +++ b/PythonScript/project/packages.config @@ -2,6 +2,6 @@ - - + + \ No newline at end of file diff --git a/appveyor.yml b/appveyor.yml index f7c815ca..a97f1a96 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -44,8 +44,8 @@ after_build: - cd "%APPVEYOR_BUILD_FOLDER%"\installer - set WIX_PATH="C:\Program Files (x86)\WiX Toolset v3.11\bin" - set PATH=%WIX_PATH%;%PATH% - - if "%platform_input%"=="x64" SET PYTHONBUILDDIR_X64="%APPVEYOR_BUILD_FOLDER%"\packages\python.3.12.2\tools - - if "%platform_input%"=="Win32" SET PYTHONBUILDDIR="%APPVEYOR_BUILD_FOLDER%"\packages\pythonx86.3.12.2\tools + - if "%platform_input%"=="x64" SET PYTHONBUILDDIR_X64="%APPVEYOR_BUILD_FOLDER%"\packages\python.3.12.3\tools + - if "%platform_input%"=="Win32" SET PYTHONBUILDDIR="%APPVEYOR_BUILD_FOLDER%"\packages\pythonx86.3.12.3\tools - copy "%APPVEYOR_BUILD_FOLDER%"\installer\buildPaths.bat.orig "%APPVEYOR_BUILD_FOLDER%"\installer\buildPaths.bat - if "%configuration%"=="Release" buildAll.bat %platform_input% - cd "%APPVEYOR_BUILD_FOLDER%"