test: use built-in collection types for type hints (Python 3.9 / PEP 585)

Since Python 3.9, type hinting has become a little less awkward, as for
collection types one doesn't need to import the corresponding
capitalized types (`Dict`, `List`, `Set`, `Tuple`, ...) anymore, but can
use the built-in types directly. [1] [2]
This commit applies the replacement for all Python scripts (i.e. in the
contrib and test folders) for the basic types:
    - typing.Dict  -> dict
    - typing.List  -> list
    - typing.Set   -> set
    - typing.Tuple -> tuple

[1] https://docs.python.org/3.9/whatsnew/3.9.html#type-hinting-generics-in-standard-collections
[2] https://peps.python.org/pep-0585/#implementation for a list of type
This commit is contained in:
Sebastian Falbesoner 2023-10-25 00:55:17 +02:00
parent d53400e75e
commit d516cf83ed
18 changed files with 76 additions and 88 deletions

View file

@ -5,7 +5,6 @@
import sys import sys
import re import re
from typing import Dict, List, Set
MAPPING = { MAPPING = {
'core_read.cpp': 'core_io.cpp', 'core_read.cpp': 'core_io.cpp',
@ -33,7 +32,7 @@ def module_name(path):
return None return None
files = dict() files = dict()
deps: Dict[str, Set[str]] = dict() deps: dict[str, set[str]] = dict()
RE = re.compile("^#include <(.*)>") RE = re.compile("^#include <(.*)>")
@ -65,7 +64,7 @@ while True:
shortest_cycle = None shortest_cycle = None
for module in sorted(deps.keys()): for module in sorted(deps.keys()):
# Build the transitive closure of dependencies of module # Build the transitive closure of dependencies of module
closure: Dict[str, List[str]] = dict() closure: dict[str, list[str]] = dict()
for dep in deps[module]: for dep in deps[module]:
closure[dep] = [] closure[dep] = []
while True: while True:

View file

@ -8,7 +8,6 @@ Exit status will be 0 if successful, and the program will be silent.
Otherwise the exit status will be 1 and it will log which executables failed which checks. Otherwise the exit status will be 1 and it will log which executables failed which checks.
''' '''
import sys import sys
from typing import List
import lief import lief
@ -255,7 +254,7 @@ if __name__ == '__main__':
retval = 1 retval = 1
continue continue
failed: List[str] = [] failed: list[str] = []
for (name, func) in CHECKS[etype][arch]: for (name, func) in CHECKS[etype][arch]:
if not func(binary): if not func(binary):
failed.append(name) failed.append(name)

View file

@ -11,7 +11,6 @@ Example usage:
find ../path/to/binaries -type f -executable | xargs python3 contrib/devtools/symbol-check.py find ../path/to/binaries -type f -executable | xargs python3 contrib/devtools/symbol-check.py
''' '''
import sys import sys
from typing import List, Dict
import lief import lief
@ -53,7 +52,7 @@ IGNORE_EXPORTS = {
# Expected linker-loader names can be found here: # Expected linker-loader names can be found here:
# https://sourceware.org/glibc/wiki/ABIList?action=recall&rev=16 # https://sourceware.org/glibc/wiki/ABIList?action=recall&rev=16
ELF_INTERPRETER_NAMES: Dict[lief.ELF.ARCH, Dict[lief.ENDIANNESS, str]] = { ELF_INTERPRETER_NAMES: dict[lief.ELF.ARCH, dict[lief.ENDIANNESS, str]] = {
lief.ELF.ARCH.x86_64: { lief.ELF.ARCH.x86_64: {
lief.ENDIANNESS.LITTLE: "/lib64/ld-linux-x86-64.so.2", lief.ENDIANNESS.LITTLE: "/lib64/ld-linux-x86-64.so.2",
}, },
@ -72,7 +71,7 @@ ELF_INTERPRETER_NAMES: Dict[lief.ELF.ARCH, Dict[lief.ENDIANNESS, str]] = {
}, },
} }
ELF_ABIS: Dict[lief.ELF.ARCH, Dict[lief.ENDIANNESS, List[int]]] = { ELF_ABIS: dict[lief.ELF.ARCH, dict[lief.ENDIANNESS, list[int]]] = {
lief.ELF.ARCH.x86_64: { lief.ELF.ARCH.x86_64: {
lief.ENDIANNESS.LITTLE: [3,2,0], lief.ENDIANNESS.LITTLE: [3,2,0],
}, },
@ -302,7 +301,7 @@ if __name__ == '__main__':
retval = 1 retval = 1
continue continue
failed: List[str] = [] failed: list[str] = []
for (name, func) in CHECKS[etype]: for (name, func) in CHECKS[etype]:
if not func(binary): if not func(binary):
failed.append(name) failed.append(name)

View file

@ -8,7 +8,6 @@ Test script for security-check.py
import lief import lief
import os import os
import subprocess import subprocess
from typing import List
import unittest import unittest
from utils import determine_wellknown_cmd from utils import determine_wellknown_cmd
@ -34,7 +33,7 @@ def call_security_check(cc: str, source: str, executable: str, options) -> tuple
# #
# See the definitions for ac_link in autoconf's lib/autoconf/c.m4 file for # See the definitions for ac_link in autoconf's lib/autoconf/c.m4 file for
# reference. # reference.
env_flags: List[str] = [] env_flags: list[str] = []
for var in ['CFLAGS', 'CPPFLAGS', 'LDFLAGS']: for var in ['CFLAGS', 'CPPFLAGS', 'LDFLAGS']:
env_flags += filter(None, os.environ.get(var, '').split(' ')) env_flags += filter(None, os.environ.get(var, '').split(' '))

View file

@ -7,18 +7,17 @@ Test script for symbol-check.py
''' '''
import os import os
import subprocess import subprocess
from typing import List
import unittest import unittest
from utils import determine_wellknown_cmd from utils import determine_wellknown_cmd
def call_symbol_check(cc: List[str], source, executable, options): def call_symbol_check(cc: list[str], source, executable, options):
# This should behave the same as AC_TRY_LINK, so arrange well-known flags # This should behave the same as AC_TRY_LINK, so arrange well-known flags
# in the same order as autoconf would. # in the same order as autoconf would.
# #
# See the definitions for ac_link in autoconf's lib/autoconf/c.m4 file for # See the definitions for ac_link in autoconf's lib/autoconf/c.m4 file for
# reference. # reference.
env_flags: List[str] = [] env_flags: list[str] = []
for var in ['CFLAGS', 'CPPFLAGS', 'LDFLAGS']: for var in ['CFLAGS', 'CPPFLAGS', 'LDFLAGS']:
env_flags += filter(None, os.environ.get(var, '').split(' ')) env_flags += filter(None, os.environ.get(var, '').split(' '))
@ -28,7 +27,7 @@ def call_symbol_check(cc: List[str], source, executable, options):
os.remove(executable) os.remove(executable)
return (p.returncode, p.stdout.rstrip()) return (p.returncode, p.stdout.rstrip())
def get_machine(cc: List[str]): def get_machine(cc: list[str]):
p = subprocess.run([*cc,'-dumpmachine'], stdout=subprocess.PIPE, text=True) p = subprocess.run([*cc,'-dumpmachine'], stdout=subprocess.PIPE, text=True)
return p.stdout.rstrip() return p.stdout.rstrip()

View file

@ -8,10 +8,9 @@ Common utility functions
import shutil import shutil
import sys import sys
import os import os
from typing import List
def determine_wellknown_cmd(envvar, progname) -> List[str]: def determine_wellknown_cmd(envvar, progname) -> list[str]:
maybe_env = os.getenv(envvar) maybe_env = os.getenv(envvar)
maybe_which = shutil.which(progname) maybe_which = shutil.which(progname)
if maybe_env: if maybe_env:

View file

@ -20,7 +20,7 @@ import sys, re, os, platform, shutil, stat, subprocess, os.path
from argparse import ArgumentParser from argparse import ArgumentParser
from pathlib import Path from pathlib import Path
from subprocess import PIPE, run from subprocess import PIPE, run
from typing import List, Optional from typing import Optional
# This is ported from the original macdeployqt with modifications # This is ported from the original macdeployqt with modifications
@ -181,7 +181,7 @@ class DeploymentInfo(object):
return True return True
return False return False
def getFrameworks(binaryPath: str, verbose: int) -> List[FrameworkInfo]: def getFrameworks(binaryPath: str, verbose: int) -> list[FrameworkInfo]:
if verbose: if verbose:
print(f"Inspecting with otool: {binaryPath}") print(f"Inspecting with otool: {binaryPath}")
otoolbin=os.getenv("OTOOL", "otool") otoolbin=os.getenv("OTOOL", "otool")
@ -285,7 +285,7 @@ def copyFramework(framework: FrameworkInfo, path: str, verbose: int) -> Optional
return toPath return toPath
def deployFrameworks(frameworks: List[FrameworkInfo], bundlePath: str, binaryPath: str, strip: bool, verbose: int, deploymentInfo: Optional[DeploymentInfo] = None) -> DeploymentInfo: def deployFrameworks(frameworks: list[FrameworkInfo], bundlePath: str, binaryPath: str, strip: bool, verbose: int, deploymentInfo: Optional[DeploymentInfo] = None) -> DeploymentInfo:
if deploymentInfo is None: if deploymentInfo is None:
deploymentInfo = DeploymentInfo() deploymentInfo = DeploymentInfo()

View file

@ -11,7 +11,7 @@ import sys
from io import BytesIO from io import BytesIO
import json import json
from pathlib import Path from pathlib import Path
from typing import Any, List, Optional from typing import Any, Optional
sys.path.append(os.path.join(os.path.dirname(__file__), '../../test/functional')) sys.path.append(os.path.join(os.path.dirname(__file__), '../../test/functional'))
@ -92,7 +92,7 @@ def to_jsonable(obj: Any) -> Any:
return obj return obj
def process_file(path: str, messages: List[Any], recv: bool, progress_bar: Optional[ProgressBar]) -> None: def process_file(path: str, messages: list[Any], recv: bool, progress_bar: Optional[ProgressBar]) -> None:
with open(path, 'rb') as f_in: with open(path, 'rb') as f_in:
if progress_bar: if progress_bar:
bytes_read = 0 bytes_read = 0
@ -188,7 +188,7 @@ def main():
output = Path.cwd() / Path(args.output) if args.output else False output = Path.cwd() / Path(args.output) if args.output else False
use_progress_bar = (not args.no_progress_bar) and sys.stdout.isatty() use_progress_bar = (not args.no_progress_bar) and sys.stdout.isatty()
messages = [] # type: List[Any] messages = [] # type: list[Any]
if use_progress_bar: if use_progress_bar:
total_size = sum(capture.stat().st_size for capture in capturepaths) total_size = sum(capture.stat().st_size for capture in capturepaths)
progress_bar = ProgressBar(total_size) progress_bar = ProgressBar(total_size)

View file

@ -12,9 +12,9 @@ import random
import unittest import unittest
from enum import Enum from enum import Enum
from functools import total_ordering from functools import total_ordering
from typing import Callable, Dict, Iterable, List, Optional, Tuple, Union, overload from typing import Callable, Iterable, Optional, Union, overload
def net_to_prefix(net: Union[ipaddress.IPv4Network,ipaddress.IPv6Network]) -> List[bool]: def net_to_prefix(net: Union[ipaddress.IPv4Network,ipaddress.IPv6Network]) -> list[bool]:
""" """
Convert an IPv4 or IPv6 network to a prefix represented as a list of bits. Convert an IPv4 or IPv6 network to a prefix represented as a list of bits.
@ -32,7 +32,7 @@ def net_to_prefix(net: Union[ipaddress.IPv4Network,ipaddress.IPv6Network]) -> Li
assert (netrange & ((1 << (128 - num_bits)) - 1)) == 0 assert (netrange & ((1 << (128 - num_bits)) - 1)) == 0
return [((netrange >> (127 - i)) & 1) != 0 for i in range(num_bits)] return [((netrange >> (127 - i)) & 1) != 0 for i in range(num_bits)]
def prefix_to_net(prefix: List[bool]) -> Union[ipaddress.IPv4Network,ipaddress.IPv6Network]: def prefix_to_net(prefix: list[bool]) -> Union[ipaddress.IPv4Network,ipaddress.IPv6Network]:
"""The reverse operation of net_to_prefix.""" """The reverse operation of net_to_prefix."""
# Convert to number # Convert to number
netrange = sum(b << (127 - i) for i, b in enumerate(prefix)) netrange = sum(b << (127 - i) for i, b in enumerate(prefix))
@ -47,10 +47,10 @@ def prefix_to_net(prefix: List[bool]) -> Union[ipaddress.IPv4Network,ipaddress.I
return ipaddress.IPv6Network((netrange, num_bits), True) return ipaddress.IPv6Network((netrange, num_bits), True)
# Shortcut for (prefix, ASN) entries. # Shortcut for (prefix, ASN) entries.
ASNEntry = Tuple[List[bool], int] ASNEntry = tuple[list[bool], int]
# Shortcut for (prefix, old ASN, new ASN) entries. # Shortcut for (prefix, old ASN, new ASN) entries.
ASNDiff = Tuple[List[bool], int, int] ASNDiff = tuple[list[bool], int, int]
class _VarLenCoder: class _VarLenCoder:
""" """
@ -75,7 +75,7 @@ class _VarLenCoder:
other classes start one past the last element of the class before it. other classes start one past the last element of the class before it.
""" """
def __init__(self, minval: int, clsbits: List[int]): def __init__(self, minval: int, clsbits: list[int]):
"""Construct a new _VarLenCoder.""" """Construct a new _VarLenCoder."""
self._minval = minval self._minval = minval
self._clsbits = clsbits self._clsbits = clsbits
@ -85,7 +85,7 @@ class _VarLenCoder:
"""Check whether value val is in the range this coder supports.""" """Check whether value val is in the range this coder supports."""
return self._minval <= val <= self._maxval return self._minval <= val <= self._maxval
def encode(self, val: int, ret: List[int]) -> None: def encode(self, val: int, ret: list[int]) -> None:
"""Append encoding of val onto integer list ret.""" """Append encoding of val onto integer list ret."""
assert self._minval <= val <= self._maxval assert self._minval <= val <= self._maxval
@ -120,7 +120,7 @@ class _VarLenCoder:
break break
return ret + bits return ret + bits
def decode(self, stream, bitpos) -> Tuple[int,int]: def decode(self, stream, bitpos) -> tuple[int,int]:
"""Decode a number starting at bitpos in stream, returning value and new bitpos.""" """Decode a number starting at bitpos in stream, returning value and new bitpos."""
val = self._minval val = self._minval
bits = 0 bits = 0
@ -281,11 +281,11 @@ class ASMap:
- mappings, represented by new trie nodes. - mappings, represented by new trie nodes.
""" """
def update(self, prefix: List[bool], asn: int) -> None: def update(self, prefix: list[bool], asn: int) -> None:
"""Update this ASMap object to map prefix to the specified asn.""" """Update this ASMap object to map prefix to the specified asn."""
assert asn == 0 or _CODER_ASN.can_encode(asn) assert asn == 0 or _CODER_ASN.can_encode(asn)
def recurse(node: List, offset: int) -> None: def recurse(node: list, offset: int) -> None:
if offset == len(prefix): if offset == len(prefix):
# Reached the end of prefix; overwrite this node. # Reached the end of prefix; overwrite this node.
node.clear() node.clear()
@ -306,7 +306,7 @@ class ASMap:
node.append(oldasn) node.append(oldasn)
recurse(self._trie, 0) recurse(self._trie, 0)
def update_multi(self, entries: List[Tuple[List[bool], int]]) -> None: def update_multi(self, entries: list[tuple[list[bool], int]]) -> None:
"""Apply multiple update operations, where longer prefixes take precedence.""" """Apply multiple update operations, where longer prefixes take precedence."""
entries.sort(key=lambda entry: len(entry[0])) entries.sort(key=lambda entry: len(entry[0]))
for prefix, asn in entries: for prefix, asn in entries:
@ -314,7 +314,7 @@ class ASMap:
def _set_trie(self, trie) -> None: def _set_trie(self, trie) -> None:
"""Set trie directly. Internal use only.""" """Set trie directly. Internal use only."""
def recurse(node: List) -> None: def recurse(node: list) -> None:
if len(node) < 2: if len(node) < 2:
return return
recurse(node[0]) recurse(node[0])
@ -342,7 +342,7 @@ class ASMap:
for prefix, asn in sorted(entries, key=entry_key): for prefix, asn in sorted(entries, key=entry_key):
self.update(prefix, asn) self.update(prefix, asn)
def lookup(self, prefix: List[bool]) -> Optional[int]: def lookup(self, prefix: list[bool]) -> Optional[int]:
"""Look up a prefix. Returns ASN, or 0 if unassigned, or None if indeterminate.""" """Look up a prefix. Returns ASN, or 0 if unassigned, or None if indeterminate."""
node = self._trie node = self._trie
for bit in prefix: for bit in prefix:
@ -353,11 +353,11 @@ class ASMap:
return node[0] return node[0]
return None return None
def _to_entries_flat(self, fill: bool = False) -> List[ASNEntry]: def _to_entries_flat(self, fill: bool = False) -> list[ASNEntry]:
"""Convert an ASMap object to a list of non-overlapping (prefix, asn) objects.""" """Convert an ASMap object to a list of non-overlapping (prefix, asn) objects."""
prefix : List[bool] = [] prefix : list[bool] = []
def recurse(node: List) -> List[ASNEntry]: def recurse(node: list) -> list[ASNEntry]:
ret = [] ret = []
if len(node) == 1: if len(node) == 1:
if node[0] > 0: if node[0] > 0:
@ -375,24 +375,24 @@ class ASMap:
return ret return ret
return recurse(self._trie) return recurse(self._trie)
def _to_entries_minimal(self, fill: bool = False) -> List[ASNEntry]: def _to_entries_minimal(self, fill: bool = False) -> list[ASNEntry]:
"""Convert a trie to a minimal list of ASNEntry objects, exploiting overlap.""" """Convert a trie to a minimal list of ASNEntry objects, exploiting overlap."""
prefix : List[bool] = [] prefix : list[bool] = []
def recurse(node: List) -> (Tuple[Dict[Optional[int], List[ASNEntry]], bool]): def recurse(node: list) -> (tuple[dict[Optional[int], list[ASNEntry]], bool]):
if len(node) == 1 and node[0] == 0: if len(node) == 1 and node[0] == 0:
return {None if fill else 0: []}, True return {None if fill else 0: []}, True
if len(node) == 1: if len(node) == 1:
return {node[0]: [], None: [(list(prefix), node[0])]}, False return {node[0]: [], None: [(list(prefix), node[0])]}, False
ret: Dict[Optional[int], List[ASNEntry]] = {} ret: dict[Optional[int], list[ASNEntry]] = {}
prefix.append(False) prefix.append(False)
left, lhole = recurse(node[0]) left, lhole = recurse(node[0])
prefix[-1] = True prefix[-1] = True
right, rhole = recurse(node[1]) right, rhole = recurse(node[1])
prefix.pop() prefix.pop()
hole = not fill and (lhole or rhole) hole = not fill and (lhole or rhole)
def candidate(ctx: Optional[int], res0: Optional[List[ASNEntry]], def candidate(ctx: Optional[int], res0: Optional[list[ASNEntry]],
res1: Optional[List[ASNEntry]]): res1: Optional[list[ASNEntry]]):
if res0 is not None and res1 is not None: if res0 is not None and res1 is not None:
if ctx not in ret or len(res0) + len(res1) < len(ret[ctx]): if ctx not in ret or len(res0) + len(res1) < len(ret[ctx]):
ret[ctx] = res0 + res1 ret[ctx] = res0 + res1
@ -417,7 +417,7 @@ class ASMap:
"""Convert this ASMap object to a string containing Python code constructing it.""" """Convert this ASMap object to a string containing Python code constructing it."""
return f"ASMap({self._trie})" return f"ASMap({self._trie})"
def to_entries(self, overlapping: bool = True, fill: bool = False) -> List[ASNEntry]: def to_entries(self, overlapping: bool = True, fill: bool = False) -> list[ASNEntry]:
""" """
Convert the mappings in this ASMap object to a list of ASNEntry objects. Convert the mappings in this ASMap object to a list of ASNEntry objects.
@ -448,7 +448,7 @@ class ASMap:
assert max_asn >= 1 or unassigned_prob == 1 assert max_asn >= 1 or unassigned_prob == 1
assert _CODER_ASN.can_encode(max_asn) assert _CODER_ASN.can_encode(max_asn)
assert 0.0 <= unassigned_prob <= 1.0 assert 0.0 <= unassigned_prob <= 1.0
trie: List = [] trie: list = []
leaves = [trie] leaves = [trie]
ret = ASMap() ret = ASMap()
for i in range(1, num_leaves): for i in range(1, num_leaves):
@ -472,12 +472,12 @@ class ASMap:
def _to_binnode(self, fill: bool = False) -> _BinNode: def _to_binnode(self, fill: bool = False) -> _BinNode:
"""Convert a trie to a _BinNode object.""" """Convert a trie to a _BinNode object."""
def recurse(node: List) -> Tuple[Dict[Optional[int], _BinNode], bool]: def recurse(node: list) -> tuple[dict[Optional[int], _BinNode], bool]:
if len(node) == 1 and node[0] == 0: if len(node) == 1 and node[0] == 0:
return {(None if fill else 0): _BinNode.make_end()}, True return {(None if fill else 0): _BinNode.make_end()}, True
if len(node) == 1: if len(node) == 1:
return {None: _BinNode.make_leaf(node[0]), node[0]: _BinNode.make_end()}, False return {None: _BinNode.make_leaf(node[0]), node[0]: _BinNode.make_end()}, False
ret: Dict[Optional[int], _BinNode] = {} ret: dict[Optional[int], _BinNode] = {}
left, lhole = recurse(node[0]) left, lhole = recurse(node[0])
right, rhole = recurse(node[1]) right, rhole = recurse(node[1])
hole = (lhole or rhole) and not fill hole = (lhole or rhole) and not fill
@ -507,7 +507,7 @@ class ASMap:
@staticmethod @staticmethod
def _from_binnode(binnode: _BinNode) -> "ASMap": def _from_binnode(binnode: _BinNode) -> "ASMap":
"""Construct an ASMap object from a _BinNode. Internal use only.""" """Construct an ASMap object from a _BinNode. Internal use only."""
def recurse(node: _BinNode, default: int) -> List: def recurse(node: _BinNode, default: int) -> list:
if node.ins == _Instruction.RETURN: if node.ins == _Instruction.RETURN:
return [node.arg1] return [node.arg1]
if node.ins == _Instruction.JUMP: if node.ins == _Instruction.JUMP:
@ -542,7 +542,7 @@ class ASMap:
Returns: Returns:
A bytes object with the encoding of this ASMap object. A bytes object with the encoding of this ASMap object.
""" """
bits: List[int] = [] bits: list[int] = []
def recurse(node: _BinNode) -> None: def recurse(node: _BinNode) -> None:
_CODER_INS.encode(node.ins.value, bits) _CODER_INS.encode(node.ins.value, bits)
@ -582,11 +582,11 @@ class ASMap:
def from_binary(bindata: bytes) -> Optional["ASMap"]: def from_binary(bindata: bytes) -> Optional["ASMap"]:
"""Decode an ASMap object from the provided binary encoding.""" """Decode an ASMap object from the provided binary encoding."""
bits: List[int] = [] bits: list[int] = []
for byte in bindata: for byte in bindata:
bits.extend((byte >> i) & 1 for i in range(8)) bits.extend((byte >> i) & 1 for i in range(8))
def recurse(bitpos: int) -> Tuple[_BinNode, int]: def recurse(bitpos: int) -> tuple[_BinNode, int]:
insval, bitpos = _CODER_INS.decode(bits, bitpos) insval, bitpos = _CODER_INS.decode(bits, bitpos)
ins = _Instruction(insval) ins = _Instruction(insval)
if ins == _Instruction.RETURN: if ins == _Instruction.RETURN:
@ -632,7 +632,7 @@ class ASMap:
def extends(self, req: "ASMap") -> bool: def extends(self, req: "ASMap") -> bool:
"""Determine whether this matches req for all subranges where req is assigned.""" """Determine whether this matches req for all subranges where req is assigned."""
def recurse(actual: List, require: List) -> bool: def recurse(actual: list, require: list) -> bool:
if len(require) == 1 and require[0] == 0: if len(require) == 1 and require[0] == 0:
return True return True
if len(require) == 1: if len(require) == 1:
@ -646,20 +646,20 @@ class ASMap:
#pylint: disable=protected-access #pylint: disable=protected-access
return recurse(self._trie, req._trie) return recurse(self._trie, req._trie)
def diff(self, other: "ASMap") -> List[ASNDiff]: def diff(self, other: "ASMap") -> list[ASNDiff]:
"""Compute the diff from self to other.""" """Compute the diff from self to other."""
prefix: List[bool] = [] prefix: list[bool] = []
ret: List[ASNDiff] = [] ret: list[ASNDiff] = []
def recurse(old_node: List, new_node: List): def recurse(old_node: list, new_node: list):
if len(old_node) == 1 and len(new_node) == 1: if len(old_node) == 1 and len(new_node) == 1:
if old_node[0] != new_node[0]: if old_node[0] != new_node[0]:
ret.append((list(prefix), old_node[0], new_node[0])) ret.append((list(prefix), old_node[0], new_node[0]))
else: else:
old_left: List = old_node if len(old_node) == 1 else old_node[0] old_left: list = old_node if len(old_node) == 1 else old_node[0]
old_right: List = old_node if len(old_node) == 1 else old_node[1] old_right: list = old_node if len(old_node) == 1 else old_node[1]
new_left: List = new_node if len(new_node) == 1 else new_node[0] new_left: list = new_node if len(new_node) == 1 else new_node[0]
new_right: List = new_node if len(new_node) == 1 else new_node[1] new_right: list = new_node if len(new_node) == 1 else new_node[1]
prefix.append(False) prefix.append(False)
recurse(old_left, new_left) recurse(old_left, new_left)
prefix[-1] = True prefix[-1] = True
@ -760,7 +760,7 @@ class TestASMap(unittest.TestCase):
# It starts off being equal to asmap. # It starts off being equal to asmap.
patched = copy.copy(asmap) patched = copy.copy(asmap)
# Keep a list of patches performed. # Keep a list of patches performed.
patches: List[ASNEntry] = [] patches: list[ASNEntry] = []
# Initially there cannot be any difference. # Initially there cannot be any difference.
self.assertEqual(asmap.diff(patched), []) self.assertEqual(asmap.diff(patched), [])
# Make 5 patches, each building on top of the previous ones. # Make 5 patches, each building on top of the previous ones.

View file

@ -11,7 +11,7 @@ import collections
import ipaddress import ipaddress
import re import re
import sys import sys
from typing import List, Dict, Union from typing import Union
from asmap import ASMap, net_to_prefix from asmap import ASMap, net_to_prefix
@ -117,14 +117,14 @@ def parseline(line: str) -> Union[dict, None]:
'sortkey': sortkey, 'sortkey': sortkey,
} }
def dedup(ips: List[Dict]) -> List[Dict]: def dedup(ips: list[dict]) -> list[dict]:
""" Remove duplicates from `ips` where multiple ips share address and port. """ """ Remove duplicates from `ips` where multiple ips share address and port. """
d = {} d = {}
for ip in ips: for ip in ips:
d[ip['ip'],ip['port']] = ip d[ip['ip'],ip['port']] = ip
return list(d.values()) return list(d.values())
def filtermultiport(ips: List[Dict]) -> List[Dict]: def filtermultiport(ips: list[dict]) -> list[dict]:
""" Filter out hosts with more nodes per IP""" """ Filter out hosts with more nodes per IP"""
hist = collections.defaultdict(list) hist = collections.defaultdict(list)
for ip in ips: for ip in ips:
@ -132,7 +132,7 @@ def filtermultiport(ips: List[Dict]) -> List[Dict]:
return [value[0] for (key,value) in list(hist.items()) if len(value)==1] return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py # Based on Greg Maxwell's seed_filter.py
def filterbyasn(asmap: ASMap, ips: List[Dict], max_per_asn: Dict, max_per_net: int) -> List[Dict]: def filterbyasn(asmap: ASMap, ips: list[dict], max_per_asn: dict, max_per_net: int) -> list[dict]:
""" Prunes `ips` by """ Prunes `ips` by
(a) trimming ips to have at most `max_per_net` ips from each net (e.g. ipv4, ipv6); and (a) trimming ips to have at most `max_per_net` ips from each net (e.g. ipv4, ipv6); and
(b) trimming ips to have at most `max_per_asn` ips from each asn in each net. (b) trimming ips to have at most `max_per_asn` ips from each asn in each net.
@ -143,8 +143,8 @@ def filterbyasn(asmap: ASMap, ips: List[Dict], max_per_asn: Dict, max_per_net: i
# Filter IPv46 by ASN, and limit to max_per_net per network # Filter IPv46 by ASN, and limit to max_per_net per network
result = [] result = []
net_count: Dict[str, int] = collections.defaultdict(int) net_count: dict[str, int] = collections.defaultdict(int)
asn_count: Dict[int, int] = collections.defaultdict(int) asn_count: dict[int, int] = collections.defaultdict(int)
for i, ip in enumerate(ips_ipv46): for i, ip in enumerate(ips_ipv46):
if net_count[ip['net']] == max_per_net: if net_count[ip['net']] == max_per_net:
@ -165,9 +165,9 @@ def filterbyasn(asmap: ASMap, ips: List[Dict], max_per_asn: Dict, max_per_net: i
result.extend(ips_onion[0:max_per_net]) result.extend(ips_onion[0:max_per_net])
return result return result
def ip_stats(ips: List[Dict]) -> str: def ip_stats(ips: list[dict]) -> str:
""" Format and return pretty string from `ips`. """ """ Format and return pretty string from `ips`. """
hist: Dict[str, int] = collections.defaultdict(int) hist: dict[str, int] = collections.defaultdict(int)
for ip in ips: for ip in ips:
if ip is not None: if ip is not None:
hist[ip['net']] += 1 hist[ip['net']] += 1

View file

@ -65,7 +65,7 @@ class RESTTest (BitcoinTestFramework):
body: str = '', body: str = '',
status: int = 200, status: int = 200,
ret_type: RetType = RetType.JSON, ret_type: RetType = RetType.JSON,
query_params: Optional[typing.Dict[str, typing.Any]] = None, query_params: Optional[dict[str, typing.Any]] = None,
) -> typing.Union[http.client.HTTPResponse, bytes, str, None]: ) -> typing.Union[http.client.HTTPResponse, bytes, str, None]:
rest_uri = '/rest' + uri rest_uri = '/rest' + uri
if req_type in ReqType: if req_type in ReqType:

View file

@ -10,7 +10,6 @@ This file is modified from python-bitcoinlib.
from collections import namedtuple from collections import namedtuple
import struct import struct
import unittest import unittest
from typing import List, Dict
from .key import TaggedHash, tweak_add_pubkey, compute_xonly_pubkey from .key import TaggedHash, tweak_add_pubkey, compute_xonly_pubkey
@ -110,8 +109,8 @@ class CScriptOp(int):
_opcode_instances.append(super().__new__(cls, n)) _opcode_instances.append(super().__new__(cls, n))
return _opcode_instances[n] return _opcode_instances[n]
OPCODE_NAMES: Dict[CScriptOp, str] = {} OPCODE_NAMES: dict[CScriptOp, str] = {}
_opcode_instances: List[CScriptOp] = [] _opcode_instances: list[CScriptOp] = []
# Populate opcode instance table # Populate opcode instance table
for n in range(0xff + 1): for n in range(0xff + 1):

View file

@ -19,7 +19,6 @@ import sys
import tempfile import tempfile
import time import time
from typing import List
from .address import create_deterministic_address_bcrt1_p2tr_op_true from .address import create_deterministic_address_bcrt1_p2tr_op_true
from .authproxy import JSONRPCException from .authproxy import JSONRPCException
from . import coverage from . import coverage
@ -96,7 +95,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
"""Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method""" """Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.chain: str = 'regtest' self.chain: str = 'regtest'
self.setup_clean_chain: bool = False self.setup_clean_chain: bool = False
self.nodes: List[TestNode] = [] self.nodes: list[TestNode] = []
self.extra_args = None self.extra_args = None
self.network_thread = None self.network_thread = None
self.rpc_timeout = 60 # Wait for up to 60 seconds for the RPC server to respond self.rpc_timeout = 60 # Wait for up to 60 seconds for the RPC server to respond

View file

@ -20,7 +20,7 @@ import time
from . import coverage from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException from .authproxy import AuthServiceProxy, JSONRPCException
from typing import Callable, Optional, Tuple from typing import Callable, Optional
logger = logging.getLogger("TestFramework.utils") logger = logging.getLogger("TestFramework.utils")
@ -416,7 +416,7 @@ def get_datadir_path(dirname, n):
return pathlib.Path(dirname) / f"node{n}" return pathlib.Path(dirname) / f"node{n}"
def get_temp_default_datadir(temp_dir: pathlib.Path) -> Tuple[dict, pathlib.Path]: def get_temp_default_datadir(temp_dir: pathlib.Path) -> tuple[dict, pathlib.Path]:
"""Return os-specific environment variables that can be set to make the """Return os-specific environment variables that can be set to make the
GetDefaultDataDir() function return a datadir path under the provided GetDefaultDataDir() function return a datadir path under the provided
temp_dir, as well as the complete path it would return.""" temp_dir, as well as the complete path it would return."""

View file

@ -9,7 +9,6 @@ from decimal import Decimal
from enum import Enum from enum import Enum
from typing import ( from typing import (
Any, Any,
List,
Optional, Optional,
) )
from test_framework.address import ( from test_framework.address import (
@ -284,7 +283,7 @@ class MiniWallet:
def create_self_transfer_multi( def create_self_transfer_multi(
self, self,
*, *,
utxos_to_spend: Optional[List[dict]] = None, utxos_to_spend: Optional[list[dict]] = None,
num_outputs=1, num_outputs=1,
amount_per_output=0, amount_per_output=0,
locktime=0, locktime=0,

View file

@ -4,8 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php. # file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that fast rescan using block filters for descriptor wallets detects """Test that fast rescan using block filters for descriptor wallets detects
top-ups correctly and finds the same transactions than the slow variant.""" top-ups correctly and finds the same transactions than the slow variant."""
from typing import List
from test_framework.address import address_to_scriptpubkey from test_framework.address import address_to_scriptpubkey
from test_framework.descriptors import descsum_create from test_framework.descriptors import descsum_create
from test_framework.test_framework import BitcoinTestFramework from test_framework.test_framework import BitcoinTestFramework
@ -32,7 +30,7 @@ class WalletFastRescanTest(BitcoinTestFramework):
self.skip_if_no_wallet() self.skip_if_no_wallet()
self.skip_if_no_sqlite() self.skip_if_no_sqlite()
def get_wallet_txids(self, node: TestNode, wallet_name: str) -> List[str]: def get_wallet_txids(self, node: TestNode, wallet_name: str) -> list[str]:
w = node.get_wallet_rpc(wallet_name) w = node.get_wallet_rpc(wallet_name)
txs = w.listtransactions('*', 1000000) txs = w.listtransactions('*', 1000000)
return [tx['txid'] for tx in txs] return [tx['txid'] for tx in txs]

View file

@ -11,7 +11,7 @@ import os
import re import re
import sys import sys
from subprocess import check_output from subprocess import check_output
from typing import Dict, Optional, NoReturn from typing import Optional, NoReturn
CMD_TOP_LEVEL = ["git", "rev-parse", "--show-toplevel"] CMD_TOP_LEVEL = ["git", "rev-parse", "--show-toplevel"]
CMD_ALL_FILES = ["git", "ls-files", "-z", "--full-name", "--stage"] CMD_ALL_FILES = ["git", "ls-files", "-z", "--full-name", "--stage"]
@ -69,7 +69,7 @@ class FileMeta(object):
return None return None
def get_git_file_metadata() -> Dict[str, FileMeta]: def get_git_file_metadata() -> dict[str, FileMeta]:
''' '''
Return a dictionary mapping the name of all files in the repository to git tree metadata. Return a dictionary mapping the name of all files in the repository to git tree metadata.
''' '''

View file

@ -11,7 +11,6 @@ Check include guards.
import re import re
import sys import sys
from subprocess import check_output from subprocess import check_output
from typing import List
HEADER_ID_PREFIX = 'BITCOIN_' HEADER_ID_PREFIX = 'BITCOIN_'
@ -28,7 +27,7 @@ EXCLUDE_FILES_WITH_PREFIX = ['contrib/devtools/bitcoin-tidy',
'src/test/fuzz/FuzzedDataProvider.h'] 'src/test/fuzz/FuzzedDataProvider.h']
def _get_header_file_lst() -> List[str]: def _get_header_file_lst() -> list[str]:
""" Helper function to get a list of header filepaths to be """ Helper function to get a list of header filepaths to be
checked for include guards. checked for include guards.
""" """