Skip to content

Commit

Permalink
type hint: add type hint for checksum and xpak
Browse files Browse the repository at this point in the history
  • Loading branch information
ppphp committed May 10, 2024
1 parent 7c9bb2c commit dc12bd0
Show file tree
Hide file tree
Showing 3 changed files with 63 additions and 47 deletions.
6 changes: 4 additions & 2 deletions lib/portage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,13 +226,15 @@ def _decode_argv(argv):
return [_unicode_decode(x.encode(fs_encoding, "surrogateescape")) for x in argv]


def _unicode_encode(s, encoding=_encodings["content"], errors="backslashreplace"):
def _unicode_encode(
s, encoding=_encodings["content"], errors="backslashreplace"
) -> bytes:
if isinstance(s, str):
s = s.encode(encoding, errors)
return s


def _unicode_decode(s, encoding=_encodings["content"], errors="replace"):
def _unicode_decode(s, encoding=_encodings["content"], errors="replace") -> str:
if isinstance(s, bytes):
s = str(s, encoding=encoding, errors=errors)
return s
Expand Down
51 changes: 31 additions & 20 deletions lib/portage/checksum.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import stat
import subprocess
import tempfile

from typing import Any
from portage import _encodings, _unicode_decode, _unicode_encode
from portage import os
from portage.const import HASHING_BLOCKSIZE, PRELINK_BINARY
Expand All @@ -34,11 +34,11 @@


# Dict of all available hash functions
hashfunc_map = {}
hashfunc_map: dict[str, Any] = {}
hashorigin_map = {}


def _open_file(filename):
def _open_file(filename: str | bytes):
try:
return open(
_unicode_encode(filename, encoding=_encodings["fs"], errors="strict"), "rb"
Expand All @@ -58,7 +58,7 @@ def _open_file(filename):
class _generate_hash_function:
__slots__ = ("_hashobject",)

def __init__(self, hashtype, hashobject, origin="unknown"):
def __init__(self, hashtype, hashobject, origin: str = "unknown"):
self._hashobject = hashobject
hashfunc_map[hashtype] = self
hashorigin_map[hashtype] = origin
Expand All @@ -75,7 +75,7 @@ def checksum_str(self, data):
checksum.update(data)
return checksum.hexdigest()

def checksum_file(self, filename):
def checksum_file(self, filename: str) -> tuple[Any, int]:
"""
Run a checksum against a file.
Expand Down Expand Up @@ -177,15 +177,15 @@ def checksum_file(self, filename):

# There is only one implementation for size
class SizeHash:
def checksum_file(self, filename):
def checksum_file(self, filename: str) -> tuple[int, int]:
size = os.stat(filename).st_size
return (size, size)


hashfunc_map["size"] = SizeHash()

# cache all supported hash methods in a frozenset
hashfunc_keys = frozenset(hashfunc_map)
hashfunc_keys: set[str] = frozenset(hashfunc_map)

# end actual hash functions

Expand All @@ -202,7 +202,7 @@ def checksum_file(self, filename):
del cmd, proc, status


def is_prelinkable_elf(filename):
def is_prelinkable_elf(filename: bytes) -> bool:
with _open_file(filename) as f:
magic = f.read(17)
return (
Expand All @@ -212,26 +212,26 @@ def is_prelinkable_elf(filename):
) # 2=ET_EXEC, 3=ET_DYN


def perform_md5(x, calc_prelink=0):
def perform_md5(x: str, calc_prelink: int = 0) -> tuple[bytes, int]:
return perform_checksum(x, "MD5", calc_prelink)[0]


def _perform_md5_merge(x, **kwargs):
def _perform_md5_merge(x: str, **kwargs) -> bytes:
return perform_md5(
_unicode_encode(x, encoding=_encodings["merge"], errors="strict"), **kwargs
)


def perform_all(x, calc_prelink=0):
def perform_all(x: str, calc_prelink: int = 0) -> dict[str, bytes]:
mydict = {k: perform_checksum(x, k, calc_prelink)[0] for k in hashfunc_keys}
return mydict


def get_valid_checksum_keys():
def get_valid_checksum_keys() -> set[str]:
return hashfunc_keys


def get_hash_origin(hashtype):
def get_hash_origin(hashtype: str):
if hashtype not in hashfunc_keys:
raise KeyError(hashtype)
return hashorigin_map.get(hashtype, "unknown")
Expand Down Expand Up @@ -266,15 +266,15 @@ class _hash_filter:
"_tokens",
)

def __init__(self, filter_str):
def __init__(self, filter_str: str):
tokens = filter_str.upper().split()
if not tokens or tokens[-1] == "*":
del tokens[:]
self.transparent = not tokens
tokens.reverse()
self._tokens = tuple(tokens)

def __call__(self, hash_name):
def __call__(self, hash_name: str) -> bool:
if self.transparent:
return True
matches = ("*", hash_name)
Expand All @@ -286,7 +286,9 @@ def __call__(self, hash_name):
return False


def _apply_hash_filter(digests, hash_filter):
def _apply_hash_filter(
digests: dict[str, str], hash_filter: callable[[str], bool]
) -> dict[str, str]:
"""
Return a new dict containing the filtered digests, or the same
dict if no changes are necessary. This will always preserve at
Expand Down Expand Up @@ -321,7 +323,12 @@ def _apply_hash_filter(digests, hash_filter):
return digests


def verify_all(filename, mydict, calc_prelink=0, strict=0):
def verify_all(
filename: str | bytes,
mydict: dict[str, any],
calc_prelink: int = 0,
strict: int = 0,
):
"""
Verify all checksums against a file.
Expand Down Expand Up @@ -388,7 +395,9 @@ def verify_all(filename, mydict, calc_prelink=0, strict=0):
return file_is_ok, reason


def perform_checksum(filename, hashname="MD5", calc_prelink=0):
def perform_checksum(
filename: str, hashname: str = "MD5", calc_prelink: int = 0
) -> tuple[bytes, int]:
"""
Run a specific checksum against a file. The filename can
be either unicode or an encoded byte string. If filename
Expand Down Expand Up @@ -450,7 +459,9 @@ def perform_checksum(filename, hashname="MD5", calc_prelink=0):
del e


def perform_multiple_checksums(filename, hashes=["MD5"], calc_prelink=0):
def perform_multiple_checksums(
filename: str, hashes: list[str] = ["MD5"], calc_prelink: int = 0
) -> dict[str, bytes]:
"""
Run a group of checksums against a file.
Expand All @@ -475,7 +486,7 @@ def perform_multiple_checksums(filename, hashes=["MD5"], calc_prelink=0):
return rVal


def checksum_str(data, hashname="MD5"):
def checksum_str(data: bytes, hashname: str = "MD5"):
"""
Run a specific checksum against a byte string.
Expand Down
Loading

0 comments on commit dc12bd0

Please sign in to comment.