Skip to content

Commit

Permalink
perf: Use __slots__ for data classes on Python 3.10+
Browse files Browse the repository at this point in the history
Signed-off-by: Dmitry Dygalo <[email protected]>
  • Loading branch information
Stranger6667 committed Jun 29, 2024
1 parent 788622e commit b8f7612
Show file tree
Hide file tree
Showing 5 changed files with 248 additions and 59 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@

## [Unreleased] - TBD

### Performance

- Use `__slots__` for data classes on Python 3.10+.

## [0.1.0] - 2024-06-29

- Initial public release
Expand Down
86 changes: 82 additions & 4 deletions benches/har.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,93 @@
CURRENT_DIR = pathlib.Path(__file__).parent.absolute()

with open(CURRENT_DIR / "entries.json") as fd:
ENTRIES = json.load(fd)
RAW_ENTRIES = fd.read()
ENTRIES = json.loads(RAW_ENTRIES)

for entry in ENTRIES:
entry["startedDateTime"] = datetime.fromisoformat(entry["startedDateTime"])
entry["request"] = harfile.Request(**entry["request"])
entry["response"] = harfile.Response(**entry["response"])
entry["request"] = harfile.Request(
**{
"method": entry["request"]["method"],
"url": entry["request"]["url"],
"httpVersion": entry["request"]["httpVersion"],
"cookies": [harfile.Cookie(**cookie) for cookie in entry["request"]["cookies"]],
"headers": [harfile.Record(**header) for header in entry["request"]["headers"]],
"queryString": [harfile.Record(**query) for query in entry["request"]["queryString"]],
"headersSize": entry["request"]["headersSize"],
"bodySize": entry["request"]["bodySize"],
**({} if "comment" not in entry["request"] else {"comment": entry["request"]["comment"]}),
**(
{}
if "postData" not in entry["request"]
else {
"postData": harfile.PostData(
**{
"mimeType": entry["request"]["postData"]["mimeType"],
**(
{}
if "params" not in entry["request"]["postData"]
else {
"params": [
harfile.PostParameter(**param)
for param in entry["request"]["postData"]["params"]
]
}
),
**(
{}
if "comment" not in entry["request"]["postData"]
else {"comment": entry["request"]["postData"]["comment"]}
),
**(
{}
if "text" not in entry["request"]["postData"]
else {"text": entry["request"]["postData"]["text"]}
),
}
)
}
),
}
)
entry["response"] = harfile.Response(
**{
"status": entry["response"]["status"],
"statusText": entry["response"]["statusText"],
"httpVersion": entry["response"]["httpVersion"],
"cookies": [harfile.Cookie(**cookie) for cookie in entry["response"]["cookies"]],
"headers": [harfile.Record(**header) for header in entry["response"]["headers"]],
"content": harfile.Content(**entry["response"]["content"]),
"redirectURL": entry["response"]["redirectURL"],
"headersSize": entry["response"]["headersSize"],
"bodySize": entry["response"]["bodySize"],
**({} if "comment" not in entry["response"] else {"comment": entry["response"]["comment"]}),
}
)
entry["timings"] = harfile.Timings(**entry["timings"])
if "cache" in entry:
entry["cache"] = harfile.Cache(**entry["cache"])
entry["cache"] = harfile.Cache(
**{
**(
{}
if "beforeRequest" not in entry["cache"]
else {"beforeRequest": harfile.CacheEntry(**entry["cache"]["beforeRequest"])}
),
**(
{}
if "afterRequest" not in entry["cache"]
else {"afterRequest": harfile.CacheEntry(**entry["cache"]["afterRequest"])}
),
**({} if "comment" not in entry["cache"] else {"comment": entry["cache"]["comment"]}),
}
)

buffer = io.StringIO()
with harfile.open(buffer) as har:
for entry in ENTRIES:
har.add_entry(**entry)
loaded = json.dumps(json.loads(buffer.getvalue())["log"]["entries"])
assert len(loaded) == len(RAW_ENTRIES.strip())


@pytest.mark.benchmark
Expand Down
60 changes: 17 additions & 43 deletions src/harfile/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@

import builtins
import json
from dataclasses import asdict
from datetime import datetime
from os import PathLike
from types import TracebackType
from typing import IO, Any
from typing import IO

from ._models import (
Browser,
Cache,
CacheEntry,
Content,
Cookie,
Creator,
Expand All @@ -31,6 +31,7 @@
"Browser",
"Creator",
"Cache",
"CacheEntry",
"Request",
"Response",
"Timings",
Expand Down Expand Up @@ -124,48 +125,25 @@ def add_entry(
if not self._has_preable:
self._write_preamble()
self._has_preable = True
self._write_entry(
startedDateTime=startedDateTime,
time=time,
request=request,
response=response,
cache=cache,
timings=timings,
serverIPAddress=serverIPAddress,
connection=connection,
comment=comment,
)

def _write_entry(
self,
*,
startedDateTime: datetime,
time: int | float,
request: Request,
response: Response,
timings: Timings,
cache: Cache | None = None,
serverIPAddress: str | None = None,
connection: str | None = None,
comment: str | None = None,
) -> None:
separator = "\n" if self._is_first_entry else ",\n"
self._is_first_entry = False
self._fd.write(f"{separator} {{")
self._fd.write(f'\n "startedDateTime": "{startedDateTime.isoformat()}",')
self._fd.write(f'\n "time": {time},')
self._fd.write(f'\n "request": {json.dumps(asdict(request, dict_factory=_dict_factory))},')
self._fd.write(f'\n "response": {json.dumps(asdict(response, dict_factory=_dict_factory))},')
self._fd.write(f'\n "timings": {json.dumps(asdict(timings, dict_factory=_dict_factory))}')
write = self._fd.write
dumps = json.dumps
write(f"{separator} {{")
write(f'\n "startedDateTime": "{startedDateTime.isoformat()}",')
write(f'\n "time": {time},')
write(f'\n "request": {dumps(request.asdict())},')
write(f'\n "response": {dumps(response.asdict())},')
write(f'\n "timings": {dumps(timings.asdict())}')
if cache:
self._fd.write(f',\n "cache": {json.dumps(asdict(cache, dict_factory=_dict_factory))}')
write(f',\n "cache": {dumps(cache.asdict())}')
if serverIPAddress:
self._fd.write(f',\n "serverIPAddress": {json.dumps(serverIPAddress)}')
write(f',\n "serverIPAddress": {dumps(serverIPAddress)}')
if connection:
self._fd.write(f',\n "connection": {json.dumps(connection)}')
write(f',\n "connection": {dumps(connection)}')
if comment:
self._fd.write(f',\n "comment": {json.dumps(comment)}')
self._fd.write("\n }")
write(f',\n "comment": {dumps(comment)}')
write("\n }")

def _write_preamble(self) -> None:
creator = f"""{{
Expand All @@ -186,7 +164,7 @@ def _write_preamble(self) -> None:
"creator": {creator},
"browser": {browser}""")
if self._comment:
self._fd.write(f' "comment": "{self._comment}"')
self._fd.write(f',\n "comment": "{self._comment}"')
self._fd.write(',\n "entries": [')

def _write_postscript(self) -> None:
Expand All @@ -196,8 +174,4 @@ def _write_postscript(self) -> None:
self._fd.write("\n ]\n }\n}")


def _dict_factory(value: list[tuple[str, Any]]) -> dict[str, Any]:
return {key: value for key, value in value if value is not None}


open = HarFile.open
Loading

0 comments on commit b8f7612

Please sign in to comment.