diff --git a/.appveyor.yml b/.appveyor.yml
deleted file mode 100644
index 781ad4a4b1d..00000000000
--- a/.appveyor.yml
+++ /dev/null
@@ -1,99 +0,0 @@
-skip_commits:
- files:
- - ".github/**/*"
- - ".gitmodules"
- - "docs/**/*"
- - "wheels/**/*"
-
-version: '{build}'
-clone_folder: c:\pillow
-init:
-- ECHO %PYTHON%
-#- ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
-# Uncomment previous line to get RDP access during the build.
-
-environment:
- COVERAGE_CORE: sysmon
- EXECUTABLE: python.exe
- TEST_OPTIONS:
- DEPLOY: YES
- matrix:
- - PYTHON: C:/Python313
- ARCHITECTURE: x86
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2022
- - PYTHON: C:/Python39-x64
- ARCHITECTURE: AMD64
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
-
-
-install:
-- '%PYTHON%\%EXECUTABLE% --version'
-- '%PYTHON%\%EXECUTABLE% -m pip install --upgrade pip'
-- curl -fsSL -o pillow-test-images.zip https://github.com/python-pillow/test-images/archive/main.zip
-- 7z x pillow-test-images.zip -oc:\
-- xcopy /S /Y c:\test-images-main\* c:\pillow\tests\images
-- curl -fsSL -o nasm-win64.zip https://raw.githubusercontent.com/python-pillow/pillow-depends/main/nasm-2.16.03-win64.zip
-- 7z x nasm-win64.zip -oc:\
-- choco install ghostscript --version=10.4.0
-- path c:\nasm-2.16.03;C:\Program Files\gs\gs10.04.0\bin;%PATH%
-- cd c:\pillow\winbuild\
-- ps: |
- c:\python39\python.exe c:\pillow\winbuild\build_prepare.py -v --depends=C:\pillow-depends\
- c:\pillow\winbuild\build\build_dep_all.cmd
- $host.SetShouldExit(0)
-- path C:\pillow\winbuild\build\bin;%PATH%
-
-build_script:
-- cd c:\pillow
-- winbuild\build\build_env.cmd
-- '%PYTHON%\%EXECUTABLE% -m pip install -v -C raqm=vendor -C fribidi=vendor .'
-- '%PYTHON%\%EXECUTABLE% selftest.py --installed'
-
-test_script:
-- cd c:\pillow
-- '%PYTHON%\%EXECUTABLE% -m pip install pytest pytest-cov pytest-timeout defusedxml ipython numpy olefile pyroma'
-- c:\"Program Files (x86)"\"Windows Kits"\10\Debuggers\x86\gflags.exe /p /enable %PYTHON%\%EXECUTABLE%
-- path %PYTHON%;%PATH%
-- .ci\test.cmd
-
-after_test:
-- curl -Os https://uploader.codecov.io/latest/windows/codecov.exe
-- .\codecov.exe --file coverage.xml --name %PYTHON% --flags AppVeyor
-
-matrix:
- fast_finish: true
-
-cache:
-- '%LOCALAPPDATA%\pip\Cache'
-
-artifacts:
-- path: pillow\*.egg
- name: egg
-- path: pillow\*.whl
- name: wheel
-
-before_deploy:
- - cd c:\pillow
- - '%PYTHON%\%EXECUTABLE% -m pip wheel -v -C raqm=vendor -C fribidi=vendor .'
- - ps: Get-ChildItem .\*.whl | % { Push-AppveyorArtifact $_.FullName -FileName $_.Name }
-
-deploy:
- provider: S3
- region: us-west-2
- access_key_id: AKIAIRAXC62ZNTVQJMOQ
- secret_access_key:
- secure: Hwb6klTqtBeMgxAjRoDltiiqpuH8xbwD4UooDzBSiCWXjuFj1lyl4kHgHwTCCGqi
- bucket: pillow-nightly
- folder: win/$(APPVEYOR_BUILD_NUMBER)/
- artifact: /.*egg|wheel/
- on:
- APPVEYOR_REPO_NAME: python-pillow/Pillow
- branch: main
- deploy: YES
-
-
-# Uncomment the following lines to get RDP access after the build/test and block for
-# up to the timeout limit (~1hr)
-#
-#on_finish:
-#- ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
diff --git a/.ci/after_success.sh b/.ci/after_success.sh
index c71546f007b..6da27b975cc 100755
--- a/.ci/after_success.sh
+++ b/.ci/after_success.sh
@@ -2,8 +2,4 @@
# gather the coverage data
python3 -m pip install coverage
-if [[ $MATRIX_DOCKER ]]; then
- python3 -m coverage xml --ignore-errors
-else
- python3 -m coverage xml
-fi
+python3 -m coverage xml
diff --git a/.ci/build.sh b/.ci/build.sh
index e678f68ec85..ae10cb67155 100755
--- a/.ci/build.sh
+++ b/.ci/build.sh
@@ -3,8 +3,5 @@
set -e
python3 -m coverage erase
-if [ $(uname) == "Darwin" ]; then
- export CPPFLAGS="-I/usr/local/miniconda/include";
-fi
make clean
make install-coverage
diff --git a/.ci/requirements-mypy.txt b/.ci/requirements-mypy.txt
index cd1b1a1a1dc..10e59b885ef 100644
--- a/.ci/requirements-mypy.txt
+++ b/.ci/requirements-mypy.txt
@@ -1,4 +1,4 @@
-mypy==1.14.0
+mypy==1.14.1
IceSpringPySideStubs-PyQt6
IceSpringPySideStubs-PySide6
ipython
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index ba2b7d8ed26..c098e32ebc8 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -9,7 +9,7 @@ Please send a pull request to the `main` branch. Please include [documentation](
- Fork the Pillow repository.
- Create a branch from `main`.
- Develop bug fixes, features, tests, etc.
-- Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/projects/new) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests.
+- Run the test suite. You can enable GitHub Actions (https://github.com/MY-USERNAME/Pillow/actions) on your repo to catch test failures prior to the pull request, and [Codecov](https://codecov.io/gh) to see if the changed code is covered by tests.
- Create a pull request to pull the changes from your branch to the Pillow `main`.
### Guidelines
@@ -17,7 +17,7 @@ Please send a pull request to the `main` branch. Please include [documentation](
- Separate code commits from reformatting commits.
- Provide tests for any newly added code.
- Follow PEP 8.
-- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running tests on AppVeyor.
+- When committing only documentation changes please include `[ci skip]` in the commit message to avoid running extra tests.
- Include [release notes](https://github.com/python-pillow/Pillow/tree/main/docs/releasenotes) as needed or appropriate with your bug fixes, feature additions and tests.
## Reporting Issues
diff --git a/.github/mergify.yml b/.github/mergify.yml
index 3c20661376f..9bb089615be 100644
--- a/.github/mergify.yml
+++ b/.github/mergify.yml
@@ -9,7 +9,6 @@ pull_request_rules:
- status-success=Windows Test Successful
- status-success=MinGW
- status-success=Cygwin Test Successful
- - status-success=continuous-integration/appveyor/pr
actions:
merge:
method: merge
diff --git a/.github/workflows/test-cygwin.yml b/.github/workflows/test-cygwin.yml
index 5b0a0394688..abfeaa77f9c 100644
--- a/.github/workflows/test-cygwin.yml
+++ b/.github/workflows/test-cygwin.yml
@@ -52,7 +52,7 @@ jobs:
persist-credentials: false
- name: Install Cygwin
- uses: cygwin/cygwin-install-action@v4
+ uses: cygwin/cygwin-install-action@v5
with:
packages: >
gcc-g++
diff --git a/.github/workflows/test-docker.yml b/.github/workflows/test-docker.yml
index cc5f9d4a5a9..da5e191da27 100644
--- a/.github/workflows/test-docker.yml
+++ b/.github/workflows/test-docker.yml
@@ -29,21 +29,18 @@ concurrency:
jobs:
build:
- runs-on: ubuntu-latest
+ runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
+ os: ["ubuntu-latest"]
docker: [
- # Run slower jobs first to give them a headstart and reduce waiting time
- ubuntu-22.04-jammy-arm64v8,
- ubuntu-24.04-noble-ppc64le,
- ubuntu-24.04-noble-s390x,
- # Then run the remainder
alpine,
amazon-2-amd64,
amazon-2023-amd64,
arch,
centos-stream-9-amd64,
+ centos-stream-10-amd64,
debian-12-bookworm-x86,
debian-12-bookworm-amd64,
fedora-40-amd64,
@@ -54,12 +51,17 @@ jobs:
]
dockerTag: [main]
include:
- - docker: "ubuntu-22.04-jammy-arm64v8"
- qemu-arch: "aarch64"
- docker: "ubuntu-24.04-noble-ppc64le"
+ os: "ubuntu-22.04"
qemu-arch: "ppc64le"
+ dockerTag: main
- docker: "ubuntu-24.04-noble-s390x"
+ os: "ubuntu-22.04"
qemu-arch: "s390x"
+ dockerTag: main
+ - docker: "ubuntu-24.04-noble-arm64v8"
+ os: "ubuntu-24.04-arm"
+ dockerTag: main
name: ${{ matrix.docker }}
@@ -89,15 +91,15 @@ jobs:
- name: After success
run: |
- PATH="$PATH:~/.local/bin"
docker start pillow_container
+ sudo docker cp pillow_container:/Pillow /Pillow
+ sudo chown -R runner /Pillow
pil_path=`docker exec pillow_container /vpy3/bin/python -c 'import os, PIL;print(os.path.realpath(os.path.dirname(PIL.__file__)))'`
docker stop pillow_container
sudo mkdir -p $pil_path
sudo cp src/PIL/*.py $pil_path
+ cd /Pillow
.ci/after_success.sh
- env:
- MATRIX_DOCKER: ${{ matrix.docker }}
- name: Upload coverage
uses: codecov/codecov-action@v5
diff --git a/.github/workflows/test-mingw.yml b/.github/workflows/test-mingw.yml
index a1d6ba61c9b..bb6d7dc373e 100644
--- a/.github/workflows/test-mingw.yml
+++ b/.github/workflows/test-mingw.yml
@@ -66,9 +66,9 @@ jobs:
mingw-w64-x86_64-libtiff \
mingw-w64-x86_64-libwebp \
mingw-w64-x86_64-openjpeg2 \
- mingw-w64-x86_64-python3-numpy \
- mingw-w64-x86_64-python3-olefile \
- mingw-w64-x86_64-python3-pip \
+ mingw-w64-x86_64-python-numpy \
+ mingw-w64-x86_64-python-olefile \
+ mingw-w64-x86_64-python-pip \
mingw-w64-x86_64-python-pytest \
mingw-w64-x86_64-python-pytest-cov \
mingw-w64-x86_64-python-pytest-timeout \
diff --git a/.github/workflows/test-windows.yml b/.github/workflows/test-windows.yml
index d905a392585..8faab2ef477 100644
--- a/.github/workflows/test-windows.yml
+++ b/.github/workflows/test-windows.yml
@@ -31,15 +31,20 @@ env:
jobs:
build:
- runs-on: windows-latest
+ runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
- python-version: ["pypy3.10", "3.9", "3.10", "3.11", "3.12", "3.13"]
+ python-version: ["pypy3.10", "3.10", "3.11", "3.12", "3.13", "3.14"]
+ architecture: ["x64"]
+ os: ["windows-latest"]
+ include:
+ # Test the oldest Python on 32-bit
+ - { python-version: "3.9", architecture: "x86", os: "windows-2019" }
timeout-minutes: 30
- name: Python ${{ matrix.python-version }}
+ name: Python ${{ matrix.python-version }} (${{ matrix.architecture }})
steps:
- name: Checkout Pillow
@@ -67,6 +72,7 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
+ architecture: ${{ matrix.architecture }}
cache: pip
cache-dependency-path: ".github/workflows/test-windows.yml"
@@ -78,7 +84,7 @@ jobs:
python3 -m pip install --upgrade pip
- name: Install CPython dependencies
- if: "!contains(matrix.python-version, 'pypy')"
+ if: "!contains(matrix.python-version, 'pypy') && matrix.architecture != 'x86'"
run: |
python3 -m pip install PyQt6
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 83a696f5f8b..e3efe0b593a 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -42,6 +42,7 @@ jobs:
]
python-version: [
"pypy3.10",
+ "3.14",
"3.13t",
"3.13",
"3.12",
diff --git a/.github/workflows/wheels-dependencies.sh b/.github/workflows/wheels-dependencies.sh
index 4e0fad79f4e..dffb360856a 100755
--- a/.github/workflows/wheels-dependencies.sh
+++ b/.github/workflows/wheels-dependencies.sh
@@ -37,20 +37,15 @@ fi
ARCHIVE_SDIR=pillow-depends-main
# Package versions for fresh source builds
-FREETYPE_VERSION=2.13.2
-HARFBUZZ_VERSION=10.1.0
-LIBPNG_VERSION=1.6.44
+FREETYPE_VERSION=2.13.3
+HARFBUZZ_VERSION=10.2.0
+LIBPNG_VERSION=1.6.46
JPEGTURBO_VERSION=3.1.0
OPENJPEG_VERSION=2.5.3
-XZ_VERSION=5.6.3
+XZ_VERSION=5.6.4
TIFF_VERSION=4.6.0
LCMS2_VERSION=2.16
-if [[ -n "$IS_MACOS" ]]; then
- GIFLIB_VERSION=5.2.2
-else
- GIFLIB_VERSION=5.2.1
-fi
-ZLIB_NG_VERSION=2.2.2
+ZLIB_NG_VERSION=2.2.3
LIBWEBP_VERSION=1.5.0
BZIP2_VERSION=1.0.8
LIBXCB_VERSION=1.17.0
@@ -103,7 +98,7 @@ function build_harfbuzz {
function build {
build_xz
- if [ -z "$IS_ALPINE" ] && [ -z "$IS_MACOS" ]; then
+ if [ -z "$IS_ALPINE" ] && [ -z "$SANITIZER" ] && [ -z "$IS_MACOS" ]; then
yum remove -y zlib-devel
fi
build_zlib_ng
@@ -140,7 +135,9 @@ function build {
if [[ -n "$IS_MACOS" ]]; then
CFLAGS="$CFLAGS -Wl,-headerpad_max_install_names"
fi
- build_libwebp
+ build_simple libwebp $LIBWEBP_VERSION \
+ https://storage.googleapis.com/downloads.webmproject.org/releases/webp tar.gz \
+ --enable-libwebpmux --enable-libwebpdemux
CFLAGS=$ORIGINAL_CFLAGS
build_brotli
diff --git a/.github/workflows/wheels-test.ps1 b/.github/workflows/wheels-test.ps1
index f593c722854..a1edc14ef25 100644
--- a/.github/workflows/wheels-test.ps1
+++ b/.github/workflows/wheels-test.ps1
@@ -11,6 +11,9 @@ if ("$venv" -like "*\cibw-run-*\pp*-win_amd64\*") {
$env:path += ";$pillow\winbuild\build\bin\"
& "$venv\Scripts\activate.ps1"
& reg add "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Image File Execution Options\python.exe" /v "GlobalFlag" /t REG_SZ /d "0x02000000" /f
+if ("$venv" -like "*\cibw-run-*-win_amd64\*") {
+ & python -m pip install numpy
+}
cd $pillow
& python -VV
if (!$?) { exit $LASTEXITCODE }
diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml
index c5e55aa621d..db8e4d58bab 100644
--- a/.github/workflows/wheels.yml
+++ b/.github/workflows/wheels.yml
@@ -13,6 +13,7 @@ on:
paths:
- ".ci/requirements-cibw.txt"
- ".github/workflows/wheel*"
+ - "pyproject.toml"
- "setup.py"
- "wheels/*"
- "winbuild/build_prepare.py"
@@ -23,6 +24,7 @@ on:
paths:
- ".ci/requirements-cibw.txt"
- ".github/workflows/wheel*"
+ - "pyproject.toml"
- "setup.py"
- "wheels/*"
- "winbuild/build_prepare.py"
@@ -40,62 +42,7 @@ env:
FORCE_COLOR: 1
jobs:
- build-1-QEMU-emulated-wheels:
- if: github.event_name != 'schedule'
- name: aarch64 ${{ matrix.python-version }} ${{ matrix.spec }}
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- python-version:
- - pp310
- - cp3{9,10,11}
- - cp3{12,13}
- spec:
- - manylinux2014
- - manylinux_2_28
- - musllinux
- exclude:
- - { python-version: pp310, spec: musllinux }
-
- steps:
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- submodules: true
-
- - uses: actions/setup-python@v5
- with:
- python-version: "3.x"
-
- # https://github.com/docker/setup-qemu-action
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
-
- - name: Install cibuildwheel
- run: |
- python3 -m pip install -r .ci/requirements-cibw.txt
-
- - name: Build wheels
- run: |
- python3 -m cibuildwheel --output-dir wheelhouse
- env:
- # Build only the currently selected Linux architecture (so we can
- # parallelise for speed).
- CIBW_ARCHS: "aarch64"
- # Likewise, select only one Python version per job to speed this up.
- CIBW_BUILD: "${{ matrix.python-version }}-${{ matrix.spec == 'musllinux' && 'musllinux' || 'manylinux' }}*"
- CIBW_ENABLE: cpython-prerelease
- # Extra options for manylinux.
- CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.spec }}
- CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.spec }}
-
- - uses: actions/upload-artifact@v4
- with:
- name: dist-qemu-${{ matrix.python-version }}-${{ matrix.spec }}
- path: ./wheelhouse/*.whl
-
- build-2-native-wheels:
+ build-native-wheels:
if: github.event_name != 'schedule' || github.repository_owner == 'python-pillow'
name: ${{ matrix.name }}
runs-on: ${{ matrix.os }}
@@ -130,6 +77,14 @@ jobs:
cibw_arch: x86_64
build: "*manylinux*"
manylinux: "manylinux_2_28"
+ - name: "manylinux2014 and musllinux aarch64"
+ os: ubuntu-24.04-arm
+ cibw_arch: aarch64
+ - name: "manylinux_2_28 aarch64"
+ os: ubuntu-24.04-arm
+ cibw_arch: aarch64
+ build: "*manylinux*"
+ manylinux: "manylinux_2_28"
steps:
- uses: actions/checkout@v4
with:
@@ -150,7 +105,9 @@ jobs:
env:
CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BUILD: ${{ matrix.build }}
- CIBW_ENABLE: cpython-prerelease cpython-freethreading
+ CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
+ CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.manylinux }}
+ CIBW_MANYLINUX_PYPY_AARCH64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_PYPY_X86_64_IMAGE: ${{ matrix.manylinux }}
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.manylinux }}
CIBW_SKIP: pp39-*
@@ -227,7 +184,7 @@ jobs:
CIBW_ARCHS: ${{ matrix.cibw_arch }}
CIBW_BEFORE_ALL: "{package}\\winbuild\\build\\build_dep_all.cmd"
CIBW_CACHE_PATH: "C:\\cibw"
- CIBW_ENABLE: cpython-prerelease cpython-freethreading
+ CIBW_ENABLE: cpython-prerelease cpython-freethreading pypy
CIBW_SKIP: pp39-*
CIBW_TEST_SKIP: "*-win_arm64"
CIBW_TEST_COMMAND: 'docker run --rm
@@ -263,8 +220,6 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: "3.x"
- cache: pip
- cache-dependency-path: "Makefile"
- run: make sdist
@@ -275,7 +230,7 @@ jobs:
scientific-python-nightly-wheels-publish:
if: github.repository_owner == 'python-pillow' && (github.event_name == 'schedule' || github.event_name == 'workflow_dispatch')
- needs: [build-2-native-wheels, windows]
+ needs: [build-native-wheels, windows]
runs-on: ubuntu-latest
name: Upload wheels to scientific-python-nightly-wheels
steps:
@@ -292,7 +247,7 @@ jobs:
pypi-publish:
if: github.repository_owner == 'python-pillow' && github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
- needs: [build-1-QEMU-emulated-wheels, build-2-native-wheels, windows, sdist]
+ needs: [build-native-wheels, windows, sdist]
runs-on: ubuntu-latest
name: Upload release to PyPI
environment:
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f91260c724f..20fa7d04f00 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.8.1
+ rev: v0.8.6
hooks:
- id: ruff
args: [--exit-non-zero-on-fix]
@@ -24,7 +24,7 @@ repos:
exclude: (Makefile$|\.bat$|\.cmake$|\.eps$|\.fits$|\.gd$|\.opt$)
- repo: https://github.com/pre-commit/mirrors-clang-format
- rev: v19.1.4
+ rev: v19.1.6
hooks:
- id: clang-format
types: [c]
@@ -56,6 +56,11 @@ repos:
- id: check-readthedocs
- id: check-renovate
+ - repo: https://github.com/woodruffw/zizmor-pre-commit
+ rev: v1.0.0
+ hooks:
+ - id: zizmor
+
- repo: https://github.com/sphinx-contrib/sphinx-lint
rev: v1.0.0
hooks:
diff --git a/.readthedocs.yml b/.readthedocs.yml
index def6282dd56..3e03c76ea9b 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -1,5 +1,8 @@
version: 2
+sphinx:
+ configuration: docs/conf.py
+
formats: [pdf]
build:
diff --git a/MANIFEST.in b/MANIFEST.in
index af25dfd2db5..48085b82ed0 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -20,7 +20,6 @@ graft docs
graft _custom_build
# build/src control detritus
-exclude .appveyor.yml
exclude .clang-format
exclude .coveragerc
exclude .editorconfig
diff --git a/README.md b/README.md
index 057d0acf0ce..1cae558ada3 100644
--- a/README.md
+++ b/README.md
@@ -42,9 +42,6 @@ As of 2019, Pillow development is
-
diff --git a/RELEASING.md b/RELEASING.md
index ebdbb6406e8..932beb2c26e 100644
--- a/RELEASING.md
+++ b/RELEASING.md
@@ -9,7 +9,7 @@ Released quarterly on January 2nd, April 1st, July 1st and October 15th.
* [ ] Open a release ticket e.g. https://github.com/python-pillow/Pillow/issues/3154
* [ ] Develop and prepare release in `main` branch.
-* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in `main` branch.
+* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in `main` branch.
* [ ] Check that all the wheel builds pass the tests in the [GitHub Actions "Wheels" workflow](https://github.com/python-pillow/Pillow/actions/workflows/wheels.yml) jobs by manually triggering them.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Run pre-release check via `make release-test` in a freshly cloned repo.
@@ -38,7 +38,7 @@ Released as needed for security, installation or critical bug fixes.
git checkout -t remotes/origin/5.2.x
```
* [ ] Cherry pick individual commits from `main` branch to release branch e.g. `5.2.x`, then `git push`.
-* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) and [AppVeyor](https://ci.appveyor.com/project/python-pillow/Pillow) to confirm passing tests in release branch e.g. `5.2.x`.
+* [ ] Check [GitHub Actions](https://github.com/python-pillow/Pillow/actions) to confirm passing tests in release branch e.g. `5.2.x`.
* [ ] In compliance with [PEP 440](https://peps.python.org/pep-0440/), update version identifier in `src/PIL/_version.py`
* [ ] Run pre-release check via `make release-test`.
* [ ] Create tag for release e.g.:
diff --git a/Tests/check_png_dos.py b/Tests/check_png_dos.py
index 63d6657bc45..1bfb94ab7f7 100644
--- a/Tests/check_png_dos.py
+++ b/Tests/check_png_dos.py
@@ -3,26 +3,25 @@
import zlib
from io import BytesIO
+import pytest
+
from PIL import Image, ImageFile, PngImagePlugin
TEST_FILE = "Tests/images/png_decompression_dos.png"
-def test_ignore_dos_text() -> None:
- ImageFile.LOAD_TRUNCATED_IMAGES = True
+def test_ignore_dos_text(monkeypatch: pytest.MonkeyPatch) -> None:
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
- try:
- im = Image.open(TEST_FILE)
+ with Image.open(TEST_FILE) as im:
im.load()
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
- assert isinstance(im, PngImagePlugin.PngImageFile)
- for s in im.text.values():
- assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
+ assert isinstance(im, PngImagePlugin.PngImageFile)
+ for s in im.text.values():
+ assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
- for s in im.info.values():
- assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
+ for s in im.info.values():
+ assert len(s) < 1024 * 1024, "Text chunk larger than 1M"
def test_dos_text() -> None:
diff --git a/Tests/helper.py b/Tests/helper.py
index d6a93a8030b..e7b0db1d6b1 100644
--- a/Tests/helper.py
+++ b/Tests/helper.py
@@ -140,18 +140,11 @@ def assert_image_similar_tofile(
filename: str,
epsilon: float,
msg: str | None = None,
- mode: str | None = None,
) -> None:
with Image.open(filename) as img:
- if mode:
- img = img.convert(mode)
assert_image_similar(a, img, epsilon, msg)
-def assert_all_same(items: Sequence[Any], msg: str | None = None) -> None:
- assert items.count(items[0]) == len(items), msg
-
-
def assert_not_all_same(items: Sequence[Any], msg: str | None = None) -> None:
assert items.count(items[0]) != len(items), msg
@@ -327,16 +320,7 @@ def magick_command() -> list[str] | None:
return None
-def on_appveyor() -> bool:
- return "APPVEYOR" in os.environ
-
-
-def on_github_actions() -> bool:
- return "GITHUB_ACTIONS" in os.environ
-
-
def on_ci() -> bool:
- # GitHub Actions and AppVeyor have "CI"
return "CI" in os.environ
diff --git a/Tests/images/jfif_unit_cm.jpg b/Tests/images/jfif_unit_cm.jpg
new file mode 100644
index 00000000000..78b50e60a23
Binary files /dev/null and b/Tests/images/jfif_unit_cm.jpg differ
diff --git a/Tests/oss-fuzz/test_fuzzers.py b/Tests/oss-fuzz/test_fuzzers.py
index 90eb8713a8b..e42ec90aa54 100644
--- a/Tests/oss-fuzz/test_fuzzers.py
+++ b/Tests/oss-fuzz/test_fuzzers.py
@@ -7,7 +7,7 @@
import packaging
import pytest
-from PIL import Image, UnidentifiedImageError, features
+from PIL import Image, features
from Tests.helper import skip_unless_feature
if sys.platform.startswith("win32"):
@@ -32,21 +32,17 @@ def test_fuzz_images(path: str) -> None:
fuzzers.fuzz_image(f.read())
assert True
except (
+ # Known exceptions from Pillow
OSError,
SyntaxError,
MemoryError,
ValueError,
NotImplementedError,
OverflowError,
- ):
- # Known exceptions that are through from Pillow
- assert True
- except (
+ # Known Image.* exceptions
Image.DecompressionBombError,
Image.DecompressionBombWarning,
- UnidentifiedImageError,
):
- # Known Image.* exceptions
assert True
finally:
fuzzers.disable_decompressionbomb_error()
diff --git a/Tests/test_decompression_bomb.py b/Tests/test_decompression_bomb.py
index c140156f9ea..98d833736fa 100644
--- a/Tests/test_decompression_bomb.py
+++ b/Tests/test_decompression_bomb.py
@@ -12,19 +12,16 @@
class TestDecompressionBomb:
- def teardown_method(self) -> None:
- Image.MAX_IMAGE_PIXELS = ORIGINAL_LIMIT
-
def test_no_warning_small_file(self) -> None:
# Implicit assert: no warning.
# A warning would cause a failure.
with Image.open(TEST_FILE):
pass
- def test_no_warning_no_limit(self) -> None:
+ def test_no_warning_no_limit(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Arrange
# Turn limit off
- Image.MAX_IMAGE_PIXELS = None
+ monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", None)
assert Image.MAX_IMAGE_PIXELS is None
# Act / Assert
@@ -33,18 +30,18 @@ def test_no_warning_no_limit(self) -> None:
with Image.open(TEST_FILE):
pass
- def test_warning(self) -> None:
+ def test_warning(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Set limit to trigger warning on the test file
- Image.MAX_IMAGE_PIXELS = 128 * 128 - 1
+ monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 128 * 128 - 1)
assert Image.MAX_IMAGE_PIXELS == 128 * 128 - 1
with pytest.warns(Image.DecompressionBombWarning):
with Image.open(TEST_FILE):
pass
- def test_exception(self) -> None:
+ def test_exception(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Set limit to trigger exception on the test file
- Image.MAX_IMAGE_PIXELS = 64 * 128 - 1
+ monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 64 * 128 - 1)
assert Image.MAX_IMAGE_PIXELS == 64 * 128 - 1
with pytest.raises(Image.DecompressionBombError):
@@ -66,9 +63,9 @@ def test_exception_gif_extents(self) -> None:
with pytest.raises(Image.DecompressionBombError):
im.seek(1)
- def test_exception_gif_zero_width(self) -> None:
+ def test_exception_gif_zero_width(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Set limit to trigger exception on the test file
- Image.MAX_IMAGE_PIXELS = 4 * 64 * 128
+ monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", 4 * 64 * 128)
assert Image.MAX_IMAGE_PIXELS == 4 * 64 * 128
with pytest.raises(Image.DecompressionBombError):
diff --git a/Tests/test_file_apng.py b/Tests/test_file_apng.py
index ee6c867c3ca..9d5154fca07 100644
--- a/Tests/test_file_apng.py
+++ b/Tests/test_file_apng.py
@@ -307,13 +307,8 @@ def test_apng_syntax_errors() -> None:
im.load()
# we can handle this case gracefully
- exception = None
with Image.open("Tests/images/apng/syntax_num_frames_low.png") as im:
- try:
- im.seek(im.n_frames - 1)
- except Exception as e:
- exception = e
- assert exception is None
+ im.seek(im.n_frames - 1)
with pytest.raises(OSError):
with Image.open("Tests/images/apng/syntax_num_frames_high.png") as im:
@@ -405,13 +400,8 @@ def test_apng_save_split_fdat(tmp_path: Path) -> None:
append_images=frames,
)
with Image.open(test_file) as im:
- exception = None
- try:
- im.seek(im.n_frames - 1)
- im.load()
- except Exception as e:
- exception = e
- assert exception is None
+ im.seek(im.n_frames - 1)
+ im.load()
def test_apng_save_duration_loop(tmp_path: Path) -> None:
diff --git a/Tests/test_file_blp.py b/Tests/test_file_blp.py
index 1e2f20c407b..9f2de8f982e 100644
--- a/Tests/test_file_blp.py
+++ b/Tests/test_file_blp.py
@@ -4,7 +4,7 @@
import pytest
-from PIL import Image
+from PIL import BlpImagePlugin, Image
from .helper import (
assert_image_equal,
@@ -19,6 +19,7 @@ def test_load_blp1() -> None:
assert_image_equal_tofile(im, "Tests/images/blp/blp1_jpeg.png")
with Image.open("Tests/images/blp/blp1_jpeg2.blp") as im:
+ assert im.mode == "RGBA"
im.load()
@@ -37,6 +38,13 @@ def test_load_blp2_dxt1a() -> None:
assert_image_equal_tofile(im, "Tests/images/blp/blp2_dxt1a.png")
+def test_invalid_file() -> None:
+ invalid_file = "Tests/images/flower.jpg"
+
+ with pytest.raises(BlpImagePlugin.BLPFormatError):
+ BlpImagePlugin.BlpImageFile(invalid_file)
+
+
def test_save(tmp_path: Path) -> None:
f = str(tmp_path / "temp.blp")
diff --git a/Tests/test_file_fli.py b/Tests/test_file_fli.py
index 0a7740cc87d..876561a88b8 100644
--- a/Tests/test_file_fli.py
+++ b/Tests/test_file_fli.py
@@ -35,22 +35,19 @@ def test_sanity() -> None:
assert im.is_animated
-def test_prefix_chunk() -> None:
- ImageFile.LOAD_TRUNCATED_IMAGES = True
- try:
- with Image.open(animated_test_file_with_prefix_chunk) as im:
- assert im.mode == "P"
- assert im.size == (320, 200)
- assert im.format == "FLI"
- assert im.info["duration"] == 171
- assert im.is_animated
-
- palette = im.getpalette()
- assert palette[3:6] == [255, 255, 255]
- assert palette[381:384] == [204, 204, 12]
- assert palette[765:] == [252, 0, 0]
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+def test_prefix_chunk(monkeypatch: pytest.MonkeyPatch) -> None:
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
+ with Image.open(animated_test_file_with_prefix_chunk) as im:
+ assert im.mode == "P"
+ assert im.size == (320, 200)
+ assert im.format == "FLI"
+ assert im.info["duration"] == 171
+ assert im.is_animated
+
+ palette = im.getpalette()
+ assert palette[3:6] == [255, 255, 255]
+ assert palette[381:384] == [204, 204, 12]
+ assert palette[765:] == [252, 0, 0]
@pytest.mark.skipif(is_pypy(), reason="Requires CPython")
diff --git a/Tests/test_file_gif.py b/Tests/test_file_gif.py
index 5d46b157d55..61a9475c73d 100644
--- a/Tests/test_file_gif.py
+++ b/Tests/test_file_gif.py
@@ -109,7 +109,7 @@ def test_palette_not_needed_for_second_frame() -> None:
assert_image_similar(im, hopper("L").convert("RGB"), 8)
-def test_strategy() -> None:
+def test_strategy(monkeypatch: pytest.MonkeyPatch) -> None:
with Image.open("Tests/images/iss634.gif") as im:
expected_rgb_always = im.convert("RGB")
@@ -119,35 +119,36 @@ def test_strategy() -> None:
im.seek(1)
expected_different = im.convert("RGB")
- try:
- GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_ALWAYS
- with Image.open("Tests/images/iss634.gif") as im:
- assert im.mode == "RGB"
- assert_image_equal(im, expected_rgb_always)
+ monkeypatch.setattr(
+ GifImagePlugin, "LOADING_STRATEGY", GifImagePlugin.LoadingStrategy.RGB_ALWAYS
+ )
+ with Image.open("Tests/images/iss634.gif") as im:
+ assert im.mode == "RGB"
+ assert_image_equal(im, expected_rgb_always)
- with Image.open("Tests/images/chi.gif") as im:
- assert im.mode == "RGBA"
- assert_image_equal(im, expected_rgb_always_rgba)
+ with Image.open("Tests/images/chi.gif") as im:
+ assert im.mode == "RGBA"
+ assert_image_equal(im, expected_rgb_always_rgba)
- GifImagePlugin.LOADING_STRATEGY = (
- GifImagePlugin.LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY
- )
- # Stay in P mode with only a global palette
- with Image.open("Tests/images/chi.gif") as im:
- assert im.mode == "P"
+ monkeypatch.setattr(
+ GifImagePlugin,
+ "LOADING_STRATEGY",
+ GifImagePlugin.LoadingStrategy.RGB_AFTER_DIFFERENT_PALETTE_ONLY,
+ )
+ # Stay in P mode with only a global palette
+ with Image.open("Tests/images/chi.gif") as im:
+ assert im.mode == "P"
- im.seek(1)
- assert im.mode == "P"
- assert_image_equal(im.convert("RGB"), expected_different)
+ im.seek(1)
+ assert im.mode == "P"
+ assert_image_equal(im.convert("RGB"), expected_different)
- # Change to RGB mode when a frame has an individual palette
- with Image.open("Tests/images/iss634.gif") as im:
- assert im.mode == "P"
+ # Change to RGB mode when a frame has an individual palette
+ with Image.open("Tests/images/iss634.gif") as im:
+ assert im.mode == "P"
- im.seek(1)
- assert im.mode == "RGB"
- finally:
- GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST
+ im.seek(1)
+ assert im.mode == "RGB"
def test_optimize() -> None:
@@ -555,17 +556,15 @@ def test_dispose_background_transparency() -> None:
def test_transparent_dispose(
loading_strategy: GifImagePlugin.LoadingStrategy,
expected_colors: tuple[tuple[int | tuple[int, int, int, int], ...]],
+ monkeypatch: pytest.MonkeyPatch,
) -> None:
- GifImagePlugin.LOADING_STRATEGY = loading_strategy
- try:
- with Image.open("Tests/images/transparent_dispose.gif") as img:
- for frame in range(3):
- img.seek(frame)
- for x in range(3):
- color = img.getpixel((x, 0))
- assert color == expected_colors[frame][x]
- finally:
- GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST
+ monkeypatch.setattr(GifImagePlugin, "LOADING_STRATEGY", loading_strategy)
+ with Image.open("Tests/images/transparent_dispose.gif") as img:
+ for frame in range(3):
+ img.seek(frame)
+ for x in range(3):
+ color = img.getpixel((x, 0))
+ assert color == expected_colors[frame][x]
def test_dispose_previous() -> None:
@@ -1398,24 +1397,23 @@ def test_lzw_bits() -> None:
),
)
def test_extents(
- test_file: str, loading_strategy: GifImagePlugin.LoadingStrategy
+ test_file: str,
+ loading_strategy: GifImagePlugin.LoadingStrategy,
+ monkeypatch: pytest.MonkeyPatch,
) -> None:
- GifImagePlugin.LOADING_STRATEGY = loading_strategy
- try:
- with Image.open("Tests/images/" + test_file) as im:
- assert im.size == (100, 100)
+ monkeypatch.setattr(GifImagePlugin, "LOADING_STRATEGY", loading_strategy)
+ with Image.open("Tests/images/" + test_file) as im:
+ assert im.size == (100, 100)
- # Check that n_frames does not change the size
- assert im.n_frames == 2
- assert im.size == (100, 100)
+ # Check that n_frames does not change the size
+ assert im.n_frames == 2
+ assert im.size == (100, 100)
- im.seek(1)
- assert im.size == (150, 150)
+ im.seek(1)
+ assert im.size == (150, 150)
- im.load()
- assert im.im.size == (150, 150)
- finally:
- GifImagePlugin.LOADING_STRATEGY = GifImagePlugin.LoadingStrategy.RGB_AFTER_FIRST
+ im.load()
+ assert im.im.size == (150, 150)
def test_missing_background() -> None:
diff --git a/Tests/test_file_ico.py b/Tests/test_file_ico.py
index 37770498a0a..e240faf1ec8 100644
--- a/Tests/test_file_ico.py
+++ b/Tests/test_file_ico.py
@@ -243,27 +243,23 @@ def test_draw_reloaded(tmp_path: Path) -> None:
assert_image_equal_tofile(im, "Tests/images/hopper_draw.ico")
-def test_truncated_mask() -> None:
+def test_truncated_mask(monkeypatch: pytest.MonkeyPatch) -> None:
# 1 bpp
with open("Tests/images/hopper_mask.ico", "rb") as fp:
data = fp.read()
- ImageFile.LOAD_TRUNCATED_IMAGES = True
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
data = data[:-3]
- try:
- with Image.open(io.BytesIO(data)) as im:
- with Image.open("Tests/images/hopper_mask.png") as expected:
- assert im.mode == "1"
+ with Image.open(io.BytesIO(data)) as im:
+ assert im.mode == "1"
- # 32 bpp
- output = io.BytesIO()
- expected = hopper("RGBA")
- expected.save(output, "ico", bitmap_format="bmp")
+ # 32 bpp
+ output = io.BytesIO()
+ expected = hopper("RGBA")
+ expected.save(output, "ico", bitmap_format="bmp")
- data = output.getvalue()[:-1]
+ data = output.getvalue()[:-1]
- with Image.open(io.BytesIO(data)) as im:
- assert im.mode == "RGB"
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+ with Image.open(io.BytesIO(data)) as im:
+ assert im.mode == "RGB"
diff --git a/Tests/test_file_iptc.py b/Tests/test_file_iptc.py
index 8a7c59fb150..c6c0c1aab9d 100644
--- a/Tests/test_file_iptc.py
+++ b/Tests/test_file_iptc.py
@@ -58,10 +58,7 @@ def test_getiptcinfo_fotostation() -> None:
# Assert
assert iptc is not None
- for tag in iptc.keys():
- if tag[0] == 240:
- return
- pytest.fail("FotoStation tag not found")
+ assert 240 in (tag[0] for tag in iptc.keys()), "FotoStation tag not found"
def test_getiptcinfo_zero_padding() -> None:
diff --git a/Tests/test_file_jpeg.py b/Tests/test_file_jpeg.py
index 3931a50ac70..c578066af38 100644
--- a/Tests/test_file_jpeg.py
+++ b/Tests/test_file_jpeg.py
@@ -181,6 +181,10 @@ def test(xdpi: int, ydpi: int | None = None) -> tuple[int, int] | None:
assert test(100, 200) == (100, 200)
assert test(0) is None # square pixels
+ def test_dpi_jfif_cm(self) -> None:
+ with Image.open("Tests/images/jfif_unit_cm.jpg") as im:
+ assert im.info["dpi"] == (2.54, 5.08)
+
@mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
)
@@ -277,7 +281,10 @@ def test_progressive(self) -> None:
assert not im2.info.get("progressive")
assert im3.info.get("progressive")
- assert_image_equal(im1, im3)
+ if features.check_feature("mozjpeg"):
+ assert_image_similar(im1, im3, 9.39)
+ else:
+ assert_image_equal(im1, im3)
assert im1_bytes >= im3_bytes
def test_progressive_large_buffer(self, tmp_path: Path) -> None:
@@ -349,7 +356,6 @@ def test_empty_exif_gps(self) -> None:
assert exif.get_ifd(0x8825) == {}
transposed = ImageOps.exif_transpose(im)
- assert transposed is not None
exif = transposed.getexif()
assert exif.get_ifd(0x8825) == {}
@@ -420,8 +426,12 @@ def test_progressive_compat(self) -> None:
im2 = self.roundtrip(hopper(), progressive=1)
im3 = self.roundtrip(hopper(), progression=1) # compatibility
- assert_image_equal(im1, im2)
- assert_image_equal(im1, im3)
+ if features.check_feature("mozjpeg"):
+ assert_image_similar(im1, im2, 9.39)
+ assert_image_similar(im1, im3, 9.39)
+ else:
+ assert_image_equal(im1, im2)
+ assert_image_equal(im1, im3)
assert im2.info.get("progressive")
assert im2.info.get("progression")
assert im3.info.get("progressive")
@@ -520,12 +530,13 @@ def test_ff00_jpeg_header(self) -> None:
@mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
)
- def test_truncated_jpeg_should_read_all_the_data(self) -> None:
+ def test_truncated_jpeg_should_read_all_the_data(
+ self, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
filename = "Tests/images/truncated_jpeg.jpg"
- ImageFile.LOAD_TRUNCATED_IMAGES = True
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
with Image.open(filename) as im:
im.load()
- ImageFile.LOAD_TRUNCATED_IMAGES = False
assert im.getbbox() is not None
def test_truncated_jpeg_throws_oserror(self) -> None:
@@ -1014,7 +1025,7 @@ def test_save_xmp(self, tmp_path: Path) -> None:
im.save(f, xmp=b"1" * 65505)
@pytest.mark.timeout(timeout=1)
- def test_eof(self) -> None:
+ def test_eof(self, monkeypatch: pytest.MonkeyPatch) -> None:
# Even though this decoder never says that it is finished
# the image should still end when there is no new data
class InfiniteMockPyDecoder(ImageFile.PyDecoder):
@@ -1027,11 +1038,10 @@ def decode(
with Image.open(TEST_FILE) as im:
im.tile = [
- ("INFINITE", (0, 0, 128, 128), 0, ("RGB", 0, 1)),
+ ImageFile._Tile("INFINITE", (0, 0, 128, 128), 0, ("RGB", 0, 1)),
]
- ImageFile.LOAD_TRUNCATED_IMAGES = True
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
im.load()
- ImageFile.LOAD_TRUNCATED_IMAGES = False
def test_separate_tables(self) -> None:
im = hopper()
diff --git a/Tests/test_file_jpeg2k.py b/Tests/test_file_jpeg2k.py
index 34176d3cecb..589240191ef 100644
--- a/Tests/test_file_jpeg2k.py
+++ b/Tests/test_file_jpeg2k.py
@@ -181,14 +181,11 @@ def test_load_dpi() -> None:
assert "dpi" not in im.info
-def test_restricted_icc_profile() -> None:
- ImageFile.LOAD_TRUNCATED_IMAGES = True
- try:
- # JPEG2000 image with a restricted ICC profile and a known colorspace
- with Image.open("Tests/images/balloon_eciRGBv2_aware.jp2") as im:
- assert im.mode == "RGB"
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+def test_restricted_icc_profile(monkeypatch: pytest.MonkeyPatch) -> None:
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
+ # JPEG2000 image with a restricted ICC profile and a known colorspace
+ with Image.open("Tests/images/balloon_eciRGBv2_aware.jp2") as im:
+ assert im.mode == "RGB"
@pytest.mark.skipif(
@@ -325,6 +322,18 @@ def test_cmyk() -> None:
assert im.getpixel((0, 0)) == (185, 134, 0, 0)
+@pytest.mark.skipif(
+ not os.path.exists(EXTRA_DIR), reason="Extra image files not installed"
+)
+@skip_unless_feature_version("jpg_2000", "2.5.3")
+def test_cmyk_save() -> None:
+ with Image.open(f"{EXTRA_DIR}/issue205.jp2") as jp2:
+ assert jp2.mode == "CMYK"
+
+ im = roundtrip(jp2)
+ assert_image_equal(im, jp2)
+
+
@pytest.mark.parametrize("ext", (".j2k", ".jp2"))
def test_16bit_monochrome_has_correct_mode(ext: str) -> None:
with Image.open("Tests/images/16bit.cropped" + ext) as im:
@@ -480,8 +489,7 @@ def test_plt_marker(card: ImageFile.ImageFile) -> None:
out.seek(0)
while True:
marker = out.read(2)
- if not marker:
- pytest.fail("End of stream without PLT")
+ assert marker, "End of stream without PLT"
jp2_boxid = _binary.i16be(marker)
if jp2_boxid == 0xFF4F:
diff --git a/Tests/test_file_libtiff.py b/Tests/test_file_libtiff.py
index 2a87e60c847..f44fb7fec48 100644
--- a/Tests/test_file_libtiff.py
+++ b/Tests/test_file_libtiff.py
@@ -44,11 +44,7 @@ def _assert_noerr(self, tmp_path: Path, im: TiffImagePlugin.TiffImageFile) -> No
im.load()
im.getdata()
- try:
- assert im._compression == "group4"
- except AttributeError:
- print("No _compression")
- print(dir(im))
+ assert im._compression == "group4"
# can we write it back out, in a different form.
out = str(tmp_path / "temp.png")
@@ -1116,13 +1112,15 @@ def test_exif_transpose(self) -> None:
)
def test_buffering(self, test_file: str) -> None:
# load exif first
- with Image.open(open(test_file, "rb", buffering=1048576)) as im:
- exif = dict(im.getexif())
+ with open(test_file, "rb", buffering=1048576) as f:
+ with Image.open(f) as im:
+ exif = dict(im.getexif())
# load image before exif
- with Image.open(open(test_file, "rb", buffering=1048576)) as im2:
- im2.load()
- exif_after_load = dict(im2.getexif())
+ with open(test_file, "rb", buffering=1048576) as f:
+ with Image.open(f) as im2:
+ im2.load()
+ exif_after_load = dict(im2.getexif())
assert exif == exif_after_load
@@ -1155,7 +1153,7 @@ def test_realloc_overflow(self, monkeypatch: pytest.MonkeyPatch) -> None:
im.load()
# Assert that the error code is IMAGING_CODEC_MEMORY
- assert str(e.value) == "-9"
+ assert str(e.value) == "decoder error -9"
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", "jpeg"))
def test_save_multistrip(self, compression: str, tmp_path: Path) -> None:
@@ -1169,23 +1167,22 @@ def test_save_multistrip(self, compression: str, tmp_path: Path) -> None:
assert len(im.tag_v2[STRIPOFFSETS]) > 1
@pytest.mark.parametrize("argument", (True, False))
- def test_save_single_strip(self, argument: bool, tmp_path: Path) -> None:
+ def test_save_single_strip(
+ self, argument: bool, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
im = hopper("RGB").resize((256, 256))
out = str(tmp_path / "temp.tif")
if not argument:
- TiffImagePlugin.STRIP_SIZE = 2**18
- try:
- arguments: dict[str, str | int] = {"compression": "tiff_adobe_deflate"}
- if argument:
- arguments["strip_size"] = 2**18
- im.save(out, "TIFF", **arguments)
-
- with Image.open(out) as im:
- assert isinstance(im, TiffImagePlugin.TiffImageFile)
- assert len(im.tag_v2[STRIPOFFSETS]) == 1
- finally:
- TiffImagePlugin.STRIP_SIZE = 65536
+ monkeypatch.setattr(TiffImagePlugin, "STRIP_SIZE", 2**18)
+ arguments: dict[str, str | int] = {"compression": "tiff_adobe_deflate"}
+ if argument:
+ arguments["strip_size"] = 2**18
+ im.save(out, "TIFF", **arguments)
+
+ with Image.open(out) as im:
+ assert isinstance(im, TiffImagePlugin.TiffImageFile)
+ assert len(im.tag_v2[STRIPOFFSETS]) == 1
@pytest.mark.parametrize("compression", ("tiff_adobe_deflate", None))
def test_save_zero(self, compression: str | None, tmp_path: Path) -> None:
diff --git a/Tests/test_file_png.py b/Tests/test_file_png.py
index 974e1e75faa..efd2e5cd970 100644
--- a/Tests/test_file_png.py
+++ b/Tests/test_file_png.py
@@ -363,7 +363,7 @@ def test_verify_struct_error(self) -> None:
with pytest.raises((OSError, SyntaxError)):
im.verify()
- def test_verify_ignores_crc_error(self) -> None:
+ def test_verify_ignores_crc_error(self, monkeypatch: pytest.MonkeyPatch) -> None:
# check ignores crc errors in ancillary chunks
chunk_data = chunk(b"tEXt", b"spam")
@@ -373,24 +373,20 @@ def test_verify_ignores_crc_error(self) -> None:
with pytest.raises(SyntaxError):
PngImagePlugin.PngImageFile(BytesIO(image_data))
- ImageFile.LOAD_TRUNCATED_IMAGES = True
- try:
- im = load(image_data)
- assert im is not None
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
+ im = load(image_data)
+ assert im is not None
- def test_verify_not_ignores_crc_error_in_required_chunk(self) -> None:
+ def test_verify_not_ignores_crc_error_in_required_chunk(
+ self, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
# check does not ignore crc errors in required chunks
image_data = MAGIC + IHDR[:-1] + b"q" + TAIL
- ImageFile.LOAD_TRUNCATED_IMAGES = True
- try:
- with pytest.raises(SyntaxError):
- PngImagePlugin.PngImageFile(BytesIO(image_data))
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
+ with pytest.raises(SyntaxError):
+ PngImagePlugin.PngImageFile(BytesIO(image_data))
def test_roundtrip_dpi(self) -> None:
# Check dpi roundtripping
@@ -600,7 +596,7 @@ def test_roundtrip_private_chunk(self) -> None:
(b"prIV", b"VALUE3", True),
]
- def test_textual_chunks_after_idat(self) -> None:
+ def test_textual_chunks_after_idat(self, monkeypatch: pytest.MonkeyPatch) -> None:
with Image.open("Tests/images/hopper.png") as im:
assert "comment" in im.text
for k, v in {
@@ -614,18 +610,17 @@ def test_textual_chunks_after_idat(self) -> None:
with pytest.raises(OSError):
assert isinstance(im.text, dict)
+ # Raises an EOFError in load_end
+ with Image.open("Tests/images/hopper_idat_after_image_end.png") as im:
+ assert im.text == {"TXT": "VALUE", "ZIP": "VALUE"}
+
# Raises a UnicodeDecodeError in load_end
with Image.open("Tests/images/truncated_image.png") as im:
# The file is truncated
with pytest.raises(OSError):
- im.text()
- ImageFile.LOAD_TRUNCATED_IMAGES = True
+ im.text
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
assert isinstance(im.text, dict)
- ImageFile.LOAD_TRUNCATED_IMAGES = False
-
- # Raises an EOFError in load_end
- with Image.open("Tests/images/hopper_idat_after_image_end.png") as im:
- assert im.text == {"TXT": "VALUE", "ZIP": "VALUE"}
def test_unknown_compression_method(self) -> None:
with pytest.raises(SyntaxError, match="Unknown compression method"):
@@ -651,15 +646,16 @@ def test_padded_idat(self) -> None:
@pytest.mark.parametrize(
"cid", (b"IHDR", b"sRGB", b"pHYs", b"acTL", b"fcTL", b"fdAT")
)
- def test_truncated_chunks(self, cid: bytes) -> None:
+ def test_truncated_chunks(
+ self, cid: bytes, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
fp = BytesIO()
with PngImagePlugin.PngStream(fp) as png:
with pytest.raises(ValueError):
png.call(cid, 0, 0)
- ImageFile.LOAD_TRUNCATED_IMAGES = True
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
png.call(cid, 0, 0)
- ImageFile.LOAD_TRUNCATED_IMAGES = False
@pytest.mark.parametrize("save_all", (True, False))
def test_specify_bits(self, save_all: bool, tmp_path: Path) -> None:
@@ -789,17 +785,14 @@ class MyStdOut:
with Image.open(mystdout) as reloaded:
assert_image_equal_tofile(reloaded, TEST_PNG_FILE)
- def test_truncated_end_chunk(self) -> None:
+ def test_truncated_end_chunk(self, monkeypatch: pytest.MonkeyPatch) -> None:
with Image.open("Tests/images/truncated_end_chunk.png") as im:
with pytest.raises(OSError):
im.load()
- ImageFile.LOAD_TRUNCATED_IMAGES = True
- try:
- with Image.open("Tests/images/truncated_end_chunk.png") as im:
- assert_image_equal_tofile(im, "Tests/images/hopper.png")
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
+ with Image.open("Tests/images/truncated_end_chunk.png") as im:
+ assert_image_equal_tofile(im, "Tests/images/hopper.png")
@pytest.mark.skipif(is_win32(), reason="Requires Unix or macOS")
@@ -808,11 +801,11 @@ class TestTruncatedPngPLeaks(PillowLeakTestCase):
mem_limit = 2 * 1024 # max increase in K
iterations = 100 # Leak is 56k/iteration, this will leak 5.6megs
- def test_leak_load(self) -> None:
+ def test_leak_load(self, monkeypatch: pytest.MonkeyPatch) -> None:
with open("Tests/images/hopper.png", "rb") as f:
DATA = BytesIO(f.read(16 * 1024))
- ImageFile.LOAD_TRUNCATED_IMAGES = True
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
with Image.open(DATA) as im:
im.load()
@@ -820,7 +813,4 @@ def core() -> None:
with Image.open(DATA) as im:
im.load()
- try:
- self._test_leak(core)
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+ self._test_leak(core)
diff --git a/Tests/test_file_spider.py b/Tests/test_file_spider.py
index 4cafda86536..713db848df8 100644
--- a/Tests/test_file_spider.py
+++ b/Tests/test_file_spider.py
@@ -7,7 +7,7 @@
import pytest
-from PIL import Image, ImageSequence, SpiderImagePlugin
+from PIL import Image, SpiderImagePlugin
from .helper import assert_image_equal, hopper, is_pypy
@@ -153,8 +153,8 @@ def test_nonstack_file() -> None:
def test_nonstack_dos() -> None:
with Image.open(TEST_FILE) as im:
- for i, frame in enumerate(ImageSequence.Iterator(im)):
- assert i <= 1, "Non-stack DOS file test failed"
+ with pytest.raises(EOFError):
+ im.seek(0)
# for issue #4093
diff --git a/Tests/test_file_tiff.py b/Tests/test_file_tiff.py
index cfeb55c7e8b..b0d500121d1 100644
--- a/Tests/test_file_tiff.py
+++ b/Tests/test_file_tiff.py
@@ -115,6 +115,19 @@ def test_bigtiff(self, tmp_path: Path) -> None:
outfile = str(tmp_path / "temp.tif")
im.save(outfile, save_all=True, append_images=[im], tiffinfo=im.tag_v2)
+ def test_bigtiff_save(self, tmp_path: Path) -> None:
+ outfile = str(tmp_path / "temp.tif")
+ im = hopper()
+ im.save(outfile, big_tiff=True)
+
+ with Image.open(outfile) as reloaded:
+ assert reloaded.tag_v2._bigtiff is True
+
+ im.save(outfile, save_all=True, append_images=[im], big_tiff=True)
+
+ with Image.open(outfile) as reloaded:
+ assert reloaded.tag_v2._bigtiff is True
+
def test_seek_too_large(self) -> None:
with pytest.raises(ValueError, match="Unable to seek to frame"):
Image.open("Tests/images/seek_too_large.tif")
@@ -733,7 +746,7 @@ def im_generator(ims: list[Image.Image]) -> Generator[Image.Image, None, None]:
assert reread.n_frames == 3
def test_fixoffsets(self) -> None:
- b = BytesIO(b"II\x2a\x00\x00\x00\x00\x00")
+ b = BytesIO(b"II\x2A\x00\x00\x00\x00\x00")
with TiffImagePlugin.AppendingTiffWriter(b) as a:
b.seek(0)
a.fixOffsets(1, isShort=True)
@@ -746,6 +759,37 @@ def test_fixoffsets(self) -> None:
with pytest.raises(RuntimeError):
a.fixOffsets(1)
+ b = BytesIO(b"II\x2A\x00\x00\x00\x00\x00")
+ with TiffImagePlugin.AppendingTiffWriter(b) as a:
+ a.offsetOfNewPage = 2**16
+
+ b.seek(0)
+ a.fixOffsets(1, isShort=True)
+
+ b = BytesIO(b"II\x2B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00")
+ with TiffImagePlugin.AppendingTiffWriter(b) as a:
+ a.offsetOfNewPage = 2**32
+
+ b.seek(0)
+ a.fixOffsets(1, isShort=True)
+
+ b.seek(0)
+ a.fixOffsets(1, isLong=True)
+
+ def test_appending_tiff_writer_writelong(self) -> None:
+ data = b"II\x2A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
+ b = BytesIO(data)
+ with TiffImagePlugin.AppendingTiffWriter(b) as a:
+ a.writeLong(2**32 - 1)
+ assert b.getvalue() == data + b"\xff\xff\xff\xff"
+
+ def test_appending_tiff_writer_rewritelastshorttolong(self) -> None:
+ data = b"II\x2A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
+ b = BytesIO(data)
+ with TiffImagePlugin.AppendingTiffWriter(b) as a:
+ a.rewriteLastShortToLong(2**32 - 1)
+ assert b.getvalue() == data[:-2] + b"\xff\xff\xff\xff"
+
def test_saving_icc_profile(self, tmp_path: Path) -> None:
# Tests saving TIFF with icc_profile set.
# At the time of writing this will only work for non-compressed tiffs
@@ -897,11 +941,10 @@ def test_string_dimension(self) -> None:
@pytest.mark.timeout(6)
@pytest.mark.filterwarnings("ignore:Truncated File Read")
- def test_timeout(self) -> None:
+ def test_timeout(self, monkeypatch: pytest.MonkeyPatch) -> None:
with Image.open("Tests/images/timeout-6646305047838720") as im:
- ImageFile.LOAD_TRUNCATED_IMAGES = True
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
im.load()
- ImageFile.LOAD_TRUNCATED_IMAGES = False
@pytest.mark.parametrize(
"test_file",
diff --git a/Tests/test_file_webp.py b/Tests/test_file_webp.py
index ad5aa9ed62c..abe888241b4 100644
--- a/Tests/test_file_webp.py
+++ b/Tests/test_file_webp.py
@@ -28,9 +28,9 @@
class TestUnsupportedWebp:
- def test_unsupported(self) -> None:
+ def test_unsupported(self, monkeypatch: pytest.MonkeyPatch) -> None:
if HAVE_WEBP:
- WebPImagePlugin.SUPPORTED = False
+ monkeypatch.setattr(WebPImagePlugin, "SUPPORTED", False)
file_path = "Tests/images/hopper.webp"
with pytest.warns(UserWarning):
@@ -38,9 +38,6 @@ def test_unsupported(self) -> None:
with Image.open(file_path):
pass
- if HAVE_WEBP:
- WebPImagePlugin.SUPPORTED = True
-
@skip_unless_feature("webp")
class TestFileWebp:
diff --git a/Tests/test_image.py b/Tests/test_image.py
index c8df474f493..9a2e3c46533 100644
--- a/Tests/test_image.py
+++ b/Tests/test_image.py
@@ -189,8 +189,6 @@ def test_pathlib(self, tmp_path: Path) -> None:
if ext == ".jp2" and not features.check_codec("jpg_2000"):
pytest.skip("jpg_2000 not available")
temp_file = str(tmp_path / ("temp." + ext))
- if os.path.exists(temp_file):
- os.remove(temp_file)
im.save(Path(temp_file))
def test_fp_name(self, tmp_path: Path) -> None:
@@ -667,7 +665,7 @@ def test_remap_palette(self) -> None:
# Test illegal image mode
with hopper() as im:
with pytest.raises(ValueError):
- im.remap_palette(None)
+ im.remap_palette([])
def test_remap_palette_transparency(self) -> None:
im = Image.new("P", (1, 2), (0, 0, 0))
@@ -770,7 +768,7 @@ def test_empty_exif(self) -> None:
assert dict(exif)
# Test that exif data is cleared after another load
- exif.load(None)
+ exif.load(b"")
assert not dict(exif)
# Test loading just the EXIF header
@@ -793,6 +791,10 @@ def test_empty_get_ifd(self) -> None:
ifd[36864] = b"0220"
assert exif.get_ifd(0x8769) == {36864: b"0220"}
+ reloaded_exif = Image.Exif()
+ reloaded_exif.load(exif.tobytes())
+ assert reloaded_exif.get_ifd(0x8769) == {36864: b"0220"}
+
@mark_if_feature_version(
pytest.mark.valgrind_known_error, "libjpeg_turbo", "2.0", reason="Known Failing"
)
@@ -987,6 +989,11 @@ def test_getxmp_padded(self) -> None:
else:
assert im.getxmp() == {"xmpmeta": None}
+ def test_get_child_images(self) -> None:
+ im = Image.new("RGB", (1, 1))
+ with pytest.warns(DeprecationWarning):
+ assert im.get_child_images() == []
+
@pytest.mark.parametrize("size", ((1, 0), (0, 1), (0, 0)))
def test_zero_tobytes(self, size: tuple[int, int]) -> None:
im = Image.new("RGB", size)
diff --git a/Tests/test_image_access.py b/Tests/test_image_access.py
index bb30b462d2f..14a5e2e7bfe 100644
--- a/Tests/test_image_access.py
+++ b/Tests/test_image_access.py
@@ -271,13 +271,25 @@ def test_putpixel_overflow_error(self, mode: str) -> None:
class TestEmbeddable:
- @pytest.mark.xfail(reason="failing test")
+ @pytest.mark.xfail(not (sys.version_info >= (3, 13)), reason="failing test")
@pytest.mark.skipif(not is_win32(), reason="requires Windows")
def test_embeddable(self) -> None:
import ctypes
from setuptools.command import build_ext
+ compiler = getattr(build_ext, "new_compiler")()
+ compiler.add_include_dir(sysconfig.get_config_var("INCLUDEPY"))
+
+ libdir = sysconfig.get_config_var("LIBDIR") or sysconfig.get_config_var(
+ "INCLUDEPY"
+ ).replace("include", "libs")
+ compiler.add_library_dir(libdir)
+ try:
+ compiler.initialize()
+ except Exception:
+ pytest.skip("Compiler could not be initialized")
+
with open("embed_pil.c", "w", encoding="utf-8") as fh:
home = sys.prefix.replace("\\", "\\\\")
fh.write(
@@ -305,13 +317,6 @@ def test_embeddable(self) -> None:
"""
)
- compiler = getattr(build_ext, "new_compiler")()
- compiler.add_include_dir(sysconfig.get_config_var("INCLUDEPY"))
-
- libdir = sysconfig.get_config_var("LIBDIR") or sysconfig.get_config_var(
- "INCLUDEPY"
- ).replace("include", "libs")
- compiler.add_library_dir(libdir)
objects = compiler.compile(["embed_pil.c"])
compiler.link_executable(objects, "embed_pil")
diff --git a/Tests/test_image_resize.py b/Tests/test_image_resize.py
index 57fcf9a3463..1166371b8f9 100644
--- a/Tests/test_image_resize.py
+++ b/Tests/test_image_resize.py
@@ -309,7 +309,7 @@ def resize(mode: str, size: tuple[int, int] | list[int]) -> None:
# Test unknown resampling filter
with hopper() as im:
with pytest.raises(ValueError):
- im.resize((10, 10), "unknown")
+ im.resize((10, 10), -1)
@skip_unless_feature("libtiff")
def test_transposed(self) -> None:
diff --git a/Tests/test_imagedraw.py b/Tests/test_imagedraw.py
index 5fc1c27661a..28d7ed7252d 100644
--- a/Tests/test_imagedraw.py
+++ b/Tests/test_imagedraw.py
@@ -1396,6 +1396,28 @@ def test_stroke_descender() -> None:
assert_image_similar_tofile(im, "Tests/images/imagedraw_stroke_descender.png", 6.76)
+@skip_unless_feature("freetype2")
+def test_stroke_inside_gap() -> None:
+ # Arrange
+ im = Image.new("RGB", (120, 130))
+ draw = ImageDraw.Draw(im)
+ font = ImageFont.truetype("Tests/fonts/FreeMono.ttf", 120)
+
+ # Act
+ draw.text((12, 12), "i", "#f00", font, stroke_width=20)
+
+ # Assert
+ for y in range(im.height):
+ glyph = ""
+ for x in range(im.width):
+ if im.getpixel((x, y)) == (0, 0, 0):
+ if glyph == "started":
+ glyph = "ended"
+ else:
+ assert glyph != "ended", "Gap inside stroked glyph"
+ glyph = "started"
+
+
@skip_unless_feature("freetype2")
def test_split_word() -> None:
# Arrange
diff --git a/Tests/test_imagefile.py b/Tests/test_imagefile.py
index 8bef90ce43c..b05d29dae96 100644
--- a/Tests/test_imagefile.py
+++ b/Tests/test_imagefile.py
@@ -191,13 +191,10 @@ def test_truncated_with_errors(self) -> None:
im.load()
@skip_unless_feature("zlib")
- def test_truncated_without_errors(self) -> None:
+ def test_truncated_without_errors(self, monkeypatch: pytest.MonkeyPatch) -> None:
with Image.open("Tests/images/truncated_image.png") as im:
- ImageFile.LOAD_TRUNCATED_IMAGES = True
- try:
- im.load()
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
+ im.load()
@skip_unless_feature("zlib")
def test_broken_datastream_with_errors(self) -> None:
@@ -206,13 +203,12 @@ def test_broken_datastream_with_errors(self) -> None:
im.load()
@skip_unless_feature("zlib")
- def test_broken_datastream_without_errors(self) -> None:
+ def test_broken_datastream_without_errors(
+ self, monkeypatch: pytest.MonkeyPatch
+ ) -> None:
with Image.open("Tests/images/broken_data_stream.png") as im:
- ImageFile.LOAD_TRUNCATED_IMAGES = True
- try:
- im.load()
- finally:
- ImageFile.LOAD_TRUNCATED_IMAGES = False
+ monkeypatch.setattr(ImageFile, "LOAD_TRUNCATED_IMAGES", True)
+ im.load()
class MockPyDecoder(ImageFile.PyDecoder):
diff --git a/Tests/test_imagefont.py b/Tests/test_imagefont.py
index 6a0a940b987..f110cc1d05c 100644
--- a/Tests/test_imagefont.py
+++ b/Tests/test_imagefont.py
@@ -461,6 +461,20 @@ def test_free_type_font_get_mask(font: ImageFont.FreeTypeFont) -> None:
assert mask.size == (108, 13)
+def test_stroke_mask() -> None:
+ # Arrange
+ text = "i"
+
+ # Act
+ font = ImageFont.truetype(FONT_PATH, 128)
+ mask = font.getmask(text, stroke_width=2)
+
+ # Assert
+ assert mask.getpixel((34, 5)) == 255
+ assert mask.getpixel((38, 5)) == 0
+ assert mask.getpixel((42, 5)) == 255
+
+
def test_load_when_image_not_found() -> None:
with tempfile.NamedTemporaryFile(delete=False) as tmp:
pass
diff --git a/Tests/test_imageops.py b/Tests/test_imageops.py
index 2fb2a60b632..7262f29e64a 100644
--- a/Tests/test_imageops.py
+++ b/Tests/test_imageops.py
@@ -405,7 +405,6 @@ def check(orientation_im: Image.Image) -> None:
else:
original_exif = im.info["exif"]
transposed_im = ImageOps.exif_transpose(im)
- assert transposed_im is not None
assert_image_similar(base_im, transposed_im, 17)
if orientation_im is base_im:
assert "exif" not in im.info
@@ -417,7 +416,6 @@ def check(orientation_im: Image.Image) -> None:
# Repeat the operation to test that it does not keep transposing
transposed_im2 = ImageOps.exif_transpose(transposed_im)
- assert transposed_im2 is not None
assert_image_equal(transposed_im2, transposed_im)
check(base_im)
@@ -433,7 +431,6 @@ def check(orientation_im: Image.Image) -> None:
assert im.getexif()[0x0112] == 3
transposed_im = ImageOps.exif_transpose(im)
- assert transposed_im is not None
assert 0x0112 not in transposed_im.getexif()
transposed_im._reload_exif()
@@ -446,14 +443,12 @@ def check(orientation_im: Image.Image) -> None:
assert im.getexif()[0x0112] == 3
transposed_im = ImageOps.exif_transpose(im)
- assert transposed_im is not None
assert 0x0112 not in transposed_im.getexif()
# Orientation set directly on Image.Exif
im = hopper()
im.getexif()[0x0112] = 3
transposed_im = ImageOps.exif_transpose(im)
- assert transposed_im is not None
assert 0x0112 not in transposed_im.getexif()
@@ -464,7 +459,6 @@ def test_exif_transpose_xml_without_xmp() -> None:
del im.info["xmp"]
transposed_im = ImageOps.exif_transpose(im)
- assert transposed_im is not None
assert 0x0112 not in transposed_im.getexif()
diff --git a/Tests/test_map.py b/Tests/test_map.py
index 93140f6e5a5..1278ba3a6d6 100644
--- a/Tests/test_map.py
+++ b/Tests/test_map.py
@@ -7,36 +7,30 @@
from PIL import Image
-def test_overflow() -> None:
+def test_overflow(monkeypatch: pytest.MonkeyPatch) -> None:
# There is the potential to overflow comparisons in map.c
# if there are > SIZE_MAX bytes in the image or if
# the file encodes an offset that makes
# (offset + size(bytes)) > SIZE_MAX
# Note that this image triggers the decompression bomb warning:
- max_pixels = Image.MAX_IMAGE_PIXELS
- Image.MAX_IMAGE_PIXELS = None
+ monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", None)
# This image hits the offset test.
with Image.open("Tests/images/l2rgb_read.bmp") as im:
with pytest.raises((ValueError, MemoryError, OSError)):
im.load()
- Image.MAX_IMAGE_PIXELS = max_pixels
-
-def test_tobytes() -> None:
+def test_tobytes(monkeypatch: pytest.MonkeyPatch) -> None:
# Note that this image triggers the decompression bomb warning:
- max_pixels = Image.MAX_IMAGE_PIXELS
- Image.MAX_IMAGE_PIXELS = None
+ monkeypatch.setattr(Image, "MAX_IMAGE_PIXELS", None)
# Previously raised an access violation on Windows
with Image.open("Tests/images/l2rgb_read.bmp") as im:
with pytest.raises((ValueError, MemoryError, OSError)):
im.tobytes()
- Image.MAX_IMAGE_PIXELS = max_pixels
-
@pytest.mark.skipif(sys.maxsize <= 2**32, reason="Requires 64-bit system")
def test_ysize() -> None:
diff --git a/depends/install_imagequant.sh b/depends/install_imagequant.sh
index 8d62d5ac733..88756f8f9b9 100755
--- a/depends/install_imagequant.sh
+++ b/depends/install_imagequant.sh
@@ -2,7 +2,7 @@
# install libimagequant
archive_name=libimagequant
-archive_version=4.3.3
+archive_version=4.3.4
archive=$archive_name-$archive_version
diff --git a/docs/about.rst b/docs/about.rst
index c51ddebd081..7df895b8ffc 100644
--- a/docs/about.rst
+++ b/docs/about.rst
@@ -6,12 +6,11 @@ Goals
The fork author's goal is to foster and support active development of PIL through:
-- Continuous integration testing via `GitHub Actions`_ and `AppVeyor`_
+- Continuous integration testing via `GitHub Actions`_
- Publicized development activity on `GitHub`_
- Regular releases to the `Python Package Index`_
.. _GitHub Actions: https://github.com/python-pillow/Pillow/actions
-.. _AppVeyor: https://ci.appveyor.com/project/Python-pillow/pillow
.. _GitHub: https://github.com/python-pillow/Pillow
.. _Python Package Index: https://pypi.org/project/pillow/
diff --git a/docs/deprecations.rst b/docs/deprecations.rst
index 25607e27c3b..634cee6894c 100644
--- a/docs/deprecations.rst
+++ b/docs/deprecations.rst
@@ -175,6 +175,24 @@ deprecated and will be removed in Pillow 12 (2025-10-15). They were used for obt
raw pointers to ``ImagingCore`` internals. To interact with C code, you can use
``Image.Image.getim()``, which returns a ``Capsule`` object.
+ExifTags.IFD.Makernote
+^^^^^^^^^^^^^^^^^^^^^^
+
+.. deprecated:: 11.1.0
+
+``ExifTags.IFD.Makernote`` has been deprecated. Instead, use
+``ExifTags.IFD.MakerNote``.
+
+Image.Image.get_child_images()
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. deprecated:: 11.2.0
+
+``Image.Image.get_child_images()`` has been deprecated. and will be removed in Pillow
+13 (2026-10-15). It will be moved to ``ImageFile.ImageFile.get_child_images()``. The
+method uses an image's file pointer, and so child images could only be retrieved from
+an :py:class:`PIL.ImageFile.ImageFile` instance.
+
Removed features
----------------
diff --git a/docs/handbook/image-file-formats.rst b/docs/handbook/image-file-formats.rst
index bf3087f6f68..a915ee4e22e 100644
--- a/docs/handbook/image-file-formats.rst
+++ b/docs/handbook/image-file-formats.rst
@@ -572,10 +572,19 @@ JPEG 2000
Pillow reads and writes JPEG 2000 files containing ``L``, ``LA``, ``RGB``,
``RGBA``, or ``YCbCr`` data. When reading, ``YCbCr`` data is converted to
``RGB`` or ``RGBA`` depending on whether or not there is an alpha channel.
-Beginning with version 8.3.0, Pillow can read (but not write) ``RGB``,
-``RGBA``, and ``YCbCr`` images with subsampled components. Pillow supports
-JPEG 2000 raw codestreams (``.j2k`` files), as well as boxed JPEG 2000 files
-(``.jp2`` or ``.jpx`` files).
+
+.. versionadded:: 8.3.0
+ Pillow can read (but not write) ``RGB``, ``RGBA``, and ``YCbCr`` images with
+ subsampled components.
+
+.. versionadded:: 10.4.0
+ Pillow can read ``CMYK`` images with OpenJPEG 2.5.1 and later.
+
+.. versionadded:: 11.1.0
+ Pillow can write ``CMYK`` images with OpenJPEG 2.5.3 and later.
+
+Pillow supports JPEG 2000 raw codestreams (``.j2k`` files), as well as boxed
+JPEG 2000 files (``.jp2`` or ``.jpx`` files).
When loading, if you set the ``mode`` on the image prior to the
:py:meth:`~PIL.Image.Image.load` method being invoked, you can ask Pillow to
@@ -1199,6 +1208,11 @@ The :py:meth:`~PIL.Image.Image.save` method can take the following keyword argum
.. versionadded:: 8.4.0
+**big_tiff**
+ If true, the image will be saved as a BigTIFF.
+
+ .. versionadded:: 11.1.0
+
**compression**
A string containing the desired compression method for the
file. (valid only with libtiff installed) Valid compression
diff --git a/docs/index.rst b/docs/index.rst
index 18f5c3d13e7..689088d48ce 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -33,10 +33,6 @@ Pillow for enterprise is available via the Tidelift Subscription. `Learn more None:
assert self.fp is not None
self.magic = self.fp.read(4)
+ if not _accept(self.magic):
+ msg = f"Bad BLP magic {repr(self.magic)}"
+ raise BLPFormatError(msg)
- self.fp.seek(5, os.SEEK_CUR)
- (self._blp_alpha_depth,) = struct.unpack(" tuple[int, int]:
try:
- self._read_blp_header()
+ self._read_header()
self._load()
except struct.error as e:
msg = "Truncated BLP file"
@@ -293,25 +308,9 @@ def decode(self, buffer: bytes | Image.SupportsArrayInterface) -> tuple[int, int
def _load(self) -> None:
pass
- def _read_blp_header(self) -> None:
- assert self.fd is not None
- self.fd.seek(4)
- (self._blp_compression,) = struct.unpack(" None:
+ self._offsets = struct.unpack("<16I", self._safe_read(16 * 4))
+ self._lengths = struct.unpack("<16I", self._safe_read(16 * 4))
def _safe_read(self, length: int) -> bytes:
assert self.fd is not None
@@ -327,9 +326,11 @@ def _read_palette(self) -> list[tuple[int, int, int, int]]:
ret.append((b, g, r, a))
return ret
- def _read_bgra(self, palette: list[tuple[int, int, int, int]]) -> bytearray:
+ def _read_bgra(
+ self, palette: list[tuple[int, int, int, int]], alpha: bool
+ ) -> bytearray:
data = bytearray()
- _data = BytesIO(self._safe_read(self._blp_lengths[0]))
+ _data = BytesIO(self._safe_read(self._lengths[0]))
while True:
try:
(offset,) = struct.unpack(" bytearray:
break
b, g, r, a = palette[offset]
d: tuple[int, ...] = (r, g, b)
- if self._blp_alpha_depth:
+ if alpha:
d += (a,)
data.extend(d)
return data
@@ -345,19 +346,21 @@ def _read_bgra(self, palette: list[tuple[int, int, int, int]]) -> bytearray:
class BLP1Decoder(_BLPBaseDecoder):
def _load(self) -> None:
- if self._blp_compression == Format.JPEG:
+ self._compression, self._encoding, alpha = self.args
+
+ if self._compression == Format.JPEG:
self._decode_jpeg_stream()
- elif self._blp_compression == 1:
- if self._blp_encoding in (4, 5):
+ elif self._compression == 1:
+ if self._encoding in (4, 5):
palette = self._read_palette()
- data = self._read_bgra(palette)
+ data = self._read_bgra(palette, alpha)
self.set_as_raw(data)
else:
- msg = f"Unsupported BLP encoding {repr(self._blp_encoding)}"
+ msg = f"Unsupported BLP encoding {repr(self._encoding)}"
raise BLPFormatError(msg)
else:
- msg = f"Unsupported BLP compression {repr(self._blp_encoding)}"
+ msg = f"Unsupported BLP compression {repr(self._encoding)}"
raise BLPFormatError(msg)
def _decode_jpeg_stream(self) -> None:
@@ -366,65 +369,61 @@ def _decode_jpeg_stream(self) -> None:
(jpeg_header_size,) = struct.unpack(" None:
+ self._compression, self._encoding, alpha, self._alpha_encoding = self.args
+
palette = self._read_palette()
assert self.fd is not None
- self.fd.seek(self._blp_offsets[0])
+ self.fd.seek(self._offsets[0])
- if self._blp_compression == 1:
+ if self._compression == 1:
# Uncompressed or DirectX compression
- if self._blp_encoding == Encoding.UNCOMPRESSED:
- data = self._read_bgra(palette)
+ if self._encoding == Encoding.UNCOMPRESSED:
+ data = self._read_bgra(palette, alpha)
- elif self._blp_encoding == Encoding.DXT:
+ elif self._encoding == Encoding.DXT:
data = bytearray()
- if self._blp_alpha_encoding == AlphaEncoding.DXT1:
- linesize = (self.size[0] + 3) // 4 * 8
- for yb in range((self.size[1] + 3) // 4):
- for d in decode_dxt1(
- self._safe_read(linesize), alpha=bool(self._blp_alpha_depth)
- ):
+ if self._alpha_encoding == AlphaEncoding.DXT1:
+ linesize = (self.state.xsize + 3) // 4 * 8
+ for yb in range((self.state.ysize + 3) // 4):
+ for d in decode_dxt1(self._safe_read(linesize), alpha):
data += d
- elif self._blp_alpha_encoding == AlphaEncoding.DXT3:
- linesize = (self.size[0] + 3) // 4 * 16
- for yb in range((self.size[1] + 3) // 4):
+ elif self._alpha_encoding == AlphaEncoding.DXT3:
+ linesize = (self.state.xsize + 3) // 4 * 16
+ for yb in range((self.state.ysize + 3) // 4):
for d in decode_dxt3(self._safe_read(linesize)):
data += d
- elif self._blp_alpha_encoding == AlphaEncoding.DXT5:
- linesize = (self.size[0] + 3) // 4 * 16
- for yb in range((self.size[1] + 3) // 4):
+ elif self._alpha_encoding == AlphaEncoding.DXT5:
+ linesize = (self.state.xsize + 3) // 4 * 16
+ for yb in range((self.state.ysize + 3) // 4):
for d in decode_dxt5(self._safe_read(linesize)):
data += d
else:
- msg = f"Unsupported alpha encoding {repr(self._blp_alpha_encoding)}"
+ msg = f"Unsupported alpha encoding {repr(self._alpha_encoding)}"
raise BLPFormatError(msg)
else:
- msg = f"Unknown BLP encoding {repr(self._blp_encoding)}"
+ msg = f"Unknown BLP encoding {repr(self._encoding)}"
raise BLPFormatError(msg)
else:
- msg = f"Unknown BLP compression {repr(self._blp_compression)}"
+ msg = f"Unknown BLP compression {repr(self._compression)}"
raise BLPFormatError(msg)
self.set_as_raw(data)
@@ -473,10 +472,15 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
assert im.palette is not None
fp.write(struct.pack(" None:
assert self.fp is not None
- offset = self.fp.tell()
-
if not _accept(self.fp.read(4)):
msg = "Not a BUFR file"
raise SyntaxError(msg)
- self.fp.seek(offset)
+ self.fp.seek(-4, os.SEEK_CUR)
# make something up
self._mode = "F"
diff --git a/src/PIL/ExifTags.py b/src/PIL/ExifTags.py
index 207d4de4e24..2280d5ce84b 100644
--- a/src/PIL/ExifTags.py
+++ b/src/PIL/ExifTags.py
@@ -353,6 +353,7 @@ class IFD(IntEnum):
Exif = 0x8769
GPSInfo = 0x8825
MakerNote = 0x927C
+ Makernote = 0x927C # Deprecated
Interop = 0xA005
IFD1 = -1
diff --git a/src/PIL/GribStubImagePlugin.py b/src/PIL/GribStubImagePlugin.py
index 63ec390ae68..9d7e9e40cd1 100644
--- a/src/PIL/GribStubImagePlugin.py
+++ b/src/PIL/GribStubImagePlugin.py
@@ -10,6 +10,7 @@
#
from __future__ import annotations
+import os
from typing import IO
from . import Image, ImageFile
@@ -41,13 +42,11 @@ class GribStubImageFile(ImageFile.StubImageFile):
def _open(self) -> None:
assert self.fp is not None
- offset = self.fp.tell()
-
if not _accept(self.fp.read(8)):
msg = "Not a GRIB file"
raise SyntaxError(msg)
- self.fp.seek(offset)
+ self.fp.seek(-8, os.SEEK_CUR)
# make something up
self._mode = "F"
diff --git a/src/PIL/Hdf5StubImagePlugin.py b/src/PIL/Hdf5StubImagePlugin.py
index b3e73064f5f..d98b0236b94 100644
--- a/src/PIL/Hdf5StubImagePlugin.py
+++ b/src/PIL/Hdf5StubImagePlugin.py
@@ -10,6 +10,7 @@
#
from __future__ import annotations
+import os
from typing import IO
from . import Image, ImageFile
@@ -41,13 +42,11 @@ class HDF5StubImageFile(ImageFile.StubImageFile):
def _open(self) -> None:
assert self.fp is not None
- offset = self.fp.tell()
-
if not _accept(self.fp.read(8)):
msg = "Not an HDF file"
raise SyntaxError(msg)
- self.fp.seek(offset)
+ self.fp.seek(-8, os.SEEK_CUR)
# make something up
self._mode = "F"
diff --git a/src/PIL/Image.py b/src/PIL/Image.py
index 90374d80469..99b1b9ab303 100644
--- a/src/PIL/Image.py
+++ b/src/PIL/Image.py
@@ -603,24 +603,16 @@ def _new(self, im: core.ImagingCore) -> Image:
def __enter__(self):
return self
- def _close_fp(self):
- if getattr(self, "_fp", False):
- if self._fp != self.fp:
- self._fp.close()
- self._fp = DeferredError(ValueError("Operation on closed image"))
- if self.fp:
- self.fp.close()
-
def __exit__(self, *args):
- if hasattr(self, "fp"):
+ from . import ImageFile
+
+ if isinstance(self, ImageFile.ImageFile):
if getattr(self, "_exclusive_fp", False):
self._close_fp()
self.fp = None
def close(self) -> None:
"""
- Closes the file pointer, if possible.
-
This operation will destroy the image core and release its memory.
The image data will be unusable afterward.
@@ -629,13 +621,6 @@ def close(self) -> None:
:py:meth:`~PIL.Image.Image.load` method. See :ref:`file-handling` for
more information.
"""
- if hasattr(self, "fp"):
- try:
- self._close_fp()
- self.fp = None
- except Exception as msg:
- logger.debug("Error closing: %s", msg)
-
if getattr(self, "map", None):
self.map: mmap.mmap | None = None
@@ -1554,50 +1539,10 @@ def _reload_exif(self) -> None:
self.getexif()
def get_child_images(self) -> list[ImageFile.ImageFile]:
- child_images = []
- exif = self.getexif()
- ifds = []
- if ExifTags.Base.SubIFDs in exif:
- subifd_offsets = exif[ExifTags.Base.SubIFDs]
- if subifd_offsets:
- if not isinstance(subifd_offsets, tuple):
- subifd_offsets = (subifd_offsets,)
- for subifd_offset in subifd_offsets:
- ifds.append((exif._get_ifd_dict(subifd_offset), subifd_offset))
- ifd1 = exif.get_ifd(ExifTags.IFD.IFD1)
- if ifd1 and ifd1.get(ExifTags.Base.JpegIFOffset):
- assert exif._info is not None
- ifds.append((ifd1, exif._info.next))
-
- offset = None
- for ifd, ifd_offset in ifds:
- current_offset = self.fp.tell()
- if offset is None:
- offset = current_offset
-
- fp = self.fp
- if ifd is not None:
- thumbnail_offset = ifd.get(ExifTags.Base.JpegIFOffset)
- if thumbnail_offset is not None:
- thumbnail_offset += getattr(self, "_exif_offset", 0)
- self.fp.seek(thumbnail_offset)
- data = self.fp.read(ifd.get(ExifTags.Base.JpegIFByteCount))
- fp = io.BytesIO(data)
-
- with open(fp) as im:
- from . import TiffImagePlugin
-
- if thumbnail_offset is None and isinstance(
- im, TiffImagePlugin.TiffImageFile
- ):
- im._frame_pos = [ifd_offset]
- im._seek(0)
- im.load()
- child_images.append(im)
+ from . import ImageFile
- if offset is not None:
- self.fp.seek(offset)
- return child_images
+ deprecate("Image.Image.get_child_images", 13)
+ return ImageFile.ImageFile.get_child_images(self) # type: ignore[arg-type]
def getim(self) -> CapsuleType:
"""
@@ -4023,6 +3968,9 @@ def tobytes(self, offset: int = 8) -> bytes:
head = self._get_head()
ifd = TiffImagePlugin.ImageFileDirectory_v2(ifh=head)
+ for tag, ifd_dict in self._ifds.items():
+ if tag not in self:
+ ifd[tag] = ifd_dict
for tag, value in self.items():
if tag in [
ExifTags.IFD.Exif,
diff --git a/src/PIL/ImageDraw.py b/src/PIL/ImageDraw.py
index d8e4c0c60de..81f8fbce013 100644
--- a/src/PIL/ImageDraw.py
+++ b/src/PIL/ImageDraw.py
@@ -643,6 +643,7 @@ def draw_text(ink: int, stroke_width: float = 0) -> None:
features=features,
language=language,
stroke_width=stroke_width,
+ stroke_filled=True,
anchor=anchor,
ink=ink,
start=start,
@@ -692,7 +693,8 @@ def draw_text(ink: int, stroke_width: float = 0) -> None:
draw_text(stroke_ink, stroke_width)
# Draw normal text
- draw_text(ink, 0)
+ if ink != stroke_ink:
+ draw_text(ink)
else:
# Only draw normal text
draw_text(ink)
diff --git a/src/PIL/ImageFile.py b/src/PIL/ImageFile.py
index 10add11f919..9c8f6bf7c60 100644
--- a/src/PIL/ImageFile.py
+++ b/src/PIL/ImageFile.py
@@ -31,18 +31,21 @@
import abc
import io
import itertools
+import logging
import os
import struct
import sys
from typing import IO, TYPE_CHECKING, Any, NamedTuple, cast
-from . import Image
+from . import ExifTags, Image
from ._deprecate import deprecate
-from ._util import is_path
+from ._util import DeferredError, is_path
if TYPE_CHECKING:
from ._typing import StrOrBytesPath
+logger = logging.getLogger(__name__)
+
MAXBLOCK = 65536
SAFEBLOCK = 1024 * 1024
@@ -163,6 +166,85 @@ def __init__(
def _open(self) -> None:
pass
+ def _close_fp(self):
+ if getattr(self, "_fp", False):
+ if self._fp != self.fp:
+ self._fp.close()
+ self._fp = DeferredError(ValueError("Operation on closed image"))
+ if self.fp:
+ self.fp.close()
+
+ def close(self) -> None:
+ """
+ Closes the file pointer, if possible.
+
+ This operation will destroy the image core and release its memory.
+ The image data will be unusable afterward.
+
+ This function is required to close images that have multiple frames or
+ have not had their file read and closed by the
+ :py:meth:`~PIL.Image.Image.load` method. See :ref:`file-handling` for
+ more information.
+ """
+ try:
+ self._close_fp()
+ self.fp = None
+ except Exception as msg:
+ logger.debug("Error closing: %s", msg)
+
+ super().close()
+
+ def get_child_images(self) -> list[ImageFile]:
+ child_images = []
+ exif = self.getexif()
+ ifds = []
+ if ExifTags.Base.SubIFDs in exif:
+ subifd_offsets = exif[ExifTags.Base.SubIFDs]
+ if subifd_offsets:
+ if not isinstance(subifd_offsets, tuple):
+ subifd_offsets = (subifd_offsets,)
+ for subifd_offset in subifd_offsets:
+ ifds.append((exif._get_ifd_dict(subifd_offset), subifd_offset))
+ ifd1 = exif.get_ifd(ExifTags.IFD.IFD1)
+ if ifd1 and ifd1.get(ExifTags.Base.JpegIFOffset):
+ assert exif._info is not None
+ ifds.append((ifd1, exif._info.next))
+
+ offset = None
+ for ifd, ifd_offset in ifds:
+ assert self.fp is not None
+ current_offset = self.fp.tell()
+ if offset is None:
+ offset = current_offset
+
+ fp = self.fp
+ if ifd is not None:
+ thumbnail_offset = ifd.get(ExifTags.Base.JpegIFOffset)
+ if thumbnail_offset is not None:
+ thumbnail_offset += getattr(self, "_exif_offset", 0)
+ self.fp.seek(thumbnail_offset)
+
+ length = ifd.get(ExifTags.Base.JpegIFByteCount)
+ assert isinstance(length, int)
+ data = self.fp.read(length)
+ fp = io.BytesIO(data)
+
+ with Image.open(fp) as im:
+ from . import TiffImagePlugin
+
+ if thumbnail_offset is None and isinstance(
+ im, TiffImagePlugin.TiffImageFile
+ ):
+ im._frame_pos = [ifd_offset]
+ im._seek(0)
+ im.load()
+ child_images.append(im)
+
+ if offset is not None:
+ assert self.fp is not None
+ self.fp.seek(offset)
+ return child_images
+
def get_format_mimetype(self) -> str | None:
if self.custom_mimetype:
return self.custom_mimetype
diff --git a/src/PIL/ImageFont.py b/src/PIL/ImageFont.py
index d8c2655609e..a4986aa8c40 100644
--- a/src/PIL/ImageFont.py
+++ b/src/PIL/ImageFont.py
@@ -644,6 +644,7 @@ def fill(width: int, height: int) -> Image.core.ImagingCore:
features,
language,
stroke_width,
+ kwargs.get("stroke_filled", False),
anchor,
ink,
start[0],
diff --git a/src/PIL/ImageOps.py b/src/PIL/ImageOps.py
index bb29cc0d3e8..fef1d7328c2 100644
--- a/src/PIL/ImageOps.py
+++ b/src/PIL/ImageOps.py
@@ -22,7 +22,7 @@
import operator
import re
from collections.abc import Sequence
-from typing import Protocol, cast
+from typing import Literal, Protocol, cast, overload
from . import ExifTags, Image, ImagePalette
@@ -673,6 +673,16 @@ def solarize(image: Image.Image, threshold: int = 128) -> Image.Image:
return _lut(image, lut)
+@overload
+def exif_transpose(image: Image.Image, *, in_place: Literal[True]) -> None: ...
+
+
+@overload
+def exif_transpose(
+ image: Image.Image, *, in_place: Literal[False] = False
+) -> Image.Image: ...
+
+
def exif_transpose(image: Image.Image, *, in_place: bool = False) -> Image.Image | None:
"""
If an image has an EXIF Orientation tag, other than 1, transpose the image
diff --git a/src/PIL/JpegImagePlugin.py b/src/PIL/JpegImagePlugin.py
index 6548505a7ab..af2b1355753 100644
--- a/src/PIL/JpegImagePlugin.py
+++ b/src/PIL/JpegImagePlugin.py
@@ -92,6 +92,9 @@ def APP(self: JpegImageFile, marker: int) -> None:
else:
if jfif_unit == 1:
self.info["dpi"] = jfif_density
+ elif jfif_unit == 2: # cm
+ # 1 dpcm = 2.54 dpi
+ self.info["dpi"] = tuple(d * 2.54 for d in jfif_density)
self.info["jfif_unit"] = jfif_unit
self.info["jfif_density"] = jfif_density
elif marker == 0xFFE1 and s[:6] == b"Exif\0\0":
diff --git a/src/PIL/PngImagePlugin.py b/src/PIL/PngImagePlugin.py
index b71b115d51a..c689749fd6d 100644
--- a/src/PIL/PngImagePlugin.py
+++ b/src/PIL/PngImagePlugin.py
@@ -1385,7 +1385,7 @@ def _save(
b"\0", # 12: interlace flag
)
- chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"]
+ chunks = [b"cHRM", b"cICP", b"gAMA", b"sBIT", b"sRGB", b"tIME"]
icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile"))
if icc:
diff --git a/src/PIL/SpiderImagePlugin.py b/src/PIL/SpiderImagePlugin.py
index ea04bd5caac..c612e5a6263 100644
--- a/src/PIL/SpiderImagePlugin.py
+++ b/src/PIL/SpiderImagePlugin.py
@@ -268,7 +268,7 @@ def makeSpiderHeader(im: Image.Image) -> list[bytes]:
def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
- if im.mode[0] != "F":
+ if im.mode != "F":
im = im.convert("F")
hdr = makeSpiderHeader(im)
diff --git a/src/PIL/TiffImagePlugin.py b/src/PIL/TiffImagePlugin.py
index 74b9b3a7fcc..8e44ca9f916 100644
--- a/src/PIL/TiffImagePlugin.py
+++ b/src/PIL/TiffImagePlugin.py
@@ -582,7 +582,7 @@ class ImageFileDirectory_v2(_IFDv2Base):
def __init__(
self,
- ifh: bytes = b"II\052\0\0\0\0\0",
+ ifh: bytes = b"II\x2A\x00\x00\x00\x00\x00",
prefix: bytes | None = None,
group: int | None = None,
) -> None:
@@ -949,12 +949,25 @@ def load(self, fp: IO[bytes]) -> None:
warnings.warn(str(msg))
return
+ def _get_ifh(self) -> bytes:
+ ifh = self._prefix + self._pack("H", 43 if self._bigtiff else 42)
+ if self._bigtiff:
+ ifh += self._pack("HH", 8, 0)
+ ifh += self._pack("Q", 16) if self._bigtiff else self._pack("L", 8)
+
+ return ifh
+
def tobytes(self, offset: int = 0) -> bytes:
# FIXME What about tagdata?
- result = self._pack("H", len(self._tags_v2))
+ result = self._pack("Q" if self._bigtiff else "H", len(self._tags_v2))
entries: list[tuple[int, int, int, bytes, bytes]] = []
- offset = offset + len(result) + len(self._tags_v2) * 12 + 4
+
+ fmt = "Q" if self._bigtiff else "L"
+ fmt_size = 8 if self._bigtiff else 4
+ offset += (
+ len(result) + len(self._tags_v2) * (20 if self._bigtiff else 12) + fmt_size
+ )
stripoffsets = None
# pass 1: convert tags to binary format
@@ -966,11 +979,7 @@ def tobytes(self, offset: int = 0) -> bytes:
logger.debug("Tag %s, Type: %s, Value: %s", tag, typ, repr(value))
is_ifd = typ == TiffTags.LONG and isinstance(value, dict)
if is_ifd:
- if self._endian == "<":
- ifh = b"II\x2A\x00\x08\x00\x00\x00"
- else:
- ifh = b"MM\x00\x2A\x00\x00\x00\x08"
- ifd = ImageFileDirectory_v2(ifh, group=tag)
+ ifd = ImageFileDirectory_v2(self._get_ifh(), group=tag)
values = self._tags_v2[tag]
for ifd_tag, ifd_value in values.items():
ifd[ifd_tag] = ifd_value
@@ -993,10 +1002,10 @@ def tobytes(self, offset: int = 0) -> bytes:
else:
count = len(values)
# figure out if data fits into the entry
- if len(data) <= 4:
- entries.append((tag, typ, count, data.ljust(4, b"\0"), b""))
+ if len(data) <= fmt_size:
+ entries.append((tag, typ, count, data.ljust(fmt_size, b"\0"), b""))
else:
- entries.append((tag, typ, count, self._pack("L", offset), data))
+ entries.append((tag, typ, count, self._pack(fmt, offset), data))
offset += (len(data) + 1) // 2 * 2 # pad to word
# update strip offset data to point beyond auxiliary data
@@ -1007,16 +1016,18 @@ def tobytes(self, offset: int = 0) -> bytes:
values = [val + offset for val in handler(self, data, self.legacy_api)]
data = self._write_dispatch[typ](self, *values)
else:
- value = self._pack("L", self._unpack("L", value)[0] + offset)
+ value = self._pack(fmt, self._unpack(fmt, value)[0] + offset)
entries[stripoffsets] = tag, typ, count, value, data
# pass 2: write entries to file
for tag, typ, count, value, data in entries:
logger.debug("%s %s %s %s %s", tag, typ, count, repr(value), repr(data))
- result += self._pack("HHL4s", tag, typ, count, value)
+ result += self._pack(
+ "HHQ8s" if self._bigtiff else "HHL4s", tag, typ, count, value
+ )
# -- overwrite here for multi-page --
- result += b"\0\0\0\0" # end of entries
+ result += self._pack(fmt, 0) # end of entries
# pass 3: write auxiliary data to file
for tag, typ, count, value, data in entries:
@@ -1028,8 +1039,7 @@ def tobytes(self, offset: int = 0) -> bytes:
def save(self, fp: IO[bytes]) -> int:
if fp.tell() == 0: # skip TIFF header on subsequent pages
- # tiff header -- PIL always starts the first IFD at offset 8
- fp.write(self._prefix + self._pack("HL", 42, 8))
+ fp.write(self._get_ifh())
offset = fp.tell()
result = self.tobytes(offset)
@@ -1401,7 +1411,8 @@ def _load_libtiff(self) -> Image.core.PixelAccess | None:
self.fp = None # might be shared
if err < 0:
- raise OSError(err)
+ msg = f"decoder error {err}"
+ raise OSError(msg)
return Image.Image.load(self)
@@ -1561,17 +1572,6 @@ def _setup(self) -> None:
# fillorder==2 modes have a corresponding
# fillorder=1 mode
self._mode, rawmode = OPEN_INFO[key]
- # libtiff always returns the bytes in native order.
- # we're expecting image byte order. So, if the rawmode
- # contains I;16, we need to convert from native to image
- # byte order.
- if rawmode == "I;16":
- rawmode = "I;16N"
- if ";16B" in rawmode:
- rawmode = rawmode.replace(";16B", ";16N")
- if ";16L" in rawmode:
- rawmode = rawmode.replace(";16L", ";16N")
-
# YCbCr images with new jpeg compression with pixels in one plane
# unpacked straight into RGB values
if (
@@ -1580,6 +1580,14 @@ def _setup(self) -> None:
and self._planar_configuration == 1
):
rawmode = "RGB"
+ # libtiff always returns the bytes in native order.
+ # we're expecting image byte order. So, if the rawmode
+ # contains I;16, we need to convert from native to image
+ # byte order.
+ elif rawmode == "I;16":
+ rawmode = "I;16N"
+ elif rawmode.endswith(";16B") or rawmode.endswith(";16L"):
+ rawmode = rawmode[:-1] + "N"
# Offset in the tile tuple is 0, we go from 0,0 to
# w,h, and we only do this once -- eds
@@ -1685,10 +1693,13 @@ def _save(im: Image.Image, fp: IO[bytes], filename: str | bytes) -> None:
msg = f"cannot write mode {im.mode} as TIFF"
raise OSError(msg) from e
- ifd = ImageFileDirectory_v2(prefix=prefix)
-
encoderinfo = im.encoderinfo
encoderconfig = im.encoderconfig
+
+ ifd = ImageFileDirectory_v2(prefix=prefix)
+ if encoderinfo.get("big_tiff"):
+ ifd._bigtiff = True
+
try:
compression = encoderinfo["compression"]
except KeyError:
@@ -2038,20 +2049,21 @@ def setup(self) -> None:
self.offsetOfNewPage = 0
self.IIMM = iimm = self.f.read(4)
+ self._bigtiff = b"\x2B" in iimm
if not iimm:
# empty file - first page
self.isFirst = True
return
self.isFirst = False
- if iimm == b"II\x2a\x00":
- self.setEndian("<")
- elif iimm == b"MM\x00\x2a":
- self.setEndian(">")
- else:
+ if iimm not in PREFIXES:
msg = "Invalid TIFF file header"
raise RuntimeError(msg)
+ self.setEndian("<" if iimm.startswith(II) else ">")
+
+ if self._bigtiff:
+ self.f.seek(4, os.SEEK_CUR)
self.skipIFDs()
self.goToEnd()
@@ -2071,11 +2083,13 @@ def finalize(self) -> None:
msg = "IIMM of new page doesn't match IIMM of first page"
raise RuntimeError(msg)
- ifd_offset = self.readLong()
+ if self._bigtiff:
+ self.f.seek(4, os.SEEK_CUR)
+ ifd_offset = self._read(8 if self._bigtiff else 4)
ifd_offset += self.offsetOfNewPage
assert self.whereToWriteNewIFDOffset is not None
self.f.seek(self.whereToWriteNewIFDOffset)
- self.writeLong(ifd_offset)
+ self._write(ifd_offset, 8 if self._bigtiff else 4)
self.f.seek(ifd_offset)
self.fixIFD()
@@ -2121,18 +2135,20 @@ def setEndian(self, endian: str) -> None:
self.endian = endian
self.longFmt = f"{self.endian}L"
self.shortFmt = f"{self.endian}H"
- self.tagFormat = f"{self.endian}HHL"
+ self.tagFormat = f"{self.endian}HH" + ("Q" if self._bigtiff else "L")
def skipIFDs(self) -> None:
while True:
- ifd_offset = self.readLong()
+ ifd_offset = self._read(8 if self._bigtiff else 4)
if ifd_offset == 0:
- self.whereToWriteNewIFDOffset = self.f.tell() - 4
+ self.whereToWriteNewIFDOffset = self.f.tell() - (
+ 8 if self._bigtiff else 4
+ )
break
self.f.seek(ifd_offset)
- num_tags = self.readShort()
- self.f.seek(num_tags * 12, os.SEEK_CUR)
+ num_tags = self._read(8 if self._bigtiff else 2)
+ self.f.seek(num_tags * (20 if self._bigtiff else 12), os.SEEK_CUR)
def write(self, data: Buffer, /) -> int:
return self.f.write(data)
@@ -2162,17 +2178,19 @@ def _verify_bytes_written(bytes_written: int | None, expected: int) -> None:
msg = f"wrote only {bytes_written} bytes but wanted {expected}"
raise RuntimeError(msg)
- def rewriteLastShortToLong(self, value: int) -> None:
- self.f.seek(-2, os.SEEK_CUR)
- bytes_written = self.f.write(struct.pack(self.longFmt, value))
- self._verify_bytes_written(bytes_written, 4)
-
- def _rewriteLast(self, value: int, field_size: int) -> None:
+ def _rewriteLast(
+ self, value: int, field_size: int, new_field_size: int = 0
+ ) -> None:
self.f.seek(-field_size, os.SEEK_CUR)
+ if not new_field_size:
+ new_field_size = field_size
bytes_written = self.f.write(
- struct.pack(self.endian + self._fmt(field_size), value)
+ struct.pack(self.endian + self._fmt(new_field_size), value)
)
- self._verify_bytes_written(bytes_written, field_size)
+ self._verify_bytes_written(bytes_written, new_field_size)
+
+ def rewriteLastShortToLong(self, value: int) -> None:
+ self._rewriteLast(value, 2, 4)
def rewriteLastShort(self, value: int) -> None:
return self._rewriteLast(value, 2)
@@ -2180,13 +2198,17 @@ def rewriteLastShort(self, value: int) -> None:
def rewriteLastLong(self, value: int) -> None:
return self._rewriteLast(value, 4)
+ def _write(self, value: int, field_size: int) -> None:
+ bytes_written = self.f.write(
+ struct.pack(self.endian + self._fmt(field_size), value)
+ )
+ self._verify_bytes_written(bytes_written, field_size)
+
def writeShort(self, value: int) -> None:
- bytes_written = self.f.write(struct.pack(self.shortFmt, value))
- self._verify_bytes_written(bytes_written, 2)
+ self._write(value, 2)
def writeLong(self, value: int) -> None:
- bytes_written = self.f.write(struct.pack(self.longFmt, value))
- self._verify_bytes_written(bytes_written, 4)
+ self._write(value, 4)
def close(self) -> None:
self.finalize()
@@ -2194,24 +2216,37 @@ def close(self) -> None:
self.f.close()
def fixIFD(self) -> None:
- num_tags = self.readShort()
+ num_tags = self._read(8 if self._bigtiff else 2)
for i in range(num_tags):
- tag, field_type, count = struct.unpack(self.tagFormat, self.f.read(8))
+ tag, field_type, count = struct.unpack(
+ self.tagFormat, self.f.read(12 if self._bigtiff else 8)
+ )
field_size = self.fieldSizes[field_type]
total_size = field_size * count
- is_local = total_size <= 4
+ fmt_size = 8 if self._bigtiff else 4
+ is_local = total_size <= fmt_size
if not is_local:
- offset = self.readLong() + self.offsetOfNewPage
- self.rewriteLastLong(offset)
+ offset = self._read(fmt_size) + self.offsetOfNewPage
+ self._rewriteLast(offset, fmt_size)
if tag in self.Tags:
cur_pos = self.f.tell()
+ logger.debug(
+ "fixIFD: %s (%d) - type: %s (%d) - type size: %d - count: %d",
+ TiffTags.lookup(tag).name,
+ tag,
+ TYPES.get(field_type, "unknown"),
+ field_type,
+ field_size,
+ count,
+ )
+
if is_local:
self._fixOffsets(count, field_size)
- self.f.seek(cur_pos + 4)
+ self.f.seek(cur_pos + fmt_size)
else:
self.f.seek(offset)
self._fixOffsets(count, field_size)
@@ -2219,24 +2254,33 @@ def fixIFD(self) -> None:
elif is_local:
# skip the locally stored value that is not an offset
- self.f.seek(4, os.SEEK_CUR)
+ self.f.seek(fmt_size, os.SEEK_CUR)
def _fixOffsets(self, count: int, field_size: int) -> None:
for i in range(count):
offset = self._read(field_size)
offset += self.offsetOfNewPage
- if field_size == 2 and offset >= 65536:
- # offset is now too large - we must convert shorts to longs
+
+ new_field_size = 0
+ if self._bigtiff and field_size in (2, 4) and offset >= 2**32:
+ # offset is now too large - we must convert long to long8
+ new_field_size = 8
+ elif field_size == 2 and offset >= 2**16:
+ # offset is now too large - we must convert short to long
+ new_field_size = 4
+ if new_field_size:
if count != 1:
msg = "not implemented"
raise RuntimeError(msg) # XXX TODO
# simple case - the offset is just one and therefore it is
# local (not referenced with another offset)
- self.rewriteLastShortToLong(offset)
- self.f.seek(-10, os.SEEK_CUR)
- self.writeShort(TiffTags.LONG) # rewrite the type to LONG
- self.f.seek(8, os.SEEK_CUR)
+ self._rewriteLast(offset, field_size, new_field_size)
+ # Move back past the new offset, past 'count', and before 'field_type'
+ rewind = -new_field_size - 4 - 2
+ self.f.seek(rewind, os.SEEK_CUR)
+ self.writeShort(new_field_size) # rewrite the type
+ self.f.seek(2 - rewind, os.SEEK_CUR)
else:
self._rewriteLast(offset, field_size)
diff --git a/src/PIL/_deprecate.py b/src/PIL/_deprecate.py
index 83952b397ff..9f9d8bbc9cc 100644
--- a/src/PIL/_deprecate.py
+++ b/src/PIL/_deprecate.py
@@ -47,6 +47,8 @@ def deprecate(
raise RuntimeError(msg)
elif when == 12:
removed = "Pillow 12 (2025-10-15)"
+ elif when == 13:
+ removed = "Pillow 13 (2026-10-15)"
else:
msg = f"Unknown removal version: {when}. Update {__name__}?"
raise ValueError(msg)
diff --git a/src/PIL/_imagingft.pyi b/src/PIL/_imagingft.pyi
index 9cc9822f5b4..81329474730 100644
--- a/src/PIL/_imagingft.pyi
+++ b/src/PIL/_imagingft.pyi
@@ -28,6 +28,7 @@ class Font:
features: list[str] | None,
lang: str | None,
stroke_width: float,
+ stroke_filled: bool,
anchor: str | None,
foreground_ink_long: int,
x_start: float,
diff --git a/src/PIL/_version.py b/src/PIL/_version.py
index 0807f949c31..e93c7887b80 100644
--- a/src/PIL/_version.py
+++ b/src/PIL/_version.py
@@ -1,4 +1,4 @@
# Master version for Pillow
from __future__ import annotations
-__version__ = "11.1.0.dev0"
+__version__ = "11.2.0.dev0"
diff --git a/src/PIL/features.py b/src/PIL/features.py
index 3645e3defc4..ae7ea4255ef 100644
--- a/src/PIL/features.py
+++ b/src/PIL/features.py
@@ -127,6 +127,7 @@ def get_supported_codecs() -> list[str]:
"fribidi": ("PIL._imagingft", "HAVE_FRIBIDI", "fribidi_version"),
"harfbuzz": ("PIL._imagingft", "HAVE_HARFBUZZ", "harfbuzz_version"),
"libjpeg_turbo": ("PIL._imaging", "HAVE_LIBJPEGTURBO", "libjpeg_turbo_version"),
+ "mozjpeg": ("PIL._imaging", "HAVE_MOZJPEG", "libjpeg_turbo_version"),
"zlib_ng": ("PIL._imaging", "HAVE_ZLIBNG", "zlib_ng_version"),
"libimagequant": ("PIL._imaging", "HAVE_LIBIMAGEQUANT", "imagequant_version"),
"xcb": ("PIL._imaging", "HAVE_XCB", None),
@@ -300,7 +301,8 @@ def pilinfo(out: IO[str] | None = None, supported_formats: bool = True) -> None:
if name == "jpg":
libjpeg_turbo_version = version_feature("libjpeg_turbo")
if libjpeg_turbo_version is not None:
- v = "libjpeg-turbo " + libjpeg_turbo_version
+ v = "mozjpeg" if check_feature("mozjpeg") else "libjpeg-turbo"
+ v += " " + libjpeg_turbo_version
if v is None:
v = version(name)
if v is not None:
diff --git a/src/_imaging.c b/src/_imaging.c
index 5d6d97bedab..2fd2deffbe6 100644
--- a/src/_imaging.c
+++ b/src/_imaging.c
@@ -76,6 +76,13 @@
#ifdef HAVE_LIBJPEG
#include "jconfig.h"
+#ifdef LIBJPEG_TURBO_VERSION
+#define JCONFIG_INCLUDED
+#ifdef __CYGWIN__
+#define _BASETSD_H
+#endif
+#include "jpeglib.h"
+#endif
#endif
#ifdef HAVE_LIBZ
@@ -466,8 +473,7 @@ getpixel(Imaging im, ImagingAccess access, int x, int y) {
}
/* unknown type */
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static char *
@@ -958,8 +964,7 @@ _convert2(ImagingObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -1207,8 +1212,7 @@ _getpixel(ImagingObject *self, PyObject *args) {
}
if (self->access == NULL) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return getpixel(self->image, self->access, x, y);
@@ -1410,8 +1414,7 @@ _paste(ImagingObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -1684,8 +1687,7 @@ _putdata(ImagingObject *self, PyObject *args) {
Py_XDECREF(seq);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -1745,8 +1747,7 @@ _putpalette(ImagingObject *self, PyObject *args) {
self->image->palette->size = palettesize * 8 / bits;
unpack(self->image->palette->palette, palette, self->image->palette->size);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -1770,8 +1771,7 @@ _putpalettealpha(ImagingObject *self, PyObject *args) {
strcpy(self->image->palette->mode, "RGBA");
self->image->palette->palette[index * 4 + 3] = (UINT8)alpha;
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -1798,8 +1798,7 @@ _putpalettealphas(ImagingObject *self, PyObject *args) {
self->image->palette->palette[i * 4 + 3] = (UINT8)values[i];
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -1835,8 +1834,7 @@ _putpixel(ImagingObject *self, PyObject *args) {
self->access->put_pixel(im, x, y, ink);
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -2003,8 +2001,7 @@ im_setmode(ImagingObject *self, PyObject *args) {
}
self->access = ImagingAccessNew(im);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -2067,8 +2064,7 @@ _transform(ImagingObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -2195,8 +2191,7 @@ _getbbox(ImagingObject *self, PyObject *args) {
}
if (!ImagingGetBBox(self->image, bbox, alpha_only)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return Py_BuildValue("iiii", bbox[0], bbox[1], bbox[2], bbox[3]);
@@ -2276,8 +2271,7 @@ _getextrema(ImagingObject *self) {
}
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -2340,8 +2334,7 @@ _fillband(ImagingObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -2356,8 +2349,7 @@ _putband(ImagingObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -2943,8 +2935,7 @@ _draw_arc(ImagingDrawObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -2981,8 +2972,7 @@ _draw_bitmap(ImagingDrawObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -3038,8 +3028,7 @@ _draw_chord(ImagingDrawObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -3093,8 +3082,7 @@ _draw_ellipse(ImagingDrawObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -3157,8 +3145,7 @@ _draw_lines(ImagingDrawObject *self, PyObject *args) {
free(xy);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -3189,8 +3176,7 @@ _draw_points(ImagingDrawObject *self, PyObject *args) {
free(xy);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
/* from outline.c */
@@ -3218,8 +3204,7 @@ _draw_outline(ImagingDrawObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -3275,8 +3260,7 @@ _draw_pieslice(ImagingDrawObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -3327,8 +3311,7 @@ _draw_polygon(ImagingDrawObject *self, PyObject *args) {
free(ixy);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -3382,8 +3365,7 @@ _draw_rectangle(ImagingDrawObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static struct PyMethodDef _draw_methods[] = {
@@ -3588,8 +3570,7 @@ _save_ppm(ImagingObject *self, PyObject *args) {
return NULL;
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
/* -------------------------------------------------------------------- */
@@ -3977,8 +3958,7 @@ _reset_stats(PyObject *self, PyObject *args) {
arena->stats_freed_blocks = 0;
MUTEX_UNLOCK(&ImagingDefaultArena.mutex);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -4038,8 +4018,7 @@ _set_alignment(PyObject *self, PyObject *args) {
ImagingDefaultArena.alignment = alignment;
MUTEX_UNLOCK(&ImagingDefaultArena.mutex);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -4063,8 +4042,7 @@ _set_block_size(PyObject *self, PyObject *args) {
ImagingDefaultArena.block_size = block_size;
MUTEX_UNLOCK(&ImagingDefaultArena.mutex);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -4092,8 +4070,7 @@ _set_blocks_max(PyObject *self, PyObject *args) {
return ImagingError_MemoryError();
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -4108,8 +4085,7 @@ _clear_cache(PyObject *self, PyObject *args) {
ImagingMemoryClearCache(&ImagingDefaultArena, i);
MUTEX_UNLOCK(&ImagingDefaultArena.mutex);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
/* -------------------------------------------------------------------- */
@@ -4367,6 +4343,15 @@ setup_module(PyObject *m) {
Py_INCREF(have_libjpegturbo);
PyModule_AddObject(m, "HAVE_LIBJPEGTURBO", have_libjpegturbo);
+ PyObject *have_mozjpeg;
+#ifdef JPEG_C_PARAM_SUPPORTED
+ have_mozjpeg = Py_True;
+#else
+ have_mozjpeg = Py_False;
+#endif
+ Py_INCREF(have_mozjpeg);
+ PyModule_AddObject(m, "HAVE_MOZJPEG", have_mozjpeg);
+
PyObject *have_libimagequant;
#ifdef HAVE_LIBIMAGEQUANT
have_libimagequant = Py_True;
diff --git a/src/_imagingcms.c b/src/_imagingcms.c
index 1805ebde17f..14cf2acd22d 100644
--- a/src/_imagingcms.c
+++ b/src/_imagingcms.c
@@ -654,8 +654,7 @@ cms_get_display_profile_win32(PyObject *self, PyObject *args) {
return PyUnicode_FromStringAndSize(filename, filename_size - 1);
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
#endif
@@ -672,20 +671,17 @@ _profile_read_mlu(CmsProfileObject *self, cmsTagSignature info) {
wchar_t *buf;
if (!cmsIsTag(self->profile, info)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
mlu = cmsReadTag(self->profile, info);
if (!mlu) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
len = cmsMLUgetWide(mlu, lc, cc, NULL, 0);
if (len == 0) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
buf = malloc(len);
@@ -723,14 +719,12 @@ _profile_read_signature(CmsProfileObject *self, cmsTagSignature info) {
unsigned int *sig;
if (!cmsIsTag(self->profile, info)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
sig = (unsigned int *)cmsReadTag(self->profile, info);
if (!sig) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return _profile_read_int_as_string(*sig);
@@ -780,14 +774,12 @@ _profile_read_ciexyz(CmsProfileObject *self, cmsTagSignature info, int multi) {
cmsCIEXYZ *XYZ;
if (!cmsIsTag(self->profile, info)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
XYZ = (cmsCIEXYZ *)cmsReadTag(self->profile, info);
if (!XYZ) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
if (multi) {
return _xyz3_py(XYZ);
@@ -801,14 +793,12 @@ _profile_read_ciexyy_triple(CmsProfileObject *self, cmsTagSignature info) {
cmsCIExyYTRIPLE *triple;
if (!cmsIsTag(self->profile, info)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
triple = (cmsCIExyYTRIPLE *)cmsReadTag(self->profile, info);
if (!triple) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
/* Note: lcms does all the heavy lifting and error checking (nr of
@@ -835,21 +825,18 @@ _profile_read_named_color_list(CmsProfileObject *self, cmsTagSignature info) {
PyObject *result;
if (!cmsIsTag(self->profile, info)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
ncl = (cmsNAMEDCOLORLIST *)cmsReadTag(self->profile, info);
if (ncl == NULL) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
n = cmsNamedColorCount(ncl);
result = PyList_New(n);
if (!result) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
for (i = 0; i < n; i++) {
@@ -858,8 +845,7 @@ _profile_read_named_color_list(CmsProfileObject *self, cmsTagSignature info) {
str = PyUnicode_FromString(name);
if (str == NULL) {
Py_DECREF(result);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
PyList_SET_ITEM(result, i, str);
}
@@ -926,8 +912,7 @@ _is_intent_supported(CmsProfileObject *self, int clut) {
result = PyDict_New();
if (result == NULL) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
n = cmsGetSupportedIntents(INTENTS, intent_ids, intent_descs);
@@ -957,8 +942,7 @@ _is_intent_supported(CmsProfileObject *self, int clut) {
Py_XDECREF(id);
Py_XDECREF(entry);
Py_XDECREF(result);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
PyDict_SetItem(result, id, entry);
Py_DECREF(id);
@@ -1042,8 +1026,7 @@ cms_profile_getattr_creation_date(CmsProfileObject *self, void *closure) {
result = cmsGetHeaderCreationDateTime(self->profile, &ct);
if (!result) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return PyDateTime_FromDateAndTime(
@@ -1141,8 +1124,7 @@ cms_profile_getattr_saturation_rendering_intent_gamut(
static PyObject *
cms_profile_getattr_red_colorant(CmsProfileObject *self, void *closure) {
if (!cmsIsMatrixShaper(self->profile)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return _profile_read_ciexyz(self, cmsSigRedColorantTag, 0);
}
@@ -1150,8 +1132,7 @@ cms_profile_getattr_red_colorant(CmsProfileObject *self, void *closure) {
static PyObject *
cms_profile_getattr_green_colorant(CmsProfileObject *self, void *closure) {
if (!cmsIsMatrixShaper(self->profile)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return _profile_read_ciexyz(self, cmsSigGreenColorantTag, 0);
}
@@ -1159,8 +1140,7 @@ cms_profile_getattr_green_colorant(CmsProfileObject *self, void *closure) {
static PyObject *
cms_profile_getattr_blue_colorant(CmsProfileObject *self, void *closure) {
if (!cmsIsMatrixShaper(self->profile)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return _profile_read_ciexyz(self, cmsSigBlueColorantTag, 0);
}
@@ -1176,21 +1156,18 @@ cms_profile_getattr_media_white_point_temperature(
cmsBool result;
if (!cmsIsTag(self->profile, info)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
XYZ = (cmsCIEXYZ *)cmsReadTag(self->profile, info);
if (XYZ == NULL || XYZ->X == 0) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
cmsXYZ2xyY(&xyY, XYZ);
result = cmsTempFromWhitePoint(&tempK, &xyY);
if (!result) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return PyFloat_FromDouble(tempK);
}
@@ -1229,8 +1206,7 @@ cms_profile_getattr_red_primary(CmsProfileObject *self, void *closure) {
result = _calculate_rgb_primaries(self, &primaries);
}
if (!result) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return _xyz_py(&primaries.Red);
@@ -1245,8 +1221,7 @@ cms_profile_getattr_green_primary(CmsProfileObject *self, void *closure) {
result = _calculate_rgb_primaries(self, &primaries);
}
if (!result) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return _xyz_py(&primaries.Green);
@@ -1261,8 +1236,7 @@ cms_profile_getattr_blue_primary(CmsProfileObject *self, void *closure) {
result = _calculate_rgb_primaries(self, &primaries);
}
if (!result) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return _xyz_py(&primaries.Blue);
@@ -1321,14 +1295,12 @@ cms_profile_getattr_icc_measurement_condition(CmsProfileObject *self, void *clos
const char *geo;
if (!cmsIsTag(self->profile, info)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
mc = (cmsICCMeasurementConditions *)cmsReadTag(self->profile, info);
if (!mc) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
if (mc->Geometry == 1) {
@@ -1362,14 +1334,12 @@ cms_profile_getattr_icc_viewing_condition(CmsProfileObject *self, void *closure)
cmsTagSignature info = cmsSigViewingConditionsTag;
if (!cmsIsTag(self->profile, info)) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
vc = (cmsICCViewingConditions *)cmsReadTag(self->profile, info);
if (!vc) {
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
return Py_BuildValue(
diff --git a/src/_imagingft.c b/src/_imagingft.c
index d38279f3e4b..c202a805921 100644
--- a/src/_imagingft.c
+++ b/src/_imagingft.c
@@ -339,29 +339,23 @@ text_layout_raqm(
len = PySequence_Fast_GET_SIZE(seq);
for (j = 0; j < len; j++) {
PyObject *item = PySequence_Fast_GET_ITEM(seq, j);
- char *feature = NULL;
- Py_ssize_t size = 0;
- PyObject *bytes;
-
if (!PyUnicode_Check(item)) {
Py_DECREF(seq);
PyErr_SetString(PyExc_TypeError, "expected a string");
goto failed;
}
- bytes = PyUnicode_AsUTF8String(item);
- if (bytes == NULL) {
+
+ Py_ssize_t size;
+ const char *feature = PyUnicode_AsUTF8AndSize(item, &size);
+ if (feature == NULL) {
Py_DECREF(seq);
goto failed;
}
- feature = PyBytes_AS_STRING(bytes);
- size = PyBytes_GET_SIZE(bytes);
if (!raqm_add_font_feature(rq, feature, size)) {
Py_DECREF(seq);
- Py_DECREF(bytes);
PyErr_SetString(PyExc_ValueError, "raqm_add_font_feature() failed");
goto failed;
}
- Py_DECREF(bytes);
}
Py_DECREF(seq);
}
@@ -840,6 +834,7 @@ font_render(FontObject *self, PyObject *args) {
int mask = 0; /* is FT_LOAD_TARGET_MONO enabled? */
int color = 0; /* is FT_LOAD_COLOR enabled? */
float stroke_width = 0;
+ int stroke_filled = 0;
PY_LONG_LONG foreground_ink_long = 0;
unsigned int foreground_ink;
const char *mode = NULL;
@@ -859,7 +854,7 @@ font_render(FontObject *self, PyObject *args) {
if (!PyArg_ParseTuple(
args,
- "OO|zzOzfzLffO:render",
+ "OO|zzOzfpzLffO:render",
&string,
&fill,
&mode,
@@ -867,6 +862,7 @@ font_render(FontObject *self, PyObject *args) {
&features,
&lang,
&stroke_width,
+ &stroke_filled,
&anchor,
&foreground_ink_long,
&x_start,
@@ -1011,7 +1007,8 @@ font_render(FontObject *self, PyObject *args) {
if (stroker != NULL) {
error = FT_Get_Glyph(glyph_slot, &glyph);
if (!error) {
- error = FT_Glyph_Stroke(&glyph, stroker, 1);
+ error = stroke_filled ? FT_Glyph_StrokeBorder(&glyph, stroker, 0, 1)
+ : FT_Glyph_Stroke(&glyph, stroker, 1);
}
if (!error) {
FT_Vector origin = {0, 0};
@@ -1377,8 +1374,7 @@ font_setvarname(FontObject *self, PyObject *args) {
return geterror(error);
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -1432,8 +1428,7 @@ font_setvaraxes(FontObject *self, PyObject *args) {
return geterror(error);
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
#endif
diff --git a/src/_imagingmath.c b/src/_imagingmath.c
index dbe636707f4..75b3716b5c1 100644
--- a/src/_imagingmath.c
+++ b/src/_imagingmath.c
@@ -192,8 +192,7 @@ _unop(PyObject *self, PyObject *args) {
unop(out, im1);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -226,8 +225,7 @@ _binop(PyObject *self, PyObject *args) {
binop(out, im1, im2);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyMethodDef _functions[] = {
diff --git a/src/_imagingtk.c b/src/_imagingtk.c
index c70d044bb86..c4448265114 100644
--- a/src/_imagingtk.c
+++ b/src/_imagingtk.c
@@ -37,8 +37,7 @@ _tkinit(PyObject *self, PyObject *args) {
/* This will bomb if interp is invalid... */
TkImaging_Init(interp);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyMethodDef functions[] = {
diff --git a/src/decode.c b/src/decode.c
index 51d0aced2bd..1f2c22491f8 100644
--- a/src/decode.c
+++ b/src/decode.c
@@ -213,8 +213,7 @@ _setimage(ImagingDecoderObject *decoder, PyObject *args) {
Py_XDECREF(decoder->lock);
decoder->lock = op;
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -231,8 +230,7 @@ _setfd(ImagingDecoderObject *decoder, PyObject *args) {
Py_XINCREF(fd);
state->fd = fd;
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
diff --git a/src/display.c b/src/display.c
index eed75975d71..36ab3b237ef 100644
--- a/src/display.c
+++ b/src/display.c
@@ -85,8 +85,7 @@ _expose(ImagingDisplayObject *display, PyObject *args) {
ImagingExposeDIB(display->dib, hdc);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -112,8 +111,7 @@ _draw(ImagingDisplayObject *display, PyObject *args) {
ImagingDrawDIB(display->dib, hdc, dst, src);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
extern Imaging
@@ -143,8 +141,7 @@ _paste(ImagingDisplayObject *display, PyObject *args) {
ImagingPasteDIB(display->dib, im, xy);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -190,8 +187,7 @@ _releasedc(ImagingDisplayObject *display, PyObject *args) {
ReleaseDC(window, dc);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -211,8 +207,7 @@ _frombytes(ImagingDisplayObject *display, PyObject *args) {
memcpy(display->dib->bits, buffer.buf, buffer.len);
PyBuffer_Release(&buffer);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -709,8 +704,7 @@ PyImaging_EventLoopWin32(PyObject *self, PyObject *args) {
}
Py_END_ALLOW_THREADS;
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
/* -------------------------------------------------------------------- */
diff --git a/src/encode.c b/src/encode.c
index d369a1b4598..0bf5e63c585 100644
--- a/src/encode.c
+++ b/src/encode.c
@@ -278,8 +278,7 @@ _setimage(ImagingEncoderObject *encoder, PyObject *args) {
Py_XDECREF(encoder->lock);
encoder->lock = op;
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -296,8 +295,7 @@ _setfd(ImagingEncoderObject *encoder, PyObject *args) {
Py_XINCREF(fd);
state->fd = fd;
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
diff --git a/src/libImaging/Except.c b/src/libImaging/Except.c
deleted file mode 100644
index f42ff9aec9e..00000000000
--- a/src/libImaging/Except.c
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * The Python Imaging Library
- * $Id$
- *
- * default exception handling
- *
- * This module is usually overridden by application code (e.g.
- * _imaging.c for PIL's standard Python bindings). If you get
- * linking errors, remove this file from your project/library.
- *
- * history:
- * 1995-06-15 fl Created
- * 1998-12-29 fl Minor tweaks
- * 2003-09-13 fl Added ImagingEnter/LeaveSection()
- *
- * Copyright (c) 1997-2003 by Secret Labs AB.
- * Copyright (c) 1995-2003 by Fredrik Lundh.
- *
- * See the README file for information on usage and redistribution.
- */
-
-#include "Imaging.h"
-
-/* exception state */
-
-void *
-ImagingError_OSError(void) {
- fprintf(stderr, "*** exception: file access error\n");
- return NULL;
-}
-
-void *
-ImagingError_MemoryError(void) {
- fprintf(stderr, "*** exception: out of memory\n");
- return NULL;
-}
-
-void *
-ImagingError_ModeError(void) {
- return ImagingError_ValueError("bad image mode");
-}
-
-void *
-ImagingError_Mismatch(void) {
- return ImagingError_ValueError("images don't match");
-}
-
-void *
-ImagingError_ValueError(const char *message) {
- if (!message) {
- message = "exception: bad argument to function";
- }
- fprintf(stderr, "*** %s\n", message);
- return NULL;
-}
-
-void
-ImagingError_Clear(void) {
- /* nop */;
-}
-
-/* thread state */
-
-void
-ImagingSectionEnter(ImagingSectionCookie *cookie) {
- /* pass */
-}
-
-void
-ImagingSectionLeave(ImagingSectionCookie *cookie) {
- /* pass */
-}
diff --git a/src/libImaging/ImPlatform.h b/src/libImaging/ImPlatform.h
index c9b7e43b425..2ce282241d5 100644
--- a/src/libImaging/ImPlatform.h
+++ b/src/libImaging/ImPlatform.h
@@ -44,8 +44,6 @@
defines their own types with the same names, so we need to be able to undef
ours before including the JPEG code. */
-#if __STDC_VERSION__ >= 199901L /* C99+ */
-
#include
#define INT8 int8_t
@@ -55,34 +53,6 @@
#define INT32 int32_t
#define UINT32 uint32_t
-#else /* < C99 */
-
-#define INT8 signed char
-
-#if SIZEOF_SHORT == 2
-#define INT16 short
-#elif SIZEOF_INT == 2
-#define INT16 int
-#else
-#error Cannot find required 16-bit integer type
-#endif
-
-#if SIZEOF_SHORT == 4
-#define INT32 short
-#elif SIZEOF_INT == 4
-#define INT32 int
-#elif SIZEOF_LONG == 4
-#define INT32 long
-#else
-#error Cannot find required 32-bit integer type
-#endif
-
-#define UINT8 unsigned char
-#define UINT16 unsigned INT16
-#define UINT32 unsigned INT32
-
-#endif /* < C99 */
-
#endif /* not WIN */
/* assume IEEE; tweak if necessary (patches are welcome) */
diff --git a/src/libImaging/Imaging.h b/src/libImaging/Imaging.h
index 31052c68a97..0c2d3fc2e34 100644
--- a/src/libImaging/Imaging.h
+++ b/src/libImaging/Imaging.h
@@ -609,10 +609,6 @@ ImagingLibTiffDecode(
extern int
ImagingLibTiffEncode(Imaging im, ImagingCodecState state, UINT8 *buffer, int bytes);
#endif
-#ifdef HAVE_LIBMPEG
-extern int
-ImagingMpegDecode(Imaging im, ImagingCodecState state, UINT8 *buffer, Py_ssize_t bytes);
-#endif
extern int
ImagingMspDecode(Imaging im, ImagingCodecState state, UINT8 *buffer, Py_ssize_t bytes);
extern int
diff --git a/src/libImaging/Jpeg2KEncode.c b/src/libImaging/Jpeg2KEncode.c
index d30ccde603e..34d1a22949c 100644
--- a/src/libImaging/Jpeg2KEncode.c
+++ b/src/libImaging/Jpeg2KEncode.c
@@ -330,6 +330,13 @@ j2k_encode_entry(Imaging im, ImagingCodecState state) {
components = 4;
color_space = OPJ_CLRSPC_SRGB;
pack = j2k_pack_rgba;
+#if ((OPJ_VERSION_MAJOR == 2 && OPJ_VERSION_MINOR == 5 && OPJ_VERSION_BUILD >= 3) || \
+ (OPJ_VERSION_MAJOR == 2 && OPJ_VERSION_MINOR > 5) || OPJ_VERSION_MAJOR > 2)
+ } else if (strcmp(im->mode, "CMYK") == 0) {
+ components = 4;
+ color_space = OPJ_CLRSPC_CMYK;
+ pack = j2k_pack_rgba;
+#endif
} else {
state->errcode = IMAGING_CODEC_BROKEN;
state->state = J2K_STATE_FAILED;
diff --git a/src/libImaging/JpegEncode.c b/src/libImaging/JpegEncode.c
index 4372d51d5c3..3c11eac2206 100644
--- a/src/libImaging/JpegEncode.c
+++ b/src/libImaging/JpegEncode.c
@@ -134,7 +134,16 @@ ImagingJpegEncode(Imaging im, ImagingCodecState state, UINT8 *buf, int bytes) {
return -1;
}
- /* Compressor configuration */
+ /* Compressor configuration */
+#ifdef JPEG_C_PARAM_SUPPORTED
+ /* MozJPEG */
+ if (!context->progressive) {
+ /* Do not use MozJPEG progressive default */
+ jpeg_c_set_int_param(
+ &context->cinfo, JINT_COMPRESS_PROFILE, JCP_FASTEST
+ );
+ }
+#endif
jpeg_set_defaults(&context->cinfo);
/* Prevent RGB -> YCbCr conversion */
diff --git a/src/libImaging/Unpack.c b/src/libImaging/Unpack.c
index c23d5d889f6..9c3ee26655f 100644
--- a/src/libImaging/Unpack.c
+++ b/src/libImaging/Unpack.c
@@ -1664,6 +1664,7 @@ static struct {
{"RGBA", "RGBaXX", 48, unpackRGBaskip2},
{"RGBA", "RGBa;16L", 64, unpackRGBa16L},
{"RGBA", "RGBa;16B", 64, unpackRGBa16B},
+ {"RGBA", "BGR", 24, ImagingUnpackBGR},
{"RGBA", "BGRa", 32, unpackBGRa},
{"RGBA", "RGBA;I", 32, unpackRGBAI},
{"RGBA", "RGBA;L", 32, unpackRGBAL},
@@ -1695,6 +1696,7 @@ static struct {
#ifdef WORDS_BIGENDIAN
{"RGB", "RGB;16N", 48, unpackRGB16B},
+ {"RGB", "RGBX;16N", 64, unpackRGBA16B},
{"RGBA", "RGBa;16N", 64, unpackRGBa16B},
{"RGBA", "RGBA;16N", 64, unpackRGBA16B},
{"RGBX", "RGBX;16N", 64, unpackRGBA16B},
@@ -1708,6 +1710,7 @@ static struct {
{"RGBA", "A;16N", 16, band316B},
#else
{"RGB", "RGB;16N", 48, unpackRGB16L},
+ {"RGB", "RGBX;16N", 64, unpackRGBA16L},
{"RGBA", "RGBa;16N", 64, unpackRGBa16L},
{"RGBA", "RGBA;16N", 64, unpackRGBA16L},
{"RGBX", "RGBX;16N", 64, unpackRGBA16L},
diff --git a/src/outline.c b/src/outline.c
index 27cc255cf84..4aa6bd59e51 100644
--- a/src/outline.c
+++ b/src/outline.c
@@ -89,8 +89,7 @@ _outline_move(OutlineObject *self, PyObject *args) {
ImagingOutlineMove(self->outline, x0, y0);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -102,8 +101,7 @@ _outline_line(OutlineObject *self, PyObject *args) {
ImagingOutlineLine(self->outline, x1, y1);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -115,8 +113,7 @@ _outline_curve(OutlineObject *self, PyObject *args) {
ImagingOutlineCurve(self->outline, x1, y1, x2, y2, x3, y3);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -127,8 +124,7 @@ _outline_close(OutlineObject *self, PyObject *args) {
ImagingOutlineClose(self->outline);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static PyObject *
@@ -140,8 +136,7 @@ _outline_transform(OutlineObject *self, PyObject *args) {
ImagingOutlineTransform(self->outline, a);
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static struct PyMethodDef _outline_methods[] = {
diff --git a/src/path.c b/src/path.c
index 067f42f62de..b508df2ac30 100644
--- a/src/path.c
+++ b/src/path.c
@@ -415,8 +415,7 @@ path_map(PyPathObject *self, PyObject *args) {
}
self->mapping = 0;
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static int
@@ -528,8 +527,7 @@ path_transform(PyPathObject *self, PyObject *args) {
}
}
- Py_INCREF(Py_None);
- return Py_None;
+ Py_RETURN_NONE;
}
static struct PyMethodDef methods[] = {
diff --git a/winbuild/README.md b/winbuild/README.md
index f6111c79b0e..c474f12ceee 100644
--- a/winbuild/README.md
+++ b/winbuild/README.md
@@ -11,10 +11,11 @@ For more extensive info, see the [Windows build instructions](build.rst).
* Requires Microsoft Visual Studio 2017 or newer with C++ component.
* Requires NASM for libjpeg-turbo, a required dependency when using this script.
* Requires CMake 3.15 or newer (available as Visual Studio component).
-* Tested on Windows Server 2019 with Visual Studio 2019 Community and Visual Studio 2022 Community (AppVeyor).
-* Tested on Windows Server 2022 with Visual Studio 2022 Enterprise (GitHub Actions).
+* Tested on Windows Server 2022 with Visual Studio 2022 Enterprise and Windows Server
+ 2019 with Visual Studio 2019 Enterprise (GitHub Actions).
+
+Here's an example script to build on Windows:
-The following is a simplified version of the script used on AppVeyor:
```
set PYTHON=C:\Python39\bin
cd /D C:\Pillow\winbuild
diff --git a/winbuild/build.rst b/winbuild/build.rst
index 96b8803b477..aae78ce1237 100644
--- a/winbuild/build.rst
+++ b/winbuild/build.rst
@@ -6,7 +6,7 @@ Building Pillow on Windows
be sufficient.
This page describes the steps necessary to build Pillow using the same
-scripts used on GitHub Actions and AppVeyor CIs.
+scripts used on GitHub Actions CI.
Prerequisites
-------------
@@ -112,7 +112,7 @@ directory.
Example
-------
-The following is a simplified version of the script used on AppVeyor::
+Here's an example script to build on Windows::
set PYTHON=C:\Python39\bin
cd /D C:\Pillow\winbuild
diff --git a/winbuild/build_prepare.py b/winbuild/build_prepare.py
index 0674a9a1528..54b5d983f6e 100644
--- a/winbuild/build_prepare.py
+++ b/winbuild/build_prepare.py
@@ -113,17 +113,16 @@ def cmd_msbuild(
"BROTLI": "1.1.0",
"FREETYPE": "2.13.3",
"FRIBIDI": "1.0.16",
- "HARFBUZZ": "10.1.0",
+ "HARFBUZZ": "10.2.0",
"JPEGTURBO": "3.1.0",
"LCMS2": "2.16",
- "LIBPNG": "1.6.44",
+ "LIBPNG": "1.6.46",
"LIBWEBP": "1.5.0",
"OPENJPEG": "2.5.3",
"TIFF": "4.6.0",
- "XZ": "5.6.3",
- "ZLIBNG": "2.2.2",
+ "XZ": "5.6.4",
+ "ZLIBNG": "2.2.3",
}
-V["LIBPNG_DOTLESS"] = V["LIBPNG"].replace(".", "")
V["LIBPNG_XY"] = "".join(V["LIBPNG"].split(".")[:2])
@@ -241,8 +240,8 @@ def cmd_msbuild(
},
"libpng": {
"url": f"{SF_PROJECTS}/libpng/files/libpng{V['LIBPNG_XY']}/{V['LIBPNG']}/"
- f"lpng{V['LIBPNG_DOTLESS']}.zip/download",
- "filename": f"lpng{V['LIBPNG_DOTLESS']}.zip",
+ f"FILENAME/download",
+ "filename": f"libpng-{V['LIBPNG']}.tar.gz",
"license": "LICENSE",
"build": [
*cmds_cmake("png_static", "-DPNG_SHARED:BOOL=OFF", "-DPNG_TESTS:BOOL=OFF"),