diff --git a/.cursorrules b/.cursorrules index b7b941e..1242b3b 100644 --- a/.cursorrules +++ b/.cursorrules @@ -1,4 +1,44 @@ -- Use PyQt6 for GUI. -- Split code into files when possible. -- Make code clean and understandable. -- Optimize code for performance and memory usage. +# anylabeling — guidance for AI assistants + +This is a desktop image-annotation app on PyQt6, with an auto-labeling +backend that runs ONNX models (YOLOv5/v8, SAM1/MobileSAM, SAM2, SAM3) +and a CoreML path for SAM2 on macOS. PyPI ships two parallel packages +from the same source tree: `anylabeling` (CPU, default) and +`anylabeling-gpu` (Linux/Windows, swaps in `onnxruntime-gpu`). + +## Conventions +- Use **PyQt6** (not PyQt5). The migration happened in commit 9735fe8. +- The macOS install path **excludes PyQt6** from `pyproject.toml` + (`platform_system != 'Darwin'`); macOS users get it via conda. + Don't add a Darwin-side floor without changing the install story. +- Keep code split into focused files; avoid growing `label_widget.py` + (already ~3.2k LOC) further when a new widget would do. + +## Architecture cheat sheet +- Entry point: `anylabeling/app.py` → `MainWindow` → `LabelingWrapper` → + `LabelingWidget` (the "god widget" — owns canvas, file list, toolbars). +- Auto-labeling: `anylabeling/services/auto_labeling/` + - `registry.py` → `@ModelRegistry.register("type-name")` decorator + - `model_manager.py` → loads `models.yaml`, downloads weights to + `~/anylabeling_data/models/`, dispatches `predict_shapes_threading()` + - `models.yaml` (`anylabeling/configs/auto_labeling/`) is the model + catalog the UI reads. New model = new entry **and** registered class. + - `segment_anything.py` auto-detects SAM1/SAM2/SAM3 from ONNX inputs. +- CPU/GPU duality: `setup.py` reads `__preferred_device__` from + `anylabeling/app_info.py`; publish workflows `sed` it before `python -m build`. + +## Pre-publish gate +- `.github/workflows/tests.yml` runs a 9-cell matrix (Ubuntu/Windows/macOS + × py3.11/3.12/3.13). All publish/release workflows declare `needs: test`. +- Why it exists: `anylabeling-gpu==0.4.30` shipped to PyPI broken because + no automated step ran `pip install .` against current dep floors before + publish (issue #227, `imgviz>=2.0` returned a read-only colormap). +- Always run `python -m unittest discover -s tests` in a fresh venv before + tagging a release. See `CLAUDE.md` for the full pre-publish playbook. + +## Resource regeneration +- PyQt6 dropped `pyrcc`. To rebuild `anylabeling/resources/resources.py` + use `python scripts/compile_languages.py` — it shells out to + `pyside6-rcc` and `pyside6-lrelease` and rewrites imports back to PyQt6. +- `PySide6-Essentials` is a `[dev]` extra for this reason only; runtime + has no PySide6 dependency. diff --git a/.github/workflows/python-publish-cpu.yml b/.github/workflows/python-publish-cpu.yml index 23f75cb..38da422 100644 --- a/.github/workflows/python-publish-cpu.yml +++ b/.github/workflows/python-publish-cpu.yml @@ -5,7 +5,12 @@ on: - 'v*' jobs: + test: + name: Test before publish + uses: ./.github/workflows/tests.yml + build-n-publish: + needs: test if: startsWith(github.ref, 'refs/tags/') name: Build and publish CPU 🐍📦 to PyPI runs-on: ubuntu-latest @@ -15,9 +20,9 @@ jobs: permissions: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install pypa/build diff --git a/.github/workflows/python-publish-gpu.yml b/.github/workflows/python-publish-gpu.yml index 088241e..7c3c7ff 100644 --- a/.github/workflows/python-publish-gpu.yml +++ b/.github/workflows/python-publish-gpu.yml @@ -7,7 +7,12 @@ on: jobs: + test: + name: Test before publish + uses: ./.github/workflows/tests.yml + build-n-publish-gpu: + needs: test if: startsWith(github.ref, 'refs/tags/') name: Build and publish GPU 🐍📦 to PyPI runs-on: ubuntu-latest @@ -17,9 +22,9 @@ jobs: permissions: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install pypa/build @@ -29,9 +34,26 @@ jobs: run: >- sed -i'' -e 's/\_\_preferred_device\_\_[ ]*=[ ]*\"[A-Za-z0-9]*\"/__preferred_device__ = "GPU"/g' anylabeling/app_info.py + # PEP 621 makes pyproject.toml `[project]` metadata authoritative — setup.py + # cannot override `name` or replace `dependencies`. Rewrite pyproject.toml + # in place so the GPU wheel ships as `anylabeling-gpu` with onnxruntime-gpu. + - name: Rewrite pyproject.toml for the GPU package + run: | + sed -i 's/^name = "anylabeling"$/name = "anylabeling-gpu"/' pyproject.toml + sed -i 's/"onnxruntime>=1.20.0"/"onnxruntime-gpu>=1.20.0"/' pyproject.toml + echo "--- after rewrite ---" + grep -E '^name|onnxruntime' pyproject.toml + - name: Build a binary wheel and a source tarball run: >- python -m build --wheel --outdir dist/ . + + - name: Verify built wheel is anylabeling-gpu + run: | + ls dist/ + whl=$(ls dist/anylabeling_gpu-*.whl) + python -m zipfile -e "$whl" /tmp/whl_extract/ + grep -E '^Name:|^Requires-Dist: onnxruntime' /tmp/whl_extract/anylabeling_gpu-*.dist-info/METADATA - name: Publish distribution 📦 to PyPI if: startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ad4443f..7850560 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,7 +9,12 @@ permissions: contents: write jobs: + test: + name: Test before release + uses: ./.github/workflows/tests.yml + release: + needs: test if: startsWith(github.ref, 'refs/tags/') runs-on: ubuntu-latest @@ -62,11 +67,11 @@ jobs: contents: write steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - - uses: conda-incubator/setup-miniconda@v2 + - uses: conda-incubator/setup-miniconda@v3 with: python-version: "3.12" miniconda-version: "latest" @@ -123,11 +128,11 @@ jobs: device: [CPU, GPU] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true - - uses: conda-incubator/setup-miniconda@v2 + - uses: conda-incubator/setup-miniconda@v3 with: python-version: "3.12" miniconda-version: "latest" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..54d4129 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,52 @@ +name: Tests + +on: + push: + branches: [main, master] + tags: ['v*'] + pull_request: + branches: [main, master] + workflow_call: + +jobs: + test: + name: ${{ matrix.os }} / Python ${{ matrix.python-version }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: ["3.11", "3.12", "3.13"] + env: + # Headless Qt — otherwise PyQt6 tries to talk to a display + QT_QPA_PLATFORM: offscreen + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: pip + + - name: Install Qt system libraries (Linux) + if: runner.os == 'Linux' + run: | + sudo apt-get update + sudo apt-get install -y --no-install-recommends \ + libegl1 libxkbcommon-x11-0 libdbus-1-3 libxcb-cursor0 \ + libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 \ + libxcb-render-util0 libxcb-shape0 libxcb-xinerama0 libxcb-xkb1 + + - name: Install PyQt6 (macOS — pyproject.toml excludes it on Darwin) + if: runner.os == 'macOS' + run: python -m pip install "PyQt6>=6.7.0" + + - name: Install package + run: | + python -m pip install --upgrade pip + pip install . + + - name: Run unit tests + run: python -m unittest discover -s tests -v diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..68f2c65 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,274 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +AnyLabeling is a desktop image-annotation app built on PyQt6, with an +auto-labeling backend that runs ONNX models (YOLOv5/v8, SAM1/MobileSAM, +SAM2, SAM3) and a CoreML path for SAM2 on macOS. PyPI ships two parallel +packages from the same source tree: `anylabeling` (CPU, default) and +`anylabeling-gpu` (Linux/Windows, swaps in `onnxruntime-gpu`). + +## Common commands + +```bash +# Run the app from source (no install needed for dev) +python anylabeling/app.py + +# Run the installed CLI +anylabeling + +# Editable install for development (CPU) +pip install -e ".[dev]" +# GPU dev: pip install -e ".[gpu,dev]" +# macOS dev: pip install -e ".[macos,dev]" # plus conda install -c conda-forge pyqt=6 + +# Lint + format (ruff config is in pyproject.toml) +ruff check . +ruff format . + +# Run all tests +python -m unittest discover -s tests -v + +# Run one test file +python -m unittest tests.test_label_colormap -v + +# Run one test method +python -m unittest tests.test_label_colormap.TestLabelColormapMutability.test_copy_is_always_writable + +# Build a wheel + sdist (CPU). For GPU, sed __preferred_device__ to "GPU" first. +python -m build --sdist --wheel --outdir dist/ . + +# Build a standalone executable +bash build_executable.sh # delegates to PyInstaller via anylabeling.spec +``` + +App-level CLI flags: `--reset-config`, `--logger-level {debug,info,warning,error,fatal}`, +`--config `, `--output / -O / -o`, `--nodata`, `--autosave`, `--nosortlabels`, +`--flags`, plus a positional `filename` (image or label file). Default user +config lives at `~/.anylabelingrc`. + +## High-level architecture + +### Entry point and UI tree + +`anylabeling/app.py` sets `MKL/NUMEXPR/OMP_NUM_THREADS=1` (workaround for a +macOS-M1 bus error in `np.linalg.solve`) before any heavy imports, then +constructs a `QApplication` and a `MainWindow`. The UI tree is intentionally +shallow: + +``` +MainWindow (anylabeling/views/mainwindow.py) +└── LabelingWrapper (anylabeling/views/labeling/label_wrapper.py) + └── LabelingWidget (anylabeling/views/labeling/label_widget.py, ~3.2k LOC) + ├── Canvas (anylabeling/views/labeling/widgets/canvas.py) + ├── AutoLabelingWidget (drives ModelManager from the UI side) + ├── LabelDialog / Brightness / FileDialogPreview / ZoomWidget … + └── ExportDialog +``` + +`LabelingWidget` is the "god widget" — it owns the file list, the canvas, the +toolbars, the shape list, the label list, file I/O, undo/redo, and most +keybindings. When in doubt, that file is where things live. + +### Auto-labeling pipeline + +``` +anylabeling/services/auto_labeling/ +├── registry.py # @ModelRegistry.register("yolov8") decorator → singleton dict +├── model.py # abstract Model(QObject); predict_shapes() returns AutoLabelingResult +├── model_manager.py # ModelManager(QObject): loads models.yaml, downloads weights, +│ # dispatches predict_shapes_threading() +├── types.py # AutoLabelingResult, AutoLabelingMode (point/rectangle, ADD/REMOVE) +├── lru_cache.py # image-embedding cache for SAM-family models +├── segment_anything.py # variant detector — picks SAM1/SAM2/SAM3 from ONNX inputs/config +├── sam_onnx.py # SAM1 / MobileSAM ONNX runner +├── sam2_onnx.py # SAM2 ONNX runner +├── sam3_onnx.py # SAM3 ONNX runner (text + geometric prompts) +├── sam2_coreml.py # macOS CoreML path for SAM2.1 +└── yolov5.py / yolov8.py +``` + +Two registry-relevant facts: + +- Concrete models register themselves via `@ModelRegistry.register("type-name")` + at import time. `anylabeling/services/auto_labeling/__init__.py` imports + every module so the side-effects fire — adding a new model means importing + it here too. +- `models.yaml` (`anylabeling/configs/auto_labeling/models.yaml`) is the + catalog the UI reads. Each entry has `name`, `display_name`, `type` + (matches a registry key), `download_url`, plus model-specific fields like + `encoder_model_path`, `decoder_model_path`, `input_size`. New model = add + an entry here *and* a registered class. + +Weights live under `~/anylabeling_data/models//` after first download. + +### CPU / GPU / macOS packaging + +Static metadata is in `pyproject.toml`. `setup.py` is a small shim that +reads `__preferred_device__` from `anylabeling/app_info.py` and, when set +to `"GPU"` on non-Darwin, overrides the package name to `anylabeling-gpu` +and swaps `onnxruntime` for `onnxruntime-gpu`. The publish workflows +(`.github/workflows/python-publish-{cpu,gpu}.yml`) `sed` that constant +just before building, so both wheels come out of the same source tree. + +`pyproject.toml` excludes `PyQt6` on Darwin +(`PyQt6>=...; platform_system != 'Darwin'`). macOS users install PyQt +through conda. The macOS extra is `[macos]` (currently `coremltools==8.3.0`). + +### Qt resources and translations + +- `anylabeling/resources/resources.qrc` (XML) compiles to `resources.py`. +- `anylabeling/resources/translations/{en_US,vi_VN,zh_CN}.{ts,qm}`. +- `scripts/generate_languages.py` extracts translatable strings into `.ts` + files and runs `pyuic6` on `.ui` files. +- `scripts/compile_languages.py` calls `lrelease` to produce `.qm` files, + and then `pyrcc5` to rebuild `resources.py`. + +Note: the project migrated from PyQt5 to PyQt6 (commit `9735fe8`), but +`scripts/compile_languages.py` still calls `pyrcc5`. PyQt6 does not ship a +`pyrcc6`; one common workaround is to keep `pyrcc5` from a PyQt5-tools +sideload, or vendor the resource bytes. `generate_languages.py` already +references `pyrcc6`. Treat this script pair as inconsistent and fix +deliberately when touching it. + +### Tests + +`tests/` is plain `unittest`. Notable files: + +- `tests/test_label_colormap.py` — regression test for issue #227 + (`imgviz.label_colormap()` returns read-only on imgviz>=2.0; the call + site needs `.copy()`). +- `tests/test_real_inference.py` — end-to-end ONNX inference for + SAM1/SAM2/SAM3/YOLOv8. Each class skips itself if its model files are + not under `~/anylabeling_data/models/`. The SAM3 text-prompt tests look + for `../samexporter/images/truck.jpg` (sibling-repo path) and silently + fall back to `sample_images/evan-foley-...jpg` (no truck), which makes + three SAM3 tests fail — see step 3 of the playbook below. + +## Pre-publish local experiments + +Run these **before tagging a release** (`vX.Y.Z`). The CI matrix in +`.github/workflows/tests.yml` already gates publish on every tag push, but +running locally first is faster and catches obvious dep-resolution +failures before burning CI minutes. + +### 1. Fresh-venv install with latest deps + +The point of a *fresh* venv is to let pip resolve every dependency to the +newest version compatible with `pyproject.toml` — this is what end users +get on `pip install anylabeling[-gpu]`, and it is exactly the path that +produced the `imgviz>=2.0` read-only crash in #227. + +```bash +python -m venv /tmp/anylabeling-check +/tmp/anylabeling-check/bin/pip install --upgrade pip +/tmp/anylabeling-check/bin/pip install . +``` + +Watch for: any wheel that fails to build, any dep that pip cannot resolve. + +### 2. Run the full unittest suite + +```bash +/tmp/anylabeling-check/bin/python -m unittest discover -s tests -v +``` + +Expected: all tests pass; `test_real_inference` cases skip cleanly when +model files are not on disk — that is fine. Step 3 below covers running +those tests with real models. + +### 3. (Recommended) Real-model inference + +`tests/test_real_inference.py` exercises ONNX inference end-to-end for +SAM1 / SAM2 / SAM3 / YOLOv8. Each test class skips itself when its model +files are missing, so download whichever you can validate on the local +machine. Models live under `~/anylabeling_data/models/`. + +```bash +mkdir -p ~/anylabeling_data/models && cd ~/anylabeling_data/models + +# YOLOv8n (~13 MB) +curl -sL -o /tmp/yolov8n.zip https://github.com/vietanhdev/anylabeling-assets/releases/download/v0.4.0/yolov8n-r20230415.zip +mkdir -p yolov8n-r20230415 && unzip -q -o /tmp/yolov8n.zip -d yolov8n-r20230415 + +# MobileSAM (~37 MB) +curl -sL -o /tmp/msam.zip https://huggingface.co/vietanhdev/segment-anything-onnx-models/resolve/main/mobile_sam_20230629.zip +mkdir -p mobile_sam_20230629 && unzip -q -o /tmp/msam.zip -d mobile_sam_20230629 + +# SAM2 hiera-tiny (~155 MB) +curl -sL -o /tmp/sam2.zip https://huggingface.co/vietanhdev/segment-anything-2-onnx-models/resolve/main/sam2_hiera_tiny.zip +mkdir -p sam2_hiera_tiny_20240803 && unzip -q -o /tmp/sam2.zip -d sam2_hiera_tiny_20240803 + +# SAM3 ViT-H (~3.4 GB — only needed when SAM3 code paths changed) +curl -sL -o /tmp/sam3.zip https://huggingface.co/vietanhdev/segment-anything-3-onnx-models/resolve/main/sam3_vit_h.zip +mkdir -p sam3_vit_h_20260220 && unzip -q -o /tmp/sam3.zip -d sam3_vit_h_20260220 +``` + +The SAM3 text-prompt tests need a truck image at the sibling-repo path: + +```bash +mkdir -p ../samexporter/images +curl -sL -o ../samexporter/images/truck.jpg \ + https://raw.githubusercontent.com/vietanhdev/samexporter/main/images/truck.jpg +``` + +Then re-run the inference tests: + +```bash +/tmp/anylabeling-check/bin/python -m unittest tests.test_real_inference -v +``` + +Source of truth for model URLs is +`anylabeling/configs/auto_labeling/models.yaml`. + +### 4. Smoke-test the import chain that users hit at startup + +This is the *exact* path that crashed in #227. If it imports clean against +freshly resolved deps, the package will at least start. + +```bash +QT_QPA_PLATFORM=offscreen /tmp/anylabeling-check/bin/python -c " +from anylabeling.views.labeling import label_widget +from anylabeling import app +print('startup imports OK') +" +``` + +### 5. Repeat against every supported Python (3.11, 3.12, 3.13) + +PyPI ships one wheel that has to work on every Python listed in +`pyproject.toml` classifiers. Use `uv` to spin them up quickly: + +```bash +uv python install 3.11 3.12 3.13 +for v in 3.11 3.12 3.13; do + PY=$(uv python find $v) + VENV=/tmp/al-py${v//./} + rm -rf $VENV && $PY -m venv $VENV + $VENV/bin/pip install --upgrade pip --quiet + $VENV/bin/pip install . --quiet + $VENV/bin/python -m unittest discover -s tests +done +``` + +### 6. Then push and let CI confirm cross-platform + +The matrix in `.github/workflows/tests.yml` runs steps 1, 2, 4 on +Ubuntu + Windows + macOS × Python 3.11/3.12/3.13. The publish workflows +(`python-publish-cpu.yml`, `python-publish-gpu.yml`, `release.yml`) all +declare `needs: test`, so a red matrix blocks the PyPI upload and the +GitHub release binary builds. Step 3 (real-model inference) is *not* +automated in CI because the SAM3 model alone is 3.4 GB — run it locally +when touching ONNX inference, model loading, or preprocessing code. + +## Why this gate exists + +`anylabeling-gpu==0.4.30` shipped to PyPI broken because no automated test +ran `pip install .` against current dep floors before publish. The fix in +`label_widget.py:45` (call `.copy()` on `imgviz.label_colormap()`) had a +regression test in `tests/test_label_colormap.py`, but nothing executed it +on the publish path. The workflows in `.github/workflows/` now do. + +When adding a new dependency or raising a floor, **assume it can break +import-time code paths** — read-only numpy arrays, removed deprecated +APIs, changed default dtypes — and rely on the steps above to catch it. diff --git a/pyproject.toml b/pyproject.toml index 279f88e..f13922a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,19 +23,19 @@ classifiers = [ ] dependencies = [ "imgviz>=2.0.0", - "natsort>=8.0.0", + "natsort>=8.4.0", "numpy>=2.0.0", - "Pillow>=10.0.0", + "Pillow>=11.0.0", "PyYAML>=6.0.2", - "termcolor>=2.0.0", + "termcolor>=2.4.0", "opencv-python-headless>=4.10.0", - "PyQt6>=6.5.0; platform_system != 'Darwin'", + "PyQt6>=6.7.0; platform_system != 'Darwin'", "onnx>=1.18.0", "onnxruntime>=1.20.0", "qimage2ndarray>=1.10.0", "darkdetect>=0.8.0", - "huggingface_hub", - "osam>=0.3.1", # CLIP tokenizer for SAM3 language encoder + "huggingface_hub>=0.24.0", + "osam>=0.4.0", # CLIP tokenizer for SAM3 language encoder ] [project.optional-dependencies] @@ -51,6 +51,9 @@ macos = [ dev = [ "build>=1.2", "twine>=6.0", + # pyside6-rcc is the supported way to rebuild anylabeling/resources/resources.py + # — PyQt6 dropped its own pyrcc in the Qt6 line. Used by scripts/*_languages.py. + "PySide6-Essentials>=6.7.0", ] [project.urls] diff --git a/scripts/compile_languages.py b/scripts/compile_languages.py index b00bb6c..15d5076 100644 --- a/scripts/compile_languages.py +++ b/scripts/compile_languages.py @@ -1,13 +1,63 @@ +"""Compile translations and rebuild Qt resources. + +PyQt6 dropped `pyrcc` entirely (the Qt Project removed the standalone +resource compiler in Qt 6). The well-known workaround is to invoke +PySide6's `pyside6-rcc` and rewrite the import line so the output +imports `PyQt6.QtCore` instead of `PySide6.QtCore`. PySide6-Essentials +is declared in the `[dev]` extras in pyproject.toml. + +Run from the repo root: + python scripts/compile_languages.py +""" import os +import shutil +import subprocess +import sys + +SUPPORTED_LANGUAGES = ["en_US", "vi_VN", "zh_CN"] +QRC_PATH = "anylabeling/resources/resources.qrc" +RC_PATH = "anylabeling/resources/resources.py" + +# Look up tools first next to the active interpreter (venv bin), then $PATH. +_VENV_BIN = os.path.dirname(sys.executable) + + +def _resolve(cmd): + candidate = os.path.join(_VENV_BIN, cmd) + if os.path.isfile(candidate) and os.access(candidate, os.X_OK): + return candidate + found = shutil.which(cmd) + if found: + return found + sys.exit( + f"error: '{cmd}' not found in {_VENV_BIN} or on PATH. " + "Install dev tools with `pip install -e \".[dev]\"`." + ) + + +def _run(cmd): + print("$", " ".join(cmd)) + subprocess.run(cmd, check=True) + + +def main(): + lrelease = _resolve("pyside6-lrelease") + rcc = _resolve("pyside6-rcc") + + for lang in SUPPORTED_LANGUAGES: + _run([lrelease, f"anylabeling/resources/translations/{lang}.ts"]) + + _run([rcc, QRC_PATH, "-o", RC_PATH]) -supported_languages = ["en_US", "vi_VN", "zh_CN"] + # Rewrite PySide6 imports to PyQt6 so the rest of the app can use it. + with open(RC_PATH, "r", encoding="utf-8") as f: + content = f.read() + content = content.replace("from PySide6 import", "from PyQt6 import") + content = content.replace("import PySide6", "import PyQt6") + with open(RC_PATH, "w", encoding="utf-8") as f: + f.write(content) + print(f"Rewrote PySide6 → PyQt6 imports in {RC_PATH}") -for language in supported_languages: - # Compile the .ts file into a .qm file - command = f"lrelease anylabeling/resources/translations/{language}.ts" - os.system(command) -# Generate resources -command = "pyrcc5 -o anylabeling/resources/resources.py \ - anylabeling/resources/resources.qrc" -os.system(command) +if __name__ == "__main__": + main() diff --git a/scripts/generate_languages.py b/scripts/generate_languages.py index 3f63961..904f872 100644 --- a/scripts/generate_languages.py +++ b/scripts/generate_languages.py @@ -1,34 +1,76 @@ +"""Regenerate translation .ts files from source and recompile resources. + +PyQt6 dropped `pyrcc` entirely (the Qt Project removed the standalone +resource compiler in Qt 6). This script uses PyQt6's `pyuic6` and +`pylupdate6` for UI / translation extraction, and PySide6's `pyside6-rcc` +for resource compilation, rewriting `PySide6` → `PyQt6` in the output so +the generated module imports `PyQt6.QtCore`. Both PyQt6 and PySide6-Essentials +are declared in the `[dev]` extras in pyproject.toml. + +Run from the repo root: + python scripts/generate_languages.py +""" import glob import os +import shutil +import subprocess +import sys -from PyQt6 import QtCore +SUPPORTED_LANGUAGES = ["en_US", "vi_VN", "zh_CN"] +TRANSLATIONS_DIR = "anylabeling/resources/translations" +QRC_PATH = "anylabeling/resources/resources.qrc" +RC_PATH = "anylabeling/resources/resources.py" -supported_languages = ["en_US", "vi_VN", "zh_CN"] +# Look up tools first next to the active interpreter (venv bin), then $PATH. +_VENV_BIN = os.path.dirname(sys.executable) + + +def _resolve(cmd): + candidate = os.path.join(_VENV_BIN, cmd) + if os.path.isfile(candidate) and os.access(candidate, os.X_OK): + return candidate + found = shutil.which(cmd) + if found: + return found + sys.exit( + f"error: '{cmd}' not found in {_VENV_BIN} or on PATH. " + "Install dev tools with `pip install -e \".[dev]\"`." + ) + + +def _run(cmd): + print("$", " ".join(cmd)) + subprocess.run(cmd, check=True) -for language in supported_languages: - # Scan all .py files in the project directory and its subdirectories - py_files = glob.glob(os.path.join("**", "*.py"), recursive=True) - # Create a QTranslator object to generate the .ts file - translator = QtCore.QTranslator() +def main(): + pyuic = _resolve("pyuic6") + pylupdate = _resolve("pylupdate6") + lrelease = _resolve("pyside6-lrelease") + rcc = _resolve("pyside6-rcc") - # Translate all .ui files into .py files + py_files = glob.glob(os.path.join("**", "*.py"), recursive=True) ui_files = glob.glob(os.path.join("**", "*.ui"), recursive=True) + for ui_file in ui_files: py_file = os.path.splitext(ui_file)[0] + "_ui.py" - command = f"pyuic6 -x {ui_file} -o {py_file}" - os.system(command) - - # Extract translations from the .py file - translations_path = "anylabeling/resources/translations" - command = f"pylupdate6 {' '.join(py_files)} -ts {translations_path}/{language}.ts" - os.system(command) - - # Compile the .ts file into a .qm file - command = f"lrelease {translations_path}/{language}.ts" - os.system(command) - -# Generate resources -command = "pyrcc6 -o anylabeling/resources/resources.py \ - anylabeling/resources/resources.qrc" -os.system(command) + _run([pyuic, "-x", ui_file, "-o", py_file]) + + for lang in SUPPORTED_LANGUAGES: + ts_path = f"{TRANSLATIONS_DIR}/{lang}.ts" + _run([pylupdate, *py_files, "-ts", ts_path]) + _run([lrelease, ts_path]) + + _run([rcc, QRC_PATH, "-o", RC_PATH]) + + with open(RC_PATH, "r", encoding="utf-8") as f: + content = f.read() + content = content.replace("from PySide6 import", "from PyQt6 import") + content = content.replace("import PySide6", "import PyQt6") + with open(RC_PATH, "w", encoding="utf-8") as f: + f.write(content) + print(f"Rewrote PySide6 → PyQt6 imports in {RC_PATH}") + + +if __name__ == "__main__": + main()