diff --git a/.github/scripts/publish-bonsai-releases.py b/.github/scripts/publish-bonsai-releases.py new file mode 100755 index 00000000000..a393104e7c9 --- /dev/null +++ b/.github/scripts/publish-bonsai-releases.py @@ -0,0 +1,95 @@ +#!/usr/bin/env -S uv run +# /// script +# dependencies = [ +# "PyGithub", +# "requests", +# ] +# /// + +import os +from pathlib import Path + +import requests +from github import Github +from github.GitReleaseAsset import GitReleaseAsset + +EXTENSION_ID = "bonsai" +CURRENT_PYTHON_VERSION = "py313" +CURRENT_PLATFORMS = ["linux-x64", "macos-arm64", "windows-x64"] + + +def publish_asset(asset: GitReleaseAsset, token: str, repo_root: Path) -> None: + """ + Publish an asset to Blender Extensions. + Reference: https://extensions.blender.org/api/v1/swagger + """ + temp_path = repo_root / asset.name + + response = requests.get(asset.browser_download_url) + response.raise_for_status() + temp_path.write_bytes(response.content) + + url = f"https://extensions.blender.org/api/v1/extensions/{EXTENSION_ID}/versions/upload/" + headers = {"Authorization": f"Bearer {token}"} + + files = {"version_file": temp_path.read_bytes()} + response = requests.post(url, headers=headers, files=files) + response.raise_for_status() + + temp_path.unlink() + + print(f"āœ“ Published {asset.name}") + + +def main() -> None: + token = os.getenv("BLENDER_EXTENSIONS_TOKEN") + if not token: + raise Exception("BLENDER_EXTENSIONS_TOKEN environment variable not set") + + # Get the repository root + repo_root = Path(__file__).parent.parent.parent + + # Read VERSION file + version_file = repo_root / "VERSION" + version = version_file.read_text().strip() + + print(f"Current VERSION: {version}") + + tag_name = f"bonsai-{version}" + + # Get release from GitHub + gh = Github() + gh_repo = gh.get_repo("IfcOpenShell/IfcOpenShell") + release = gh_repo.get_release(tag_name) + + assets = release.get_assets() + + asset_platform_map: dict[str, tuple[GitReleaseAsset, str]] = {} + for asset in assets: + if CURRENT_PYTHON_VERSION not in asset.name: + continue + for platform in CURRENT_PLATFORMS: + if platform in asset.name: + asset_platform_map[asset.name] = (asset, platform) + break + + if len(asset_platform_map) != len(CURRENT_PLATFORMS): + found_platforms = {platform for _, (_, platform) in asset_platform_map.items()} + missing_platforms = set(CURRENT_PLATFORMS) - found_platforms + raise Exception( + f"Expected {len(CURRENT_PLATFORMS)} assets but found {len(asset_platform_map)}. " + f"Missing: {', '.join(sorted(missing_platforms))}" + ) + + print("\nRelease assets:") + for asset_name in sorted(asset_platform_map.keys()): + print(f"- {asset_name}") + + # https://extensions.blender.org/api/v1/swagger + print("\nPublishing assets to Blender Extensions:") + for asset_name, (asset, platform) in asset_platform_map.items(): + publish_asset(asset, token, repo_root) + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/build_rocky.yml b/.github/workflows/build_rocky.yml index 9ac489378ba..a1aa6118263 100644 --- a/.github/workflows/build_rocky.yml +++ b/.github/workflows/build_rocky.yml @@ -9,6 +9,13 @@ jobs: container: rockylinux:9 steps: + - name: Set up uv + uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0 + + - name: Install Python + # Installs latest Python version so it's preferred by uv over Rocky's system Python. + run: uv python install + - name: Install Dependencies run: | dnf update -y @@ -17,7 +24,6 @@ jobs: sqlite-devel bzip2-devel zlib-devel openssl-devel xz-devel \ readline-devel ncurses-devel libffi-devel libuuid-devel git-lfs \ findutils xz byacc - python3 -m pip install typing_extensions git config --global --add safe.directory '*' - name: Install aws cli @@ -45,7 +51,7 @@ jobs: - name: Unpack Dependencies run: | cd build - python3 ../nix/cache_dependencies.py unpack + uv run ../nix/cache_dependencies.py unpack - name: ccache uses: hendrikmuhs/ccache-action@v1.2.22 @@ -56,7 +62,7 @@ jobs: shell: bash run: | set -o pipefail - CXXFLAGS="-O3" CFLAGS="-O3 ${DARWIN_C_SOURCE}" ADD_COMMIT_SHA=1 BUILD_CFG=Release python3 ./nix/build-all.py -v --diskcleanup 2>&1 | tee build.log + CXXFLAGS="-O3" CFLAGS="-O3 ${DARWIN_C_SOURCE}" ADD_COMMIT_SHA=1 BUILD_CFG=Release uv run ./nix/build-all.py -v --diskcleanup 2>&1 | tee build.log - name: Upload Build Logs if: always() @@ -71,7 +77,7 @@ jobs: - name: Pack Dependencies run: | cd build - python3 ../nix/cache_dependencies.py pack + uv run ../nix/cache_dependencies.py pack - name: Commit and Push Changes to Build Repository run: | diff --git a/.github/workflows/build_rocky_arm.yml b/.github/workflows/build_rocky_arm.yml index 208e1da9da0..a5cbea640df 100644 --- a/.github/workflows/build_rocky_arm.yml +++ b/.github/workflows/build_rocky_arm.yml @@ -9,6 +9,13 @@ jobs: container: arm64v8/rockylinux:9 steps: + - name: Set up uv + uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0 + + - name: Install Python + # Installs latest Python version so it's preferred by uv over Rocky's system Python. + run: uv python install + - name: Install Dependencies run: | dnf update -y @@ -17,7 +24,6 @@ jobs: sqlite-devel bzip2-devel zlib-devel openssl-devel xz-devel \ readline-devel ncurses-devel libffi-devel libuuid-devel git-lfs \ findutils xz byacc - python3 -m pip install typing_extensions git config --global --add safe.directory '*' - name: Install aws cli @@ -45,7 +51,7 @@ jobs: - name: Unpack Dependencies run: | cd build - python3 ../nix/cache_dependencies.py unpack + uv run ../nix/cache_dependencies.py unpack - name: ccache uses: hendrikmuhs/ccache-action@v1.2.22 @@ -56,7 +62,7 @@ jobs: shell: bash run: | set -o pipefail - CXXFLAGS="-O3" CFLAGS="-O3 ${DARWIN_C_SOURCE}" ADD_COMMIT_SHA=1 BUILD_CFG=Release python3 ./nix/build-all.py -v --diskcleanup 2>&1 | tee build.log + CXXFLAGS="-O3" CFLAGS="-O3 ${DARWIN_C_SOURCE}" ADD_COMMIT_SHA=1 BUILD_CFG=Release uv run ./nix/build-all.py -v --diskcleanup 2>&1 | tee build.log - name: Upload Build Logs if: always() @@ -71,7 +77,7 @@ jobs: - name: Pack Dependencies run: | cd build - python3 ../nix/cache_dependencies.py pack + uv run ../nix/cache_dependencies.py pack - name: Commit and Push Changes to Build Repository run: | diff --git a/.github/workflows/ci-bonsai-daily.yml b/.github/workflows/ci-bonsai-daily.yml index 68b6ac2dbbb..5e1e90e8d9a 100644 --- a/.github/workflows/ci-bonsai-daily.yml +++ b/.github/workflows/ci-bonsai-daily.yml @@ -109,7 +109,7 @@ jobs: # Ensure Bonsai and ifcsverchok enable/disable works before uploading to extensions repo. # Download Blender. - wget -q -O blender.tar.xz https://download.blender.org/release/Blender5.0/blender-5.1.0-linux-x64.tar.xz + wget -q -O blender.tar.xz https://download.blender.org/release/Blender5.1/blender-5.1.0-linux-x64.tar.xz tar -xf blender.tar.xz # Setup Blender. @@ -122,7 +122,7 @@ jobs: pip install -r requirements.txt python setup_extensions_repo.py --last-tag cd .. - bonsai_zip="$(pwd)/$(ls bonsai_unstable_repo/bonsai_py311*-linux-x64.zip)" + bonsai_zip="$(pwd)/$(ls bonsai_unstable_repo/bonsai_py313*-linux-x64.zip)" # Install Bonsai. blender --command extension install-file -r user_default -e $bonsai_zip diff --git a/.github/workflows/ci-bonsai.yml b/.github/workflows/ci-bonsai.yml index 4c8364a9584..6fcc61658f3 100644 --- a/.github/workflows/ci-bonsai.yml +++ b/.github/workflows/ci-bonsai.yml @@ -24,7 +24,7 @@ jobs: strategy: fail-fast: false matrix: - pyver: [py311, py312] + pyver: [py311, py312, py313] config: - { name: "Windows Build", @@ -42,6 +42,11 @@ jobs: name: "MacOS ARM Build", short_name: macosm1, } + exclude: + # Python 3.13 is needed for Blender 5.1+ and Blender dropped Intel Mac support in 5.0. + - pyver: py313 + config: + short_name: macos steps: - uses: actions/checkout@v6 - uses: actions/setup-python@v6 # https://github.com/actions/setup-python diff --git a/.github/workflows/ci-black-formatting.yaml b/.github/workflows/ci-lint.yaml similarity index 90% rename from .github/workflows/ci-black-formatting.yaml rename to .github/workflows/ci-lint.yaml index f081f02945f..4ef84f8cbba 100644 --- a/.github/workflows/ci-black-formatting.yaml +++ b/.github/workflows/ci-lint.yaml @@ -1,4 +1,4 @@ -name: ci-black-formatting +name: ci-lint on: push: @@ -30,6 +30,7 @@ jobs: uv tool install ruff uv tool install black uv tool install poethepoet + uv tool install ty # black doesn't catch all syntax errors, so we check them explicitly. - name: Check syntax errors @@ -57,6 +58,13 @@ jobs: black --diff --check . | black-codeclimate | python .github/workflows/black_to_github_annotations.py continue-on-error: true + - name: ty check + id: ty + run: | + poe ty-venv + poe ty + continue-on-error: true + - name: Ruff check id: ruff run: | @@ -87,8 +95,7 @@ jobs: echo "\`\`\`" >> $GITHUB_STEP_SUMMARY } - run_check poe ruff-main - run_check poe ruff-old + run_check poe ruff exit $ERROR continue-on-error: true @@ -105,4 +112,7 @@ jobs: if [ "${{ steps.ruff.outcome }}" != "success" ]; then echo "::error::Ruff check failed, see Summary or 'ruff' step for the details." && ERROR=1 fi + if [ "${{ steps.ty.outcome }}" != "success" ]; then + echo "::error::ty check failed, see 'ty check' step for the details." && ERROR=1 + fi exit $ERROR diff --git a/.github/workflows/ci-pyodide-wasm-release.yml b/.github/workflows/ci-pyodide-wasm-release.yml new file mode 100644 index 00000000000..0e3017f8192 --- /dev/null +++ b/.github/workflows/ci-pyodide-wasm-release.yml @@ -0,0 +1,46 @@ +name: Release Pyodide WASM Wheel + +on: + workflow_dispatch: + +jobs: + build-and-push: + runs-on: ubuntu-latest + steps: + - name: Checkout IfcOpenShell + uses: actions/checkout@v6 + + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Build wheel + working-directory: pyodide + run: uv run pack_wheel.py --build + + - name: Find wheel + id: wheel + run: | + WHEEL=$(ls pyodide/dist/ifcopenshell-*.whl) + echo "path=$WHEEL" >> $GITHUB_OUTPUT + echo "name=$(basename $WHEEL)" >> $GITHUB_OUTPUT + + - name: Checkout wasm-wheels + uses: actions/checkout@v6 + with: + repository: IfcOpenShell/wasm-wheels + path: wasm-wheels + token: ${{ secrets.BUILD_REPO_TOKEN }} + + - name: Commit and push wheel to wasm-wheels + run: | + WHEEL_NAME="${{ steps.wheel.outputs.name }}" + cp "${{ steps.wheel.outputs.path }}" "wasm-wheels/$WHEEL_NAME" + cd wasm-wheels + git config user.name "IfcOpenBot" + git config user.email "ifcopenbot@ifcopenshell.org" + git add "$WHEEL_NAME" + git commit -m "Add $WHEEL_NAME" + VERSION=$(cat ../VERSION) + git tag "v${VERSION}" + git push origin main + git push origin "v${VERSION}" diff --git a/.github/workflows/docs-deployment.yml b/.github/workflows/docs-deployment.yml deleted file mode 100644 index 3ff50b575e8..00000000000 --- a/.github/workflows/docs-deployment.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: Build and Deploy Stable Documentation - -on: - workflow_dispatch: # Manual trigger - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - name: Checkout repository - uses: actions/checkout@v6 - - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: '3.x' - - - name: Install dependencies - run: | - cd src/bonsai/docs - pip install -r requirements.txt # Run pip install from the docs directory - - - name: Build documentation - run: | - cd src/bonsai/docs - make html - - - name: Deploy to GitHub Pages (Stable) - uses: peaceiris/actions-gh-pages@v4 - with: - deploy_key: ${{ secrets.ACTIONS_DEPLOY_KEY }} - external_repository: IfcOpenShell/bonsaibim_org_docs - publish_branch: main - cname: docs.bonsaibim.org - publish_dir: src/bonsai/docs/_build/html \ No newline at end of file diff --git a/.github/workflows/publish-aichat-app.yaml b/.github/workflows/publish-aichat-app.yaml index 776781428e4..20369577f54 100644 --- a/.github/workflows/publish-aichat-app.yaml +++ b/.github/workflows/publish-aichat-app.yaml @@ -32,7 +32,7 @@ jobs: submodules: recursive fetch-depth: 0 - name: Checkout intermediate Pages repo - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: repository: IfcOpenShell/aichat_ifcopenshell_org_static_html ref: gh-pages @@ -42,7 +42,7 @@ jobs: run: | rsync -av --delete --exclude='.git/' src/ifcchat/ output/ - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.x" - name: Download wheels diff --git a/.github/workflows/publish-bonsai-releases.yml b/.github/workflows/publish-bonsai-releases.yml new file mode 100644 index 00000000000..25d9a67d412 --- /dev/null +++ b/.github/workflows/publish-bonsai-releases.yml @@ -0,0 +1,16 @@ +name: Publish Bonsai Releases + +on: + workflow_dispatch: + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: astral-sh/setup-uv@v3 + + - run: uv run .github/scripts/publish-bonsai-releases.py + env: + BLENDER_EXTENSIONS_TOKEN: ${{ secrets.BLENDER_EXTENSIONS_TOKEN }} diff --git a/.github/workflows/publish-pyodide-demo-app.yml b/.github/workflows/publish-pyodide-demo-app.yml index 9f4c3ddbf8d..6b0141fc293 100644 --- a/.github/workflows/publish-pyodide-demo-app.yml +++ b/.github/workflows/publish-pyodide-demo-app.yml @@ -32,7 +32,7 @@ jobs: submodules: recursive fetch-depth: 0 - name: Checkout intermediate Pages repo - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: repository: IfcOpenShell/wasm_ifcopenshell_org_static_html ref: gh-pages diff --git a/README.md b/README.md index 74c2ebe1152..a44a6ff78c5 100644 --- a/README.md +++ b/README.md @@ -53,11 +53,11 @@ Contents | [ifcedit](https://docs.ifcopenshell.org/ifcedit.html) | CLI wrapper for ifcopenshell.api IFC model mutation functions | LGPL-3.0-or-later | [![PyPI](https://img.shields.io/pypi/v/ifcedit?label=PyPI&color=006dad)](https://pypi.org/project/ifcedit/) | | [ifcfm](https://docs.ifcopenshell.org/ifcfm.html) | Extract IFC data for FM handover requirements | LGPL-3.0-or-later | [![PyPI](https://img.shields.io/pypi/v/ifcfm?label=PyPI&color=006dad)](https://pypi.org/project/ifcfm/) | | [ifcmax](https://docs.ifcopenshell.org/ifcmax.html) | Historic extension for IFC support in 3DS Max | LGPL-3.0-or-later\* | [![Official](https://img.shields.io/badge/IfcOpenShell.org-Download-70ba35)](https://docs.ifcopenshell.org/ifcmax.html) -| [ifcmcp](https://docs.ifcopenshell.org/ifcmcp.html) | MCP server for querying and editing IFC building models | LGPL-3.0-or-later | [![PyPI](https://img.shields.io/pypi/v/ifcmcp?label=PyPI&color=006dad)](https://pypi.org/project/ifcmcp/) | -| [ifcopenshell-python](https://docs.ifcopenshell.org/ifcopenshell-python.html) | Python library for IFC manipulation | LGPL-3.0-or-later\* | [![Official](https://img.shields.io/badge/IfcOpenShell.org-Download-70ba35)](https://docs.ifcopenshell.org/ifcopenshell-python/installation.html) [![GitHub](https://img.shields.io/github/v/release/ifcopenshell/ifcopenshell?filter=ifcopenshell-python-*&label=GitHub&color=f6f8fa)](https://github.com/IfcOpenShell/IfcOpenShell/releases?q=ifcopenshell-python&expanded=true) [![PyPI](https://img.shields.io/pypi/v/ifcopenshell?label=PyPI&color=006dad)](https://pypi.org/project/ifcopenshell/) [![Anaconda](https://img.shields.io/conda/vn/conda-forge/ifcopenshell?label=Anaconda&color=43b02a)](https://anaconda.org/conda-forge/ifcopenshell) [![Anaconda](https://img.shields.io/conda/vn/ifcopenshell/ifcopenshell?label=Anaconda-Unstable&color=43b02a)](https://anaconda.org/ifcopenshell/ifcopenshell) [![Docker](https://img.shields.io/docker/pulls/aecgeeks/ifcopenshell?label=Docker&color=1D63ED)](https://hub.docker.com/r/aecgeeks/ifcopenshell) [![AUR](https://img.shields.io/aur/version/ifcopenshell?label=AUR&color=1793d1)](https://aur.archlinux.org/packages/ifcopenshell) [![AUR Unstable](https://img.shields.io/aur/version/ifcopenshell-git?label=AUR-Unstable&color=1793d1)](https://aur.archlinux.org/packages/ifcopenshell-git) [Pyodide WASM Wheels](https://github.com/IfcOpenShell/wasm-wheels#pyodide-test-wheels) | +| [ifcmcp](https://docs.ifcopenshell.org/ifcmcp.html) | MCP server for querying and editing IFC building models | LGPL-3.0-or-later | [![PyPI](https://img.shields.io/pypi/v/ifcopenshell-mcp?label=PyPI&color=006dad)](https://pypi.org/project/ifcopenshell-mcp/) | +| [ifcopenshell-python](https://docs.ifcopenshell.org/ifcopenshell-python.html) | Python library for IFC manipulation | LGPL-3.0-or-later\* | [![Official](https://img.shields.io/badge/IfcOpenShell.org-Download-70ba35)](https://docs.ifcopenshell.org/ifcopenshell-python/installation.html) [![GitHub](https://img.shields.io/github/v/release/ifcopenshell/ifcopenshell?filter=ifcopenshell-python-*&label=GitHub&color=f6f8fa)](https://github.com/IfcOpenShell/IfcOpenShell/releases?q=ifcopenshell-python&expanded=true) [![PyPI](https://img.shields.io/pypi/v/ifcopenshell?label=PyPI&color=006dad)](https://pypi.org/project/ifcopenshell/) [![Anaconda](https://img.shields.io/conda/vn/conda-forge/ifcopenshell?label=Anaconda&color=43b02a)](https://anaconda.org/conda-forge/ifcopenshell) [![Anaconda](https://img.shields.io/conda/vn/ifcopenshell/ifcopenshell?label=Anaconda-Unstable&color=43b02a)](https://anaconda.org/ifcopenshell/ifcopenshell) [![Docker](https://img.shields.io/docker/pulls/aecgeeks/ifcopenshell?label=Docker&color=1D63ED)](https://hub.docker.com/r/aecgeeks/ifcopenshell) [![AUR](https://img.shields.io/aur/version/ifcopenshell?label=AUR&color=1793d1)](https://aur.archlinux.org/packages/ifcopenshell) [![AUR Unstable](https://img.shields.io/aur/version/ifcopenshell-git?label=AUR-Unstable&color=1793d1)](https://aur.archlinux.org/packages/ifcopenshell-git) [![Pyodide WASM Wheels tag](https://img.shields.io/github/v/tag/ifcopenshell/wasm-wheels?sort=semver&label=pyodide-wasm-wheels)](https://github.com/IfcOpenShell/wasm-wheels) | | [ifcpatch](https://docs.ifcopenshell.org/ifcpatch.html) | Utility to run pre-packaged scripts to manipulate IFCs | LGPL-3.0-or-later | [![PyPI](https://img.shields.io/pypi/v/ifcpatch?label=PyPI&color=006dad)](https://pypi.org/project/ifcpatch/) | | [ifcquery](https://docs.ifcopenshell.org/ifcquery.html) | CLI tool for querying and inspecting IFC building models | LGPL-3.0-or-later | [![PyPI](https://img.shields.io/pypi/v/ifcquery?label=PyPI&color=006dad)](https://pypi.org/project/ifcquery/) | -| [ifcsverchok](https://docs.ifcopenshell.org/ifcsverchok.html) | Blender Add-on for visual node programming with IFC | GPL-3.0-or-later | [![GitHub Unstable](https://img.shields.io/github/v/release/ifcopenshell/ifcopenshell?filter=ifcsverchok-*.*.*.*&label=GitHub-Unstable&color=f6f8fa)](https://github.com/IfcOpenShell/IfcOpenShell/releases?q=ifcsverchok&expanded=true) +| [ifcsverchok](https://docs.ifcopenshell.org/ifcsverchok.html) | Blender Add-on for visual node programming with IFC | GPL-3.0-or-later | [![GitHub](https://img.shields.io/github/v/release/ifcopenshell/ifcopenshell?filter=ifcsverchok-*.*.*&label=GitHub&color=f6f8fa)](https://github.com/IfcOpenShell/IfcOpenShell/releases?q=ifcsverchok&expanded=true) | [ifctester](https://docs.ifcopenshell.org/ifctester.html) | Library, CLI and webapp for IDS model auditing | LGPL-3.0-or-later | [![PyPI](https://img.shields.io/pypi/v/ifctester?label=PyPI&color=006dad)](https://pypi.org/project/ifctester/) | The IfcOpenShell C++ codebase is split into multiple interal libraries: diff --git a/VERSION b/VERSION index 7ada0d303f3..7fc2521fd74 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.8.5 +0.8.6 diff --git a/nix/build-all.py b/nix/build-all.py index 46b7b5c30c4..4858907ce6d 100644 --- a/nix/build-all.py +++ b/nix/build-all.py @@ -1,4 +1,6 @@ #!/usr/bin/python +# /// script +# /// ############################################################################### # # # This file is part of IfcOpenShell. # @@ -126,13 +128,7 @@ from pathlib import Path from urllib.request import urlretrieve -try: - from typing import Literal, Union -except: - # python 3.6 compatibility for rocky 8 - from typing import Union - - from typing_extensions import Literal +from typing import Literal, Union logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) @@ -1094,10 +1090,19 @@ def build_dependency( f"http://www.python.org/ftp/python/{PYTHON_VERSION}/", f"Python-{PYTHON_VERSION}.tgz", ) - python_bin = INSTALL_DIR / f"python-{PYTHON_VERSION}" / "bin" / "python3" + python_install = INSTALL_DIR / f"python-{PYTHON_VERSION}" + python_bin = python_install / "bin" / "python3" # `_ssl` module is present -> we will be able to install `numpy` later # to verify IfcOpenShell installation - run([str(python_bin), "-c", "import _ssl"]) + try: + run([str(python_bin), "-c", "import _ssl"]) + except RuntimeError: + print( + "ERROR: Python was built without SSL support (_ssl module is missing). " + f"To fix this: remove the installed Python at {python_install}; " + "install OpenSSL development libraries and re-run." + ) + raise if MAC_CROSS_COMPILE_INTEL: assert original_path @@ -1515,7 +1520,7 @@ def compile_python_wrapper( ) # Copy setup.py where pyodide build system expects it. shutil.copy(REPO_PATH / "pyodide" / "setup.py", REPO_PATH) - # Empty pyproject so it's contents won't affect the resulting wheelthe the + # Empty pyproject so it's contents won't affect the resulting wheel # otherwise the wheel will use version and dependencies from toml, not setup.py. (REPO_PATH / "pyproject.toml").write_text("") diff --git a/nix/cache_dependencies.py b/nix/cache_dependencies.py index 3d115764b4a..7d231779ee0 100644 --- a/nix/cache_dependencies.py +++ b/nix/cache_dependencies.py @@ -1,3 +1,5 @@ +# /// script +# /// """ Cache built dependencies for builds. diff --git a/pyodide/build_pyodide.sh b/pyodide/build_pyodide.sh index 20ad946162c..db5c5f08b08 100755 --- a/pyodide/build_pyodide.sh +++ b/pyodide/build_pyodide.sh @@ -14,18 +14,11 @@ source .venv/bin/activate uv pip install pyodide-build # `uv run` is required, so xbuildenv would skip using `pip`. uv run pyodide xbuildenv install +uv run pyodide xbuildenv install-emscripten -# Emscripten doesn't come with xbuildenv. -if [ ! -d emsdk ]; then - git clone https://github.com/emscripten-core/emsdk -fi -pushd emsdk -PYODIDE_EMSCRIPTEN_VERSION=$(pyodide config get emscripten_version) -./emsdk install ${PYODIDE_EMSCRIPTEN_VERSION} -./emsdk activate ${PYODIDE_EMSCRIPTEN_VERSION} -source emsdk_env.sh +EMSDK_ROOT=$(pyodide config get emscripten_dir) +source ${EMSDK_ROOT}/emsdk_env.sh which emcc -popd mkdir -p packages/ifcopenshell VERSION=`cat IfcOpenShell/VERSION` diff --git a/pyodide/pack_wheel.py b/pyodide/pack_wheel.py new file mode 100644 index 00000000000..7b6c2a63d5c --- /dev/null +++ b/pyodide/pack_wheel.py @@ -0,0 +1,232 @@ +# +# /// script +# # Latest Pyodide build env versions are listed here: +# # https://pyodide.github.io/pyodide/api/pyodide-cross-build-environments.json +# # https://github.com/pyodide/pyodide-build/blob/main/pyodide_build/xbuildenv_releases.py +# requires-python = "==3.13.2" +# dependencies = [ +# "requests", +# "setuptools", +# ] +# /// +""" +Pack an IfcOpenShell WASM wheel using Pyodide build system. + +Usage: + uv run make_wheel.py # Show this help + uv run make_wheel.py --build # Build wheel + uv run make_wheel.py --clean # Clean build artifacts and exit +""" + +import argparse +import os +import re +import shutil +import subprocess +import time +import zipfile +from pathlib import Path +from urllib.parse import quote + +import requests + +# Get repo root (parent of this script's parent directory) +REPO_ROOT = Path(__file__).parent.parent +PYODIDE_DIR = REPO_ROOT / "pyodide" +BUILD_DIR = PYODIDE_DIR / "build" + +# Hardcoded path (Windows packing workaround with --dev flag) +PYODIDE_BUILD = Path(r"L:\Projects\Github\pyodide-build") + +# Wheel platform tag (from PYODIDE_EMSCRIPTEN_VERSION in pyodide-build/Makefile.envs) +WHEEL_PLATFORM_TAG = "emscripten_4_0_9_wasm32" + +# Location where ifcopenshell will be extracted +IFCOPENSHELL_DIR = PYODIDE_DIR / "ifcopenshell" + + +class WheelBuilder: + @staticmethod + def extract_ifcopenshell_from_git(dst: Path) -> None: + """Extract ifcopenshell directory from git repo into destination.""" + Tools.rmrf(dst) + + print(f"Extracting ifcopenshell from git to {dst}...") + # Use git ls-files piped to git checkout-index to avoid copying + # untracked or ignored files from the actual repo. + ls_proc = subprocess.Popen( + ["git", "ls-files", "-z", "src/ifcopenshell-python/ifcopenshell"], + cwd=REPO_ROOT, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + checkout_proc = subprocess.Popen( + ["git", "checkout-index", "-z", "--prefix", "pyodide/", "--stdin"], + cwd=REPO_ROOT, + stdin=ls_proc.stdout, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + assert ls_proc.stdout is not None + ls_proc.stdout.close() + checkout_proc.communicate() + + if checkout_proc.returncode != 0: + assert checkout_proc.stderr is not None + raise RuntimeError(f"Failed to extract: {checkout_proc.stderr.decode()}") + + # Move src/ifcopenshell-python/ifcopenshell to ifcopenshell. + temp_src = PYODIDE_DIR / "src" / "ifcopenshell-python" / "ifcopenshell" + shutil.move(temp_src, dst) + + # Clean up temporary src directory. + Tools.rmrf(PYODIDE_DIR / "src") + + print("āœ“ Extracted ifcopenshell from git") + + @staticmethod + def get_wheel_url(makefile_path: Path) -> str: + """Get S3 wheel URL based on BINARY_VERSION and BUILD_COMMIT from Makefile.""" + + def parse_makefile_vars() -> dict[str, str]: + content = makefile_path.read_text() + vars: dict[str, str] = {} + for match in re.finditer(r"^(BINARY_VERSION|BUILD_COMMIT):=(.+)$", content, re.MULTILINE): + vars[match.group(1)] = match.group(2).strip() + return vars + + vars: dict[str, str] = parse_makefile_vars() + binary_version = vars["BINARY_VERSION"] + build_commit = vars["BUILD_COMMIT"] + filename = f"ifcopenshell-{binary_version}+{build_commit}-cp313-cp313-pyodide_2025_0_wasm32.whl" + encoded_filename = quote(filename, safe="") + return f"https://s3.amazonaws.com/ifcopenshell-builds/{encoded_filename}" + + @staticmethod + def download_and_extract_so(url: str, build_dir: Path) -> tuple[Path, Path]: + """Download wheel from URL and extract .so and .py files.""" + py_wrapper_filename = "ifcopenshell_wrapper.py" + build_dir.mkdir(parents=True, exist_ok=True) + + wheel_path = build_dir / url.rsplit("/", 1)[-1] + + if wheel_path.exists(): + print(f"Using cached wheel: {wheel_path}") + else: + print(f"Downloading {url}...") + response = requests.get(url) + response.raise_for_status() + wheel_path.write_bytes(response.content) + + print("Extracting _ifcopenshell_wrapper files...") + with zipfile.ZipFile(wheel_path) as zf: + so_files = [f for f in zf.namelist() if f.endswith(".so")] + py_files = [f for f in zf.namelist() if f.endswith(py_wrapper_filename)] + + assert so_files, "No .so file found in wheel" + assert py_files, f"No {py_wrapper_filename} file found in wheel" + + so_file = so_files[0] + so_dst = build_dir / Path(so_file).name + so_dst.write_bytes(zf.read(so_file)) + + py_file = py_files[0] + py_dst = build_dir / Path(py_file).name + py_dst.write_bytes(zf.read(py_file)) + + return so_dst, py_dst + + +class Tools: + @staticmethod + def run( + cmd: list[str], + cwd: Path | None = None, + ) -> None: + print(f"$ {' '.join(cmd)}") + subprocess.check_call(cmd, cwd=cwd) + + @staticmethod + def create_symlink(dst: Path, src: Path) -> None: + Tools.rmrf(dst) + dst.symlink_to(src) + + @staticmethod + def rmrf(path: Path) -> None: + if path.exists() or path.is_symlink(): + if path.is_dir() and not path.is_symlink(): + shutil.rmtree(path) + else: + path.unlink() + + +def clean() -> None: + """Remove build artifacts.""" + paths_to_remove = ( + BUILD_DIR, + PYODIDE_DIR / ".pyodide_build", + PYODIDE_DIR / "dist", + PYODIDE_DIR / "ifcopenshell.egg-info", + PYODIDE_DIR / "src", + IFCOPENSHELL_DIR, + ) + for path in paths_to_remove: + if path.exists() or path.is_symlink(): + print(f"Removing {path}...") + Tools.rmrf(path) + print("āœ“ Clean complete") + + +def main() -> None: + parser = argparse.ArgumentParser(description=__doc__, add_help=False) + parser.add_argument("--build", action="store_true", help="Build the wheel") + parser.add_argument("--clean", action="store_true", help="Clean build folder") + parser.add_argument( + "--dev", + action="store_true", + help="Use editable pyodide-build from hardcoded path (Windows packing workaround)", + ) + args = parser.parse_args() + + if not args.build and not args.clean: + print(__doc__) + return + + if args.clean: + clean() + return + + start_time = time.time() + + WheelBuilder.extract_ifcopenshell_from_git(IFCOPENSHELL_DIR) + + print("Downloading and extracting _ifcopenshell_wrapper files...") + makefile = REPO_ROOT / "src" / "ifcopenshell-python" / "Makefile" + wheel_url = WheelBuilder.get_wheel_url(makefile) + so_file, py_file = WheelBuilder.download_and_extract_so(wheel_url, BUILD_DIR) + + Tools.create_symlink(IFCOPENSHELL_DIR / Path(so_file).name, so_file) + Tools.create_symlink(IFCOPENSHELL_DIR / Path(py_file).name, py_file) + + print("Installing pyodide-build...") + if args.dev: + Tools.run(["uv", "pip", "install", "-e", str(PYODIDE_BUILD)]) + else: + Tools.run(["uv", "pip", "install", "pyodide-build"]) + + print("Building with pyodide...") + # Use --no-isolation due to pyodide-build Windows support issues: + # symlink_unisolated_packages fails with missing `_sysconfigdata_$(CPYTHON_ABI_FLAGS)_emscripten_wasm32-emscripten.py`. + # Hardcode platform name since pyodide doesn't yet support overriding wheel tags on Windows. + # + # Use `LEGACY_PLATFORM` since pyodide 0.34.1 introduced new tag for wheels `pyemscripten`, + # which doesn't work with pyodide itself yet - https://github.com/pyodide/pyodide/issues/6177. + os.environ["USE_LEGACY_PLATFORM"] = "1" + Tools.run(["pyodide", "build", f"-C--build-option=--plat-name={WHEEL_PLATFORM_TAG}"]) + + elapsed = time.time() - start_time + print(f"\nāœ“ Done! ({elapsed:.1f}s)") + + +if __name__ == "__main__": + main() diff --git a/pyodide/setup.py b/pyodide/setup.py index 9678de0ac3e..474a0b45a42 100644 --- a/pyodide/setup.py +++ b/pyodide/setup.py @@ -2,12 +2,16 @@ # because `tool.setuptools.ext-modules` is still experimental in pyproject.toml # and we need it to get the wheel suffix right. import os +import sys from pathlib import Path import tomllib from setuptools import Extension, find_packages, setup +from setuptools.command.build_ext import build_ext -REPO_FOLDER = Path(__file__).parent +# Detect repo folder: if setup.py is in pyodide folder, go to parent +SETUP_DIR = Path(__file__).parent +REPO_FOLDER = SETUP_DIR.parent if SETUP_DIR.name == "pyodide" else SETUP_DIR def get_version() -> str: @@ -25,6 +29,39 @@ def get_dependencies() -> list[str]: return dependencies +class UnixBuildExt(build_ext): + """Customize ``build_ext`` to support packing on Windows.""" + + def finalize_options(self): + from distutils import sysconfig + + super().finalize_options() + if sys.platform == "win32": + self.compiler = "unix" + + # Configure sysconfig for Windows builds + # CCSHARED is the only variable that's not customizable with env vars. + # Basically avoiding this: + # File ".venv\Lib\site-packages\setuptools\_distutils\sysconfig.py", line 366, in customize_compiler + # compiler_so=cc_cmd + ' ' + ccshared, + # ~~~~~~~~~~~~~^~~~~~~~~~ + # TypeError: can only concatenate str (not "NoneType") to str + sysconfig.get_config_vars() # Initialize config cache + if sysconfig._config_vars.get("CCSHARED") is None: + sysconfig._config_vars["CCSHARED"] = "-fPIC" + # Override compiler type before it's instantiated + + # Set Emscripten compiler environment variables + os.environ["CC"] = "emcc" + os.environ["CXX"] = "em++" + os.environ["CFLAGS"] = "" + os.environ["CXXFLAGS"] = "" + os.environ["LDSHARED"] = "emcc -shared" + os.environ["AR"] = "emar" + os.environ["ARFLAGS"] = "rcs" + os.environ["SETUPTOOLS_EXT_SUFFIX"] = ".cpython-313-wasm32-emscripten.so" + + setup( name="ifcopenshell", version=get_version(), @@ -44,4 +81,5 @@ def get_dependencies() -> list[str]: }, # Has to provide extension to get the correct wheel suffix. ext_modules=[Extension("ifcopenshell._ifcopenshell_wrapper", sources=[])], + cmdclass={"build_ext": UnixBuildExt}, ) diff --git a/pyproject.toml b/pyproject.toml index 330f6150d0e..45c0b357a8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,8 +3,9 @@ name = "IfcOpenShell" version = "0.0.0" dependencies = [ "black==26.3.1", - "ruff==0.15.8", + "ruff==0.15.10", "poethepoet", + "ty==0.0.29", "gersemi==0.26.1", ] @@ -28,6 +29,9 @@ extend-exclude = ''' reportInvalidTypeForm = false disableBytesTypePromotions = true reportUnnecessaryTypeIgnoreComment = true +reportRedeclaration = false +# Ignore warnings from bpy stubs missing actual source files. +reportMissingModuleSource = false # Pylance doesn't respect gitignore, so we have to exclude files manually here # to avoid VS Code slowing down. # https://github.com/microsoft/pylance-release/issues/5169 @@ -84,7 +88,6 @@ all = "ignore" # Structural rules (no deep type inference needed, easier to adapt). abstract-method-in-final-class = "error" ambiguous-protocol-member = "error" -byte-string-type-annotation = "error" conflicting-declarations = "error" conflicting-metaclass = "error" cyclic-class-definition = "error" @@ -96,7 +99,6 @@ empty-body = "error" escape-character-in-forward-annotation = "error" final-on-non-method = "error" final-without-value = "error" -fstring-type-annotation = "error" ignore-comment-unknown-rule = "error" implicit-concatenated-string-type-annotation = "error" inconsistent-mro = "error" @@ -213,10 +215,7 @@ exclude = [ [tool.poe.tasks] -ruff-main = "ruff check --extend-exclude nix/build-all.py" -# It's actually Python 3.6, but ruff only supports 3.7+, but it should do. -ruff-old = "ruff check nix/build-all.py --target-version py37" -ruff.sequence = ["ruff-main", "ruff-old"] +ruff = "ruff check" black = "black ." @@ -224,7 +223,7 @@ ty.sequence = ["ty-bonsai", "ty-ios"] ty.help = "Run ty type checker. Requires ty-venv to be set up first." ty-bonsai = "ty check src/bonsai --python=src/bonsai/.venv" -ty-venv.sequence = ["ty-venv-bonsai", "ty-venv-ios"] +ty-venv.sequence = ["bonsai-deps", "ty-venv-bonsai", "ty-venv-ios"] ty-venv-bonsai.sequence = [ {cmd = "uv venv src/bonsai/.venv --python=3.11 --allow-existing"}, @@ -236,7 +235,7 @@ ty-venv-ios.sequence = [ {cmd = "uv pip install -r src/ifcopenshell-python/type-check-requirements.txt --python=src/ifcopenshell-python/.venv"}, ] -format.sequence = ["black", "ruff-main", "ruff-old"] +format.sequence = ["black", "ruff"] cmake-format = "gersemi . --in-place" diff --git a/src/bonsai/Makefile b/src/bonsai/Makefile index b517bc572b0..e00d77da8e5 100644 --- a/src/bonsai/Makefile +++ b/src/bonsai/Makefile @@ -17,8 +17,8 @@ # along with Bonsai. If not, see . SHELL := sh -PYTHON:=python3.11 -PIP:=pip3.11 +PYTHON:=python3 +PIP:=pip3 PATCH:=patch SED:=sed -i VENV_ACTIVATE:=bin/activate @@ -64,7 +64,7 @@ PYNUMBER:=3$(PYMINOR) PYPI_VERSION:=3.$(PYMINOR) endif # def PYVERSION -IFCMERGE_VERSION:=2026-04-02 +IFCMERGE_VERSION:=2026-04-07 ifdef PLATFORM SUPPORTED_PLATFORMS := linux macos macosm1 win diff --git a/src/bonsai/bonsai/bim/module/aggregate/decorator.py b/src/bonsai/bonsai/bim/module/aggregate/decorator.py index 2cd1c1bca07..eb389a58bd2 100644 --- a/src/bonsai/bonsai/bim/module/aggregate/decorator.py +++ b/src/bonsai/bonsai/bim/module/aggregate/decorator.py @@ -101,7 +101,7 @@ def uninstall(cls): cls.is_installed = False def dotted_line_shader(self): - vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") # ty:ignore[too-many-positional-arguments] + vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") vert_out.smooth("FLOAT", "v_ArcLength") shader_info = gpu.types.GPUShaderCreateInfo() diff --git a/src/bonsai/bonsai/bim/module/aggregate/prop.py b/src/bonsai/bonsai/bim/module/aggregate/prop.py index 0595c200c51..424f2b829f5 100644 --- a/src/bonsai/bonsai/bim/module/aggregate/prop.py +++ b/src/bonsai/bonsai/bim/module/aggregate/prop.py @@ -73,6 +73,22 @@ def poll_related_object(self: "BIMObjectAggregateProperties", related_obj: bpy.t return True +def update_relating_object(self, context): + if self.relating_object: + ifc_id = tool.Blender.get_object_bim_props(self.relating_object).ifc_definition_id + if ifc_id: + bpy.ops.bim.aggregate_assign_object(relating_object=ifc_id) + bpy.ops.bim.disable_editing_aggregate() + + +def update_related_object(self, context): + if self.related_object: + ifc_id = tool.Blender.get_object_bim_props(self.related_object).ifc_definition_id + if ifc_id: + bpy.ops.bim.aggregate_assign_object(related_object=ifc_id) + bpy.ops.bim.disable_editing_aggregate() + + def update_aggregate_decorator(self, context): if self.aggregate_decorator: AggregateDecorator.install(bpy.context) @@ -89,12 +105,15 @@ def update_aggregate_mode_decorator(self, context): class BIMObjectAggregateProperties(PropertyGroup): is_editing: BoolProperty(name="Is Editing") - relating_object: PointerProperty(name="Relating Whole", type=bpy.types.Object, poll=poll_relating_object) + relating_object: PointerProperty( + name="Relating Whole", type=bpy.types.Object, poll=poll_relating_object, update=update_relating_object + ) related_object: PointerProperty( name="Related Part", description="Related Part, will be used to derive the Relating Object", type=bpy.types.Object, poll=poll_related_object, + update=update_related_object, ) if TYPE_CHECKING: diff --git a/src/bonsai/bonsai/bim/module/attribute/operator.py b/src/bonsai/bonsai/bim/module/attribute/operator.py index 13901c0f815..8069aa29ceb 100644 --- a/src/bonsai/bonsai/bim/module/attribute/operator.py +++ b/src/bonsai/bonsai/bim/module/attribute/operator.py @@ -295,13 +295,13 @@ class ExplorerShowUIPopup(bpy.types.Operator): bl_description = "Show Explorer UI to select element as attribute value or edit it." bl_options = {"REGISTER", "UNDO"} - ifc_class: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + ifc_class: bpy.props.StringProperty() """Element IFC class.""" - attribute_name: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + attribute_name: bpy.props.StringProperty() """IFC class attribute name.""" - data_path: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + data_path: bpy.props.StringProperty() """Full data path""" - preselect_ifc_id: bpy.props.IntProperty(options={"SKIP_SAVE"}) # pyright: ignore[reportRedeclaration] + preselect_ifc_id: bpy.props.IntProperty(options={"SKIP_SAVE"}) """IFC id to preselect in the popup.""" if TYPE_CHECKING: diff --git a/src/bonsai/bonsai/bim/module/attribute/prop.py b/src/bonsai/bonsai/bim/module/attribute/prop.py index acff2424b71..425c875e37c 100644 --- a/src/bonsai/bonsai/bim/module/attribute/prop.py +++ b/src/bonsai/bonsai/bim/module/attribute/prop.py @@ -41,7 +41,7 @@ class BIMAttributeProperties(PropertyGroup): class ExplorerEntity(PropertyGroup): - ifc_definition_id: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + ifc_definition_id: bpy.props.IntProperty() if TYPE_CHECKING: ifc_definition_id: int @@ -60,7 +60,7 @@ def update_is_loaded(self, context: object) -> None: self.property_unset("editing_entity_id") self.entity_attributes.clear() - is_loaded: BoolProperty( # pyright: ignore[reportRedeclaration] + is_loaded: BoolProperty( name="Toggle Explorer UI", update=update_is_loaded, ) @@ -76,15 +76,15 @@ def get_ifc_class(self, context: object) -> tool.Blender.BLENDER_ENUM_ITEMS: def update_ifc_class(self, context: object) -> None: tool.Attribute.refresh_uilist_entities() - ifc_class: EnumProperty( # pyright: ignore[reportRedeclaration] + ifc_class: EnumProperty( name="IFC Class To Search", items=get_ifc_class, update=update_ifc_class, ) - entities: CollectionProperty(type=ExplorerEntity) # pyright: ignore[reportRedeclaration] - active_entity_index: IntProperty() # pyright: ignore[reportRedeclaration] - editing_entity_id: IntProperty() # pyright: ignore[reportRedeclaration] - entity_attributes: CollectionProperty(type=Attribute) # pyright: ignore[reportRedeclaration] + entities: CollectionProperty(type=ExplorerEntity) + active_entity_index: IntProperty() + editing_entity_id: IntProperty() + entity_attributes: CollectionProperty(type=Attribute) if TYPE_CHECKING: is_loaded: bool diff --git a/src/bonsai/bonsai/bim/module/clash/operator.py b/src/bonsai/bonsai/bim/module/clash/operator.py index 3788130a125..ae5f622bbdc 100644 --- a/src/bonsai/bonsai/bim/module/clash/operator.py +++ b/src/bonsai/bonsai/bim/module/clash/operator.py @@ -201,16 +201,10 @@ class ExecuteIfcClash(bpy.types.Operator, ExportHelper): "ALT+click to run a quick clash without selecting a file to save." ) - filter_glob: bpy.props.StringProperty( # pyright: ignore[reportRedeclaration] - default="*.bcf;*.json", options={"HIDDEN"} - ) - format: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] - name="Format", items=[(i, i, "") for i in ("bcf", "json")] - ) - filepath: bpy.props.StringProperty( # pyright: ignore[reportRedeclaration] - subtype="FILE_PATH", options={"SKIP_SAVE"} - ) - quick_clash: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + filter_glob: bpy.props.StringProperty(default="*.bcf;*.json", options={"HIDDEN"}) + format: bpy.props.EnumProperty(name="Format", items=[(i, i, "") for i in ("bcf", "json")]) + filepath: bpy.props.StringProperty(subtype="FILE_PATH", options={"SKIP_SAVE"}) + quick_clash: bpy.props.BoolProperty( options={"SKIP_SAVE"}, ) diff --git a/src/bonsai/bonsai/bim/module/clash/prop.py b/src/bonsai/bonsai/bim/module/clash/prop.py index 8bcd71632b4..1ef3403e648 100644 --- a/src/bonsai/bonsai/bim/module/clash/prop.py +++ b/src/bonsai/bonsai/bim/module/clash/prop.py @@ -37,12 +37,12 @@ class ClashSource(PropertyGroup): - name: StringProperty( # pyright: ignore[reportRedeclaration] + name: StringProperty( name="File", description="Absolute filepath to existing .ifc file to use as a clash source.", ) - filter_groups: CollectionProperty(type=BIMFilterGroup, name="Filter Groups") # pyright: ignore[reportRedeclaration] - mode: EnumProperty( # pyright: ignore[reportRedeclaration] + filter_groups: CollectionProperty(type=BIMFilterGroup, name="Filter Groups") + mode: EnumProperty( items=[ ("a", "All Elements", "All elements will be used for clashing"), ("i", "Include", "Only the selected elements are included for clashing"), @@ -62,7 +62,7 @@ class Clash(PropertyGroup): b_global_id: StringProperty(name="B") a_name: StringProperty(name="A Name") b_name: StringProperty(name="B Name") - clash_type: EnumProperty( # pyright: ignore[reportRedeclaration] + clash_type: EnumProperty( name="Clash Type", items=tuple((i, i, "") for i in CLASH_TYPE_ITEMS), ) diff --git a/src/bonsai/bonsai/bim/module/cost/operator.py b/src/bonsai/bonsai/bim/module/cost/operator.py index b612100a1d7..ac9d2d4bf14 100644 --- a/src/bonsai/bonsai/bim/module/cost/operator.py +++ b/src/bonsai/bonsai/bim/module/cost/operator.py @@ -87,7 +87,7 @@ class CopyCostSchedule(bpy.types.Operator, tool.Ifc.Operator): bl_label = "Copy Cost Schedule" bl_description = "Create a duplicate of the provided cost schedule." bl_options = {"REGISTER", "UNDO"} - cost_schedule: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + cost_schedule: bpy.props.IntProperty() if TYPE_CHECKING: cost_schedule: int diff --git a/src/bonsai/bonsai/bim/module/debug/operator.py b/src/bonsai/bonsai/bim/module/debug/operator.py index 93153860088..c88ea4a00ee 100644 --- a/src/bonsai/bonsai/bim/module/debug/operator.py +++ b/src/bonsai/bonsai/bim/module/debug/operator.py @@ -260,14 +260,14 @@ class CreateAllShapes(bpy.types.Operator): ) bl_options = {"REGISTER"} - geometry_library: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + geometry_library: bpy.props.EnumProperty( name="Geometry Library", description="Geometry library to use for testing shape creation.", items=[(i, i, "") for i in get_args(ifcopenshell.geom.GEOMETRY_LIBRARY)], # By default use the same library as used for importing ifc project. default="hybrid-cgal-simple-opencascade", ) - custom_geometry_library: bpy.props.StringProperty( # pyright: ignore[reportRedeclaration] + custom_geometry_library: bpy.props.StringProperty( name="Custom Geometry Library", description="Provide a custom geometry library name, will override the 'geometry library' property.", ) @@ -781,7 +781,7 @@ class PurgeUnusedObjects(bpy.types.Operator, tool.Ifc.Operator): bl_label = "Purge Unused Objects" bl_options = {"REGISTER", "UNDO"} - object_type: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + object_type: bpy.props.EnumProperty( name="Object Type", items=((s, s.capitalize(), "") for s in get_args(tool.Debug.PurgeMergeObjectType)), ) @@ -827,7 +827,7 @@ class MergeIdenticalObjects(bpy.types.Operator, tool.Ifc.Operator): ) bl_options = {"REGISTER", "UNDO"} - object_type: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + object_type: bpy.props.EnumProperty( name="Object Type", items=((s, s.capitalize(), "") for s in get_args(tool.Debug.PurgeMergeObjectType)), ) @@ -1073,7 +1073,7 @@ class ChangeLogLevel(bpy.types.Operator): bl_options = {"REGISTER"} bl_description = "Change general log level across all Python code in Blender" - log_level: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + log_level: bpy.props.EnumProperty( name="Log Level", items=[(i, i, "") for i in get_args(LogLevelType)], default="WARNING", diff --git a/src/bonsai/bonsai/bim/module/drawing/operator.py b/src/bonsai/bonsai/bim/module/drawing/operator.py index 45f67b0769f..d6594364bfa 100644 --- a/src/bonsai/bonsai/bim/module/drawing/operator.py +++ b/src/bonsai/bonsai/bim/module/drawing/operator.py @@ -246,17 +246,17 @@ class CreateDrawing(bpy.types.Operator): + "Add the CTRL modifier to optionally open drawings to view them as\n" + "they are created" ) - print_all: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + print_all: bpy.props.BoolProperty( name="Print All", default=False, options={"SKIP_SAVE"}, ) - open_viewer: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + open_viewer: bpy.props.BoolProperty( name="Open in Viewer", default=False, options={"SKIP_SAVE"}, ) - sync: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + sync: bpy.props.BoolProperty( name="Sync Before Creating Drawing", description="Could save some time if you're sure IFC and current Blender session are already in sync", default=True, @@ -2322,14 +2322,14 @@ class ActivateDrawingBase(tool.Ifc.Operator): + "SHIFT+CLICK to load a quick preview of the drawing view" ) - drawing: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] - should_view_from_camera: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + drawing: bpy.props.IntProperty() + should_view_from_camera: bpy.props.BoolProperty( name="Should View From Camera", description="Move view to the activated drawing's camera position.", default=True, options={"SKIP_SAVE"}, ) - use_quick_preview: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + use_quick_preview: bpy.props.BoolProperty( name="Use Quick Preview", description="Just move the camera to the drawing view, without loading anything else.", default=False, @@ -3635,14 +3635,12 @@ class ToggleTargetView(bpy.types.Operator): bl_label = "Toggle Target View" bl_options = {"REGISTER", "UNDO"} - target_view: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] - toggle_all: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + target_view: bpy.props.StringProperty() + toggle_all: bpy.props.BoolProperty( default=False, options={"SKIP_SAVE"}, ) - option: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] - items=[(i, i, "") for i in get_args(ToggleOption)] - ) + option: bpy.props.EnumProperty(items=[(i, i, "") for i in get_args(ToggleOption)]) if TYPE_CHECKING: target_view: str diff --git a/src/bonsai/bonsai/bim/module/drawing/prop.py b/src/bonsai/bonsai/bim/module/drawing/prop.py index 57c182c02ef..1647d44773d 100644 --- a/src/bonsai/bonsai/bim/module/drawing/prop.py +++ b/src/bonsai/bonsai/bim/module/drawing/prop.py @@ -860,13 +860,13 @@ class BIMTextProperties(PropertyGroup): is_editing: BoolProperty(name="Is Editing", default=False) literals: CollectionProperty(name="Literals", type=LiteralProps) newline_at: IntProperty(name="Newline At") - symbol: EnumProperty( # pyright: ignore[reportRedeclaration] + symbol: EnumProperty( name="Symbol", description="Symbol from symbols.svg to use for this text.", items=[(s, s, "") for s in ["NO SYMBOL", "CUSTOM SYMBOL"] + tool.Drawing.DEFAULT_SYMBOLS], default="NO SYMBOL", ) - custom_symbol: StringProperty( # pyright: ignore[reportRedeclaration] + custom_symbol: StringProperty( name="Custom Symbol", description="Non-default symbol to use for this text.", ) diff --git a/src/bonsai/bonsai/bim/module/geometry/operator.py b/src/bonsai/bonsai/bim/module/geometry/operator.py index 7382b093f4b..8d075a96058 100644 --- a/src/bonsai/bonsai/bim/module/geometry/operator.py +++ b/src/bonsai/bonsai/bim/module/geometry/operator.py @@ -85,7 +85,7 @@ def _execute(self, context): class OverrideMeshSeparate(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.override_mesh_separate" bl_label = "IFC Mesh Separate" - blender_op = bpy.ops.mesh.separate.get_rna_type() + blender_op = bpy.ops.mesh.separate.get_rna_type() # ty: ignore[missing-argument] bl_description = blender_op.description + ".\nAlso makes sure changes are in sync with IFC." bl_options = {"REGISTER", "UNDO"} blender_type_prop = blender_op.properties["type"] @@ -246,7 +246,7 @@ def separate_element( class OverrideOriginSet(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.override_origin_set" - blender_op = bpy.ops.object.origin_set.get_rna_type() + blender_op = bpy.ops.object.origin_set.get_rna_type() # ty: ignore[missing-argument] bl_label = "IFC Origin Set" bl_description = ( blender_op.description + ".\nAlso makes sure changes are in sync with IFC (operator works only on IFC objects)" @@ -801,7 +801,7 @@ def calc_delete_is_batch(ifc_file: ifcopenshell.file, context: bpy.types.Context class OverrideDelete(bpy.types.Operator): bl_idname = "bim.override_object_delete" bl_label = "IFC Delete" - blender_op = bpy.ops.object.delete.get_rna_type() + blender_op = bpy.ops.object.delete.get_rna_type() # ty: ignore[missing-argument] bl_description = ( blender_op.description + ".\nAlso makes sure changes in sync with IFC." @@ -821,7 +821,7 @@ class OverrideDelete(bpy.types.Operator): def poll(cls, context): # Match `object.delete` poll for consistency. # `object.delete` poll just checks for OBJECT mode. - poll = bpy.ops.object.delete.poll() + poll = bpy.ops.object.delete.poll() # ty: ignore[missing-argument] if poll: return True cls.poll_message_set("Only available in OBJECT mode") @@ -1045,7 +1045,7 @@ class SelectedIdsData(NamedTuple): class OverrideOutlinerDelete(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.override_outliner_delete" bl_label = "IFC Delete" - blender_op = bpy.ops.outliner.delete.get_rna_type() + blender_op = bpy.ops.outliner.delete.get_rna_type() # ty: ignore[missing-argument] bl_description = ( blender_op.description + ".\nAlso makes sure changes in sync with IFC." @@ -1060,7 +1060,7 @@ class OverrideOutlinerDelete(bpy.types.Operator, tool.Ifc.Operator): def poll(cls, context) -> bool: # Match `outliner.delete` poll for consistency. # `outliner.delete` just checks `area.type` == `OUTLINER`. - poll = bpy.ops.outliner.delete.poll() + poll = bpy.ops.outliner.delete.poll() # ty: ignore[missing-argument] if poll: return True cls.poll_message_set("Only available from Outliner.") @@ -1164,7 +1164,7 @@ class OverrideDuplicateMove(bpy.types.Operator): def poll(cls, context) -> bool: # Match `object.duplicate_move` poll for consistency. # `object.duplicate_move` poll checks for OBJECT mode. - poll = bpy.ops.object.duplicate_move.poll() + poll = bpy.ops.object.duplicate_move.poll() # ty: ignore[missing-argument] if poll: return True cls.poll_message_set("Only available in OBJECT mode") @@ -1908,7 +1908,7 @@ def handle_selection( class OverrideJoin(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.override_object_join" bl_label = "IFC Join" - blender_op = bpy.ops.mesh.separate.get_rna_type() + blender_op = bpy.ops.mesh.separate.get_rna_type() # ty: ignore[missing-argument] bl_description = ( blender_op.description + ".\nAlso makes sure changes are in sync with IFC." @@ -1926,7 +1926,7 @@ class OverrideJoin(bpy.types.Operator, tool.Ifc.Operator): @classmethod def poll(cls, context): - if not bpy.ops.object.join.poll(): + if not bpy.ops.object.join.poll(): # ty: ignore[missing-argument] cls.poll_message_set("Active object is not EDITable.") return False if not context.selected_editable_objects: diff --git a/src/bonsai/bonsai/bim/module/group/operator.py b/src/bonsai/bonsai/bim/module/group/operator.py index adea9ee48c7..f58ba727637 100644 --- a/src/bonsai/bonsai/bim/module/group/operator.py +++ b/src/bonsai/bonsai/bim/module/group/operator.py @@ -43,11 +43,11 @@ class ToggleGroup(bpy.types.Operator, tool.Ifc.Operator): bl_label = "Toggle Group" bl_options = {"REGISTER", "UNDO"} - ifc_definition_id: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] - group_type: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + ifc_definition_id: bpy.props.IntProperty() + group_type: bpy.props.EnumProperty( items=[(i, i, "") for i in get_args(tool.Group.GroupType)], ) - option: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + option: bpy.props.EnumProperty( items=[(i, i, "") for i in get_args(tool.Group.ToggleOption)], ) diff --git a/src/bonsai/bonsai/bim/module/ifcgit/operator.py b/src/bonsai/bonsai/bim/module/ifcgit/operator.py index cc0f75577ae..65bec6b2523 100644 --- a/src/bonsai/bonsai/bim/module/ifcgit/operator.py +++ b/src/bonsai/bonsai/bim/module/ifcgit/operator.py @@ -314,7 +314,7 @@ class SelectConflictEntity(bpy.types.Operator): bl_idname = "ifcgit.select_conflict_entity" bl_options = {"REGISTER"} - step_id: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + step_id: bpy.props.IntProperty() if TYPE_CHECKING: step_id: int @@ -515,7 +515,7 @@ class RunGitDiff(bpy.types.Operator): ) bl_options = set() - save_to_temp: bpy.props.BoolProperty(options={"SKIP_SAVE"}) # pyright: ignore[reportRedeclaration] + save_to_temp: bpy.props.BoolProperty(options={"SKIP_SAVE"}) if TYPE_CHECKING: save_to_temp: bool @@ -547,7 +547,7 @@ class RenameBranch(bpy.types.Operator): bl_idname = "ifcgit.rename_branch" bl_options = {"REGISTER"} - new_name: bpy.props.StringProperty(name="New name") # pyright: ignore[reportRedeclaration] + new_name: bpy.props.StringProperty(name="New name") if TYPE_CHECKING: new_name: str diff --git a/src/bonsai/bonsai/bim/module/light/operator.py b/src/bonsai/bonsai/bim/module/light/operator.py index 6ee37f2285c..c3f377e9b96 100644 --- a/src/bonsai/bonsai/bim/module/light/operator.py +++ b/src/bonsai/bonsai/bim/module/light/operator.py @@ -272,21 +272,21 @@ def execute(self, context): + '''" map_u map_v 0 1 0.5 - + # This is a multiplier to colour balance the env map # In this case, it provides a rough ground luminance from 3k-5k env_map colorfunc env_colour 4 100 100 100 . 0 0 - + # .37 .57 1.5 is measured from a HDRI image # It is multiplied by a factor such that grey(r,g,b) = 1 skyfunc colorfunc sky_colour 4 .64 .99 2.6 . 0 0 - + void mixpict composite 7 env_colour sky_colour grey "''' + hdr_mask_path @@ -295,22 +295,22 @@ def execute(self, context): + """" map_u map_v 0 2 0.5 1 - + composite glow env_map_glow 0 0 4 1 1 1 0 - + env_map_glow source sky 0 0 4 0 0 1 180 - + env_colour glow ground_glow 0 0 4 1 1 1 0 - + ground_glow source ground 0 0 @@ -566,7 +566,7 @@ class LightPickCoordinates(bpy.types.Operator): ) bl_options = {"REGISTER", "UNDO"} - use_current_location: bpy.props.BoolProperty(options={"SKIP_SAVE"}) # pyright: ignore[reportRedeclaration] + use_current_location: bpy.props.BoolProperty(options={"SKIP_SAVE"}) if TYPE_CHECKING: use_current_location: bool diff --git a/src/bonsai/bonsai/bim/module/misc/operator.py b/src/bonsai/bonsai/bim/module/misc/operator.py index 33da9a05dc2..203bc9dd6c0 100644 --- a/src/bonsai/bonsai/bim/module/misc/operator.py +++ b/src/bonsai/bonsai/bim/module/misc/operator.py @@ -136,7 +136,7 @@ class SplitAlongEdge(bpy.types.Operator, tool.Ifc.Operator): "Will unassign element from a type if type has a representation." ) bl_options = {"REGISTER", "UNDO"} - mode: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + mode: bpy.props.EnumProperty( default="BOOLEAN", items=tuple((i, i, "") for i in get_args(SplitAlongEdgeMode)), ) @@ -359,7 +359,7 @@ class ConfirmQuickFavoriteOperator(bpy.types.Operator): bl_idname = "bim.confirm_quick_favorite_operator" bl_label = "Confirm Operator" bl_options = {"REGISTER", "UNDO"} - index: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + index: bpy.props.IntProperty() if TYPE_CHECKING: index: int @@ -452,10 +452,8 @@ class MoveQuickFavoritesItem(bpy.types.Operator): bl_idname = "bim.move_quick_favorites_item" bl_label = "Move Quick Favorites Item" bl_options = {"REGISTER", "UNDO"} - index: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] - direction: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] - items=[("UP", "Up", ""), ("DOWN", "Down", "")] - ) + index: bpy.props.IntProperty() + direction: bpy.props.EnumProperty(items=[("UP", "Up", ""), ("DOWN", "Down", "")]) if TYPE_CHECKING: index: int @@ -474,7 +472,7 @@ class RemoveQuickFavoritesItem(bpy.types.Operator): bl_idname = "bim.remove_quick_favorites_item" bl_label = "Remove Quick Favorites Item" bl_options = {"REGISTER", "UNDO"} - index: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + index: bpy.props.IntProperty() if TYPE_CHECKING: index: int diff --git a/src/bonsai/bonsai/bim/module/misc/prop.py b/src/bonsai/bonsai/bim/module/misc/prop.py index ddeda73b887..74a82f06cd7 100644 --- a/src/bonsai/bonsai/bim/module/misc/prop.py +++ b/src/bonsai/bonsai/bim/module/misc/prop.py @@ -36,9 +36,9 @@ class QuickFavoriteEnumItem(PropertyGroup): - name: StringProperty(name="Name", default="") # pyright: ignore[reportRedeclaration] - display_name: StringProperty(name="Display Name", default="") # pyright: ignore[reportRedeclaration] - description: StringProperty(name="Description", default="") # pyright: ignore[reportRedeclaration] + name: StringProperty(name="Name", default="") + display_name: StringProperty(name="Display Name", default="") + description: StringProperty(name="Description", default="") if TYPE_CHECKING: name: str @@ -51,19 +51,19 @@ def get_enum_items(self: "QuickFavoriteProperty", context: bpy.types.Context | N class QuickFavoriteProperty(PropertyGroup): - name: StringProperty(name="Name", default="") # pyright: ignore[reportRedeclaration] - display_name: StringProperty(name="Display Name", default="") # pyright: ignore[reportRedeclaration] - value_prop: EnumProperty( # pyright: ignore[reportRedeclaration] + name: StringProperty(name="Name", default="") + display_name: StringProperty(name="Display Name", default="") + value_prop: EnumProperty( name="Value Prop", items=tuple((v, v, "") for v in get_args(QuickFavoriteValueType)), ) - string_value: StringProperty(name="String Value", default="") # pyright: ignore[reportRedeclaration] - float_value: FloatProperty(name="Float Value", default=0.0) # pyright: ignore[reportRedeclaration] - int_value: IntProperty(name="Int Value", default=0) # pyright: ignore[reportRedeclaration] - bool_value: BoolProperty(name="Bool Value", default=False) # pyright: ignore[reportRedeclaration] - enum_value: EnumProperty(name="Enum Value", items=get_enum_items) # pyright: ignore[reportRedeclaration] - enum_items: CollectionProperty(type=QuickFavoriteEnumItem) # pyright: ignore[reportRedeclaration] - is_active: BoolProperty( # pyright: ignore[reportRedeclaration] + string_value: StringProperty(name="String Value", default="") + float_value: FloatProperty(name="Float Value", default=0.0) + int_value: IntProperty(name="Int Value", default=0) + bool_value: BoolProperty(name="Bool Value", default=False) + enum_value: EnumProperty(name="Enum Value", items=get_enum_items) + enum_items: CollectionProperty(type=QuickFavoriteEnumItem) + is_active: BoolProperty( name="Is Active", description="Only active properties will be added to the operator when invoked from Quick Favorites", default=False, @@ -100,20 +100,20 @@ def get_operator_suggestions(self: "QuickFavoritesItem", context: bpy.types.Cont class QuickFavoritesItem(PropertyGroup): - is_expanded: BoolProperty(name="Is Expanded", default=False) # pyright: ignore[reportRedeclaration] - search: StringProperty( # pyright: ignore[reportRedeclaration] + is_expanded: BoolProperty(name="Is Expanded", default=False) + search: StringProperty( name="Search", default="", search=get_operator_suggestions, # Resetting `search_options`, allowing users only to use suggestions. search_options=set(), ) - properties: CollectionProperty(type=QuickFavoriteProperty) # pyright: ignore[reportRedeclaration] - operator_id: StringProperty( # pyright: ignore[reportRedeclaration] + properties: CollectionProperty(type=QuickFavoriteProperty) + operator_id: StringProperty( name="Operator ID", default="", ) - label: StringProperty( # pyright: ignore[reportRedeclaration] + label: StringProperty( name="Label", description="Label that will be used in Quick Favorites for this operator", default="", @@ -139,15 +139,15 @@ def get_searched_operator(self) -> bpy.types.Struct | None: class BIMMiscProperties(PropertyGroup): - total_storeys: IntProperty( # pyright: ignore[reportRedeclaration] + total_storeys: IntProperty( name="Total Storeys", description="Number of storeys above object's storey to take into account for resizing", default=1, ) - override_colour: FloatVectorProperty( # pyright: ignore[reportRedeclaration] + override_colour: FloatVectorProperty( name="Override Colour", subtype="COLOR", default=(1, 0, 0, 1), min=0.0, max=1.0, size=4 ) - quick_favorites: CollectionProperty(type=QuickFavoritesItem) # pyright: ignore[reportRedeclaration] + quick_favorites: CollectionProperty(type=QuickFavoritesItem) if TYPE_CHECKING: total_storeys: int diff --git a/src/bonsai/bonsai/bim/module/model/product.py b/src/bonsai/bonsai/bim/module/model/product.py index 4cf4e001727..75f5441827a 100644 --- a/src/bonsai/bonsai/bim/module/model/product.py +++ b/src/bonsai/bonsai/bim/module/model/product.py @@ -545,7 +545,7 @@ class ChangeTypePage(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.change_type_page" bl_label = "Change Type Page" bl_options = {"REGISTER"} - page: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + page: bpy.props.IntProperty() if TYPE_CHECKING: page: int diff --git a/src/bonsai/bonsai/bim/module/model/profile.py b/src/bonsai/bonsai/bim/module/model/profile.py index 1369ad3cb68..e5c0991e8ea 100644 --- a/src/bonsai/bonsai/bim/module/model/profile.py +++ b/src/bonsai/bonsai/bim/module/model/profile.py @@ -271,7 +271,7 @@ class ExtendProfile(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.extend_profile" bl_label = "Extend Profile" bl_options = {"REGISTER", "UNDO"} - join_type: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + join_type: bpy.props.EnumProperty( items=[("-", "Unjoin", ""), ("L", "L", ""), ("V", "V", ""), ("T", "T", "")], default="-", ) diff --git a/src/bonsai/bonsai/bim/module/model/prop.py b/src/bonsai/bonsai/bim/module/model/prop.py index c4956056aad..ff6ea961309 100644 --- a/src/bonsai/bonsai/bim/module/model/prop.py +++ b/src/bonsai/bonsai/bim/module/model/prop.py @@ -1729,20 +1729,20 @@ def poll_sverchok_nodes(self: "BIMExternalParametricGeometryProperties", node_tr class BIMExternalParametricGeometryProperties(bpy.types.PropertyGroup): - is_editing: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + is_editing: bpy.props.BoolProperty( name="Is Editing Paramteric Geometry", description="Toggle editing parametric geometry.", default=False, update=update_is_editing, ) - geometry_source: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + geometry_source: bpy.props.EnumProperty( name="Geometry Source", items=[ ("GEONODES", "Geometry Nodes", ""), ("IFCSVERCHOK", "IFC Sverchok", ""), ], ) - geo_nodes: bpy.props.PointerProperty( # pyright: ignore[reportRedeclaration] + geo_nodes: bpy.props.PointerProperty( name="Geometry Nodes", description="Geometry nodes tree to use as a source for representation.", type=bpy.types.GeometryNodeTree, @@ -1750,7 +1750,7 @@ class BIMExternalParametricGeometryProperties(bpy.types.PropertyGroup): poll=lambda self, node_tree: not node_tree.name.startswith("BBIM_EPG"), ) - sverchok_nodes: bpy.props.PointerProperty( # pyright: ignore[reportRedeclaration] + sverchok_nodes: bpy.props.PointerProperty( name="Sverchok Nodes", description="Sverchok node tree to use as a source for representation.", type=bpy.types.NodeTree, diff --git a/src/bonsai/bonsai/bim/module/nest/decorator.py b/src/bonsai/bonsai/bim/module/nest/decorator.py index 4a3637caa6b..28c3835ba74 100644 --- a/src/bonsai/bonsai/bim/module/nest/decorator.py +++ b/src/bonsai/bonsai/bim/module/nest/decorator.py @@ -101,7 +101,7 @@ def uninstall(cls): cls.is_installed = False def dotted_line_shader(self): - vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") # ty:ignore[too-many-positional-arguments] + vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") vert_out.smooth("FLOAT", "v_ArcLength") shader_info = gpu.types.GPUShaderCreateInfo() diff --git a/src/bonsai/bonsai/bim/module/owner/operator.py b/src/bonsai/bonsai/bim/module/owner/operator.py index 2470fd82271..b934337b64a 100644 --- a/src/bonsai/bonsai/bim/module/owner/operator.py +++ b/src/bonsai/bonsai/bim/module/owner/operator.py @@ -33,7 +33,7 @@ class EnableEditingPerson(bpy.types.Operator): bl_idname = "bim.enable_editing_person" bl_label = "Enable Editing Person" bl_options = {"REGISTER", "UNDO"} - person: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + person: bpy.props.IntProperty() if TYPE_CHECKING: person: int @@ -75,7 +75,7 @@ class RemovePerson(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.remove_person" bl_label = "Remove Person" bl_options = {"REGISTER", "UNDO"} - person: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + person: bpy.props.IntProperty() if TYPE_CHECKING: person: int @@ -88,7 +88,7 @@ class AddPersonAttribute(bpy.types.Operator): bl_idname = "bim.add_person_attribute" bl_label = "Add Person Attribute" bl_options = {"REGISTER", "UNDO"} - name: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + name: bpy.props.EnumProperty( items=tuple((i, i, "") for i in get_args(tool.Owner.PersonAttributeType)), ) @@ -104,10 +104,10 @@ class RemovePersonAttribute(bpy.types.Operator): bl_idname = "bim.remove_person_attribute" bl_label = "Remove Person Attribute" bl_options = {"REGISTER", "UNDO"} - name: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + name: bpy.props.EnumProperty( items=tuple((i, i, "") for i in get_args(tool.Owner.PersonAttributeType)), ) - id: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + id: bpy.props.IntProperty() if TYPE_CHECKING: name: tool.Owner.PersonAttributeType # pyright: ignore[reportIncompatibleVariableOverride] @@ -122,7 +122,7 @@ class EnableEditingRole(bpy.types.Operator): bl_idname = "bim.enable_editing_role" bl_label = "Enable Editing Role" bl_options = {"REGISTER", "UNDO"} - role: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + role: bpy.props.IntProperty() if TYPE_CHECKING: role: int @@ -146,7 +146,7 @@ class AddRole(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.add_role" bl_label = "Add Role" bl_options = {"REGISTER", "UNDO"} - parent: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + parent: bpy.props.IntProperty() if TYPE_CHECKING: parent: int @@ -168,7 +168,7 @@ class RemoveRole(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.remove_role" bl_label = "Remove Role" bl_options = {"REGISTER", "UNDO"} - role: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + role: bpy.props.IntProperty() if TYPE_CHECKING: role: int @@ -181,8 +181,8 @@ class AddAddress(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.add_address" bl_label = "Add Address" bl_options = {"REGISTER", "UNDO"} - parent: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] - ifc_class: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + parent: bpy.props.IntProperty() + ifc_class: bpy.props.EnumProperty( items=tuple((i, i, "") for i in get_args(ADDRESS_TYPE)), ) @@ -198,7 +198,7 @@ class AddAddressAttribute(bpy.types.Operator): bl_idname = "bim.add_address_attribute" bl_label = "Add Address Attribute" bl_options = {"REGISTER", "UNDO"} - name: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + name: bpy.props.EnumProperty( items=tuple((i, i, "") for i in get_args(tool.Owner.AddressAttributeType)), ) @@ -214,10 +214,10 @@ class RemoveAddressAttribute(bpy.types.Operator): bl_idname = "bim.remove_address_attribute" bl_label = "Remove Address Attribute" bl_options = {"REGISTER", "UNDO"} - name: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + name: bpy.props.EnumProperty( items=tuple((i, i, "") for i in get_args(tool.Owner.AddressAttributeType)), ) - id: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + id: bpy.props.IntProperty() if TYPE_CHECKING: name: tool.Owner.AddressAttributeType # pyright: ignore[reportIncompatibleVariableOverride] @@ -232,7 +232,7 @@ class EnableEditingAddress(bpy.types.Operator): bl_idname = "bim.enable_editing_address" bl_label = "Enable Editing Address" bl_options = {"REGISTER", "UNDO"} - address: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + address: bpy.props.IntProperty() if TYPE_CHECKING: address: int @@ -265,7 +265,7 @@ class RemoveAddress(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.remove_address" bl_label = "Remove Address" bl_options = {"REGISTER", "UNDO"} - address: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + address: bpy.props.IntProperty() if TYPE_CHECKING: address: int @@ -278,7 +278,7 @@ class EnableEditingOrganisation(bpy.types.Operator): bl_idname = "bim.enable_editing_organisation" bl_label = "Enable Editing Organisation" bl_options = {"REGISTER", "UNDO"} - organisation: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + organisation: bpy.props.IntProperty() if TYPE_CHECKING: organisation: int @@ -320,7 +320,7 @@ class RemoveOrganisation(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.remove_organisation" bl_label = "Remove Organisation" bl_options = {"REGISTER", "UNDO"} - organisation: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + organisation: bpy.props.IntProperty() if TYPE_CHECKING: organisation: int @@ -333,8 +333,8 @@ class AddPersonAndOrganisation(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.add_person_and_organisation" bl_label = "Add Person And Organisation" bl_options = {"REGISTER", "UNDO"} - person: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] - organisation: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + person: bpy.props.IntProperty() + organisation: bpy.props.IntProperty() if TYPE_CHECKING: person: int @@ -350,7 +350,7 @@ class RemovePersonAndOrganisation(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.remove_person_and_organisation" bl_label = "Remove Person And Organisation" bl_options = {"REGISTER", "UNDO"} - person_and_organisation: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + person_and_organisation: bpy.props.IntProperty() if TYPE_CHECKING: person_and_organisation: int @@ -365,7 +365,7 @@ class SetUser(bpy.types.Operator): bl_idname = "bim.set_user" bl_label = "Set User" bl_options = {"REGISTER", "UNDO"} - user: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + user: bpy.props.IntProperty() if TYPE_CHECKING: user: int @@ -401,7 +401,7 @@ class EnableEditingActor(bpy.types.Operator): bl_idname = "bim.enable_editing_actor" bl_label = "Enable Editing Actor" bl_options = {"REGISTER", "UNDO"} - actor: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + actor: bpy.props.IntProperty() if TYPE_CHECKING: actor: int @@ -434,7 +434,7 @@ class RemoveActor(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.remove_actor" bl_label = "Remove Actor" bl_options = {"REGISTER", "UNDO"} - actor: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + actor: bpy.props.IntProperty() if TYPE_CHECKING: actor: int @@ -447,7 +447,7 @@ class AssignActor(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.assign_actor" bl_label = "Assign Actor" bl_options = {"REGISTER", "UNDO"} - actor: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + actor: bpy.props.IntProperty() if TYPE_CHECKING: actor: int @@ -462,7 +462,7 @@ class UnassignActor(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.unassign_actor" bl_label = "Unassign Actor" bl_options = {"REGISTER", "UNDO"} - actor: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + actor: bpy.props.IntProperty() if TYPE_CHECKING: actor: int @@ -481,7 +481,7 @@ class RemoveApplication(bpy.types.Operator, tool.Ifc.Operator): "Remove provided IfcApplication." "\n\nFor safety will only work on applications without inverses (they are typically marked as '(unused)'." ) - application_id: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + application_id: bpy.props.IntProperty() if TYPE_CHECKING: application_id: int @@ -525,7 +525,7 @@ class EnableEditingApplication(bpy.types.Operator): bl_idname = "bim.enable_editing_application" bl_label = "Enable Editing Application" bl_options = {"REGISTER", "UNDO"} - application_id: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + application_id: bpy.props.IntProperty() if TYPE_CHECKING: application_id: int diff --git a/src/bonsai/bonsai/bim/module/project/operator.py b/src/bonsai/bonsai/bim/module/project/operator.py index e24f6f41d85..d38c4f3b688 100644 --- a/src/bonsai/bonsai/bim/module/project/operator.py +++ b/src/bonsai/bonsai/bim/module/project/operator.py @@ -86,9 +86,7 @@ class NewProject(bpy.types.Operator): bl_label = "New Project" bl_options = {"REGISTER", "UNDO"} bl_description = "Start a new IFC project in a fresh session" - preset: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] - items=[(i, i, "") for i in get_args(PresetType)] - ) + preset: bpy.props.EnumProperty(items=[(i, i, "") for i in get_args(PresetType)]) if TYPE_CHECKING: preset: PresetType @@ -178,13 +176,9 @@ class SelectLibraryFile(bpy.types.Operator, IFCFileSelector, ImportHelper): bl_description = ( "Select an IFC file that can be used as a library.\n\nALT+click to reload the current loaded library file." ) - filter_glob: bpy.props.StringProperty( - default="*.ifc;*.ifczip;*.ifcxml", options={"HIDDEN"} - ) # pyright: ignore[reportRedeclaration] - append_all: bpy.props.BoolProperty(default=False) # pyright: ignore[reportRedeclaration] - use_relative_path: bpy.props.BoolProperty( - name="Use Relative Path", default=False - ) # pyright: ignore[reportRedeclaration] + filter_glob: bpy.props.StringProperty(default="*.ifc;*.ifczip;*.ifcxml", options={"HIDDEN"}) + append_all: bpy.props.BoolProperty(default=False) + use_relative_path: bpy.props.BoolProperty(name="Use Relative Path", default=False) if TYPE_CHECKING: filter_glob: str @@ -568,7 +562,7 @@ class AppendLibraryElementByQuery(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.append_library_element_by_query" bl_label = "Append Library Element By Query" - query: bpy.props.StringProperty(name="Query") # pyright: ignore[reportRedeclaration] + query: bpy.props.StringProperty(name="Query") if TYPE_CHECKING: query: str @@ -600,11 +594,9 @@ class AppendLibraryElement(bpy.types.Operator, tool.Ifc.Operator): "Append element to the current project.\n\n" "ALT+CLICK to skip reusing materials, profiles, styles based on their name (may result in duplicates)" ) - definition: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] - prop_index: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] - assume_unique_by_name: bpy.props.BoolProperty( - name="Assume Unique By Name", default=True, options={"SKIP_SAVE"} - ) # pyright: ignore[reportRedeclaration] + definition: bpy.props.IntProperty() + prop_index: bpy.props.IntProperty() + assume_unique_by_name: bpy.props.BoolProperty(name="Assume Unique By Name", default=True, options={"SKIP_SAVE"}) if TYPE_CHECKING: definition: int @@ -959,28 +951,24 @@ class LoadProject(bpy.types.Operator, IFCFileSelector, ImportHelper): bl_label = "Load Project" bl_options = {"REGISTER", "UNDO"} bl_description = "Load an existing IFC project" - filepath: bpy.props.StringProperty( - subtype="FILE_PATH", options={"SKIP_SAVE"} - ) # pyright: ignore[reportRedeclaration] - filter_glob: bpy.props.StringProperty( - default="*.ifc;*.ifczip;*.ifcxml;*.ifcsqlite", options={"HIDDEN"} - ) # pyright: ignore[reportRedeclaration] - is_advanced: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + filepath: bpy.props.StringProperty(subtype="FILE_PATH", options={"SKIP_SAVE"}) + filter_glob: bpy.props.StringProperty(default="*.ifc;*.ifczip;*.ifcxml;*.ifcsqlite", options={"HIDDEN"}) + is_advanced: bpy.props.BoolProperty( name="Enable Advanced Mode", description="Load IFC file with advanced settings. Checking this option will skip loading IFC file and will open advanced load settings", default=False, ) - use_relative_path: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + use_relative_path: bpy.props.BoolProperty( name="Use Relative Path", description="Store the IFC project path relative to the .blend file. Requires .blend file to be saved", default=False, ) - should_start_fresh_session: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + should_start_fresh_session: bpy.props.BoolProperty( name="Should Start Fresh Session", description="Clear current Blender session before loading IFC. Not supported with 'Use Relative Path' option", default=True, ) - import_without_ifc_data: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + import_without_ifc_data: bpy.props.BoolProperty( name="Import Without IFC Data", description=( "Import IFC objects as Blender objects without any IFC metadata and authoring capabilities." @@ -988,9 +976,7 @@ class LoadProject(bpy.types.Operator, IFCFileSelector, ImportHelper): ), default=False, ) - use_detailed_tooltip: bpy.props.BoolProperty( - default=False, options={"HIDDEN"} - ) # pyright: ignore[reportRedeclaration] + use_detailed_tooltip: bpy.props.BoolProperty(default=False, options={"HIDDEN"}) filename_ext = ".ifc" if TYPE_CHECKING: @@ -1300,7 +1286,7 @@ class ToggleFilterCategories(bpy.types.Operator): bl_idname = "bim.toggle_filter_categories" bl_label = "Toggle Filter Categories" bl_options = {"REGISTER", "UNDO"} - should_select: bpy.props.BoolProperty(name="Should Select", default=True) # pyright: ignore[reportRedeclaration] + should_select: bpy.props.BoolProperty(name="Should Select", default=True) if TYPE_CHECKING: should_select: bool @@ -1327,7 +1313,7 @@ class LinkIfc(bpy.types.Operator, ImportHelper, tool.Ifc.Operator): default=False, ) use_cache: bpy.props.BoolProperty(name="Use Cache", default=True) - query: bpy.props.StringProperty( # pyright: ignore[reportRedeclaration] + query: bpy.props.StringProperty( name="Query", description=( "Custom selector query to use to load element from a linked model. E.g. 'IfcElement'.\n\n" @@ -1404,7 +1390,7 @@ class UnlinkIfc(bpy.types.Operator, tool.Ifc.Operator): bl_options = {"REGISTER", "UNDO"} bl_description = "Remove the selected file from the link list" - link_index: bpy.props.IntProperty(name="Link Index") # pyright: ignore[reportRedeclaration] + link_index: bpy.props.IntProperty(name="Link Index") if TYPE_CHECKING: link_index: int @@ -1428,7 +1414,7 @@ class UnloadLink(bpy.types.Operator, tool.Ifc.Operator): bl_options = {"REGISTER", "UNDO"} bl_description = "Unload the selected linked file" - link_index: bpy.props.IntProperty(name="Link Index") # pyright: ignore[reportRedeclaration] + link_index: bpy.props.IntProperty(name="Link Index") if TYPE_CHECKING: link_index: int @@ -1454,9 +1440,9 @@ class LoadLink(bpy.types.Operator, tool.Ifc.Operator): bl_options = {"REGISTER", "UNDO"} bl_description = "Load the selected file" - link_index: bpy.props.IntProperty(name="Link Index") # pyright: ignore[reportRedeclaration] - use_cache: bpy.props.BoolProperty(name="Use Cache", default=True) # pyright: ignore[reportRedeclaration] - query: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + link_index: bpy.props.IntProperty(name="Link Index") + use_cache: bpy.props.BoolProperty(name="Use Cache", default=True) + query: bpy.props.StringProperty() if TYPE_CHECKING: link_index: int @@ -1631,7 +1617,7 @@ class ReloadLink(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} bl_description = "Reload the selected file" - link_index: bpy.props.IntProperty(name="Link Index") # pyright: ignore[reportRedeclaration] + link_index: bpy.props.IntProperty(name="Link Index") if TYPE_CHECKING: link_index: int @@ -1647,7 +1633,7 @@ class ToggleLinkSelectability(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} bl_description = "Toggle selectability" - link_index: bpy.props.IntProperty(name="Link Index") # pyright: ignore[reportRedeclaration] + link_index: bpy.props.IntProperty(name="Link Index") if TYPE_CHECKING: link_index: int @@ -1679,8 +1665,8 @@ class ToggleLinkVisibility(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} bl_description = "Toggle visibility between SOLID and WIREFRAME" - link_index: bpy.props.IntProperty(name="Link Index") # pyright: ignore[reportRedeclaration] - mode: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + link_index: bpy.props.IntProperty(name="Link Index") + mode: bpy.props.EnumProperty( name="Visibility Mode", items=((i, i, "") for i in ("WIREFRAME", "VISIBLE")), ) @@ -1821,7 +1807,7 @@ class SelectLinkHandle(bpy.types.Operator): bl_options = {"REGISTER", "UNDO"} bl_description = "Select link empty object handle" - link_index: bpy.props.IntProperty(name="Link Index") # pyright: ignore[reportRedeclaration] + link_index: bpy.props.IntProperty(name="Link Index") if TYPE_CHECKING: link_index: int @@ -1843,7 +1829,7 @@ class SelectLinkedModelElement(bpy.types.Operator): bl_options = {"REGISTER"} bl_description = "Select an element in the currently selected linked model by providing GlobalId." - guid: bpy.props.StringProperty(name="GlobalId") # pyright: ignore[reportRedeclaration] + guid: bpy.props.StringProperty(name="GlobalId") if TYPE_CHECKING: guid: str @@ -1882,21 +1868,11 @@ class ExportIFC(bpy.types.Operator, ExportHelper): bl_options = {"REGISTER", "UNDO"} filename_ext = ".ifc" supported_filexts = (".ifc", ".ifczip", ".ifcjson") - filter_glob: bpy.props.StringProperty( - default=";".join(f"*{ext}" for ext in supported_filexts), options={"HIDDEN"} - ) # pyright: ignore[reportRedeclaration] - json_version: bpy.props.EnumProperty( - items=[("4", "4", ""), ("5a", "5a", "")], name="IFC JSON Version" - ) # pyright: ignore[reportRedeclaration] - json_compact: bpy.props.BoolProperty( - name="Export Compact IFCJSON", default=False - ) # pyright: ignore[reportRedeclaration] - should_save_as: bpy.props.BoolProperty( - name="Should Save As", default=False, options={"HIDDEN"} - ) # pyright: ignore[reportRedeclaration] - use_relative_path: bpy.props.BoolProperty( - name="Use Relative Path", default=False - ) # pyright: ignore[reportRedeclaration] + filter_glob: bpy.props.StringProperty(default=";".join(f"*{ext}" for ext in supported_filexts), options={"HIDDEN"}) + json_version: bpy.props.EnumProperty(items=[("4", "4", ""), ("5a", "5a", "")], name="IFC JSON Version") + json_compact: bpy.props.BoolProperty(name="Export Compact IFCJSON", default=False) + should_save_as: bpy.props.BoolProperty(name="Should Save As", default=False, options={"HIDDEN"}) + use_relative_path: bpy.props.BoolProperty(name="Use Relative Path", default=False) if TYPE_CHECKING: filter_glob: str @@ -2053,7 +2029,7 @@ class LoadLinkedProject(bpy.types.Operator, ImportHelper): bl_description = "Operator is used to load a project .cache.blend to then link it to the IFC file." bl_options = {"REGISTER", "UNDO"} - query: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + query: bpy.props.StringProperty() """See ``bim.link_ifc``.""" if TYPE_CHECKING: @@ -2443,8 +2419,8 @@ class HideQueriedLinkedElement(bpy.types.Operator): ) bl_options = {"REGISTER", "UNDO"} - unhide_all: bpy.props.BoolProperty(options={"SKIP_SAVE"}) # pyright: ignore[reportRedeclaration] - hide_all_except: bpy.props.BoolProperty(options={"SKIP_SAVE"}) # pyright: ignore[reportRedeclaration] + unhide_all: bpy.props.BoolProperty(options={"SKIP_SAVE"}) + hide_all_except: bpy.props.BoolProperty(options={"SKIP_SAVE"}) if TYPE_CHECKING: unhide_all: bool @@ -2918,12 +2894,8 @@ class IFCFileHandlerOperator(bpy.types.Operator): bl_label = "Import .ifc file" bl_options = {"REGISTER", "UNDO", "INTERNAL"} - directory: bpy.props.StringProperty( - subtype="FILE_PATH", options={"SKIP_SAVE", "HIDDEN"} - ) # pyright: ignore[reportRedeclaration] - files: bpy.props.CollectionProperty( - type=bpy.types.OperatorFileListElement, options={"SKIP_SAVE", "HIDDEN"} - ) # pyright: ignore[reportRedeclaration] + directory: bpy.props.StringProperty(subtype="FILE_PATH", options={"SKIP_SAVE", "HIDDEN"}) + files: bpy.props.CollectionProperty(type=bpy.types.OperatorFileListElement, options={"SKIP_SAVE", "HIDDEN"}) if TYPE_CHECKING: directory: str @@ -2978,7 +2950,7 @@ class MeasureTool(bpy.types.Operator, PolylineOperator): bl_label = "Measure Tool" bl_options = {"REGISTER", "UNDO"} - measure_type: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + measure_type: bpy.props.StringProperty() if TYPE_CHECKING: measure_type: str @@ -3077,7 +3049,7 @@ class MeasureFaceAreaTool(bpy.types.Operator, PolylineOperator): bl_label = "Measure Face Area Tool" bl_options = {"REGISTER", "UNDO"} - measure_type: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + measure_type: bpy.props.StringProperty() if TYPE_CHECKING: measure_type: str @@ -3379,7 +3351,7 @@ class LoadBlendMetadataAndIFC(bpy.types.Operator): bl_idname = "bim.load_blend_metadata_and_ifc" bl_label = "Load Blend Metadata and IFC" bl_options = {"REGISTER", "UNDO"} - filepath: bpy.props.StringProperty(name="IFC File Path", default="") # pyright: ignore[reportRedeclaration] + filepath: bpy.props.StringProperty(name="IFC File Path", default="") if TYPE_CHECKING: filepath: str diff --git a/src/bonsai/bonsai/bim/module/project/prop.py b/src/bonsai/bonsai/bim/module/project/prop.py index 5ba0cf6f282..1408d2f7861 100644 --- a/src/bonsai/bonsai/bim/module/project/prop.py +++ b/src/bonsai/bonsai/bim/module/project/prop.py @@ -345,7 +345,7 @@ class BIMProjectProperties(PropertyGroup): ), default=False, ) - should_cache: BoolProperty( # pyright: ignore[reportRedeclaration] + should_cache: BoolProperty( name="Cache", description=( "Cache loaded geometry to .h5 file in your cache directory (see in preferences) " diff --git a/src/bonsai/bonsai/bim/module/pset/operator.py b/src/bonsai/bonsai/bim/module/pset/operator.py index d7755cf80a1..ea1d39fb96d 100644 --- a/src/bonsai/bonsai/bim/module/pset/operator.py +++ b/src/bonsai/bonsai/bim/module/pset/operator.py @@ -240,7 +240,7 @@ class CopyPropertyToSelection(bpy.types.Operator, tool.Ifc.Operator): bl_label = "Copy Property To Selection" bl_options = {"REGISTER", "UNDO"} - name: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + name: bpy.props.StringProperty() if TYPE_CHECKING: name: str @@ -280,10 +280,10 @@ class BIM_OT_add_property_to_edit(bpy.types.Operator): bl_label = "Add Property to Edit" bl_idname = "bim.add_property_to_edit" bl_options = {"REGISTER", "UNDO"} - option: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + option: bpy.props.EnumProperty( items=[(t, t, "") for t in tool.Pset.BULK_OPERATION_TYPES], ) - index: bpy.props.IntProperty(default=-1) # pyright: ignore[reportRedeclaration] + index: bpy.props.IntProperty(default=-1) if TYPE_CHECKING: option: tool.Pset.BulkOperationType @@ -307,9 +307,9 @@ class BIM_OT_remove_property_to_edit(bpy.types.Operator): bl_label = "Remove Property from Editing" bl_idname = "bim.remove_property_to_edit" bl_options = {"REGISTER", "UNDO"} - index: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] - index2: bpy.props.IntProperty(default=-1) # pyright: ignore[reportRedeclaration] - option: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + index: bpy.props.IntProperty() + index2: bpy.props.IntProperty(default=-1) + option: bpy.props.EnumProperty( items=[(t, t, "") for t in tool.Pset.BULK_OPERATION_TYPES], ) @@ -336,7 +336,7 @@ class BIM_OT_bulk_edit_clear_list(bpy.types.Operator): bl_label = "Clear List of Properties" bl_idname = "bim.pset_bulk_edit_clear_list" bl_options = {"REGISTER", "UNDO"} - option: bpy.props.EnumProperty( # pyright: ignore[reportRedeclaration] + option: bpy.props.EnumProperty( items=[(t, t, "") for t in tool.Pset.BULK_OPERATION_TYPES], ) diff --git a/src/bonsai/bonsai/bim/module/pset/prop.py b/src/bonsai/bonsai/bim/module/pset/prop.py index 1777fa0f940..786e6a62a28 100644 --- a/src/bonsai/bonsai/bim/module/pset/prop.py +++ b/src/bonsai/bonsai/bim/module/pset/prop.py @@ -368,9 +368,9 @@ class GlobalPsetProperties(PropertyGroup): qto_filter: StringProperty(name="Qto Filter", options={"TEXTEDIT_UPDATE"}) # Bulk operations. - psets_to_delete: CollectionProperty(type=DeletePsetEntry) # pyright: ignore[reportRedeclaration] - psets_to_rename: CollectionProperty(type=RenamePropertyEntry) # pyright: ignore[reportRedeclaration] - psets_to_add_edit: CollectionProperty(type=AddEditPropertyEntry) # pyright: ignore[reportRedeclaration] + psets_to_delete: CollectionProperty(type=DeletePsetEntry) + psets_to_rename: CollectionProperty(type=RenamePropertyEntry) + psets_to_add_edit: CollectionProperty(type=AddEditPropertyEntry) if TYPE_CHECKING: pset_filter: str diff --git a/src/bonsai/bonsai/bim/module/search/operator.py b/src/bonsai/bonsai/bim/module/search/operator.py index d5a6b9b1e66..f55dcf31b72 100644 --- a/src/bonsai/bonsai/bim/module/search/operator.py +++ b/src/bonsai/bonsai/bim/module/search/operator.py @@ -799,7 +799,7 @@ class SelectQueryElements(Operator): bl_description = "Select elements matching an provided selector query" bl_options = {"REGISTER", "UNDO"} - query: StringProperty(name="Query") # pyright: ignore[reportRedeclaration] + query: StringProperty(name="Query") if TYPE_CHECKING: query: str @@ -829,12 +829,12 @@ def get_name_search_items(self, context: object, text: str) -> list[str]: # Extra item so it will be easy to select current text. return [text] + SaveSearch.name_search_items - name: StringProperty( # pyright: ignore[reportRedeclaration] + name: StringProperty( name="Name", search=get_name_search_items, search_options={"SORT"}, ) - module: StringProperty() # pyright: ignore[reportRedeclaration] + module: StringProperty() def update_use_all_ifcgroups(self, context: object = None) -> None: ifc_file = tool.Ifc.get() @@ -845,7 +845,7 @@ def update_use_all_ifcgroups(self, context: object = None) -> None: } self.name_search_items[:] = natsorted(groups) - use_all_ifcgroups: BoolProperty( # pyright: ignore[reportRedeclaration] + use_all_ifcgroups: BoolProperty( name="Use Any IfcGroup", description=( "By default we're targeting only IfcGroups with SEARCH ObjectType " diff --git a/src/bonsai/bonsai/bim/module/sequence/operator.py b/src/bonsai/bonsai/bim/module/sequence/operator.py index 2b4c317cd8e..fb97d7152a8 100644 --- a/src/bonsai/bonsai/bim/module/sequence/operator.py +++ b/src/bonsai/bonsai/bim/module/sequence/operator.py @@ -106,7 +106,7 @@ class ActivateStatusFilters(bpy.types.Operator): bl_description = "Filter and display objects based on currently selected IFC statuses" bl_options = {"REGISTER", "UNDO"} - only_if_enabled: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + only_if_enabled: bpy.props.BoolProperty( name="Only If Filters are Enabled", description="Activate status filters only in case if they were enabled from the UI before.", default=False, @@ -137,7 +137,7 @@ class SelectStatusFilter(bpy.types.Operator): bl_description = "Select elements with currently selected status" bl_options = {"REGISTER", "UNDO"} - status: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + status: bpy.props.StringProperty() if TYPE_CHECKING: status: tool.Sequence.ElementStatusUI @@ -156,7 +156,7 @@ class AssignStatus(bpy.types.Operator, tool.Ifc.Operator): bl_description = "Assign status to the selected elements.\n\nAlt+CLICK to unassign the status." bl_options = {"REGISTER", "UNDO"} - should_override_previous_status: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + should_override_previous_status: bpy.props.BoolProperty( name="Override Previous Status", description=( "Whether assigning new status should override previous one.\n\n" @@ -165,8 +165,8 @@ class AssignStatus(bpy.types.Operator, tool.Ifc.Operator): ), default=True, ) - status: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] - should_unassign_status: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + status: bpy.props.StringProperty() + should_unassign_status: bpy.props.BoolProperty( options={"SKIP_SAVE"}, ) @@ -415,7 +415,7 @@ class CopyWorkSchedule(bpy.types.Operator, tool.Ifc.Operator): bl_label = "Copy Work Schedule" bl_description = "Create a duplicate of the provided work schedule." bl_options = {"REGISTER", "UNDO"} - work_schedule: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + work_schedule: bpy.props.IntProperty() if TYPE_CHECKING: work_schedule: int diff --git a/src/bonsai/bonsai/bim/module/sequence/prop.py b/src/bonsai/bonsai/bim/module/sequence/prop.py index abb1aa9f0cb..97f274c421d 100644 --- a/src/bonsai/bonsai/bim/module/sequence/prop.py +++ b/src/bonsai/bonsai/bim/module/sequence/prop.py @@ -412,7 +412,7 @@ class TaskProduct(PropertyGroup): class BIMWorkPlanProperties(PropertyGroup): work_plan_attributes: CollectionProperty(name="Work Plan Attributes", type=Attribute) - editing_type: EnumProperty( # pyright: ignore[reportRedeclaration] + editing_type: EnumProperty( items=[(i, i, "") for i in get_args(WorkPlanEditingType)], ) work_plans: CollectionProperty(name="Work Plans", type=WorkPlan) @@ -430,8 +430,8 @@ class BIMWorkPlanProperties(PropertyGroup): class IFCStatus(PropertyGroup): - name: StringProperty() # pyright: ignore[reportRedeclaration] - is_visible: BoolProperty( # pyright: ignore[reportRedeclaration] + name: StringProperty() + is_visible: BoolProperty( name="Is Visible", default=True, update=lambda x, y: (None, bpy.ops.bim.activate_status_filters())[0] ) diff --git a/src/bonsai/bonsai/bim/module/spatial/operator.py b/src/bonsai/bonsai/bim/module/spatial/operator.py index f1eb4ec4ee9..cd6bc6cab23 100644 --- a/src/bonsai/bonsai/bim/module/spatial/operator.py +++ b/src/bonsai/bonsai/bim/module/spatial/operator.py @@ -220,7 +220,7 @@ class CopyToContainer(bpy.types.Operator, tool.Ifc.Operator): bl_label = "Copy to Container" bl_options = {"REGISTER", "UNDO"} - container: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + container: bpy.props.IntProperty() if TYPE_CHECKING: container: int diff --git a/src/bonsai/bonsai/bim/module/structural/operator.py b/src/bonsai/bonsai/bim/module/structural/operator.py index 20b32c7a0c3..825f52e9540 100644 --- a/src/bonsai/bonsai/bim/module/structural/operator.py +++ b/src/bonsai/bonsai/bim/module/structural/operator.py @@ -167,7 +167,7 @@ class EnableEditingStructuralBoundaryCondition(bpy.types.Operator): bl_idname = "bim.enable_editing_structural_boundary_condition" bl_label = "Enable Editing Structural Boundary Condition" bl_options = {"REGISTER", "UNDO"} - boundary_condition: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + boundary_condition: bpy.props.IntProperty() if TYPE_CHECKING: boundary_condition: int @@ -186,7 +186,7 @@ class EditStructuralBoundaryCondition(bpy.types.Operator, tool.Ifc.Operator): bl_idname = "bim.edit_structural_boundary_condition" bl_label = "Edit Structural Boundary Condition" bl_options = {"REGISTER", "UNDO"} - connection: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + connection: bpy.props.IntProperty() if TYPE_CHECKING: connection: int @@ -917,7 +917,7 @@ class EnableEditingBoundaryCondition(bpy.types.Operator): bl_idname = "bim.enable_editing_boundary_condition" bl_label = "Enable Editing Boundary Condition" bl_options = {"REGISTER", "UNDO"} - boundary_condition: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + boundary_condition: bpy.props.IntProperty() if TYPE_CHECKING: boundary_condition: int diff --git a/src/bonsai/bonsai/bim/module/structural/shader.py b/src/bonsai/bonsai/bim/module/structural/shader.py index 9688ce9f0e3..b9b5a5c7bc6 100644 --- a/src/bonsai/bonsai/bim/module/structural/shader.py +++ b/src/bonsai/bonsai/bim/module/structural/shader.py @@ -83,7 +83,7 @@ def get_linear_shader( PARALLEL DISTRIBUTED FORCE, DISTRIBUTED MOMENT, """ - vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") # ty:ignore[too-many-positional-arguments] + vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") vert_out.smooth("VEC3", "forces") vert_out.smooth("VEC3", "co") @@ -203,7 +203,7 @@ def get_point_shader(self, pattern: Literal["SINGLE FORCE", "SINGLE MOMENT"]) -> """param: pattern: type of pattern SINGLE FORCE, SINGLE MOMENT""" - vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") # ty: ignore[too-many-positional-arguments] + vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") vert_out.smooth("VEC3", "co") shader_info = gpu.types.GPUShaderCreateInfo() @@ -253,7 +253,7 @@ def get_point_shader(self, pattern: Literal["SINGLE FORCE", "SINGLE MOMENT"]) -> def get_planar_shader(self) -> gpu.types.GPUShader: """shader for planar loads""" - vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") # ty: ignore[too-many-positional-arguments] + vert_out = gpu.types.GPUStageInterfaceInfo("my_interface") vert_out.smooth("VEC3", "co") shader_info = gpu.types.GPUShaderCreateInfo() diff --git a/src/bonsai/bonsai/bim/module/system/operator.py b/src/bonsai/bonsai/bim/module/system/operator.py index e1f6f5e9e4d..5fe47564c0d 100644 --- a/src/bonsai/bonsai/bim/module/system/operator.py +++ b/src/bonsai/bonsai/bim/module/system/operator.py @@ -54,7 +54,7 @@ class AddSystem(bpy.types.Operator, tool.Ifc.Operator): bl_label = "Add System" bl_options = {"REGISTER", "UNDO"} - parent_system_id: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + parent_system_id: bpy.props.IntProperty() if TYPE_CHECKING: parent_system_id: int diff --git a/src/bonsai/bonsai/bim/module/web/prop.py b/src/bonsai/bonsai/bim/module/web/prop.py index 42bd272578a..e9e5a331850 100644 --- a/src/bonsai/bonsai/bim/module/web/prop.py +++ b/src/bonsai/bonsai/bim/module/web/prop.py @@ -26,16 +26,16 @@ class WebProperties(PropertyGroup): - webserver_port: IntProperty( # pyright: ignore[reportRedeclaration] + webserver_port: IntProperty( name="Webserver Port", min=0, max=65535, ) - is_running: BoolProperty( # pyright: ignore[reportRedeclaration] + is_running: BoolProperty( name="Webserver Running Status", default=False, ) - is_connected: BoolProperty( # pyright: ignore[reportRedeclaration] + is_connected: BoolProperty( name="Connection Status", default=False, ) diff --git a/src/bonsai/bonsai/bim/operator.py b/src/bonsai/bonsai/bim/operator.py index 32164781320..f1ab24c7b0e 100644 --- a/src/bonsai/bonsai/bim/operator.py +++ b/src/bonsai/bonsai/bim/operator.py @@ -159,9 +159,9 @@ class SelectURIAttribute(bpy.types.Operator, ImportHelper): bl_label = "Select URI Attribute" bl_options = {"REGISTER", "UNDO"} bl_description = "Select a local file" - attribute_data_path: bpy.props.StringProperty(name="Data Path") # pyright: ignore[reportRedeclaration] + attribute_data_path: bpy.props.StringProperty(name="Data Path") """Full data path to `Attribute`/string property.""" - use_relative_path: bpy.props.BoolProperty( # pyright: ignore[reportRedeclaration] + use_relative_path: bpy.props.BoolProperty( name="Use Relative Path", default=False, ) @@ -601,7 +601,7 @@ class CreateMacBonsaiApp(bpy.types.Operator): "ALT+click to uninstall Bonsai app if it was installed previously." ) - uninstall: bpy.props.BoolProperty(options={"SKIP_SAVE"}) # pyright: ignore[reportRedeclaration] + uninstall: bpy.props.BoolProperty(options={"SKIP_SAVE"}) if TYPE_CHECKING: uninstall: bool @@ -1667,7 +1667,7 @@ class BIM_OT_attribute_add_subitem(bpy.types.Operator): bl_description = "Add subitem to the current attribute" bl_options = {"REGISTER", "UNDO"} - data_path: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + data_path: bpy.props.StringProperty() """Full data path.""" if TYPE_CHECKING: @@ -1691,9 +1691,9 @@ class BIM_OT_attribute_remove_subitem(bpy.types.Operator): bl_description = "Add subitem to the current attribute" bl_options = {"REGISTER", "UNDO"} - data_path: bpy.props.StringProperty() # pyright: ignore[reportRedeclaration] + data_path: bpy.props.StringProperty() """Full data path.""" - index: bpy.props.IntProperty() # pyright: ignore[reportRedeclaration] + index: bpy.props.IntProperty() if TYPE_CHECKING: data_path: str diff --git a/src/bonsai/bonsai/bim/prop.py b/src/bonsai/bonsai/bim/prop.py index e5d495c62a7..e10243d266e 100644 --- a/src/bonsai/bonsai/bim/prop.py +++ b/src/bonsai/bonsai/bim/prop.py @@ -333,7 +333,7 @@ class Attribute(PropertyGroup): filter_glob: StringProperty() is_null: BoolProperty(name="Is Null", update=update_is_null) is_selected: BoolProperty(name="Is Selected", default=False) - subitems_values: CollectionProperty(type=StrProperty) # pyright: ignore[reportRedeclaration] + subitems_values: CollectionProperty(type=StrProperty) # Attribute parameters. is_optional: BoolProperty(name="Is Optional") @@ -342,7 +342,7 @@ class Attribute(PropertyGroup): value_max: FloatProperty(description="This is used to validate int_value and float_value") value_max_constraint: BoolProperty(default=False, description="True if the numerical value has an upper bound") special_type: StringProperty(name="Special Value Type", default="") - use_explorer_ui: BoolProperty() # pyright: ignore[reportRedeclaration] + use_explorer_ui: BoolProperty() metadata: StringProperty(name="Metadata", description="For storing some additional information about the attribute") update: StringProperty(name="Update", description="Custom update function to be executed") diff --git a/src/bonsai/bonsai/bim/ui.py b/src/bonsai/bonsai/bim/ui.py index 09eee514017..97980d92dda 100644 --- a/src/bonsai/bonsai/bim/ui.py +++ b/src/bonsai/bonsai/bim/ui.py @@ -665,7 +665,7 @@ class BIM_ADDON_preferences(bpy.types.AddonPreferences): name="Disable Undo When Saving (Faster saves, no undo for you!)", default=False ) should_stream: BoolProperty(name="Stream Data From IFC-SPF (Only for advanced users)", default=False) - should_always_cache: BoolProperty( # pyright: ignore[reportRedeclaration] + should_always_cache: BoolProperty( name="Always Cache Geometry", description="Whether to always cache geometry regardless of 'Cache' setting during Advanced Project Load.", ) diff --git a/src/bonsai/bonsai/core/ifcgit.py b/src/bonsai/bonsai/core/ifcgit.py index 289337908a4..875a37f8c04 100644 --- a/src/bonsai/bonsai/core/ifcgit.py +++ b/src/bonsai/bonsai/core/ifcgit.py @@ -151,8 +151,11 @@ def merge_branch(ifcgit: type[tool.IfcGit], ifc: type[tool.Ifc], operator: bpy.t conflicts = ifcgit.git_mergetool(mergetool, path_ifc) if conflicts is not None: ifcgit.git_merge_abort() - ifcgit.store_merge_conflicts(conflicts) - operator.report({"WARNING"}, "Merge failed — see the conflict report in the panel below") + if conflicts: + ifcgit.store_merge_conflicts(conflicts) + operator.report({"WARNING"}, "Merge failed — see the conflict report in the panel below") + else: + operator.report({"ERROR"}, "Merge tool failed — check that ifcmerge is installed correctly") return False ifcgit.commit_merge(path_ifc) diff --git a/src/bonsai/bonsai/tool/ifcgit.py b/src/bonsai/bonsai/tool/ifcgit.py index ae7176eb6f3..49e6440baeb 100644 --- a/src/bonsai/bonsai/tool/ifcgit.py +++ b/src/bonsai/bonsai/tool/ifcgit.py @@ -217,7 +217,7 @@ def get_commits_list(cls, path_ifc: str, lookup: dict[str, Any]) -> None: rev=[props.display_branch], ) ) - commits_relevant = list( + commits_relevant = set( git.objects.commit.Commit.iter_items( repo=repo, rev=[props.display_branch], @@ -225,11 +225,17 @@ def get_commits_list(cls, path_ifc: str, lookup: dict[str, Any]) -> None: ) ) + def is_relevant(commit): + if commit in commits_relevant: + return True + # Merge commits are relevant too + return len(commit.parents) > 1 and any(p in commits_relevant for p in commit.parents) + for commit in commits: if props.ifcgit_filter == "tagged" and commit.hexsha not in lookup: continue - elif props.ifcgit_filter == "relevant" and commit not in commits_relevant: + elif props.ifcgit_filter == "relevant" and not is_relevant(commit): continue props.ifcgit_commits.add() @@ -239,7 +245,7 @@ def get_commits_list(cls, path_ifc: str, lookup: dict[str, Any]) -> None: list_item.author_name = commit.author.name list_item.author_email = commit.author.email list_item.committed_date = int(commit.committed_date) - if commit in commits_relevant: + if is_relevant(commit): list_item.relevant = True if commit.hexsha in lookup: for tag in lookup[commit.hexsha]: @@ -280,11 +286,11 @@ def load_project(cls, path_ifc: str = "") -> None: if re.match("^Ifc", obj.name): bpy.data.objects.remove(obj, do_unlink=True) - bpy.data.orphans_purge(do_recursive=True) # ty:ignore[unknown-argument] + bpy.data.orphans_purge(do_recursive=True) - from bonsai.bim.module.root.data import IfcClassData - from bonsai.bim.module.model.data import AuthoringData import bonsai.bim.handler + from bonsai.bim.module.model.data import AuthoringData + from bonsai.bim.module.root.data import IfcClassData AuthoringData.type_thumbnails = {} @@ -589,7 +595,7 @@ def git_merge(cls, branch_name: str) -> Union[str, None]: """Attempt a git merge. Returns None on clean merge, 'conflict' on expected GitCommandError, or 'error' on an unknown GitError.""" repo = IfcGitRepo.repo - branch = repo.branches[branch_name] + branch = repo.refs[branch_name] try: repo.git.merge(branch) return None @@ -603,7 +609,7 @@ def git_merge_no_commit(cls, branch_name: str) -> Union[str, None]: """Attempt a git merge without committing (always leaves a merge state to abort). Returns None on clean merge, 'conflict' on conflict, or 'error' on unknown failure.""" repo = IfcGitRepo.repo - branch = repo.branches[branch_name] + branch = repo.refs[branch_name] try: repo.git.merge(branch, no_commit=True, no_ff=True) return None @@ -619,8 +625,8 @@ def git_mergetool(cls, mergetool: str, path_ifc: str) -> Union[list, None]: report_path = path_ifc + ".ifcmerge" try: repo.git.mergetool(tool=mergetool) - except git.exc.GitCommandError: - pass + except git.exc.GitCommandError as e: + print(f"ifcgit: mergetool failed: {e}") conflicts = None if os.path.exists(report_path): @@ -636,6 +642,10 @@ def git_mergetool(cls, mergetool: str, path_ifc: str) -> Union[list, None]: os.remove(report_path) except OSError: pass + + if conflicts is None and repo.index.unmerged_blobs(): + conflicts = [] + return conflicts @classmethod diff --git a/src/bonsai/bonsai/tool/raycast.py b/src/bonsai/bonsai/tool/raycast.py index c16bc55564a..dc45b4e9bb8 100644 --- a/src/bonsai/bonsai/tool/raycast.py +++ b/src/bonsai/bonsai/tool/raycast.py @@ -25,13 +25,12 @@ import bpy import mathutils import numpy as np -from mathutils import Vector - from bpy_extras import view3d_utils +from mathutils import Vector import bonsai.core.tool import bonsai.tool as tool -from bpy_extras import view3d_utils + class Raycast(bonsai.core.tool.Raycast): offset = 10 @@ -434,11 +433,8 @@ def divide_vector(start, end, n): seg_len_sq = sx * sx + sy * sy if seg_len_sq == 0.0: - # degenerate segment: return distance to p0 - dx = px - p0x - dy = py - p0y - dist = math.hypot(dx, dy) - return dist, (p0x, p0y), 0.0 + # degenerate segment: skip it + continue # project (p - p0) onto seg: t = dot(p-p0, seg) / |seg|^2 apx = px - p0x @@ -892,8 +888,19 @@ def calculate_snap_threshold(cls, view_distance): def create_snap_obj(cls, obj): if obj.data is None or not isinstance(obj.data, bpy.types.Mesh): return None - for snap_obj in cls.snap_objs: + for i, snap_obj in enumerate(cls.snap_objs): if obj.name == snap_obj.obj.name: + # Handle objects modified while a modal operator is active. + # Example: adding a door or window alters the wall geometry. + if len(obj.data.vertices) != len(snap_obj.verts_3d): + cls.snap_objs.pop(i) + snap_obj = SnapObj(obj) + cls.snap_objs.append(snap_obj) + for v1, v2 in zip(obj.data.vertices, snap_obj.verts_3d): + if (obj.matrix_world @ v1.co) != v2: + cls.snap_objs.pop(i) + snap_obj = SnapObj(obj) + cls.snap_objs.append(snap_obj) return snap_obj snap_obj = SnapObj(obj) cls.snap_objs.append(snap_obj) diff --git a/src/bonsai/docs/guides/development/code_style.rst b/src/bonsai/docs/guides/development/code_style.rst index cdc506d5aba..96f3d2b9cc8 100644 --- a/src/bonsai/docs/guides/development/code_style.rst +++ b/src/bonsai/docs/guides/development/code_style.rst @@ -7,7 +7,7 @@ Python code formatters For Python code formatting, we use `Black code formatter `__, black settings are stored in the repository's pyproject.toml. -We have GitHub workflow `ci-black-formatting` to maintain black formatting across the repository. +We have GitHub workflow `ci-lint` to maintain black formatting across the repository. ``black`` can be installed using ``pip install black`` and files can be formatted with the following example command: diff --git a/src/bonsai/docs/guides/development/maintenance.rst b/src/bonsai/docs/guides/development/maintenance.rst index e1728fd7a51..f08a003fd59 100644 --- a/src/bonsai/docs/guides/development/maintenance.rst +++ b/src/bonsai/docs/guides/development/maintenance.rst @@ -13,7 +13,7 @@ When adding or removing a supported Python version, update the following: * - File - What to update - * - ``.github/workflows/ci-black-formatting.yaml`` + * - ``.github/workflows/ci-lint.yaml`` - ``MIN_IOS_PY_VERSION`` * - ``.github/workflows/ci-ifcopenshell-python-pypi.yml`` - ``pyver`` matrix @@ -44,6 +44,8 @@ When a new Blender version is released and supported: * - File - What to update + * - ``.github/workflows/ci-bonsai.yml`` + - ``pyver`` matrix * - ``.github/workflows/ci-bonsai-daily.yml`` - Blender download URL @@ -57,9 +59,64 @@ When Blender ships with a new Python version: * - File - What to update - * - ``.github/workflows/ci-black-formatting.yaml`` + * - ``.github/workflows/ci-lint.yaml`` - ``MIN_BLENDER_PY_VERSION`` + * - ``.github/scripts/publish-bonsai-releases.py`` + - ``CURRENT_PYTHON_VERSION`` * - ``src/bonsai/Makefile`` - ``SUPPORTED_PYVERSIONS`` * - ``src/bonsai/scripts/dev_environment.py`` - ``PYTHON_VERSION`` mapping (Blender version, bundled Python version) + +Release +------- + +Notes: + +- Typically all packages are released at once using the same version schema +- The ``README.md`` badges can serve as a visual reference for what versions have been released +- Corrective Release (if needed after a standard release): + + - Create a new branch from the release tag (e.g., from the ``ifcopenshell-0.8.5`` tag) + - Update ``VERSION`` with the ``-post1`` suffix (e.g., ``0.8.5-post1``, **not** ``.post1``) + - The hyphen is required for semantic versioning compliance; Blender will not process ``.post1`` suffixes correctly + - Follow the standard release process for the corrective version + +- Multiple Blender Python Versions: + + - Blender does not allow multiple builds for the same platform with different Python versions (e.g., cannot have both ``bonsai_py311-0.8.5-windows-x64.zip`` and ``bonsai_py313-0.8.5-windows-x64.zip``) + - Workaround: publish different Python versions as different extension versions (e.g., py313 as ``0.8.5`` and py311 as ``0.8.5-post1``) + - Set the maximum Blender version on the Blender extensions platform UI to prevent conflicts (e.g., set max version ``5.1.0`` for ``0.8.5-post1``, which restricts it to versions below 5.1.0) + +Things to update: + +- ``.github/workflows/ci-bcf-pypi.yml`` - release `bcf-client `_ to PyPI +- ``.github/workflows/ci-bonsai.yml`` - release bonsai in GitHub releases +- ``.github/workflows/ci-bsdd-pypi.yaml`` - release `bsdd `_ to PyPI +- ``.github/workflows/ci-ifc4d-pypi.yaml`` - release `ifc4d `_ to PyPI +- ``.github/workflows/ci-ifc5d-pypi.yaml`` - release `ifc5d `_ to PyPI +- ``.github/workflows/ci-ifcclash-pypi.yaml`` - release `ifcclash `_ to PyPI +- ``.github/workflows/ci-ifcconvert.yml`` - release ifcconvert binaries in GitHub releases +- ``.github/workflows/ci-ifccsv-pypi.yaml`` - release `ifccsv `_ to PyPI +- ``.github/workflows/ci-ifcdiff-pypi.yaml`` - release `ifcdiff `_ to PyPI +- ``.github/workflows/ci-ifcedit-pypi.yaml`` - release `ifcedit `_ to PyPI +- ``.github/workflows/ci-ifcfm-pypi.yaml`` - release `ifcfm `_ to PyPI +- ``.github/workflows/ci-ifccityjson-pypi.yaml`` - release `ifccityjson `_ to PyPI +- ``.github/workflows/ci-ifcmcp-pypi.yaml`` - release `ifcopenshell-mcp `_ to PyPI +- ``.github/workflows/ci-ifcopenshell-python.yml`` - release ifcopenshell-python binaries in GitHub releases +- ``.github/workflows/ci-ifcopenshell-python-pypi.yml`` - release `ifcopenshell `_ wheels to PyPI +- ``.github/workflows/ci-ifcpatch-pypi.yaml`` - release `ifcpatch `_ to PyPI +- ``.github/workflows/ci-ifcquery-pypi.yaml`` - release `ifcquery `_ to PyPI +- ``.github/workflows/ci-ifcsverchok.yml`` - release ifcsverchok Blender add-on in GitHub releases +- ``.github/workflows/ci-ifctester-pypi.yml`` - release `ifctester `_ to PyPI +- ``.github/workflows/ci-pyodide-wasm-release.yml`` - release pyodide wasm wheel to `wasm-wheels `_ +- ``.github/workflows/publish-bonsai-releases.yml`` - publish Bonsai Blender extension to `Blender extensions platform `_ + + - ā— Requires ``BLENDER_EXTENSIONS_TOKEN`` secret to be set - ā— not yet configured + +- Publishing documentation and websites (see `website `_ repository): + + - `ifcopenshell-docs.yml` - builds and publishes IfcOpenShell documentation to `docs.ifcopenshell.org `_ (`ifcopenshell_org_docs `_ repo) + - `bonsai-docs.yml` - builds and publishes Bonsai documentation to `docs.bonsaibim.org `_ (`bonsaibim_org_docs `_ repo) + - `publish-websites.yml` - publishes `bonsaibim.org `_ (`bonsaibim_org_static_html `_ repo) and `ifcopenshell.org `_ (`ifcopenshell_org_static_html `_ repo) +- ``VERSION`` to the release version - **UPDATE THIS LAST** as all workflows above typically depend on it to set the version correctly diff --git a/src/bonsai/test/core/test_ifcgit.py b/src/bonsai/test/core/test_ifcgit.py index 539e4a082ca..4884497c8b0 100644 --- a/src/bonsai/test/core/test_ifcgit.py +++ b/src/bonsai/test/core/test_ifcgit.py @@ -20,7 +20,7 @@ import pytest import bonsai.core.ifcgit as subject -from test.core.bootstrap import ifcgit, ifc +from test.core.bootstrap import ifc, ifcgit class MockOperator: diff --git a/src/common.mk b/src/common.mk index 765655339b5..cbc251fbe2d 100644 --- a/src/common.mk +++ b/src/common.mk @@ -1,7 +1,7 @@ SHELL := sh IS_STABLE:=FALSE -PYTHON:=python3.11 -PIP:=pip3.11 +PYTHON:=python3 +PIP:=pip3 VERSION:=$(shell cat ../../VERSION) VERSION_DATE:=$(shell date '+%y%m%d') SED:=sed -i diff --git a/src/ifcchat/CNAME b/src/ifcchat/CNAME new file mode 100644 index 00000000000..6fbcc546618 --- /dev/null +++ b/src/ifcchat/CNAME @@ -0,0 +1 @@ +ai-chat.ifcopenshell.org \ No newline at end of file diff --git a/src/ifcchat/README.md b/src/ifcchat/README.md index f20aa8788d5..5dc434143ae 100644 --- a/src/ifcchat/README.md +++ b/src/ifcchat/README.md @@ -1,7 +1,7 @@ IfcOpenShell AI Assistant ========================= -A web-based client-side (pyodide + OpenAI API) model interrogation and generation API based on: ifcedit, ifcquery and ifcmcp (ifcopenshell-mcp) packaged in a HTML+JS application. +A web-based client-side (pyodide + OpenAI, Anthropic, Gemini, or OpenRouter API) model interrogation and generation API based on: ifcedit, ifcquery and ifcmcp (ifcopenshell-mcp) packaged in a HTML+JS application. ### Setup instructions diff --git a/src/ifcchat/api_anthropic.js b/src/ifcchat/api_anthropic.js new file mode 100644 index 00000000000..64033171542 --- /dev/null +++ b/src/ifcchat/api_anthropic.js @@ -0,0 +1,184 @@ +// This file was generated with the assistance of an AI coding tool. + +function parseArguments(argumentsText) { + if (!argumentsText) return {}; + try { + return JSON.parse(argumentsText); + } catch { + return {}; + } +} + +function toAnthropicTools(tools = []) { + return tools.map((tool) => ({ + name: tool.function.name, + description: tool.function.description, + input_schema: tool.function.parameters, + })); +} + +function toAnthropicAssistantContent(message) { + const content = []; + + if (message.content) { + content.push({ type: "text", text: message.content }); + } + + for (const toolCall of message.tool_calls ?? []) { + content.push({ + type: "tool_use", + id: toolCall.id, + name: toolCall.function.name, + input: parseArguments(toolCall.function.arguments), + }); + } + + if (content.length === 0) { + return ""; + } + + return content.length === 1 && content[0].type === "text" ? content[0].text : content; +} + +function toAnthropicUserContent(message) { + return typeof message.content === "string" ? message.content : JSON.stringify(message.content ?? ""); +} + +function toAnthropicToolResult(message) { + return { + type: "tool_result", + tool_use_id: message.tool_call_id, + content: typeof message.content === "string" ? message.content : JSON.stringify(message.content ?? ""), + }; +} + +function splitSystemAndMessages(messages = []) { + const system = []; + const anthropicMessages = []; + let pendingToolResults = []; + + const flushToolResults = () => { + if (pendingToolResults.length === 0) return; + anthropicMessages.push({ role: "user", content: pendingToolResults }); + pendingToolResults = []; + }; + + for (const message of messages) { + if (message.role === "system") { + if (message.content) { + system.push(message.content); + } + continue; + } + + if (message.role === "tool") { + pendingToolResults.push(toAnthropicToolResult(message)); + continue; + } + + flushToolResults(); + + if (message.role === "user") { + anthropicMessages.push({ + role: "user", + content: toAnthropicUserContent(message), + }); + continue; + } + + if (message.role === "assistant") { + anthropicMessages.push({ + role: "assistant", + content: toAnthropicAssistantContent(message), + }); + } + } + + flushToolResults(); + + return { + system: system.join("\n\n"), + messages: anthropicMessages, + }; +} + +function toChatCompletionResponse(response) { + const text = []; + const toolCalls = []; + + for (const block of response.content ?? []) { + if (block.type === "text") { + text.push(block.text); + continue; + } + + if (block.type === "tool_use") { + toolCalls.push({ + id: block.id, + type: "function", + function: { + name: block.name, + arguments: JSON.stringify(block.input ?? {}), + }, + }); + } + } + + const message = { role: "assistant" }; + const content = text.join("\n").trim(); + + if (content) { + message.content = content; + } + + if (toolCalls.length) { + message.tool_calls = toolCalls; + } + + return { + choices: [ + { + message, + }, + ], + }; +} + +export async function chat({ apiKey, model, messages, tools }) { + const request = splitSystemAndMessages(messages); + const anthropicTools = toAnthropicTools(tools); + + // Mark the last tool with cache_control so the entire tool list is cached + if (anthropicTools.length > 0) { + anthropicTools[anthropicTools.length - 1].cache_control = { type: "ephemeral" }; + } + + const body = { + model, + max_tokens: 4096, + messages: request.messages, + tools: anthropicTools, + }; + + if (request.system) { + body.system = [{ type: "text", text: request.system, cache_control: { type: "ephemeral" } }]; + } + + const res = await fetch("https://api.anthropic.com/v1/messages", { + method: "POST", + headers: { + "Content-Type": "application/json", + "x-api-key": apiKey, + "anthropic-version": "2023-06-01", + "anthropic-dangerous-direct-browser-access": "true", + }, + body: JSON.stringify(body), + }); + + if (!res.ok) { + const text = await res.text(); + throw new Error(`Anthropic error ${res.status}: ${text}`); + } + + return toChatCompletionResponse(await res.json()); +} diff --git a/src/ifcchat/api_openai.js b/src/ifcchat/api_openai.js new file mode 100644 index 00000000000..8903f897e47 --- /dev/null +++ b/src/ifcchat/api_openai.js @@ -0,0 +1,20 @@ +function getChatCompletionsUrl(baseURL) { + const root = (baseURL || "https://api.openai.com/v1").replace(/\/+$/, ""); + return `${root}/chat/completions`; +} + +export async function chat({ apiKey, baseURL, model, messages, tools }) { + const res = await fetch(getChatCompletionsUrl(baseURL), { + method: "POST", + headers: { + "Content-Type": "application/json", + "Authorization": `Bearer ${apiKey}`, + }, + body: JSON.stringify({ model, messages, tools }), + }); + if (!res.ok) { + const text = await res.text(); + throw new Error(`OpenAI error ${res.status}: ${text}`); + } + return await res.json(); +} diff --git a/src/ifcchat/api_openrouter.js b/src/ifcchat/api_openrouter.js new file mode 100644 index 00000000000..38c11e36e85 --- /dev/null +++ b/src/ifcchat/api_openrouter.js @@ -0,0 +1,20 @@ +function getChatCompletionsUrl(baseURL) { + const root = (baseURL || "https://openrouter.ai/api/v1").replace(/\/+$/, ""); + return `${root}/chat/completions`; +} + +export async function chat({ apiKey, baseURL, model, messages, tools }) { + const res = await fetch(getChatCompletionsUrl(baseURL), { + method: "POST", + headers: { + "Content-Type": "application/json", + "Authorization": `Bearer ${apiKey}`, + }, + body: JSON.stringify({ model, messages, tools }), + }); + if (!res.ok) { + const text = await res.text(); + throw new Error(`OpenRouter error ${res.status}: ${text}`); + } + return await res.json(); +} diff --git a/src/ifcchat/app.js b/src/ifcchat/app.js index 516ec8f846b..cf8723f46f5 100644 --- a/src/ifcchat/app.js +++ b/src/ifcchat/app.js @@ -1,4 +1,132 @@ // app.js +import * as openaiApi from "./api_openai.js"; +import * as anthropicApi from "./api_anthropic.js"; +import * as openrouterApi from "./api_openrouter.js"; + +const PROVIDERS = { + openai: { + api: openaiApi, + apiKeyLabel: "OpenAI API key", + apiKeyPlaceholder: "sk-...", + baseUrlLabel: "Base URL", + baseUrlPlaceholder: "https://api.openai.com/v1", + baseUrlDefault: "https://api.openai.com/v1", + models: [ + { + value: "gpt-5.2", + label: "gpt-5.2" + }, + { + value: "gpt-5.2-chat-latest", + label: "gpt-5.2-chat-latest" + }, + { + value: "gpt-5", + label: "gpt-5" + }, + { + value: "gpt-5-chat-latest", + label: "gpt-5-chat-latest" + }, + { + value: "gpt-5-mini", + label: "gpt-5-mini" + }, + { + value: "gpt-5-nano", + label: "gpt-5-nano" + }, + { + value: "gpt-4.1", + label: "gpt-4.1" + }, + { + value: "gpt-4.1-mini", + label: "gpt-4.1-mini" + }, + { + value: "gpt-4.1-nano", + label: "gpt-4.1-nano" + }, + ], + }, + anthropic: { + api: anthropicApi, + apiKeyLabel: "Anthropic API key", + apiKeyPlaceholder: "sk-ant-...", + models: [ + { + value: "claude-sonnet-4-6", + label: "claude-sonnet-4-6" + }, + { + value: "claude-opus-4-6", + label: "claude-opus-4-6" + }, + { + value: "claude-haiku-4-5-20251001", + label: "claude-haiku-4-5" + }, + ], + }, + gemini: { + api: openaiApi, + apiKeyLabel: "Gemini API key", + apiKeyPlaceholder: "AIza...", + baseUrlLabel: "Base URL", + baseUrlPlaceholder: "https://generativelanguage.googleapis.com/v1beta/openai/", + baseUrlDefault: "https://generativelanguage.googleapis.com/v1beta/openai/", + models: [ + { + value: "gemini-3-flash-preview", + label: "gemini-3-flash-preview" + }, + { + value: "gemini-2.5-flash", + label: "gemini-2.5-flash" + }, + { + value: "gemini-2.5-pro", + label: "gemini-2.5-pro" + }, + ], + }, + openrouter: { + api: openrouterApi, + apiKeyLabel: "OpenRouter API key", + apiKeyPlaceholder: "sk-or-v1-...", + baseUrlLabel: "Base URL", + baseUrlPlaceholder: "https://openrouter.ai/api/v1", + baseUrlDefault: "https://openrouter.ai/api/v1", + models: [ + { + value: "openai/gpt-oss-20b", + label: "gpt-oss-20b" + }, + { + value: "openai/gpt-oss-120b", + label: "gpt-oss-120b" + }, + { + value: "mistralai/mistral-small-3.2-24b-instruct", + label: "mistral-small-3.2" + }, + { + value: "openai/gpt-4.1", + label: "gpt-4.1" + }, + { + value: "anthropic/claude-sonnet-4-5", + label: "claude-sonnet-4-5" + }, + { + value: "google/gemini-2.5-pro-preview", + label: "gemini-2.5-pro" + }, + ], + }, +}; + const $ = (id) => document.getElementById(id); const statusEl = $("status"); @@ -6,11 +134,43 @@ const msgsEl = $("msgs"); const sendBtn = $("send"); const inputEl = $("input"); const apiKeyEl = $("apiKey"); +const apiKeyLabelEl = $("apiKeyLabel"); +const baseUrlRowEl = $("baseUrlRow"); +const baseUrlLabelEl = $("baseUrlLabel"); +const baseUrlEl = $("baseUrl"); +const thinkingIndicatorEl = $("thinkingIndicator"); +const compactingIndicatorEl = $("compactingIndicator"); const modelEl = $("model"); +const providerEls = document.querySelectorAll('input[name="provider"]'); const ifcFileEl = $("ifcFile"); const newBtn = $("newModel"); const downloadBtn = $("downloadIfc"); +function getProviderValue() { + return document.querySelector('input[name="provider"]:checked')?.value || "openai"; +} + +function onProviderChange() { + const provider = PROVIDERS[getProviderValue()]; + apiKeyLabelEl.innerHTML = `${provider.apiKeyLabel}stored in browser memory; only sent to provider servers`; + apiKeyEl.placeholder = provider.apiKeyPlaceholder; + baseUrlRowEl.hidden = !provider.baseUrlDefault; + if (provider.baseUrlDefault) { + baseUrlLabelEl.innerHTML = `${provider.baseUrlLabel}override the API endpoint for OpenAI-compatible providers`; + baseUrlEl.placeholder = provider.baseUrlPlaceholder; + baseUrlEl.value = provider.baseUrlDefault; + } else { + baseUrlEl.value = ""; + baseUrlEl.placeholder = ""; + } + modelEl.innerHTML = provider.models.map(m => ``).join(""); +} + +for (const providerEl of providerEls) { + providerEl.addEventListener("change", onProviderChange); +} +onProviderChange(); + function setBusy(isBusy, reason = "") { const controls = [ $("send"), @@ -30,32 +190,214 @@ function setBusy(isBusy, reason = "") { browseBtn.tabIndex = isBusy ? -1 : 0; } + sendBtn.innerHTML = isBusy + ? `` + : `Send send`; + setStatus(isBusy ? (reason || "Working…") : "Ready"); } +function escapeHtml(text) { + return text + .replaceAll("&", "&") + .replaceAll("<", "<") + .replaceAll(">", ">") + .replaceAll('"', """) + .replaceAll("'", "'"); +} + +function sanitizeUrl(url) { + try { + const parsed = new URL(url, window.location.href); + if (["http:", "https:", "mailto:"].includes(parsed.protocol)) { + return parsed.href; + } + } catch { + } + return null; +} + +function renderInlineMarkdown(text) { + const placeholders = []; + const addPlaceholder = (html) => { + const token = `@@MD${placeholders.length}@@`; + placeholders.push({ token, html }); + return token; + }; + + let rendered = text; + + rendered = rendered.replace(/`([^`]+)`/g, (_, code) => addPlaceholder(`${escapeHtml(code)}`)); + rendered = rendered.replace(/\[([^\]]+)\]\(([^)\s]+)\)/g, (_, label, url) => { + const href = sanitizeUrl(url); + if (!href) { + return `${label} (${url})`; + } + return addPlaceholder( + `${escapeHtml(label)}` + ); + }); + + rendered = escapeHtml(rendered); + rendered = rendered.replace(/\*\*([^*]+)\*\*/g, "$1"); + rendered = rendered.replace(/\*([^*]+)\*/g, "$1"); + rendered = rendered.replace(/_([^_]+)_/g, "$1"); + + for (const placeholder of placeholders) { + rendered = rendered.replaceAll(placeholder.token, placeholder.html); + } + + return rendered; +} + +function renderMarkdown(text) { + const lines = String(text).replace(/\r\n?/g, "\n").split("\n"); + const html = []; + let paragraphLines = []; + let quoteLines = []; + let listType = null; + let listItems = []; + + const flushParagraph = () => { + if (!paragraphLines.length) return; + html.push(`

${renderInlineMarkdown(paragraphLines.join(" "))}

`); + paragraphLines = []; + }; + + const flushQuote = () => { + if (!quoteLines.length) return; + const quoteBody = quoteLines.map((line) => renderInlineMarkdown(line)).join("
"); + html.push(`

${quoteBody}

`); + quoteLines = []; + }; + + const flushList = () => { + if (!listItems.length || !listType) return; + const items = listItems.map((item) => `
  • ${renderInlineMarkdown(item)}
  • `).join(""); + html.push(`<${listType}>${items}`); + listType = null; + listItems = []; + }; + + const flushAll = () => { + flushParagraph(); + flushQuote(); + flushList(); + }; + + for (let index = 0; index < lines.length; index++) { + const line = lines[index]; + const trimmed = line.trim(); + + if (trimmed.startsWith("```")) { + flushAll(); + const language = trimmed.slice(3).trim(); + const codeLines = []; + index += 1; + while (index < lines.length && !lines[index].trim().startsWith("```")) { + codeLines.push(lines[index]); + index += 1; + } + const languageClass = language ? ` class="language-${escapeHtml(language)}"` : ""; + html.push(`
    ${escapeHtml(codeLines.join("\n"))}
    `); + continue; + } + + if (!trimmed) { + flushAll(); + continue; + } + + const headingMatch = trimmed.match(/^(#{1,6})\s+(.+)$/); + if (headingMatch) { + flushAll(); + const level = headingMatch[1].length; + html.push(`${renderInlineMarkdown(headingMatch[2])}`); + continue; + } + + const quoteMatch = trimmed.match(/^>\s?(.*)$/); + if (quoteMatch) { + flushParagraph(); + flushList(); + quoteLines.push(quoteMatch[1]); + continue; + } + + if (quoteLines.length) { + flushQuote(); + } + + const unorderedListMatch = trimmed.match(/^[-*]\s+(.+)$/); + if (unorderedListMatch) { + flushParagraph(); + if (listType && listType !== "ul") { + flushList(); + } + listType = "ul"; + listItems.push(unorderedListMatch[1]); + continue; + } + + const orderedListMatch = trimmed.match(/^\d+\.\s+(.+)$/); + if (orderedListMatch) { + flushParagraph(); + if (listType && listType !== "ol") { + flushList(); + } + listType = "ol"; + listItems.push(orderedListMatch[1]); + continue; + } + + if (listItems.length) { + flushList(); + } + + paragraphLines.push(trimmed); + } + + flushAll(); + + return html.join(""); +} + function addMessage(role, text) { if (text.ok) { text = text.data; } + if (typeof text !== "string") { + text = JSON.stringify(text, null, 2); + } const wrap = document.createElement("div"); wrap.className = `msg ${role}`; wrap.innerHTML = ` -
    ${role}
    +
    ${role}${role === "tool" ? 'ā–¶' : ''}
    `; const bubble = wrap.querySelector(".bubble"); - bubble.textContent = text; + if (role === "assistant") { + bubble.classList.add("markdown-content"); + bubble.innerHTML = renderMarkdown(text); + } else { + bubble.textContent = text; + } bubble.onclick = function () { if (bubble.scrollHeight > 100 && role === "tool") { - bubble.style.maxHeight = bubble.style.maxHeight == 'none' ? '' : 'none'; - bubble.style.borderBottom = bubble.style.borderBottom == '' ? 'dotted 2px gray' : ''; + const expanded = bubble.style.maxHeight === 'none'; + bubble.style.maxHeight = expanded ? '' : 'none'; + bubble.style.borderBottom = expanded ? '' : 'dotted 2px gray'; + wrap.querySelector(".chevron").style.transform = expanded ? '' : 'rotate(90deg)'; } } - msgsEl.appendChild(wrap); + msgsEl.insertBefore(wrap, thinkingIndicatorEl); msgsEl.scrollTop = msgsEl.scrollHeight; } function setStatus(text) { statusEl.textContent = text; + thinkingIndicatorEl.hidden = text !== "Thinking…"; + compactingIndicatorEl.hidden = text !== "Compacting…"; + msgsEl.scrollTop = msgsEl.scrollHeight; } const worker = new Worker("./ifc_worker.js", { type: "module" }); @@ -75,74 +417,73 @@ function callWorker(type, payload = {}) { }); } -// ---- OpenAI Responses API tool schemas (should match ifcmcp.core openai_tools()) ---- -// Docs show Responses API function_call items + function_call_output loop. :contentReference[oaicite:4]{index=4} +// ---- Tool schemas (should match ifcmcp.core openai_tools()) ---- const tools = [ { - type: "function", name: "ifc_new", description: "Create a new empty IFC model in memory.", - parameters: { type: "object", properties: { schema: { type: "string" } }, required: [], additionalProperties: false } + type: "function", function: { name: "ifc_new", description: "Create a new empty IFC model in memory. Valid schemas: IFC4, IFC2X3, IFC4X3 (for IFC 4.3).", + parameters: { type: "object", properties: { schema: { type: "string", enum: ["IFC4", "IFC2X3", "IFC4X3"] } }, required: [], additionalProperties: false } } }, { - type: "function", name: "ifc_summary", description: "Get a concise overview of the loaded IFC model.", - parameters: { type: "object", properties: {}, required: [], additionalProperties: false } + type: "function", function: { name: "ifc_summary", description: "Get a concise overview of the loaded IFC model.", + parameters: { type: "object", properties: {}, required: [], additionalProperties: false } } }, { - type: "function", name: "ifc_tree", description: "Get the full spatial hierarchy tree.", - parameters: { type: "object", properties: {}, required: [], additionalProperties: false } + type: "function", function: { name: "ifc_tree", description: "Get the full spatial hierarchy tree.", + parameters: { type: "object", properties: {}, required: [], additionalProperties: false } } }, { - type: "function", name: "ifc_select", description: "Select elements using ifcopenshell selector syntax (e.g. 'IfcWall').", - parameters: { type: "object", properties: { query: { type: "string" } }, required: ["query"], additionalProperties: false } + type: "function", function: { name: "ifc_select", description: "Select elements using ifcopenshell selector syntax (e.g. 'IfcWall').", + parameters: { type: "object", properties: { query: { type: "string" } }, required: ["query"], additionalProperties: false } } }, { - type: "function", name: "ifc_info", description: "Inspect an entity by STEP id.", - parameters: { type: "object", properties: { element_id: { type: "integer" } }, required: ["element_id"], additionalProperties: false } + type: "function", function: { name: "ifc_info", description: "Inspect an entity by STEP id.", + parameters: { type: "object", properties: { element_id: { type: "integer" } }, required: ["element_id"], additionalProperties: false } } }, { - type: "function", name: "ifc_relations", description: "Get relationships for an element. traverse='up' walks to IfcProject.", + type: "function", function: { name: "ifc_relations", description: "Get relationships for an element. traverse='up' walks to IfcProject.", parameters: { type: "object", properties: { element_id: { type: "integer" }, traverse: { type: "string" } }, required: ["element_id"], additionalProperties: false - } + } } }, { - type: "function", name: "ifc_clash", description: "Run clash/clearance checks for an element.", + type: "function", function: { name: "ifc_clash", description: "Run clash/clearance checks for an element.", parameters: { type: "object", properties: { element_id: { type: "integer" }, clearance: { type: "number" }, tolerance: { type: "number" }, scope: { type: "string" } }, required: ["element_id"], additionalProperties: false - } + } } }, { - type: "function", name: "ifc_list", description: "List ifcopenshell.api modules or functions within a module.", - parameters: { type: "object", properties: { module: { type: "string" } }, required: [], additionalProperties: false } + type: "function", function: { name: "ifc_list", description: "List ifcopenshell.api modules or functions within a module.", + parameters: { type: "object", properties: { module: { type: "string" } }, required: [], additionalProperties: false } } }, { - type: "function", name: "ifc_docs", description: "Get documentation for an ifcopenshell.api function, 'module.function'.", - parameters: { type: "object", properties: { function_path: { type: "string" } }, required: ["function_path"], additionalProperties: false } + type: "function", function: { name: "ifc_docs", description: "Get documentation for an ifcopenshell.api function, 'module.function'.", + parameters: { type: "object", properties: { function_path: { type: "string" } }, required: ["function_path"], additionalProperties: false } } }, { - type: "function", name: "ifc_edit", description: "Execute an ifcopenshell.api mutation; params is a JSON string of stringly-typed kwargs.", - parameters: { type: "object", properties: { function_path: { type: "string" }, params: { type: "string" } }, required: ["function_path"], additionalProperties: false } + type: "function", function: { name: "ifc_edit", description: "Execute an ifcopenshell.api mutation; params is a JSON string of stringly-typed kwargs.", + parameters: { type: "object", properties: { function_path: { type: "string" }, params: { type: "string" } }, required: ["function_path"], additionalProperties: false } } }, { - type: "function", name: "ifc_validate", description: "Validate the loaded model. Returns valid bool and list of issues.", - parameters: { type: "object", properties: { express_rules: { type: "boolean" } }, required: [], additionalProperties: false } + type: "function", function: { name: "ifc_validate", description: "Validate the loaded model. Returns valid bool and list of issues.", + parameters: { type: "object", properties: { express_rules: { type: "boolean" } }, required: [], additionalProperties: false } } }, { - type: "function", name: "ifc_schedule", description: "List work schedules and nested tasks. Use max_depth=1 for top-level phases only on large projects.", - parameters: { type: "object", properties: { max_depth: { type: "integer" } }, required: [], additionalProperties: false } + type: "function", function: { name: "ifc_schedule", description: "List work schedules and nested tasks. Use max_depth=1 for top-level phases only on large projects.", + parameters: { type: "object", properties: { max_depth: { type: "integer" } }, required: [], additionalProperties: false } } }, { - type: "function", name: "ifc_cost", description: "List cost schedules and nested cost items. Use max_depth=1 for top-level sections only on large BoQs.", - parameters: { type: "object", properties: { max_depth: { type: "integer" } }, required: [], additionalProperties: false } + type: "function", function: { name: "ifc_cost", description: "List cost schedules and nested cost items. Use max_depth=1 for top-level sections only on large BoQs.", + parameters: { type: "object", properties: { max_depth: { type: "integer" } }, required: [], additionalProperties: false } } }, { - type: "function", name: "ifc_schema", description: "Return IFC class documentation for an entity type.", - parameters: { type: "object", properties: { entity_type: { type: "string" } }, required: ["entity_type"], additionalProperties: false } + type: "function", function: { name: "ifc_schema", description: "Return IFC class documentation for an entity type.", + parameters: { type: "object", properties: { entity_type: { type: "string" } }, required: ["entity_type"], additionalProperties: false } } }, { - type: "function", name: "ifc_quantify", description: "Run quantity take-off (QTO) on the model. Modifies model in-place; call ifc_save() after.", - parameters: { type: "object", properties: { rule: { type: "string" }, selector: { type: "string" } }, required: ["rule"], additionalProperties: false } + type: "function", function: { name: "ifc_quantify", description: "Run quantity take-off (QTO) on the model. Modifies model in-place; call ifc_save() after.", + parameters: { type: "object", properties: { rule: { type: "string" }, selector: { type: "string" } }, required: ["rule"], additionalProperties: false } } }, ]; @@ -152,89 +493,199 @@ Rules: - If the user asks about model contents (counts, lists, properties, hierarchy), use tools like ifc_summary/ifc_select/ifc_info/ifc_tree. - If the user asks to change the model, prefer: (1) ifc_list to find candidate API modules, (2) ifc_docs for the exact function signature, then (3) ifc_edit. - If there is no model and the user wants to create one, call ifc_new. +- In case of type errors on api functions, retry providing values as strings (for example in the case of the matrix in geometry.edit_object_placement). - After edits, explain what changed and suggest downloading the IFC. Be concise. Avoid dumping huge trees unless asked. `; -let inputItems = []; // running conversation state (Responses API style) +let messages = []; // running conversation state (Chat Completions style) -async function openAIResponsesCreate({ apiKey, model, input, tools }) { - const res = await fetch("https://api.openai.com/v1/responses", { - method: "POST", - headers: { - "Content-Type": "application/json", - "Authorization": `Bearer ${apiKey}`, - }, - body: JSON.stringify({ - model, - instructions: SYSTEM_INSTRUCTIONS, - tools, - input, - }), - }); +const MAX_TOOL_RESULT_CHARS = 0; +const MAX_HISTORY_MESSAGES = 40; +const ESTIMATED_CHARS_PER_TOKEN = 4; +const MAX_ESTIMATED_TOKENS_PER_MINUTE = 24000; +const COMPACT_WHEN_ESTIMATED_TOKENS = 18000; +const KEEP_RAW_TURN_GROUPS = 1; +const minuteTokenMap = new Map(); - if (!res.ok) { - const text = await res.text(); - throw new Error(`OpenAI error ${res.status}: ${text}`); +function truncateToolResult(text) { + if (MAX_TOOL_RESULT_CHARS == 0 || text.length <= MAX_TOOL_RESULT_CHARS) return text; + return text.slice(0, MAX_TOOL_RESULT_CHARS) + "\n... (truncated)"; +} + +function trimHistory() { + if (messages.length <= MAX_HISTORY_MESSAGES) return; + // Find a safe cut point — don't break mid-tool-call sequence. + // Walk forward from the trim target to find a user message boundary. + let cut = messages.length - MAX_HISTORY_MESSAGES; + while (cut < messages.length && messages[cut].role !== "user") { + cut++; + } + if (cut > 0 && cut < messages.length) { + messages.splice(0, cut); } - return await res.json(); } -function extractAssistantText(response) { - const out = []; - for (const item of response.output ?? []) { - if (item.type === "message" && item.role === "assistant") { - for (const c of item.content ?? []) { - if (c.type === "output_text") out.push(c.text); - } +function getEstimatedTokenMinuteLog(firstIterationMinuteBucket) { + return Array.from(minuteTokenMap.entries()) + .filter(([minuteBucket]) => minuteBucket >= firstIterationMinuteBucket) + .sort(([leftMinuteBucket], [rightMinuteBucket]) => leftMinuteBucket - rightMinuteBucket) + .map(([minuteBucket, estimatedTokens]) => ({ + timestamp: new Date(minuteBucket * 60000).toISOString(), + estimated_tokens: estimatedTokens, + })); +} + +async function chatWithMinuteDelay({ chat, apiKey, baseURL, model, messages, tools }) { + const estimatedTokens = Math.max( + 1, + Math.ceil(JSON.stringify({ model, messages, ...(tools ? { tools } : {}) }).length / ESTIMATED_CHARS_PER_TOKEN) + ); + let currentMinuteBucket = Math.floor(Date.now() / 60000); + const estimateTokenUsage = (minuteTokenMap.get(currentMinuteBucket) ?? 0) + estimatedTokens; + + if (estimateTokenUsage > MAX_ESTIMATED_TOKENS_PER_MINUTE) { + currentMinuteBucket += 1; + await new Promise((resolve) => setTimeout(() => resolve(), 60000)); + } + + minuteTokenMap.set(currentMinuteBucket, (minuteTokenMap.get(currentMinuteBucket) ?? 0) + estimatedTokens); + + return { + minuteBucket: currentMinuteBucket, + response: await chat({ apiKey, baseURL, model, messages, tools }), + }; +} + +async function compactHistoryWithLLM(chat, apiKey, baseURL, model) { + const estimatedTokens = Math.max( + 1, + Math.ceil(JSON.stringify([{ role: "system", content: SYSTEM_INSTRUCTIONS }, ...messages]).length / ESTIMATED_CHARS_PER_TOKEN) + ); + if (messages.length <= MAX_HISTORY_MESSAGES && estimatedTokens <= COMPACT_WHEN_ESTIMATED_TOKENS) return null; + + const { prefix, groups } = messages.reduce((acc, message) => { + if (message.role === "user") { + acc.groups.push([message]); + } else if (acc.groups.length) { + acc.groups[acc.groups.length - 1].push(message); + } else { + acc.prefix.push(message); } + return acc; + }, { prefix: [], groups: [] }); + + if (groups.length <= KEEP_RAW_TURN_GROUPS) return null; + + const compacted = [...prefix, ...groups.slice(0, -KEEP_RAW_TURN_GROUPS).flat()]; + if (!compacted.length) return null; + + setStatus("Compacting…"); + try { + const before = { + message_count: messages.length, + turn_group_count: groups.length, + estimated_tokens: estimatedTokens, + }; + const { minuteBucket, response } = await chatWithMinuteDelay({ + chat, + apiKey, + baseURL, + model, + messages: [ + { + role: "system", + content: "Summarize older IFC chat context for continuation. Preserve user goals, model state and schema, edits already applied, important ids, names, selectors, and unresolved questions. Be concise, factual, and use short markdown bullets. Do not mention that this is a summary." + }, + { role: "user", content: JSON.stringify(compacted) }, + ], + }); + const summary = response.choices?.[0]?.message?.content?.trim(); + + if (!summary) return minuteBucket; + + messages = [ + { role: "assistant", content: `[Context summary]\n${summary}` }, + ...groups.slice(-KEEP_RAW_TURN_GROUPS).flat(), + ]; + console.log("History compaction before", before); + console.log("History compaction after", { + message_count: messages.length, + turn_group_count: messages.filter((message) => message.role === "user").length, + estimated_tokens: Math.max( + 1, + Math.ceil(JSON.stringify([{ role: "system", content: SYSTEM_INSTRUCTIONS }, ...messages]).length / ESTIMATED_CHARS_PER_TOKEN) + ), + }); + return minuteBucket; + } finally { + setStatus("Thinking…"); } - return out.join("\n").trim(); } async function runAgentTurn(userText) { const apiKey = apiKeyEl.value.trim(); if (!apiKey) throw new Error("Missing API key"); - // Add user message - inputItems.push({ role: "user", content: userText }); + const provider = PROVIDERS[getProviderValue()]; + const { chat } = provider.api; + const baseURL = provider.baseUrlDefault ? baseUrlEl.value.trim() : undefined; + let firstIterationMinuteBucket = null; + + messages.push({ role: "user", content: userText }); - // Tool-calling loop (Responses API): append response.output, execute function_call items, append function_call_output. for (let i = 0; i < 64; i++) { - const response = await openAIResponsesCreate({ + const compactedMinuteBucket = await compactHistoryWithLLM(chat, apiKey, baseURL, modelEl.value); + if (firstIterationMinuteBucket === null && compactedMinuteBucket !== null) { + firstIterationMinuteBucket = compactedMinuteBucket; + } + if (messages.length > MAX_HISTORY_MESSAGES * 2) trimHistory(); + + const messages_with_system = [{ role: "system", content: SYSTEM_INSTRUCTIONS }, ...messages]; + const { minuteBucket, response } = await chatWithMinuteDelay({ + chat, apiKey, + baseURL, model: modelEl.value, - input: inputItems, + messages: messages_with_system, tools, }); + if (firstIterationMinuteBucket === null) { + firstIterationMinuteBucket = minuteBucket; + } + + const message = response.choices?.[0]?.message; + if (!message) throw new Error("No message in response"); - // Keep ALL output items (incl reasoning/tool calls) in the running state. - inputItems.push(...(response.output ?? [])); + messages.push(message); - // Show any assistant text immediately - const text = extractAssistantText(response); - if (text) addMessage("assistant", text); + if (message.content) addMessage("assistant", message.content); - const calls = (response.output ?? []).filter((x) => x.type === "function_call"); - if (calls.length === 0) return; + const calls = message.tool_calls ?? []; + if (calls.length === 0) { + console.log("Estimated token usage by minute", getEstimatedTokenMinuteLog(firstIterationMinuteBucket)); + return; + } for (const call of calls) { let args = {}; - try { args = call.arguments ? JSON.parse(call.arguments) : {}; } + try { args = call.function.arguments ? JSON.parse(call.function.arguments) : {}; } catch { args = {}; } - addMessage("tool", `→ ${call.name}(${JSON.stringify(args)})`); + addMessage("tool", `→ ${call.function.name}(${JSON.stringify(args)})`); - const toolRes = await callWorker("toolCall", { name: call.name, args }); + const toolRes = await callWorker("toolCall", { name: call.function.name, args }); - // Feed tool result back to the model - inputItems.push({ - type: "function_call_output", - call_id: call.call_id, - output: JSON.stringify(toolRes.result), + const fullResult = JSON.stringify(toolRes.result); + + messages.push({ + role: "tool", + tool_call_id: call.id, + content: truncateToolResult(fullResult), }); - addMessage("tool", `← ${call.name}: ${JSON.stringify(toolRes.result, null, 2)}`); + // Show full result in UI, but only truncated version goes to the LLM + addMessage("tool", `← ${call.function.name}: ${JSON.stringify(toolRes.result, null, 2)}`); } } @@ -251,7 +702,7 @@ sendBtn.onclick = async () => { await runAgentTurn(text); setBusy(false, "Ready"); } catch (e) { - setBusy(true, "Error"); + setBusy(false, "Error"); addMessage("assistant", `Error: ${e.message}`); } }; @@ -281,7 +732,7 @@ ifcFileEl.onchange = async () => { newBtn.onclick = async () => { try { setBusy(true, "Creating new model…"); - const r = await callWorker("toolCall", { name: "ifc_new", args: { schema: "IFC4" } }); + const r = await callWorker("toolCall", { name: "ifc_new", args: { schema: "IFC4X3" } }); addMessage("assistant", `New model: ${JSON.stringify(r.result)}`); setBusy(false, "Ready"); } catch (e) { @@ -317,4 +768,4 @@ downloadBtn.onclick = async () => { setBusy(true, "Error"); addMessage("assistant", `Worker init failed: ${e.message}`); } -})(); \ No newline at end of file +})(); diff --git a/src/ifcchat/index.html b/src/ifcchat/index.html index ac878ae615f..7ccb6785912 100644 --- a/src/ifcchat/index.html +++ b/src/ifcchat/index.html @@ -5,233 +5,7 @@ IfcOpenShell AI Assistant - + @@ -239,14 +13,46 @@
    - + +
    + + + + +
    +
    + +
    + + +
    + +
    +
    +
    + + +
    +
    - +