diff --git a/.github/scripts/publish-bonsai-releases.py b/.github/scripts/publish-bonsai-releases.py
new file mode 100755
index 00000000000..a393104e7c9
--- /dev/null
+++ b/.github/scripts/publish-bonsai-releases.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env -S uv run
+# /// script
+# dependencies = [
+# "PyGithub",
+# "requests",
+# ]
+# ///
+
+import os
+from pathlib import Path
+
+import requests
+from github import Github
+from github.GitReleaseAsset import GitReleaseAsset
+
+EXTENSION_ID = "bonsai"
+CURRENT_PYTHON_VERSION = "py313"
+CURRENT_PLATFORMS = ["linux-x64", "macos-arm64", "windows-x64"]
+
+
+def publish_asset(asset: GitReleaseAsset, token: str, repo_root: Path) -> None:
+ """
+ Publish an asset to Blender Extensions.
+ Reference: https://extensions.blender.org/api/v1/swagger
+ """
+ temp_path = repo_root / asset.name
+
+ response = requests.get(asset.browser_download_url)
+ response.raise_for_status()
+ temp_path.write_bytes(response.content)
+
+ url = f"https://extensions.blender.org/api/v1/extensions/{EXTENSION_ID}/versions/upload/"
+ headers = {"Authorization": f"Bearer {token}"}
+
+ files = {"version_file": temp_path.read_bytes()}
+ response = requests.post(url, headers=headers, files=files)
+ response.raise_for_status()
+
+ temp_path.unlink()
+
+ print(f"✓ Published {asset.name}")
+
+
+def main() -> None:
+ token = os.getenv("BLENDER_EXTENSIONS_TOKEN")
+ if not token:
+ raise Exception("BLENDER_EXTENSIONS_TOKEN environment variable not set")
+
+ # Get the repository root
+ repo_root = Path(__file__).parent.parent.parent
+
+ # Read VERSION file
+ version_file = repo_root / "VERSION"
+ version = version_file.read_text().strip()
+
+ print(f"Current VERSION: {version}")
+
+ tag_name = f"bonsai-{version}"
+
+ # Get release from GitHub
+ gh = Github()
+ gh_repo = gh.get_repo("IfcOpenShell/IfcOpenShell")
+ release = gh_repo.get_release(tag_name)
+
+ assets = release.get_assets()
+
+ asset_platform_map: dict[str, tuple[GitReleaseAsset, str]] = {}
+ for asset in assets:
+ if CURRENT_PYTHON_VERSION not in asset.name:
+ continue
+ for platform in CURRENT_PLATFORMS:
+ if platform in asset.name:
+ asset_platform_map[asset.name] = (asset, platform)
+ break
+
+ if len(asset_platform_map) != len(CURRENT_PLATFORMS):
+ found_platforms = {platform for _, (_, platform) in asset_platform_map.items()}
+ missing_platforms = set(CURRENT_PLATFORMS) - found_platforms
+ raise Exception(
+ f"Expected {len(CURRENT_PLATFORMS)} assets but found {len(asset_platform_map)}. "
+ f"Missing: {', '.join(sorted(missing_platforms))}"
+ )
+
+ print("\nRelease assets:")
+ for asset_name in sorted(asset_platform_map.keys()):
+ print(f"- {asset_name}")
+
+ # https://extensions.blender.org/api/v1/swagger
+ print("\nPublishing assets to Blender Extensions:")
+ for asset_name, (asset, platform) in asset_platform_map.items():
+ publish_asset(asset, token, repo_root)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/workflows/build_rocky.yml b/.github/workflows/build_rocky.yml
index 9ac489378ba..a1aa6118263 100644
--- a/.github/workflows/build_rocky.yml
+++ b/.github/workflows/build_rocky.yml
@@ -9,6 +9,13 @@ jobs:
container: rockylinux:9
steps:
+ - name: Set up uv
+ uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
+
+ - name: Install Python
+ # Installs latest Python version so it's preferred by uv over Rocky's system Python.
+ run: uv python install
+
- name: Install Dependencies
run: |
dnf update -y
@@ -17,7 +24,6 @@ jobs:
sqlite-devel bzip2-devel zlib-devel openssl-devel xz-devel \
readline-devel ncurses-devel libffi-devel libuuid-devel git-lfs \
findutils xz byacc
- python3 -m pip install typing_extensions
git config --global --add safe.directory '*'
- name: Install aws cli
@@ -45,7 +51,7 @@ jobs:
- name: Unpack Dependencies
run: |
cd build
- python3 ../nix/cache_dependencies.py unpack
+ uv run ../nix/cache_dependencies.py unpack
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2.22
@@ -56,7 +62,7 @@ jobs:
shell: bash
run: |
set -o pipefail
- CXXFLAGS="-O3" CFLAGS="-O3 ${DARWIN_C_SOURCE}" ADD_COMMIT_SHA=1 BUILD_CFG=Release python3 ./nix/build-all.py -v --diskcleanup 2>&1 | tee build.log
+ CXXFLAGS="-O3" CFLAGS="-O3 ${DARWIN_C_SOURCE}" ADD_COMMIT_SHA=1 BUILD_CFG=Release uv run ./nix/build-all.py -v --diskcleanup 2>&1 | tee build.log
- name: Upload Build Logs
if: always()
@@ -71,7 +77,7 @@ jobs:
- name: Pack Dependencies
run: |
cd build
- python3 ../nix/cache_dependencies.py pack
+ uv run ../nix/cache_dependencies.py pack
- name: Commit and Push Changes to Build Repository
run: |
diff --git a/.github/workflows/build_rocky_arm.yml b/.github/workflows/build_rocky_arm.yml
index 208e1da9da0..a5cbea640df 100644
--- a/.github/workflows/build_rocky_arm.yml
+++ b/.github/workflows/build_rocky_arm.yml
@@ -9,6 +9,13 @@ jobs:
container: arm64v8/rockylinux:9
steps:
+ - name: Set up uv
+ uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v8.0.0
+
+ - name: Install Python
+ # Installs latest Python version so it's preferred by uv over Rocky's system Python.
+ run: uv python install
+
- name: Install Dependencies
run: |
dnf update -y
@@ -17,7 +24,6 @@ jobs:
sqlite-devel bzip2-devel zlib-devel openssl-devel xz-devel \
readline-devel ncurses-devel libffi-devel libuuid-devel git-lfs \
findutils xz byacc
- python3 -m pip install typing_extensions
git config --global --add safe.directory '*'
- name: Install aws cli
@@ -45,7 +51,7 @@ jobs:
- name: Unpack Dependencies
run: |
cd build
- python3 ../nix/cache_dependencies.py unpack
+ uv run ../nix/cache_dependencies.py unpack
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2.22
@@ -56,7 +62,7 @@ jobs:
shell: bash
run: |
set -o pipefail
- CXXFLAGS="-O3" CFLAGS="-O3 ${DARWIN_C_SOURCE}" ADD_COMMIT_SHA=1 BUILD_CFG=Release python3 ./nix/build-all.py -v --diskcleanup 2>&1 | tee build.log
+ CXXFLAGS="-O3" CFLAGS="-O3 ${DARWIN_C_SOURCE}" ADD_COMMIT_SHA=1 BUILD_CFG=Release uv run ./nix/build-all.py -v --diskcleanup 2>&1 | tee build.log
- name: Upload Build Logs
if: always()
@@ -71,7 +77,7 @@ jobs:
- name: Pack Dependencies
run: |
cd build
- python3 ../nix/cache_dependencies.py pack
+ uv run ../nix/cache_dependencies.py pack
- name: Commit and Push Changes to Build Repository
run: |
diff --git a/.github/workflows/ci-lint.yaml b/.github/workflows/ci-lint.yaml
index 6dc18453ad6..4ef84f8cbba 100644
--- a/.github/workflows/ci-lint.yaml
+++ b/.github/workflows/ci-lint.yaml
@@ -95,8 +95,7 @@ jobs:
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
}
- run_check poe ruff-main
- run_check poe ruff-old
+ run_check poe ruff
exit $ERROR
continue-on-error: true
diff --git a/.github/workflows/docs-deployment.yml b/.github/workflows/docs-deployment.yml
deleted file mode 100644
index 3ff50b575e8..00000000000
--- a/.github/workflows/docs-deployment.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-name: Build and Deploy Stable Documentation
-
-on:
- workflow_dispatch: # Manual trigger
-
-jobs:
- build:
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v6
-
- - name: Set up Python
- uses: actions/setup-python@v6
- with:
- python-version: '3.x'
-
- - name: Install dependencies
- run: |
- cd src/bonsai/docs
- pip install -r requirements.txt # Run pip install from the docs directory
-
- - name: Build documentation
- run: |
- cd src/bonsai/docs
- make html
-
- - name: Deploy to GitHub Pages (Stable)
- uses: peaceiris/actions-gh-pages@v4
- with:
- deploy_key: ${{ secrets.ACTIONS_DEPLOY_KEY }}
- external_repository: IfcOpenShell/bonsaibim_org_docs
- publish_branch: main
- cname: docs.bonsaibim.org
- publish_dir: src/bonsai/docs/_build/html
\ No newline at end of file
diff --git a/.github/workflows/publish-bonsai-releases.yml b/.github/workflows/publish-bonsai-releases.yml
new file mode 100644
index 00000000000..25d9a67d412
--- /dev/null
+++ b/.github/workflows/publish-bonsai-releases.yml
@@ -0,0 +1,16 @@
+name: Publish Bonsai Releases
+
+on:
+ workflow_dispatch:
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - uses: astral-sh/setup-uv@v3
+
+ - run: uv run .github/scripts/publish-bonsai-releases.py
+ env:
+ BLENDER_EXTENSIONS_TOKEN: ${{ secrets.BLENDER_EXTENSIONS_TOKEN }}
diff --git a/README.md b/README.md
index a87e023d435..a44a6ff78c5 100644
--- a/README.md
+++ b/README.md
@@ -53,11 +53,11 @@ Contents
| [ifcedit](https://docs.ifcopenshell.org/ifcedit.html) | CLI wrapper for ifcopenshell.api IFC model mutation functions | LGPL-3.0-or-later | [](https://pypi.org/project/ifcedit/) |
| [ifcfm](https://docs.ifcopenshell.org/ifcfm.html) | Extract IFC data for FM handover requirements | LGPL-3.0-or-later | [](https://pypi.org/project/ifcfm/) |
| [ifcmax](https://docs.ifcopenshell.org/ifcmax.html) | Historic extension for IFC support in 3DS Max | LGPL-3.0-or-later\* | [](https://docs.ifcopenshell.org/ifcmax.html)
-| [ifcmcp](https://docs.ifcopenshell.org/ifcmcp.html) | MCP server for querying and editing IFC building models | LGPL-3.0-or-later | [](https://pypi.org/project/ifcmcp/) |
+| [ifcmcp](https://docs.ifcopenshell.org/ifcmcp.html) | MCP server for querying and editing IFC building models | LGPL-3.0-or-later | [](https://pypi.org/project/ifcopenshell-mcp/) |
| [ifcopenshell-python](https://docs.ifcopenshell.org/ifcopenshell-python.html) | Python library for IFC manipulation | LGPL-3.0-or-later\* | [](https://docs.ifcopenshell.org/ifcopenshell-python/installation.html) [](https://github.com/IfcOpenShell/IfcOpenShell/releases?q=ifcopenshell-python&expanded=true) [](https://pypi.org/project/ifcopenshell/) [](https://anaconda.org/conda-forge/ifcopenshell) [](https://anaconda.org/ifcopenshell/ifcopenshell) [](https://hub.docker.com/r/aecgeeks/ifcopenshell) [](https://aur.archlinux.org/packages/ifcopenshell) [](https://aur.archlinux.org/packages/ifcopenshell-git) [](https://github.com/IfcOpenShell/wasm-wheels) |
| [ifcpatch](https://docs.ifcopenshell.org/ifcpatch.html) | Utility to run pre-packaged scripts to manipulate IFCs | LGPL-3.0-or-later | [](https://pypi.org/project/ifcpatch/) |
| [ifcquery](https://docs.ifcopenshell.org/ifcquery.html) | CLI tool for querying and inspecting IFC building models | LGPL-3.0-or-later | [](https://pypi.org/project/ifcquery/) |
-| [ifcsverchok](https://docs.ifcopenshell.org/ifcsverchok.html) | Blender Add-on for visual node programming with IFC | GPL-3.0-or-later | [](https://github.com/IfcOpenShell/IfcOpenShell/releases?q=ifcsverchok&expanded=true)
+| [ifcsverchok](https://docs.ifcopenshell.org/ifcsverchok.html) | Blender Add-on for visual node programming with IFC | GPL-3.0-or-later | [](https://github.com/IfcOpenShell/IfcOpenShell/releases?q=ifcsverchok&expanded=true)
| [ifctester](https://docs.ifcopenshell.org/ifctester.html) | Library, CLI and webapp for IDS model auditing | LGPL-3.0-or-later | [](https://pypi.org/project/ifctester/) |
The IfcOpenShell C++ codebase is split into multiple interal libraries:
diff --git a/VERSION b/VERSION
index 7ada0d303f3..7fc2521fd74 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-0.8.5
+0.8.6
diff --git a/nix/build-all.py b/nix/build-all.py
index a4e261fc70a..4858907ce6d 100644
--- a/nix/build-all.py
+++ b/nix/build-all.py
@@ -1,4 +1,6 @@
#!/usr/bin/python
+# /// script
+# ///
###############################################################################
# #
# This file is part of IfcOpenShell. #
@@ -126,13 +128,7 @@
from pathlib import Path
from urllib.request import urlretrieve
-try:
- from typing import Literal, Union
-except:
- # python 3.6 compatibility for rocky 8
- from typing import Union
-
- from typing_extensions import Literal
+from typing import Literal, Union
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
diff --git a/nix/cache_dependencies.py b/nix/cache_dependencies.py
index 3d115764b4a..7d231779ee0 100644
--- a/nix/cache_dependencies.py
+++ b/nix/cache_dependencies.py
@@ -1,3 +1,5 @@
+# /// script
+# ///
"""
Cache built dependencies for builds.
diff --git a/pyodide/build_pyodide.sh b/pyodide/build_pyodide.sh
index db5c5f08b08..e7f399adfd9 100755
--- a/pyodide/build_pyodide.sh
+++ b/pyodide/build_pyodide.sh
@@ -17,7 +17,8 @@ uv run pyodide xbuildenv install
uv run pyodide xbuildenv install-emscripten
EMSDK_ROOT=$(pyodide config get emscripten_dir)
-source ${EMSDK_ROOT}/emsdk_env.sh
+[ -f ${EMSDK_ROOT}/emsdk_env.sh ] && source ${EMSDK_ROOT}/emsdk_env.sh
+[ -f ${EMSDK_ROOT}/../../emsdk_env.sh ] && source ${EMSDK_ROOT}/../../emsdk_env.sh
which emcc
mkdir -p packages/ifcopenshell
diff --git a/pyproject.toml b/pyproject.toml
index eb6b4620d7b..45c0b357a8b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,7 +3,7 @@ name = "IfcOpenShell"
version = "0.0.0"
dependencies = [
"black==26.3.1",
- "ruff==0.15.9",
+ "ruff==0.15.10",
"poethepoet",
"ty==0.0.29",
"gersemi==0.26.1",
@@ -215,10 +215,7 @@ exclude = [
[tool.poe.tasks]
-ruff-main = "ruff check --extend-exclude nix/build-all.py"
-# It's actually Python 3.6, but ruff only supports 3.7+, but it should do.
-ruff-old = "ruff check nix/build-all.py --target-version py37"
-ruff.sequence = ["ruff-main", "ruff-old"]
+ruff = "ruff check"
black = "black ."
@@ -238,7 +235,7 @@ ty-venv-ios.sequence = [
{cmd = "uv pip install -r src/ifcopenshell-python/type-check-requirements.txt --python=src/ifcopenshell-python/.venv"},
]
-format.sequence = ["black", "ruff-main", "ruff-old"]
+format.sequence = ["black", "ruff"]
cmake-format = "gersemi . --in-place"
diff --git a/src/bonsai/Makefile b/src/bonsai/Makefile
index bd7bbbf5978..e00d77da8e5 100644
--- a/src/bonsai/Makefile
+++ b/src/bonsai/Makefile
@@ -17,8 +17,8 @@
# along with Bonsai. If not, see .
SHELL := sh
-PYTHON:=python3.11
-PIP:=pip3.11
+PYTHON:=python3
+PIP:=pip3
PATCH:=patch
SED:=sed -i
VENV_ACTIVATE:=bin/activate
diff --git a/src/bonsai/bonsai/bim/data/fonts/LICENSE b/src/bonsai/bonsai/bim/data/fonts/LICENSE
new file mode 100644
index 00000000000..4dc4bdd0937
--- /dev/null
+++ b/src/bonsai/bonsai/bim/data/fonts/LICENSE
@@ -0,0 +1,96 @@
+Copyright (c) 2011-2012, Nikita Volchenkov (),
+with Reserved Font Name OpenGost Type B.
+
+Copyright (c) 2012, Valek Filippov ().
+
+This Font Software is licensed under the SIL Open Font License, Version 1.1.
+This license is copied below, and is also available with a FAQ at:
+http://scripts.sil.org/OFL
+
+
+-----------------------------------------------------------
+SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+-----------------------------------------------------------
+
+PREAMBLE
+The goals of the Open Font License (OFL) are to stimulate worldwide
+development of collaborative font projects, to support the font creation
+efforts of academic and linguistic communities, and to provide a free and
+open framework in which fonts may be shared and improved in partnership
+with others.
+
+The OFL allows the licensed fonts to be used, studied, modified and
+redistributed freely as long as they are not sold by themselves. The
+fonts, including any derivative works, can be bundled, embedded,
+redistributed and/or sold with any software provided that any reserved
+names are not used by derivative works. The fonts and derivatives,
+however, cannot be released under any other type of license. The
+requirement for fonts to remain under this license does not apply
+to any document created using the fonts or their derivatives.
+
+DEFINITIONS
+"Font Software" refers to the set of files released by the Copyright
+Holder(s) under this license and clearly marked as such. This may
+include source files, build scripts and documentation.
+
+"Reserved Font Name" refers to any names specified as such after the
+copyright statement(s).
+
+"Original Version" refers to the collection of Font Software components as
+distributed by the Copyright Holder(s).
+
+"Modified Version" refers to any derivative made by adding to, deleting,
+or substituting -- in part or in whole -- any of the components of the
+Original Version, by changing formats or by porting the Font Software to a
+new environment.
+
+"Author" refers to any designer, engineer, programmer, technical
+writer or other person who contributed to the Font Software.
+
+PERMISSION & CONDITIONS
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of the Font Software, to use, study, copy, merge, embed, modify,
+redistribute, and sell modified and unmodified copies of the Font
+Software, subject to the following conditions:
+
+1) Neither the Font Software nor any of its individual components,
+in Original or Modified Versions, may be sold by itself.
+
+2) Original or Modified Versions of the Font Software may be bundled,
+redistributed and/or sold with any software, provided that each copy
+contains the above copyright notice and this license. These can be
+included either as stand-alone text files, human-readable headers or
+in the appropriate machine-readable metadata fields within text or
+binary files as long as those fields can be easily viewed by the user.
+
+3) No Modified Version of the Font Software may use the Reserved Font
+Name(s) unless explicit written permission is granted by the corresponding
+Copyright Holder. This restriction only applies to the primary font name as
+presented to the users.
+
+4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
+Software shall not be used to promote, endorse or advertise any
+Modified Version, except to acknowledge the contribution(s) of the
+Copyright Holder(s) and the Author(s) or with their explicit written
+permission.
+
+5) The Font Software, modified or unmodified, in part or in whole,
+must be distributed entirely under this license, and must not be
+distributed under any other license. The requirement for fonts to
+remain under this license does not apply to any document created
+using the Font Software.
+
+TERMINATION
+This license becomes null and void if any of the above conditions are
+not met.
+
+DISCLAIMER
+THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
+OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
+DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
+OTHER DEALINGS IN THE FONT SOFTWARE.
diff --git a/src/bonsai/bonsai/bim/module/style/prop.py b/src/bonsai/bonsai/bim/module/style/prop.py
index bd62021d653..a4cfb2cb9ea 100644
--- a/src/bonsai/bonsai/bim/module/style/prop.py
+++ b/src/bonsai/bonsai/bim/module/style/prop.py
@@ -118,6 +118,19 @@ def update_shader_graph(self: Union["Texture", "BIMStylesProperties"], context:
tool.Loader.create_surface_style_with_textures(material, shading_data, textures_data)
+def _make_clear_null_updater(null_prop: str):
+ def _update(self: "BIMStylesProperties", context: bpy.types.Context) -> None:
+ self[null_prop] = False
+ update_shader_graph(self, context)
+
+ return _update
+
+
+update_diffuse_colour = _make_clear_null_updater("is_diffuse_colour_null")
+update_specular_colour = _make_clear_null_updater("is_specular_colour_null")
+update_specular_highlight_value = _make_clear_null_updater("is_specular_highlight_null")
+
+
UV_MODES = [
("UV", "UV", _("Actual UV data presented on the geometry")),
("Generated", "Generated", _("Automatically-generated UV from the vertex positions of the mesh")),
@@ -221,24 +234,29 @@ class BIMStylesProperties(PropertyGroup):
transparency: bpy.props.FloatProperty(
name="Transparency", default=0.0, min=0.0, max=1.0, update=update_shader_graph
)
- # TODO: do something on null?
- is_diffuse_colour_null: BoolProperty(name="Is Null")
+ is_diffuse_colour_null: BoolProperty(name="Is Null", update=update_shader_graph)
diffuse_colour_class: EnumProperty(
items=[(x, x, "") for x in get_args(ColourClass)],
name="Diffuse Colour Class",
- update=update_shader_graph,
+ update=update_diffuse_colour,
)
diffuse_colour: bpy.props.FloatVectorProperty(
- name="Diffuse Colour", subtype="COLOR", default=(1, 1, 1), min=0.0, max=1.0, size=3, update=update_shader_graph
+ name="Diffuse Colour",
+ subtype="COLOR",
+ default=(1, 1, 1),
+ min=0.0,
+ max=1.0,
+ size=3,
+ update=update_diffuse_colour,
)
diffuse_colour_ratio: bpy.props.FloatProperty(
- name="Diffuse Ratio", default=0.0, min=0.0, max=1.0, update=update_shader_graph
+ name="Diffuse Ratio", default=0.0, min=0.0, max=1.0, update=update_diffuse_colour
)
- is_specular_colour_null: BoolProperty(name="Is Null")
+ is_specular_colour_null: BoolProperty(name="Is Null", update=update_shader_graph)
specular_colour_class: EnumProperty(
items=[(x, x, "") for x in get_args(ColourClass)],
name="Specular Colour Class",
- update=update_shader_graph,
+ update=update_specular_colour,
default="IfcNormalisedRatioMeasure",
)
specular_colour: bpy.props.FloatVectorProperty(
@@ -248,7 +266,7 @@ class BIMStylesProperties(PropertyGroup):
min=0.0,
max=1.0,
size=3,
- update=update_shader_graph,
+ update=update_specular_colour,
)
specular_colour_ratio: bpy.props.FloatProperty(
name="Specular Ratio",
@@ -256,16 +274,16 @@ class BIMStylesProperties(PropertyGroup):
default=0.0,
min=0.0,
max=1.0,
- update=update_shader_graph,
+ update=update_specular_colour,
)
- is_specular_highlight_null: BoolProperty(name="Is Null")
+ is_specular_highlight_null: BoolProperty(name="Is Null", update=update_shader_graph)
specular_highlight: bpy.props.FloatProperty(
name="Specular Highlight",
description="Used as Roughness value in PHYSICAL Reflectance Method",
default=0.0,
min=0.0,
max=1.0,
- update=update_shader_graph,
+ update=update_specular_highlight_value,
)
reflectance_method: EnumProperty(
name="Reflectance Method",
diff --git a/src/bonsai/bonsai/tool/cost.py b/src/bonsai/bonsai/tool/cost.py
index bbec525ee92..fc07a629a6b 100644
--- a/src/bonsai/bonsai/tool/cost.py
+++ b/src/bonsai/bonsai/tool/cost.py
@@ -987,7 +987,8 @@ def change_parent_cost_item(
def disable_editing_cost_item_parent(cls) -> None:
props = cls.get_cost_props()
props.active_cost_item_id = 0
- props.change_cost_item_parent = False
+ if props.change_cost_item_parent == True:
+ props.change_cost_item_parent = False
@classmethod
def load_cost_item_quantities(cls, cost_item: Optional[ifcopenshell.entity_instance] = None) -> None:
diff --git a/src/bonsai/bonsai/tool/style.py b/src/bonsai/bonsai/tool/style.py
index 8db3ed30fee..83f1751e960 100644
--- a/src/bonsai/bonsai/tool/style.py
+++ b/src/bonsai/bonsai/tool/style.py
@@ -203,6 +203,11 @@ def get_shading_style_data_from_props(cls) -> dict[str, Any]:
available_props = props.bl_rna.properties.keys()
for prop_blender, prop_ifc in STYLE_PROPS_MAP.items():
+ null_prop_name = f"is_{prop_blender}_null"
+ if null_prop_name in available_props and getattr(props, null_prop_name):
+ surface_style_data[prop_ifc] = None
+ continue
+
class_prop_name = f"{prop_blender}_class"
# get detailed color properties if available
diff --git a/src/bonsai/docs/guides/development/maintenance.rst b/src/bonsai/docs/guides/development/maintenance.rst
index d0de81c757b..f08a003fd59 100644
--- a/src/bonsai/docs/guides/development/maintenance.rst
+++ b/src/bonsai/docs/guides/development/maintenance.rst
@@ -61,7 +61,62 @@ When Blender ships with a new Python version:
- What to update
* - ``.github/workflows/ci-lint.yaml``
- ``MIN_BLENDER_PY_VERSION``
+ * - ``.github/scripts/publish-bonsai-releases.py``
+ - ``CURRENT_PYTHON_VERSION``
* - ``src/bonsai/Makefile``
- ``SUPPORTED_PYVERSIONS``
* - ``src/bonsai/scripts/dev_environment.py``
- ``PYTHON_VERSION`` mapping (Blender version, bundled Python version)
+
+Release
+-------
+
+Notes:
+
+- Typically all packages are released at once using the same version schema
+- The ``README.md`` badges can serve as a visual reference for what versions have been released
+- Corrective Release (if needed after a standard release):
+
+ - Create a new branch from the release tag (e.g., from the ``ifcopenshell-0.8.5`` tag)
+ - Update ``VERSION`` with the ``-post1`` suffix (e.g., ``0.8.5-post1``, **not** ``.post1``)
+ - The hyphen is required for semantic versioning compliance; Blender will not process ``.post1`` suffixes correctly
+ - Follow the standard release process for the corrective version
+
+- Multiple Blender Python Versions:
+
+ - Blender does not allow multiple builds for the same platform with different Python versions (e.g., cannot have both ``bonsai_py311-0.8.5-windows-x64.zip`` and ``bonsai_py313-0.8.5-windows-x64.zip``)
+ - Workaround: publish different Python versions as different extension versions (e.g., py313 as ``0.8.5`` and py311 as ``0.8.5-post1``)
+ - Set the maximum Blender version on the Blender extensions platform UI to prevent conflicts (e.g., set max version ``5.1.0`` for ``0.8.5-post1``, which restricts it to versions below 5.1.0)
+
+Things to update:
+
+- ``.github/workflows/ci-bcf-pypi.yml`` - release `bcf-client `_ to PyPI
+- ``.github/workflows/ci-bonsai.yml`` - release bonsai in GitHub releases
+- ``.github/workflows/ci-bsdd-pypi.yaml`` - release `bsdd `_ to PyPI
+- ``.github/workflows/ci-ifc4d-pypi.yaml`` - release `ifc4d `_ to PyPI
+- ``.github/workflows/ci-ifc5d-pypi.yaml`` - release `ifc5d `_ to PyPI
+- ``.github/workflows/ci-ifcclash-pypi.yaml`` - release `ifcclash `_ to PyPI
+- ``.github/workflows/ci-ifcconvert.yml`` - release ifcconvert binaries in GitHub releases
+- ``.github/workflows/ci-ifccsv-pypi.yaml`` - release `ifccsv `_ to PyPI
+- ``.github/workflows/ci-ifcdiff-pypi.yaml`` - release `ifcdiff `_ to PyPI
+- ``.github/workflows/ci-ifcedit-pypi.yaml`` - release `ifcedit `_ to PyPI
+- ``.github/workflows/ci-ifcfm-pypi.yaml`` - release `ifcfm `_ to PyPI
+- ``.github/workflows/ci-ifccityjson-pypi.yaml`` - release `ifccityjson `_ to PyPI
+- ``.github/workflows/ci-ifcmcp-pypi.yaml`` - release `ifcopenshell-mcp `_ to PyPI
+- ``.github/workflows/ci-ifcopenshell-python.yml`` - release ifcopenshell-python binaries in GitHub releases
+- ``.github/workflows/ci-ifcopenshell-python-pypi.yml`` - release `ifcopenshell `_ wheels to PyPI
+- ``.github/workflows/ci-ifcpatch-pypi.yaml`` - release `ifcpatch `_ to PyPI
+- ``.github/workflows/ci-ifcquery-pypi.yaml`` - release `ifcquery `_ to PyPI
+- ``.github/workflows/ci-ifcsverchok.yml`` - release ifcsverchok Blender add-on in GitHub releases
+- ``.github/workflows/ci-ifctester-pypi.yml`` - release `ifctester `_ to PyPI
+- ``.github/workflows/ci-pyodide-wasm-release.yml`` - release pyodide wasm wheel to `wasm-wheels `_
+- ``.github/workflows/publish-bonsai-releases.yml`` - publish Bonsai Blender extension to `Blender extensions platform `_
+
+ - ❗ Requires ``BLENDER_EXTENSIONS_TOKEN`` secret to be set - ❗ not yet configured
+
+- Publishing documentation and websites (see `website `_ repository):
+
+ - `ifcopenshell-docs.yml` - builds and publishes IfcOpenShell documentation to `docs.ifcopenshell.org `_ (`ifcopenshell_org_docs `_ repo)
+ - `bonsai-docs.yml` - builds and publishes Bonsai documentation to `docs.bonsaibim.org `_ (`bonsaibim_org_docs `_ repo)
+ - `publish-websites.yml` - publishes `bonsaibim.org `_ (`bonsaibim_org_static_html `_ repo) and `ifcopenshell.org `_ (`ifcopenshell_org_static_html `_ repo)
+- ``VERSION`` to the release version - **UPDATE THIS LAST** as all workflows above typically depend on it to set the version correctly
diff --git a/src/bonsai/test/tool/test_cost.py b/src/bonsai/test/tool/test_cost.py
new file mode 100644
index 00000000000..3cfbe03c91c
--- /dev/null
+++ b/src/bonsai/test/tool/test_cost.py
@@ -0,0 +1,49 @@
+# Bonsai - OpenBIM Blender Add-on
+# Copyright (C) 2021 Dion Moult
+#
+# This file is part of Bonsai.
+#
+# Bonsai is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Bonsai is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Bonsai. If not, see .
+
+
+import test.bim.bootstrap
+import ifcopenshell.api.cost
+
+import bonsai.core.tool
+import bonsai.tool as tool
+import test.bim.bootstrap
+from test.bim.bootstrap import NewFile
+
+from bonsai.tool.cost import Cost as subject
+
+class TestImplementsTool(NewFile):
+ def test_run(self):
+ assert isinstance(subject(), bonsai.core.tool.Cost)
+
+class TestDisableEditingCostItemParent(NewFile):
+ def test_avoid_recursion_error(newfile, monkeypatch):
+ class DummyProps:
+ def __init__(self):
+ self.change_cost_item_parent = None
+ self.active_cost_item_id = 5
+
+ props = DummyProps()
+ monkeypatch.setattr(
+ "bonsai.tool.Cost.get_cost_props",
+ lambda: props
+ )
+ subject.disable_editing_cost_item_parent()
+ assert props.active_cost_item_id == 0
+ assert props.change_cost_item_parent is not False
+
diff --git a/src/common.mk b/src/common.mk
index 765655339b5..cbc251fbe2d 100644
--- a/src/common.mk
+++ b/src/common.mk
@@ -1,7 +1,7 @@
SHELL := sh
IS_STABLE:=FALSE
-PYTHON:=python3.11
-PIP:=pip3.11
+PYTHON:=python3
+PIP:=pip3
VERSION:=$(shell cat ../../VERSION)
VERSION_DATE:=$(shell date '+%y%m%d')
SED:=sed -i
diff --git a/src/ifc5d/ifc5d/IFC4X3QtoBaseQuantitiesBlender.json b/src/ifc5d/ifc5d/IFC4X3QtoBaseQuantitiesBlender.json
index 596f39a2b5d..02f67170281 100644
--- a/src/ifc5d/ifc5d/IFC4X3QtoBaseQuantitiesBlender.json
+++ b/src/ifc5d/ifc5d/IFC4X3QtoBaseQuantitiesBlender.json
@@ -237,7 +237,7 @@
"Area": "get_net_side_area",
"Height": "get_height",
"Perimeter": "get_rectangular_perimeter",
- "Width": "get_length"
+ "Width": "get_x"
}
},
"IfcDuctFitting + IfcDuctFittingType": {
diff --git a/src/ifcchat/ifc_worker.js b/src/ifcchat/ifc_worker.js
index 517bc4b24ca..06d6ecdf70c 100644
--- a/src/ifcchat/ifc_worker.js
+++ b/src/ifcchat/ifc_worker.js
@@ -29,16 +29,7 @@ async function ensurePyodide() {
const micropip = pyodide.pyimport("micropip");
micropip.install("python-dateutil")
- // Detect python minor version (3.12 vs 3.13) and pick a matching wheel.
- const pyVer = pyodide.runPython(`
-import sys
-f"{sys.version_info.major}.{sys.version_info.minor}"
- `);
-
- const wheelUrl =
- pyVer === "3.13"
- ? "https://ifcopenshell.github.io/wasm-wheels/ifcopenshell-0.8.3+34a1bc6-cp313-cp313-emscripten_4_0_9_wasm32.whl"
- : "https://ifcopenshell.github.io/wasm-wheels/ifcopenshell-0.8.2+d50e806-cp312-cp312-emscripten_3_1_58_wasm32.whl";
+ const wheelUrl = "https://ifcopenshell.github.io/wasm-wheels/ifcopenshell-0.8.5-cp313-cp313-pyodide_2025_0_wasm32.whl";
await micropip.install(wheelUrl);
diff --git a/src/ifcgeom/mapping/IfcPointByDistanceExpression.cpp b/src/ifcgeom/mapping/IfcPointByDistanceExpression.cpp
index 180226fb820..f07234a8f16 100644
--- a/src/ifcgeom/mapping/IfcPointByDistanceExpression.cpp
+++ b/src/ifcgeom/mapping/IfcPointByDistanceExpression.cpp
@@ -52,6 +52,15 @@ taxonomy::ptr mapping::map_impl(const IfcSchema::IfcPointByDistanceExpression* i
if (inst->OffsetVertical().has_value()) {
auto offset_vertical = inst->OffsetVertical().get() * length_unit_;
o += offset_vertical * z;
+
+ auto tmp1 = (z * offset_vertical).eval();
+ auto tmp2 = (Eigen::Vector3d(0, 0, 1) * offset_vertical).eval();
+ auto tmp3 = (tmp1 - tmp2).eval();
+
+ std::ostringstream oss;
+ oss << "local z: " << z.x() << "," << z.y() << "," << z.z() << "; delta: " << tmp3.x() << "," << tmp3.y() << "," << tmp3.z();
+ auto osss = oss.str();
+ std::wcout << osss.c_str() << std::endl;
}
if (inst->OffsetLongitudinal().has_value()) {
diff --git a/src/ifcmcp/README.md b/src/ifcmcp/README.md
index a0fe01c8eaf..6d513bfd07c 100644
--- a/src/ifcmcp/README.md
+++ b/src/ifcmcp/README.md
@@ -8,10 +8,10 @@ sessions.
## Installation
```bash
-pip install ifcmcp
+pip install ifcopenshell-mcp
```
-Requires `ifcopenshell`, `ifcquery`, and `ifcedit`. The `mcp` package is an optional dependency needed to run the server; install it with `pip install ifcmcp[mcp]` or add `mcp` separately.
+Requires `ifcopenshell`, `ifcquery`, and `ifcedit`. The `mcp` package is an optional dependency needed to run the server; install it with `pip install ifcopenshell-mcp[mcp]` or add `mcp` separately.
## Running the server
diff --git a/src/ifcopenshell-python/Makefile b/src/ifcopenshell-python/Makefile
index 643735ba375..7d6592635d9 100644
--- a/src/ifcopenshell-python/Makefile
+++ b/src/ifcopenshell-python/Makefile
@@ -5,8 +5,8 @@ VERSION_DATE:=$(shell date '+%y%m%d')
PYVERSION:=py311
PLATFORM:=linux64
-PYTHON:=python3.11
-PIP:=pip3.11
+PYTHON:=python3
+PIP:=pip3
SED:=sed -i
VENV_ACTIVATE:=bin/activate
diff --git a/src/ifcopenshell-python/ifcopenshell/api/cost/remove_cost_item.py b/src/ifcopenshell-python/ifcopenshell/api/cost/remove_cost_item.py
index ce1aa5545ef..9b90cce2e88 100644
--- a/src/ifcopenshell-python/ifcopenshell/api/cost/remove_cost_item.py
+++ b/src/ifcopenshell-python/ifcopenshell/api/cost/remove_cost_item.py
@@ -51,7 +51,7 @@ def remove_cost_item(file: ifcopenshell.file, cost_item: ifcopenshell.entity_ins
if history:
ifcopenshell.util.element.remove_deep2(file, history)
elif inverse.is_a("IfcRelAssignsToControl"):
- if len(inverse.RelatedObjects) >= 2 or inverse.RelatingControl == cost_item:
+ if len(inverse.RelatedObjects) >= 2:
continue
history = inverse.OwnerHistory
file.remove(inverse)
diff --git a/src/ifcopenshell-python/ifcopenshell/draw.py b/src/ifcopenshell-python/ifcopenshell/draw.py
index 962dbbb34f5..ba78f0d48da 100644
--- a/src/ifcopenshell-python/ifcopenshell/draw.py
+++ b/src/ifcopenshell-python/ifcopenshell/draw.py
@@ -42,7 +42,8 @@
DO_NOTHING = lambda *args: None
-ARRANGE_POLYGON_SETTINGS = W.arrange_polygon_settings() if hasattr(W, 'arrange_polygon_settings') else None
+ARRANGE_POLYGON_SETTINGS = W.arrange_polygon_settings() if hasattr(W, "arrange_polygon_settings") else None
+
@dataclass
class draw_settings:
diff --git a/src/ifcopenshell-python/ifcopenshell/util/cost.py b/src/ifcopenshell-python/ifcopenshell/util/cost.py
index 875594f1a5e..4354e49e90c 100644
--- a/src/ifcopenshell-python/ifcopenshell/util/cost.py
+++ b/src/ifcopenshell-python/ifcopenshell/util/cost.py
@@ -196,9 +196,12 @@ def get_cost_items_for_product(product: ifcopenshell.entity_instance) -> list[if
:return: A list of IfcCostItem objects representing the cost items related to the product.
"""
cost_items = []
- for assignment in product.HasAssignments:
- if assignment.is_a("IfcRelAssignsToControl") and assignment.RelatingControl.is_a("IfcCostItem"):
- cost_items.append(assignment.RelatingControl)
+ for assignment in product.HasAssignments or []:
+ if assignment.is_a("IfcRelAssignsToControl"):
+ control = assignment.RelatingControl
+ if control and control.is_a("IfcCostItem"):
+ cost_items.append(control)
+
return cost_items
diff --git a/src/ifcopenshell-python/test/util/test_cost.py b/src/ifcopenshell-python/test/util/test_cost.py
new file mode 100644
index 00000000000..516a69edd0d
--- /dev/null
+++ b/src/ifcopenshell-python/test/util/test_cost.py
@@ -0,0 +1,52 @@
+# IfcOpenShell - IFC toolkit and geometry engine
+# Copyright (C) 2021 Dion Moult
+#
+# This file is part of IfcOpenShell.
+#
+# IfcOpenShell is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# IfcOpenShell is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with IfcOpenShell. If not, see .
+
+import pytest
+
+import ifcopenshell.api.control
+import ifcopenshell.api.cost
+import test.bootstrap
+import ifcopenshell.api.root
+
+import ifcopenshell.util.cost as subject
+
+class TestGetCostItemForProduct(test.bootstrap.IFC4):
+ def test_run(self):
+ model = self.file
+ element = ifcopenshell.api.root.create_entity(self.file, ifc_class="IfcWall")
+ cost_schedule = ifcopenshell.api.cost.add_cost_schedule(model)
+ item1 = ifcopenshell.api.cost.add_cost_item(model, cost_schedule=cost_schedule)
+ ifcopenshell.api.control.assign_control(model, related_objects=[element], relating_control=item1)
+ assert list(subject.get_cost_items_for_product(element)) == [item1]
+
+ def test_remove_cost_item(self):
+ model = self.file
+ element = ifcopenshell.api.root.create_entity(self.file, ifc_class="IfcWall")
+ cost_schedule = ifcopenshell.api.cost.add_cost_schedule(model)
+ item1 = ifcopenshell.api.cost.add_cost_item(model, cost_schedule=cost_schedule)
+ ifcopenshell.api.control.assign_control(model, related_objects=[element], relating_control=item1)
+ ifcopenshell.api.cost.remove_cost_item(model, cost_item = item1)
+ assert list(subject.get_cost_items_for_product(element)) == []
+
+ def test_no_assigned_cost_items(self):
+ model = self.file
+ element = ifcopenshell.api.root.create_entity(self.file, ifc_class="IfcWall")
+ cost_schedule = ifcopenshell.api.cost.add_cost_schedule(model)
+ item1 = ifcopenshell.api.cost.add_cost_item(model, cost_schedule=cost_schedule)
+ assert list(subject.get_cost_items_for_product(element)) == []
+
diff --git a/src/ifcparse/IfcHierarchyHelper.h b/src/ifcparse/IfcHierarchyHelper.h
index 019cbf69c1a..cfe37b37179 100644
--- a/src/ifcparse/IfcHierarchyHelper.h
+++ b/src/ifcparse/IfcHierarchyHelper.h
@@ -475,7 +475,7 @@ class IFC_PARSE_API IfcHierarchyHelper : public IfcParse::IfcFile {
t->set_attribute_value(1, owner_hist);
int relating_index = 4;
int related_index = 5;
- if (T::Class().name() == "IfcRelContainedInSpatialStructure" || std::is_base_of::value) {
+ if (T::Class().name() == "IfcRelContainedInSpatialStructure" || T::Class().name() == "IfcRelReferencedInSpatialStructure" || std::is_base_of::value) {
// some classes have attributes reversed.
std::swap(relating_index, related_index);
}
diff --git a/src/svgfill/src/arrange_polygons.cpp b/src/svgfill/src/arrange_polygons.cpp
index ba8785a2677..f405f707c14 100644
--- a/src/svgfill/src/arrange_polygons.cpp
+++ b/src/svgfill/src/arrange_polygons.cpp
@@ -1461,8 +1461,8 @@ double point_to_oriented_box_distance(const DPoint& p, const MergedBoxRecord& bo
std::map> snap_points_to_box_axes(
const CenterLineGraphData& graph,
- const std::vector& boxes)
-{
+ const std::vector& boxes,
+ const K::FT& max_projection_distance) {
std::vector snapped_points(graph.points.size());
for (size_t i = 0; i < graph.points.size(); ++i) {
@@ -1521,7 +1521,10 @@ std::map> snap_points_to_box_axes(
}
return a.line_distance < b.line_distance;
});
- snapped_points[i] = best.projection;
+
+ if ((snapped_points[i] - best.projection).squared_length() < (max_projection_distance * max_projection_distance)) {
+ snapped_points[i] = best.projection;
+ }
}
std::map> adjacency;
@@ -1545,8 +1548,8 @@ std::map> snap_points_to_box_axes(
Graph2D join_segment_runs(
DebugWriter& debug,
const std::map>& line_graph,
- const std::map& midpoint_to_edge_length)
-{
+ const std::map& midpoint_to_edge_length,
+ const K::FT& max_projection_distance) {
auto graph = make_center_line_graph_data(line_graph, midpoint_to_edge_length);
auto runs = runs_from_graph(graph);
runs.erase(std::remove_if(runs.begin(), runs.end(), [](const LineRun& run) {
@@ -1577,7 +1580,7 @@ Graph2D join_segment_runs(
}
debug.write_polygons(run_polygons, "merged_boxes");
- auto snapped_graph = snap_points_to_box_axes(graph, boxes);
+ auto snapped_graph = snap_points_to_box_axes(graph, boxes, max_projection_distance);
return Graph2D(snapped_graph);
}
@@ -2069,6 +2072,83 @@ std::list> extend_end_vertices_based_on_input(
return constructed_segments;
}
+std::list>
+extend_end_vertices_based_on_input_simple(
+ const Graph2D& G,
+ const Polygon_list& outer_perimiter,
+ const K::FT& max_projection_distance)
+{
+ std::list> constructed_segments;
+
+ for (auto it = G.vertices_begin(); it != G.vertices_end(); ++it) {
+ if (it->second.size() == 1) {
+ auto& M = it->first;
+
+ for (auto& bnd : outer_perimiter) {
+ // if point M is contained in bnd interior:
+ // if (!bnd.has_on_unbounded_side(M)) {
+ if (bnd.has_on_bounded_side(M)) {
+ auto& incoming = *it->second.begin();
+ // create ray incoming -> M
+ CGAL::Ray_2 ray(incoming, M - incoming);
+
+ // intersect ray with boundary
+ boost::optional> closest_segment;
+ boost::optional> closest_intersection_point;
+ K::FT sq_distance_along_ray = std::numeric_limits::infinity();
+ for (auto jt = bnd.edges_begin(); jt != bnd.edges_end(); ++jt) {
+ const auto& seg = *jt;
+ auto x = CGAL::intersection(ray, seg);
+ if (x) {
+ if (auto* xp = variant_get>(&*x)) {
+ auto dist = ((*xp) - M).squared_length();
+ if (dist < sq_distance_along_ray) {
+ if (dist < (max_projection_distance * max_projection_distance)) {
+ closest_segment = seg;
+ closest_intersection_point = *xp;
+ sq_distance_along_ray = dist;
+ } else {
+ }
+ }
+ }
+ }
+ }
+
+ if (closest_intersection_point) {
+ constructed_segments.push_front({M, *closest_intersection_point});
+ } else {
+
+ // Loop over boundary segments, and project point onto it, take the closest
+ K::FT closest_distance = std::numeric_limits::infinity();
+ boost::optional> closest_point;
+ for (auto& poly : outer_perimiter) {
+ for (auto jt = poly.edges_begin(); jt != poly.edges_end(); ++jt) {
+ auto seg = *jt;
+ auto Pp = seg.supporting_line().projection(M);
+ if (seg.has_on(Pp)) {
+ auto d = CGAL::squared_distance(Pp, M);
+ if (d < (max_projection_distance * max_projection_distance)) {
+ if (d < closest_distance) {
+ closest_distance = d;
+ closest_point = Pp;
+ }
+ }
+ }
+ }
+ }
+
+ if (closest_point) {
+ constructed_segments.push_front({M, *closest_point});
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return constructed_segments;
+}
+
void fuse_corridor_halves_with_input(Arrangement_2& arr, Graph2D& G, SegmentLookup& segment_lookup, const Polygon_list& input_polygons, DebugWriter& debug_output) {
std::set edges_to_remove;
@@ -2161,7 +2241,7 @@ class Segment_2_less {
}
};
-std::vector arrangement_cell_iou(Arrangement_2& left, Arrangement_2& right) {
+std::vector arrangement_cell_iou(DebugWriter& debug_output, Arrangement_2& left, Arrangement_2& right) {
using Walk_pl = CGAL::Arr_walk_along_line_point_location;
Walk_pl walk_pl(right);
@@ -2170,6 +2250,9 @@ std::vector arrangement_cell_iou(Arrangement_2& left, Arrangement_2& righ
std::vector return_values;
+ K::FT max_iou_deviation = 1;
+ std::array max_deviation_poly_pair;
+
for (auto it = left.faces_begin(); it != left.faces_end(); ++it) {
if (!it->is_unbounded()) {
// convert arr facet to polygon with holes
@@ -2178,6 +2261,9 @@ std::vector arrangement_cell_iou(Arrangement_2& left, Arrangement_2& righ
for (auto hit = it->inner_ccbs_begin(); hit != it->inner_ccbs_end(); ++hit) {
pwh.add_hole(circ_to_poly(*hit));
}
+ if (!pwh.outer_boundary().is_simple()) {
+ throw std::runtime_error("Polygon with holes has a non-simple outer boundary");
+ }
CGAL::Polygon_triangulation_decomposition_2 decompositor;
std::vector temp;
@@ -2218,9 +2304,22 @@ std::vector arrangement_cell_iou(Arrangement_2& left, Arrangement_2& righ
}
}
+ if (max_score == -std::numeric_limits::infinity()) {
+ // no more points to try
+ return_values.push_back(0);
+ break;
+ }
+
+ visited_points.insert(best_point);
+
auto res = walk_pl.locate(best_point);
if (auto* v = variant_get(&res)) {
+ if ((*v)->is_unbounded()) {
+ // try next point
+ continue;
+ }
if (visited_faces_on_right.count(*v) > 0) {
+ // Maybe we should be more permissive, try some other points etc.
return_values.push_back(0);
} else {
// convert arr facet to polygon with holes
@@ -2229,6 +2328,9 @@ std::vector arrangement_cell_iou(Arrangement_2& left, Arrangement_2& righ
for (auto hit = (*v)->inner_ccbs_begin(); hit != (*v)->inner_ccbs_end(); ++hit) {
pwh_right.add_hole(circ_to_poly(*hit));
}
+ if (!pwh_right.outer_boundary().is_simple()) {
+ throw std::runtime_error("Polygon with holes has a non-simple outer boundary");
+ }
// compute intersection over union of pwh and the original polygon
if (CGAL::do_intersect(pwh, pwh_right)) {
@@ -2238,7 +2340,7 @@ std::vector arrangement_cell_iou(Arrangement_2& left, Arrangement_2& righ
for (auto& r : result) {
auto poly_area = r.outer_boundary().area();
for (auto& h : r.holes()) {
- poly_area -= h.area();
+ poly_area -= CGAL::abs(h.area());
}
intersection_area += poly_area;
}
@@ -2246,9 +2348,15 @@ std::vector arrangement_cell_iou(Arrangement_2& left, Arrangement_2& righ
CGAL::join(pwh, pwh_right, poly12);
typename K::FT union_area = poly12.outer_boundary().area();
for (auto& h : poly12.holes()) {
- union_area -= h.area();
+ union_area -= CGAL::abs(h.area());
}
return_values.push_back(intersection_area / union_area);
+
+ auto& v = return_values.back();
+ if (v < max_iou_deviation) {
+ max_iou_deviation = v;
+ max_deviation_poly_pair = {pwh.outer_boundary(), pwh_right.outer_boundary()};
+ }
} else {
return_values.push_back(0);
}
@@ -2263,6 +2371,11 @@ std::vector arrangement_cell_iou(Arrangement_2& left, Arrangement_2& righ
}
}
+ if (max_iou_deviation != 1) {
+ debug_output.write_polygon(max_deviation_poly_pair[0], "max_iou_deviation_left");
+ debug_output.write_polygon(max_deviation_poly_pair[1], "max_iou_deviation_right");
+ }
+
return return_values;
}
@@ -2935,6 +3048,18 @@ class timer {
bool enabled_;
};
+size_t delete_same_facet_edge_pairs(Arrangement_2& arr) {
+ size_t n_deleted = 0;
+ for (auto it = arr.edges_begin(); it != arr.edges_end();) {
+ decltype(it) current = it++;
+ if (current->face() == current->twin()->face()) {
+ arr.remove_edge(current);
+ n_deleted++;
+ }
+ }
+ return n_deleted;
+}
+
void arrange_cgal_polygons(svgfill::arrange_polygon_settings settings, const std::vector& input_polygons_, std::vector& output_polygons, double polygon_offset_distance = -1.) {
static const double OVERLAP_RESOLUTION_DISTANCE = 1.e-1;
// even larger amount of inset so that outer perimeter is safely within all input polygons even when overlap resolution is applied
@@ -3142,8 +3267,10 @@ void arrange_cgal_polygons(svgfill::arrange_polygon_settings settings, const std
t0 = timer.start("center line cleaning");
Graph2D G;
+ Graph2D G_orig(line_graph);
+
if (settings.line_cleaning_algo == 0) {
- G = join_segment_runs(debug_output, line_graph, midpoint_to_edge_length);
+ G = join_segment_runs(debug_output, line_graph, midpoint_to_edge_length, subdivision_length * 4);
Arrangement_2 arr;
G.to_arrangement(arr);
Graph2D G2;
@@ -3181,7 +3308,65 @@ void arrange_cgal_polygons(svgfill::arrange_polygon_settings settings, const std
t0 = timer.start("topology");
- auto segments = extend_end_vertices_based_on_input(G, midpoint_to_segment, segment_to_input_facet, outer_perimiter, segment_lookup, subdivision_length * 4);
+ std::list> segments, segments1, segments2;
+
+ if (settings.line_cleaning_algo == 0) {
+ segments1 = extend_end_vertices_based_on_input_simple(G, outer_perimiter, subdivision_length * 4);
+ segments2 = extend_end_vertices_based_on_input_simple(G_orig, outer_perimiter, subdivision_length * 4);
+
+ Arrangement_2 arr_clean;
+ G.to_arrangement(arr_clean);
+ for (auto& pq : segments1) {
+ if (pq.first == pq.second) {
+ continue;
+ }
+ CGAL::insert(arr_clean, Segment_2(pq.first, pq.second));
+ }
+
+ Arrangement_2 arr_orig;
+ G_orig.to_arrangement(arr_orig);
+ for (auto& pq : segments2) {
+ if (pq.first == pq.second) {
+ continue;
+ }
+ CGAL::insert(arr_orig, Segment_2(pq.first, pq.second));
+ }
+
+ delete_same_facet_edge_pairs(arr_clean);
+ delete_same_facet_edge_pairs(arr_orig);
+
+ for (auto& p : outer_perimiter) {
+ for (auto it = p.edges_begin(); it != p.edges_end(); ++it) {
+ auto source = it->source();
+ auto target = it->target();
+ if (source == target) {
+ continue;
+ }
+ CGAL::insert(arr_orig, Segment_2(source, target));
+ CGAL::insert(arr_clean, Segment_2(source, target));
+ }
+ }
+
+ auto ious = arrangement_cell_iou(debug_output, arr_clean, arr_orig);
+ /*
+ for (auto& iou : ious) {
+ std::cout << " " << CGAL::to_double(iou - 1);
+ }
+ std::cout << std::endl;
+ */
+
+ auto it = std::min_element(ious.begin(), ious.end());
+
+ if (it != ious.end() && (*it < 0.5)) {
+ std::cerr << "Significant difference between cleaned and original arrangement, using original for topology reconstruction: " << *it << std::endl;
+ segments = segments2;
+ G = G_orig;
+ } else {
+ segments = segments1;
+ }
+ } else {
+ segments = extend_end_vertices_based_on_input(G, midpoint_to_segment, segment_to_input_facet, outer_perimiter, segment_lookup, subdivision_length * 4);
+ }
// Now plot the edges on an arrangement in order to find planar cycles
// and merge the corridor-halves with their neighbouring input polygon
@@ -3256,7 +3441,7 @@ void arrange_cgal_polygons(svgfill::arrange_polygon_settings settings, const std
double threshold;
clean_noisy_paths(debug_output, arr, segment_lookup, threshold);
remove_colinear_vertices(arr);
- clean_noisy_bounds(debug_output, arr, segment_lookup, threshold);
+ // clean_noisy_bounds(debug_output, arr, segment_lookup, threshold);
}
t0.stop();