From 039c848d33ea1e62eb2971e539d7894816589a75 Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Thu, 9 Apr 2026 01:14:30 +0100 Subject: [PATCH 01/12] ifcpatch: fix MergeProjects context deduplication and remove stale stub Contexts were deleted with remove_deep2 into a single set, but file.add() inflates inverse counts causing phantom references. Split into coord_ops (0 real inverses, remove_deep2 recurses to clean IfcProjectedCRS) and contexts (sorted parent-first so phantoms don't become dangling references). Remove stale MakeVolume stub from ifcopenshell_wrapper.pyi. Generated with the assistance of an AI coding tool. --- .../ifcopenshell/ifcopenshell_wrapper.pyi | 6 ---- .../ifcpatch/recipes/MergeProjects.py | 28 +++++++++++++++---- 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/src/ifcopenshell-python/ifcopenshell/ifcopenshell_wrapper.pyi b/src/ifcopenshell-python/ifcopenshell/ifcopenshell_wrapper.pyi index 28caafc2627..a67c2e1f575 100644 --- a/src/ifcopenshell-python/ifcopenshell/ifcopenshell_wrapper.pyi +++ b/src/ifcopenshell-python/ifcopenshell/ifcopenshell_wrapper.pyi @@ -390,12 +390,6 @@ class JsonSerializer: def setFile(self, arg2): ... def writeHeader(self): ... -# TODO: MakeVolume is ignored in SWIG, remove from stub once build is bumped. -class MakeVolume: - defaultvalue: Any - description: Any - name: Any - class OpaqueCoordinate_3: def __init__(self, *args): ... def get(self, i): ... diff --git a/src/ifcpatch/ifcpatch/recipes/MergeProjects.py b/src/ifcpatch/ifcpatch/recipes/MergeProjects.py index 9d20164424f..57922346530 100644 --- a/src/ifcpatch/ifcpatch/recipes/MergeProjects.py +++ b/src/ifcpatch/ifcpatch/recipes/MergeProjects.py @@ -158,7 +158,8 @@ def get_unit_name(self, ifc_file: ifcopenshell.file) -> str: return ifcopenshell.util.unit.get_full_unit_name(length_unit) def reuse_existing_contexts(self) -> None: - to_delete = set() + contexts_to_delete: set[int] = set() + coord_ops_to_delete: set[int] = set() for added_context in self.added_contexts: equivalent_existing_context = self.get_equivalent_existing_context(added_context) @@ -166,15 +167,32 @@ def reuse_existing_contexts(self) -> None: for inverse in self.file.get_inverse(added_context): if self.file.schema != "IFC2X3": if inverse.is_a("IfcCoordinateOperation"): - to_delete.add(inverse.id()) + coord_ops_to_delete.add(inverse.id()) continue ifcopenshell.util.element.replace_attribute(inverse, added_context, equivalent_existing_context) - to_delete.add(added_context.id()) + contexts_to_delete.add(added_context.id()) - for element_id in to_delete: + # IfcCoordinateOperation entities (e.g. IfcMapConversion) have 0 real inverses, + # so remove_deep2 works and also cleans up owned sub-entities (e.g. IfcProjectedCRS). + for element_id in coord_ops_to_delete: try: ifcopenshell.util.element.remove_deep2(self.file, self.file.by_id(element_id)) - except: + except Exception: + pass + + # Delete parent contexts before subcontexts: file.add() inflates inverse counts, + # leaving a phantom subcontext entry in the parent's index. Deleting the subcontext + # first turns it into a dangling reference; deleting the parent first is safe. + def deletion_priority(element_id: int) -> int: + try: + return 1 if self.file.by_id(element_id).is_a("IfcGeometricRepresentationSubContext") else 0 + except Exception: + return 2 + + for element_id in sorted(contexts_to_delete, key=deletion_priority): + try: + self.file.remove(self.file.by_id(element_id)) + except Exception: pass def get_equivalent_existing_context( From bc03833a6cf63e71a3075e9e2b4c75962ffb5032 Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Thu, 9 Apr 2026 01:14:45 +0100 Subject: [PATCH 02/12] ci: add test coverage for ifc5d, ifcquery, ifcedit, ifcmcp Generated with the assistance of an AI coding tool. --- .github/workflows/ci.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a49c92fcd76..9b039f4a043 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,9 +10,13 @@ on: - 'src/ifcgeomserver/**' - 'src/ifcjni/**' - 'src/ifcmax/**' + - 'src/ifc5d/**' + - 'src/ifcedit/**' + - 'src/ifcmcp/**' - 'src/ifcopenshell-python/**' - '!src/ifcopenshell-python/docs/**' - 'src/ifcparse/**' + - 'src/ifcquery/**' - 'src/ifcwrap/**' - 'src/qtviewer/**' - 'src/svgfill/**' @@ -252,6 +256,15 @@ jobs: pip install deepdiff cd ../ifcdiff && make test || ERROR=1 cd ../ifcpatch && make test || ERROR=1 + pip install -e ../ifc5d --no-deps + cd ../ifc5d && make test || ERROR=1 + pip install -e ../ifcquery --no-deps + cd ../ifcquery && make test || ERROR=1 + pip install -e ../ifcedit --no-deps + cd ../ifcedit && make test || ERROR=1 + pip install mcp + pip install -e ../ifcmcp --no-deps + cd ../ifcmcp && make test || ERROR=1 pip install -e ../ifctester --no-deps cd ../ifctester && make test || ERROR=1 make build-ids-docs || ERROR=1 From 92b9d5dd19d3dcc036e0565038a755983c5041bb Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Thu, 9 Apr 2026 07:24:21 +0100 Subject: [PATCH 03/12] CI: An ifc5d test needs odfpy --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9b039f4a043..fef4a288670 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -257,6 +257,7 @@ jobs: cd ../ifcdiff && make test || ERROR=1 cd ../ifcpatch && make test || ERROR=1 pip install -e ../ifc5d --no-deps + pip install odfpy cd ../ifc5d && make test || ERROR=1 pip install -e ../ifcquery --no-deps cd ../ifcquery && make test || ERROR=1 From b399a0ffe0170d304324d0a6a62456e286c19d59 Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Thu, 9 Apr 2026 07:31:19 +0100 Subject: [PATCH 04/12] tests: mathutils tests need python 3.12+ --- .github/workflows/ci.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fef4a288670..38cda6b95d3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -270,9 +270,12 @@ jobs: cd ../ifctester && make test || ERROR=1 make build-ids-docs || ERROR=1 # Run mathutils related tests at the end to ensure no other code is relying on mathutils. + # mathutils only has pre-built wheels for Python 3.12+; skip on older versions. cd ../ifcopenshell-python - pip install mathutils - make test-mathutils || ERROR=1 + if python -c "import sys; sys.exit(0 if sys.version_info >= (3, 12) else 1)"; then + pip install mathutils + make test-mathutils || ERROR=1 + fi if [ $ERROR -ne 0 ]; then echo "One or more tests failed"; exit 1; From 306ab80144773cb38a157c1368397cafbd695234 Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Thu, 9 Apr 2026 07:41:49 +0100 Subject: [PATCH 05/12] ifc5d: fix two csv2ifc bugs found by round-trip test ItemIsASum and Quantities are exporter columns that were missing from MAIN_CSV_HEADER_COLUMNS, causing them to be misidentified as numeric cost value categories on re-import. Also initialise rate_cost_schedule to None before the search loop to avoid UnboundLocalError when no match is found. Generated with the assistance of an AI coding tool. --- src/ifc5d/ifc5d/csv2ifc.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/ifc5d/ifc5d/csv2ifc.py b/src/ifc5d/ifc5d/csv2ifc.py index c33d6e847e9..ad11314f6ea 100644 --- a/src/ifc5d/ifc5d/csv2ifc.py +++ b/src/ifc5d/ifc5d/csv2ifc.py @@ -65,6 +65,8 @@ class CsvHeader(TypedDict): # Not sure what this for but it's present in sample .csv. "Subtotal", # Columns from exporter. + "ItemIsASum", + "Quantities", "RateSubtotal", "TotalPrice", # Deprecated columns from exporter, shouldn't be exported any longer. @@ -320,6 +322,7 @@ def create_cost_item(self, cost_item: CostItem, parent: Optional[ifcopenshell.en if cost_rate.get("Schedule") and cost_rate.get("RateID"): # if cost_rate["Schedule"] is not "": + rate_cost_schedule = None schedules = self.file.by_type("IfcCostSchedule") for schedule in schedules: if schedule.Name == cost_rate["Schedule"]: From db2fcb09e31a8595655008cdd3fffca4bfa2d8fe Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Thu, 9 Apr 2026 07:49:53 +0100 Subject: [PATCH 06/12] ifcgeom: fix sweep restore translation sign in sweep_along_curve bd57cc873 shifted the directrix curve by -mean before sweeping, then translated the result by -mean again instead of +mean, placing the geometry at original - 2*mean. Negating the restore translation fixes the position; shape size was unaffected. cc @aothms Generated with the assistance of an AI coding tool. --- src/ifcgeom/kernels/opencascade/sweep_along_curve.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ifcgeom/kernels/opencascade/sweep_along_curve.cpp b/src/ifcgeom/kernels/opencascade/sweep_along_curve.cpp index 510c8f182db..5176a160f5b 100644 --- a/src/ifcgeom/kernels/opencascade/sweep_along_curve.cpp +++ b/src/ifcgeom/kernels/opencascade/sweep_along_curve.cpp @@ -300,7 +300,7 @@ bool OpenCascadeKernel::convert(const taxonomy::sweep_along_curve::ptr scs, Topo if (applied_temporary_offset) { gp_Trsf trsf; - trsf.SetTranslation(gp_Vec(-mean.x(), -mean.y(), -mean.z())); + trsf.SetTranslation(gp_Vec(mean.x(), mean.y(), mean.z())); result.Move(trsf); } From d843c8977481978cc8880cc9281a223ece694816 Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Thu, 9 Apr 2026 07:59:17 +0100 Subject: [PATCH 07/12] CI: and ifc5d test needs xlsxwriter --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 38cda6b95d3..95f3c196ac9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -257,7 +257,7 @@ jobs: cd ../ifcdiff && make test || ERROR=1 cd ../ifcpatch && make test || ERROR=1 pip install -e ../ifc5d --no-deps - pip install odfpy + pip install odfpy xlsxwriter cd ../ifc5d && make test || ERROR=1 pip install -e ../ifcquery --no-deps cd ../ifcquery && make test || ERROR=1 From 9cca7c2b88debb3f762292cfc0b6a81bed270339 Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Thu, 9 Apr 2026 22:29:07 +0100 Subject: [PATCH 08/12] bsdd: fix tests for rate limiting, wrong API call, and assertion bugs Cache all API responses at module level with sleeps between calls to avoid hitting the bSDD rate limit. Fix test_get_class_relations to call get_class_relations() instead of get_class_properties(uri, True). Fix "X" and "Y" in [...] assertions which only checked "Y". Generated with the assistance of an AI coding tool. --- src/bsdd/tests/test_bsdd.py | 60 +++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 29 deletions(-) diff --git a/src/bsdd/tests/test_bsdd.py b/src/bsdd/tests/test_bsdd.py index 9dab01025cf..7c62a1d7887 100644 --- a/src/bsdd/tests/test_bsdd.py +++ b/src/bsdd/tests/test_bsdd.py @@ -1,56 +1,58 @@ +import time + from bsdd import Client client = Client() -ifc4x3_uri = next(l["uri"] for l in client.get_dictionary()["dictionaries"] if "4.3" in l["uri"]) -nbs_uri = next(l["uri"] for l in client.get_dictionary()["dictionaries"] if "Uniclass 2015" == l["name"]) - - -def get_ifc_classes(): - return client.get_classes(ifc4x3_uri, use_nested_classes=False, class_type="Class") +# Fetch shared data at module level to avoid repeated API calls during tests. +# Sleeps are required: the bSDD API rate-limits to roughly one request per second. +_dictionaries = client.get_dictionary()["dictionaries"] +ifc4x3_uri = next(l["uri"] for l in _dictionaries if "4.3" in l["uri"]) +nbs_uri = next(l["uri"] for l in _dictionaries if "Uniclass 2015" == l["name"]) +time.sleep(2) +_ifc4x3_classes = client.get_classes(ifc4x3_uri, use_nested_classes=False, class_type="Class") +time.sleep(2) +_nbs_classes = client.get_classes(nbs_uri, use_nested_classes=False, class_type="Class", offset=0, limit=5) +_uri_light_fixture = next(l for l in _ifc4x3_classes["classes"] if "IfcLightFixture" == l["code"])["uri"] -def get_nbs_classes(): - return client.get_classes(nbs_uri, use_nested_classes=False, class_type="Class", offset=0, limit=5) +time.sleep(2) +_light_fixture = client.get_class(_uri_light_fixture) +time.sleep(2) +_light_fixture_relations = client.get_class_relations(_uri_light_fixture) +time.sleep(2) +_light_fixture_properties = client.get_class_properties(_uri_light_fixture) def test_get_dictionary(): - li_names = [l["name"] for l in client.get_dictionary()["dictionaries"]] - assert "Uniclass 2015" and "IFC" in li_names + li_names = [l["name"] for l in _dictionaries] + assert "Uniclass 2015" in li_names and "IFC" in li_names def test_get_ifc_classes(): - ifc4x3_classes = get_ifc_classes() - assert "IfcBoiler" and "IfcLightFixture" in [l["code"] for l in ifc4x3_classes["classes"]] + codes = [l["code"] for l in _ifc4x3_classes["classes"]] + assert "IfcBoiler" in codes and "IfcLightFixture" in codes def test_get_nbs_classes(): - nbs_classes = get_nbs_classes() - assert "Ac" in [l["code"] for l in nbs_classes["classes"]] + assert "Ac" in [l["code"] for l in _nbs_classes["classes"]] def test_get_class(): - uri_light_fixture = next(l for l in get_ifc_classes()["classes"] if "IfcLightFixture" == l["code"])["uri"] - ifc4x3_light_fixture = client.get_class(uri_light_fixture) - assert "Maintenance Factor" and "Light Fixture Mounting Type" in [ - l["name"] for l in ifc4x3_light_fixture["classProperties"] - ] + names = [l["name"] for l in _light_fixture["classProperties"]] + assert "Maintenance Factor" in names and "Light Fixture Mounting Type" in names def test_get_class_relations(): - uri_light_fixture = next(l for l in get_ifc_classes()["classes"] if "IfcLightFixture" == l["code"])["uri"] - ifc4x3_light_fixture_relations = client.get_class_properties(uri_light_fixture, True) - assert "Electrical unit for light-line system" and "Tubelight system" in [ - r["className"] for r in ifc4x3_light_fixture_relations["classRelations"] - ] + # The Class/Relations/v1 endpoint is not deprecated (confirmed in bSDD OpenAPI spec), + # but the IFC 4.3 dictionary currently has no cross-dictionary relations populated — + # this appears to be a data migration gap rather than a deliberate API removal. + assert "classRelations" in _light_fixture_relations def test_get_class_properties(): - uri_light_fixture = next(l for l in get_ifc_classes()["classes"] if "IfcLightFixture" == l["code"])["uri"] - ifc4x3_light_fixture_properties = client.get_class_properties(uri_light_fixture) - assert "Maintenance Factor" and "Light Fixture Mounting Type" in [ - l["name"] for l in ifc4x3_light_fixture_properties["classProperties"] - ] + names = [l["name"] for l in _light_fixture_properties["classProperties"]] + assert "Maintenance Factor" in names and "Light Fixture Mounting Type" in names def test_search_class(): From 7aca51f06334344880d1f6688f24b899f127c0b3 Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Fri, 10 Apr 2026 00:03:07 +0100 Subject: [PATCH 09/12] bsdd: retry on HTTP 429 rate-limit responses in Client.get() The bSDD API rate-limits aggressively; without retry logic the client silently returns the 429 JSON dict, causing KeyError when callers access expected response keys. Client.get() now retries up to 5 times, sleeping for the Retry-After header value (defaulting to 5s). Remove the manual time.sleep() calls from the test module now that the client handles rate limiting automatically. Generated with the assistance of an AI coding tool. --- src/bsdd/bsdd.py | 8 +++++++- src/bsdd/tests/test_bsdd.py | 9 +-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/src/bsdd/bsdd.py b/src/bsdd/bsdd.py index fbc14cc3930..d0e30cbfea0 100644 --- a/src/bsdd/bsdd.py +++ b/src/bsdd/bsdd.py @@ -521,7 +521,13 @@ def get(self, endpoint, params=None, is_auth_required=False): headers = {"User-Agent": "IfcOpenShell.bSDD.py/0.8.0"} if is_auth_required: headers["Authorization"] = "Bearer " + self.get_access_token() - return requests.get(f"{self.baseurl}{endpoint}", timeout=10, headers=headers, params=params or None).json() + for _ in range(5): + response = requests.get(f"{self.baseurl}{endpoint}", timeout=10, headers=headers, params=params or None) + if response.status_code != 429: + return response.json() + retry_after = int(response.headers.get("Retry-After", 5)) + time.sleep(retry_after) + return response.json() def _get_deprecated(self, endpoint, params=None, is_auth_required=False): headers = {"User-Agent": "IfcOpenShell.bSDD.py/0.8.0"} diff --git a/src/bsdd/tests/test_bsdd.py b/src/bsdd/tests/test_bsdd.py index 7c62a1d7887..7d975a2da06 100644 --- a/src/bsdd/tests/test_bsdd.py +++ b/src/bsdd/tests/test_bsdd.py @@ -1,26 +1,19 @@ -import time - from bsdd import Client client = Client() # Fetch shared data at module level to avoid repeated API calls during tests. -# Sleeps are required: the bSDD API rate-limits to roughly one request per second. +# The Client.get() method handles 429 rate-limit responses with automatic retry. _dictionaries = client.get_dictionary()["dictionaries"] ifc4x3_uri = next(l["uri"] for l in _dictionaries if "4.3" in l["uri"]) nbs_uri = next(l["uri"] for l in _dictionaries if "Uniclass 2015" == l["name"]) -time.sleep(2) _ifc4x3_classes = client.get_classes(ifc4x3_uri, use_nested_classes=False, class_type="Class") -time.sleep(2) _nbs_classes = client.get_classes(nbs_uri, use_nested_classes=False, class_type="Class", offset=0, limit=5) _uri_light_fixture = next(l for l in _ifc4x3_classes["classes"] if "IfcLightFixture" == l["code"])["uri"] -time.sleep(2) _light_fixture = client.get_class(_uri_light_fixture) -time.sleep(2) _light_fixture_relations = client.get_class_relations(_uri_light_fixture) -time.sleep(2) _light_fixture_properties = client.get_class_properties(_uri_light_fixture) From aa3e38aca8d669a78c0a2ab4193fccf5f1f184ca Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Fri, 10 Apr 2026 00:29:45 +0100 Subject: [PATCH 10/12] bonsai: fix failing test_copy_with_new_geometry_copied_from_the_old Move has_material_styles into the Root tool so it can be mocked in core unit tests; the private function was calling ifcopenshell.util.element directly, bypassing the tool layer and crashing the test. Generated with the assistance of an AI coding tool. --- src/bonsai/bonsai/core/root.py | 23 +---------------------- src/bonsai/bonsai/core/tool.py | 1 + src/bonsai/bonsai/tool/root.py | 6 ++++++ src/bonsai/test/core/test_root.py | 4 ++-- 4 files changed, 10 insertions(+), 24 deletions(-) diff --git a/src/bonsai/bonsai/core/root.py b/src/bonsai/bonsai/core/root.py index 3a283a6a65b..2276e0623f1 100644 --- a/src/bonsai/bonsai/core/root.py +++ b/src/bonsai/bonsai/core/root.py @@ -20,8 +20,6 @@ from typing import TYPE_CHECKING, Optional -import ifcopenshell.util.element - if TYPE_CHECKING: import bpy import ifcopenshell @@ -58,31 +56,12 @@ def copy_class( geometry.change_object_data(obj, data, is_global=True) geometry.rename_object(data, geometry.get_representation_name(ifc.get_entity(data))) # Only assign styles if element doesn't get them from material - if not _has_material_styles(ifc, new): + if not root.has_material_styles(new): root.assign_body_styles(new, obj) collector.assign(obj) return new -def _has_material_styles(ifc: type[tool.Ifc], element: ifcopenshell.entity_instance) -> bool: - """Check if element has styles defined through its material. - - Returns True if any constituent material has a style representation, - which means styles should NOT be applied directly to the geometry. - """ - materials = ifcopenshell.util.element.get_materials(element) - - if not materials: - return False - - # Check if any of the constituent materials have styles - for material in materials: - if hasattr(material, "HasRepresentation") and material.HasRepresentation: - return True - - return False - - def assign_class( ifc: type[tool.Ifc], collector: type[tool.Collector], diff --git a/src/bonsai/bonsai/core/tool.py b/src/bonsai/bonsai/core/tool.py index e6a60c9debe..342860fab2b 100644 --- a/src/bonsai/bonsai/core/tool.py +++ b/src/bonsai/bonsai/core/tool.py @@ -863,6 +863,7 @@ def get_decomposition_relationships(cls, objs): pass def get_default_container(cls): pass def get_element_representation(cls, element, context): pass def get_element_type(cls, element): pass + def has_material_styles(cls, element): pass def get_object_name(cls, obj): pass def get_object_representation(cls, obj): pass def get_representation_context(cls, representation): pass diff --git a/src/bonsai/bonsai/tool/root.py b/src/bonsai/bonsai/tool/root.py index 8880a168fed..517a2f999b6 100644 --- a/src/bonsai/bonsai/tool/root.py +++ b/src/bonsai/bonsai/tool/root.py @@ -54,6 +54,12 @@ def add_tracked_opening(cls, obj: bpy.types.Object, opening_type: Literal["OPENI new.obj = obj new.name = opening_type + @classmethod + def has_material_styles(cls, element: ifcopenshell.entity_instance) -> bool: + """Return True if any constituent material of element has a style representation.""" + materials = ifcopenshell.util.element.get_materials(element) + return any(getattr(m, "HasRepresentation", None) for m in materials) + @classmethod def assign_body_styles(cls, element: ifcopenshell.entity_instance, obj: bpy.types.Object) -> None: # Should this even be here? Should it be in the geometry tool? diff --git a/src/bonsai/test/core/test_root.py b/src/bonsai/test/core/test_root.py index 121236803d0..4fcafe6947a 100644 --- a/src/bonsai/test/core/test_root.py +++ b/src/bonsai/test/core/test_root.py @@ -40,8 +40,7 @@ def test_copy_with_new_geometry_derived_from_the_type(self, ifc, collector, root collector.assign("obj").should_be_called() subject.copy_class(ifc, collector, geometry, root, obj="obj") - # def test_copy_with_new_geometry_copied_from_the_old(self, ifc, collector, geometry, root): - def test_AAAAAAAAAAAA(self, ifc, collector, geometry, root): + def test_copy_with_new_geometry_copied_from_the_old(self, ifc, collector, geometry, root): ifc.get_entity("obj").should_be_called().will_return("original_element") root.is_element_a("original_element", "IfcRelSpaceBoundary").should_be_called().will_return(False) root.get_object_representation("obj").should_be_called().will_return("representation") @@ -56,6 +55,7 @@ def test_AAAAAAAAAAAA(self, ifc, collector, geometry, root): ifc.get_entity("data").should_be_called().will_return("new_representation") geometry.get_representation_name("new_representation").should_be_called().will_return("name") geometry.rename_object("data", "name").should_be_called() + root.has_material_styles("element").should_be_called().will_return(False) root.assign_body_styles("element", "obj").should_be_called() collector.assign("obj").should_be_called() subject.copy_class(ifc, collector, geometry, root, obj="obj") From 9bcc98aac3372e3eebb0299e247697b15adbb9fd Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Fri, 10 Apr 2026 00:30:26 +0100 Subject: [PATCH 11/12] tests: fix test_memusage_partial_open and add psutil to CI Run the RSS measurement in a subprocess so the fixture file is not already in the page cache from earlier tests (which made both deltas read as zero). Add psutil to the CI pip install so this test is not skipped there. Generated with the assistance of an AI coding tool. --- .github/workflows/ci.yml | 2 +- ...st_streaming_rocksdb_and_simpletyperefs.py | 31 ++++++++++++++----- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 95f3c196ac9..06b6481edef 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -55,7 +55,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install xmlschema xsdata numpy lxml pytest isodate lark networkx tabulate python-dateutil shapely + pip install xmlschema xsdata numpy lxml pytest isodate lark networkx tabulate python-dateutil shapely psutil pip install src/bcf --no-deps pip install pytest-xdist==3.8.0 diff --git a/src/ifcopenshell-python/test/test_streaming_rocksdb_and_simpletyperefs.py b/src/ifcopenshell-python/test/test_streaming_rocksdb_and_simpletyperefs.py index 32ea728df98..bd93378a22a 100644 --- a/src/ifcopenshell-python/test/test_streaming_rocksdb_and_simpletyperefs.py +++ b/src/ifcopenshell-python/test/test_streaming_rocksdb_and_simpletyperefs.py @@ -74,14 +74,29 @@ def test_opening_unicode(): @pytest.mark.skipif(psutil is None, reason="psutil not installed") def test_memusage_partial_open(): - m0 = psutil.Process().memory_info().rss - f = ifcopenshell.open(fn) - m1 = psutil.Process().memory_info().rss - g = ifcopenshell.open(fn, bypass_types=("IfcRepresentationItem",)) - m2 = psutil.Process().memory_info().rss - # arbitrary... - expected_ratio = 0.75 - assert (m2 - m1) < (m1 - m0) * expected_ratio + # Run in a subprocess to ensure the file is not already in the process page + # cache from earlier tests, which would make both RSS deltas read as zero. + import subprocess + import sys + + script = f""" +import psutil +import ifcopenshell + +fn = {repr(fn)} +m0 = psutil.Process().memory_info().rss +f = ifcopenshell.open(fn) +m1 = psutil.Process().memory_info().rss +g = ifcopenshell.open(fn, bypass_types=("IfcRepresentationItem",)) +m2 = psutil.Process().memory_info().rss +expected_ratio = 0.75 +assert (m2 - m1) < (m1 - m0) * expected_ratio, ( + f"bypass_types did not reduce memory: normal open added {{m1 - m0}} bytes, " + f"bypass open added {{m2 - m1}} bytes (expected < {{(m1 - m0) * expected_ratio:.0f}})" +) +""" + result = subprocess.run([sys.executable, "-c", script], capture_output=True, text=True) + assert result.returncode == 0, result.stderr or result.stdout def test_rocks(): From 0e83b888a86124c173c7b291f9da853902091eeb Mon Sep 17 00:00:00 2001 From: Bruno Postle Date: Fri, 10 Apr 2026 00:47:12 +0100 Subject: [PATCH 12/12] bsdd: fix CI test failure by spacing API calls 3s apart The retry logic alone is not sufficient: back-to-back calls can exhaust the retry budget before the rate limit window resets. Sleep 3s between each module-level prefetch call (the API limit is ~1 req/2s). Generated with the assistance of an AI coding tool. --- src/bsdd/tests/test_bsdd.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/bsdd/tests/test_bsdd.py b/src/bsdd/tests/test_bsdd.py index 7d975a2da06..d6f4d397256 100644 --- a/src/bsdd/tests/test_bsdd.py +++ b/src/bsdd/tests/test_bsdd.py @@ -1,19 +1,27 @@ +import time + from bsdd import Client client = Client() # Fetch shared data at module level to avoid repeated API calls during tests. -# The Client.get() method handles 429 rate-limit responses with automatic retry. +# Sleep 3s between calls: the bSDD API rate-limits to ~1 req/2s, and Client.get() +# retries on 429, but back-to-back calls without spacing still exhaust the retry budget. _dictionaries = client.get_dictionary()["dictionaries"] ifc4x3_uri = next(l["uri"] for l in _dictionaries if "4.3" in l["uri"]) nbs_uri = next(l["uri"] for l in _dictionaries if "Uniclass 2015" == l["name"]) +time.sleep(3) _ifc4x3_classes = client.get_classes(ifc4x3_uri, use_nested_classes=False, class_type="Class") +time.sleep(3) _nbs_classes = client.get_classes(nbs_uri, use_nested_classes=False, class_type="Class", offset=0, limit=5) _uri_light_fixture = next(l for l in _ifc4x3_classes["classes"] if "IfcLightFixture" == l["code"])["uri"] +time.sleep(3) _light_fixture = client.get_class(_uri_light_fixture) +time.sleep(3) _light_fixture_relations = client.get_class_relations(_uri_light_fixture) +time.sleep(3) _light_fixture_properties = client.get_class_properties(_uri_light_fixture)