diff --git a/.github/workflows/ci-docker-base.yml b/.github/workflows/ci-docker-base.yml
index 76072b54a..f338db023 100644
--- a/.github/workflows/ci-docker-base.yml
+++ b/.github/workflows/ci-docker-base.yml
@@ -1,13 +1,8 @@
name: ci-ada-docker-base
on:
- push:
- paths:
- - 'images/base.Dockerfile'
- - 'images/environment.yml'
- - '.github/workflows/ci-docker-base.yml'
- branches:
- - main
+ workflow_dispatch:
+
jobs:
docker:
diff --git a/.github/workflows/ci-docker-dev.yml b/.github/workflows/ci-docker-dev.yml
index a9811d364..b945dd3aa 100644
--- a/.github/workflows/ci-docker-dev.yml
+++ b/.github/workflows/ci-docker-dev.yml
@@ -1,6 +1,8 @@
name: ci-ada-docker-dev
-on: push
+on:
+ workflow_dispatch:
+
jobs:
docker:
diff --git a/.github/workflows/ci-pages.yml b/.github/workflows/ci-pages.yml
index 2824b246c..9c2f00d60 100644
--- a/.github/workflows/ci-pages.yml
+++ b/.github/workflows/ci-pages.yml
@@ -1,9 +1,8 @@
name: ci-docs
+
on:
- push:
- branches:
- - dev
- - fem-static
+ workflow_dispatch:
+
jobs:
build:
runs-on: ubuntu-latest
diff --git a/.github/workflows/pr-test.yml b/.github/workflows/pr-test.yml
deleted file mode 100644
index ea904ec2c..000000000
--- a/.github/workflows/pr-test.yml
+++ /dev/null
@@ -1,133 +0,0 @@
-name: ci-pr-tests
-
-# bump 2
-on:
- pull_request:
- branches:
- - main
-
-concurrency:
- group: ci-ada-main-${{ github.ref }}
- cancel-in-progress: true
-
-jobs:
- activate:
- if: |
- github.repository == 'krande/adapy' &&
- !contains(github.event.head_commit.message, '[skip ci]')
- runs-on: ubuntu-latest
- steps:
- - run: |
- echo ok go!
- echo head commit message: "${{ github.event.head_commit.message }}"
- echo event name: "${{ github.event_name }}"
- echo base_ref: "${{ github.base_ref }}"
- echo ref: "${{ github.ref }}"
- echo sha "${{ github.sha }}"
- echo PR head sha: "${{ github.event.pull_request.head.sha }}"
- echo PR head ref "${{ github.event.pull_request.head.ref }}"
- lint:
- needs: activate
- if: github.event_name == 'pull_request'
- runs-on: ubuntu-latest
- steps:
- - uses: actions/setup-python@v4
- with:
- python-version: "3.x"
- - uses: actions/checkout@v3
- - name: Install lint packages
- run: pip install isort ruff black
- - name: Lint with black
- run: black --config pyproject.toml --check .
- - name: Lint with isort
- run: isort --check .
- - name: Lint with ruff
- run: ruff .
-
- version:
- name: Check version
- runs-on: ubuntu-latest
- defaults:
- run:
- shell: bash -l {0}
- steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
- with:
- python-version: '3.11'
-
- - name: Install bumpyproject
- run: pip install git+https://github.com/Krande/bumpyproject.git
-
- - name: Check if current pyproject version is safe for pushing to conda & pypi
- id: bump
- run: bumpy pyproject --check-current --ga-version-output
- env:
- CONDA_URL: "https://api.anaconda.org/package/krande/ada-py"
- PYPI_URL: "https://pypi.org/pypi/ada-py/json"
-
- test:
- needs: activate
- name: ${{ matrix.platform.name }}-${{ matrix.pyver.name }}-${{ matrix.build.name }}
- runs-on: ${{ matrix.platform.distver }}
- defaults:
- run:
- shell: bash -l {0}
- strategy:
- fail-fast: false
- matrix:
- build: [ { name: 'core' },{ name: 'full' } ]
- pyver: [
- { name: py310, distver: '3.10' },
- { name: py311, distver: '3.11' }
- ]
- platform: [
- { name: Windows, distver: windows-latest, short: 'win-64' },
- { name: Linux, distver: ubuntu-latest, short: 'linux-64' },
- { name: macOS, distver: macos-latest, short: 'osx-64' }
- ]
- steps:
- - uses: actions/checkout@v3
- - uses: mamba-org/setup-micromamba@v1 # https://github.com/mamba-org/setup-micromamba
- if: ${{ matrix.build.name == 'core' }}
- with:
- cache-environment: false
- condarc: |
- channel_priority: strict
- channels:
- - krande
- - conda-forge
- environment-file: conda/environment.core.yml
- create-args: >-
- python=${{ matrix.pyver.distver }}
- pytest
-
- - uses: mamba-org/setup-micromamba@v1
- if: ${{ matrix.build.name == 'full' }}
- with:
- cache-environment: false
- condarc: |
- channel_priority: strict
- channels:
- - krande
- - conda-forge
- environment-file: conda/environment.core.yml
- create-args: >-
- python=${{ matrix.pyver.distver }}
- pytest
- jupyterlab
- pythreejs
- pyparsing
- pygfx
- pyglfw
-
- - name: pip install
- run: |
- pip install -e .
-
- - name: Test Core
- if: ${{ matrix.build.name == 'core' }}
- run: pytest ./tests --ignore=./tests/fem/ --ignore=./tests/full/
- - name: Test Full
- if: ${{ matrix.build.name == 'full' }}
- run: pytest ./tests --ignore=./tests/fem/
\ No newline at end of file
diff --git a/.github/workflows/pre-release-dispatch.yaml b/.github/workflows/pre-release-dispatch.yaml
index 316c5276f..e7e9326c0 100644
--- a/.github/workflows/pre-release-dispatch.yaml
+++ b/.github/workflows/pre-release-dispatch.yaml
@@ -17,12 +17,22 @@ on:
description: 'Issue Pre-Release to docker'
required: false
type: boolean
- default: true
+ default: false
release_gitops:
description: 'Issue Pre-Release to gitops'
required: false
type: boolean
- default: true
+ default: false
+ release_level:
+ description: 'Pre-release level (rebuild=latest tag)'
+ required: false
+ type: choice
+ default: "patch"
+ options:
+ - "patch"
+ - "minor"
+ - "major"
+ - "rebuild"
permissions:
@@ -42,6 +52,7 @@ jobs:
release_conda: ${{ github.event.inputs.release_conda }}
release_docker: ${{ github.event.inputs.release_docker }}
release_gitops: ${{ github.event.inputs.release_gitops }}
+ pre_release_level: ${{ github.event.inputs.release_level }}
secrets:
SOURCE_KEY: ${{ secrets.SOURCE_KEY }}
CONDA_API_TOKEN: ${{ secrets.CONDA_API_TOKEN }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
deleted file mode 100644
index 1c7d9d7ec..000000000
--- a/.github/workflows/release.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-name: ci-release
-
-on:
- pull_request:
- branches:
- - main
- types: [closed]
-
-jobs:
- pypi:
- if: ${{ github.event.pull_request.merged }}
- name: Publish to PYPI
- defaults:
- run:
- shell: bash -l {0}
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
- with:
- python-version: '3.x'
- - name: Build and upload package for pypi
- shell: bash -l {0}
- run: |
- pip install build
- python -m build
- - name: Publish a Python distribution to PyPI
- uses: pypa/gh-action-pypi-publish@release/v1
- with:
- user: __token__
- password: ${{ secrets.PYPI_API_TOKEN }}
-
- conda:
- if: ${{ github.event.pull_request.merged }}
- name: Publish to CONDA
- defaults:
- run:
- shell: bash -l {0}
- strategy:
- fail-fast: false
- matrix:
- build: [ { name: 'core' },{ name: 'full' } ]
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - uses: mamba-org/setup-micromamba@v1 # https://github.com/mamba-org/setup-micromamba
- with:
- cache-env: true
- channels: conda-forge
- channel-priority: strict
- environment-file: conda/environment.build.yml
-
- - name: Build & Upload to CONDA Main
- run: |
- conda mambabuild -c conda-forge -c krande . --variants "{'variant': '${{matrix.build.name}}'}" --user krande --token=${{ secrets.ANACONDA_TOKEN }} --no-copy-test-source-files --no-test --no-build-id
- working-directory: conda
\ No newline at end of file
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 9641b0575..50d723c9a 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -1,6 +1,6 @@
name: ci-branch-tests
-# Bump 4
+
on: push
concurrency:
@@ -8,39 +8,6 @@ concurrency:
cancel-in-progress: true
jobs:
- bump:
- name: Bump version
- runs-on: ubuntu-latest
- defaults:
- run:
- shell: bash -l {0}
- outputs:
- version: ${{ steps.bump.outputs.version }}
- conda_bump: ${{ steps.bump.outputs.condabump }}
- pypi_bump: ${{ steps.bump.outputs.pypibump }}
- steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
- with:
- python-version: '3.11'
-
- - name: Install bumpyproject
- run: pip install git+https://github.com/Krande/bumpyproject.git
-
- - name: Check if current pyproject version is safe for pushing to conda & pypi
- id: bump
- run: bumpy pyproject --check-current --ga-version-output
- env:
- CONDA_URL: "https://api.anaconda.org/package/krande/ada-py"
- PYPI_URL: "https://pypi.org/pypi/ada-py/json"
-
- - name: echo bump
- run: |
- echo GITHUB_REF ${{ github.ref }}
- echo ${{ steps.bump.outputs.version }}
- echo CONDA_BUMP ${{ steps.bump.outputs.condabump }}
- echo PYPI_BUMP ${{ steps.bump.outputs.pypibump }}
-
test-core:
name: Test ada-py-core
runs-on: ubuntu-latest
@@ -51,7 +18,11 @@ jobs:
- uses: actions/checkout@v3
- uses: mamba-org/setup-micromamba@v1 # https://github.com/mamba-org/setup-micromamba
with:
- cache-environment: true
+ cache-environment: false
+ condarc: |
+ channels:
+ - conda-forge
+ - krande
environment-file: conda/environment.core.yml
create-args: >-
python=3.11
@@ -74,7 +45,11 @@ jobs:
- uses: actions/checkout@v3
- uses: mamba-org/setup-micromamba@v1 # https://github.com/mamba-org/setup-micromamba
with:
- cache-environment: true
+ cache-environment: false
+ condarc: |
+ channels:
+ - conda-forge
+ - krande
environment-file: conda/environment.core.yml
create-args: >-
python=3.11
@@ -91,60 +66,4 @@ jobs:
pip install -e .
- name: Run Tests
- run: pytest ./tests --ignore=./tests/fem/
-
-# Only if triggered by a tag and passes tests, then publish to pypi and conda
- pypi:
- needs: [ test-core, test-full, bump ]
-# if: ${{ startsWith(github.ref, 'refs/tags/') && needs.bump.outputs.pypi_bump == 'TRUE' }}
- # Will rely only on the bump output.
- if: ${{ needs.bump.outputs.pypi_bump == 'TRUE' }}
- name: Publish to PYPI
- defaults:
- run:
- shell: bash -l {0}
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-python@v4
- with:
- python-version: '3.x'
-
- - name: Install deps
- run: pip install build
-
- - name: Build and upload package for pypi
- run: |
- python -m build
- - name: Publish a Python distribution to PyPI
- uses: pypa/gh-action-pypi-publish@release/v1
- with:
- user: __token__
- password: ${{ secrets.PYPI_API_TOKEN }}
-
- conda_upload:
- needs: [ test-core, test-full, bump ]
-# if: ${{ startsWith(github.ref, 'refs/tags/') && needs.bump.outputs.conda_bump == 'TRUE' }}
- if: ${{ needs.bump.outputs.conda_bump == 'TRUE' }}
- name: Publish to CONDA
- defaults:
- run:
- shell: bash -l {0}
- runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- build: [ { name: 'core' },{ name: 'full' } ]
- steps:
- - uses: actions/checkout@v3
- - uses: mamba-org/setup-micromamba@v1 # https://github.com/mamba-org/setup-micromamba
- with:
- cache-environment: true
- environment-file: conda/environment.build.yml
-
- - name: Build & Upload to CONDA Dev
- run: |
- conda mambabuild -c conda-forge -c krande/label/dev . --variants "{'variant': '${{matrix.build.name}}'}" --user krande --token=${{ secrets.ANACONDA_TOKEN }} --label dev --no-copy-test-source-files --no-test --no-build-id
- working-directory: conda
- env:
- VERSION: ${{ needs.bump.outputs.version }}
\ No newline at end of file
+ run: pytest ./tests --ignore=./tests/fem/
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
index 2bada6d7e..00e3ed197 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,3 +1,4 @@
include src/ada/sections/resources/ProfileDB.json
include src/ada/materials/metals/resources/NLMatParams.json
-include src/ada/cadit/gxml/write/resources/xml_blank.xml
\ No newline at end of file
+include src/ada/cadit/gxml/write/resources/xml_blank.xml
+include src/ada/fem/results/resources/results.sql
\ No newline at end of file
diff --git a/Makefile b/Makefile
deleted file mode 100644
index f0c35010a..000000000
--- a/Makefile
+++ /dev/null
@@ -1,66 +0,0 @@
-mount=--mount type=bind,source="$(CURDIR)/temp/report",target=/aster/work/tests/fem/temp \
- --mount type=bind,source="$(CURDIR)/temp/scratch",target=/aster/work/scratch
-
-define check_and_create_dir
-if [ ! -d temp/scratch ]; then \
- mkdir -p temp/scratch; \
-fi
-if [ ! -d temp/report ]; then \
- mkdir -p temp/report; \
-fi
-endef
-
-dev:
- mamba env update --file environment.dev.yml --prune
-
-core:
- mamba env update --file conda/environment.core.yml --prune
-
-format:
- black --config pyproject.toml . && isort . && ruff . --fix
-
-bump:
- bumpy pyproject
-
-push:
- bumpy pyproject --push
-
-docs-dev:
- mamba env update --file docs/environment.docs.yml --prune
-
-docs:
- activate adadocs && cd docs && make html
-
-bbase:
- docker build . -t krande/ada:base -f images/base.Dockerfile
-
-bdev:
- docker build . -t krande/ada:dev -f images/dev.Dockerfile
-
-bfem:
- docker build . -t krande/ada:femtests -f images/femtests.Dockerfile
-
-mdir:
- mkdir -p temp/report && mkdir temp/scratch
-
-dtest:
- $(check_and_create_dir); \
- docker run --rm $(mount) krande/ada:femtests bash -c "\
- conda run --live-stream -n adadocker \
- pytest . && \
- conda run --live-stream -n adadocker python build_verification_report.py"
-
-dprint:
- docker run --rm $(mount) krande/ada:femtests ls -l
-
-dcheck:
- docker run --rm krande/ada:femtests ls -l /bin/bash
-
-pbase:
- docker push krande/ada:base
-
-run:
- docker run -it --rm -p 8888:8888 krande/adabase:latest
-
-test:
- cd tests && pytest --cov=ada --cov-report=xml --cov-report=html .
diff --git a/action_config.toml b/action_config.toml
index 0d055fae2..99b6d3f9c 100644
--- a/action_config.toml
+++ b/action_config.toml
@@ -1,13 +1,19 @@
[tool.python]
enabled = true
+pre_release_tag = "dev"
+
[tool.python.pip]
enabled = true
[tool.python.conda]
enabled = true
-#pkg_name_override
recipe_dir = "./conda"
-owner = "Krandedev"
+extra_conda_dep_channels = ["krande"]
+
+# Anaconda Related
+use_anaconda_server = true
+owner = "Krande"
+label = "main"
#
# info
#
diff --git a/conda/meta.yaml b/conda/meta.yaml
index 400f92226..bd72cb210 100644
--- a/conda/meta.yaml
+++ b/conda/meta.yaml
@@ -26,6 +26,8 @@ build:
script: python -m pip install . --no-deps -vv
number: {{ build }}
string: {{ variant }}_h{{ PKG_HASH }}_{{ build }}
+ entry_points:
+ - ada-viewer=ada.visit.render_pygfx:main
# Set list of core dependencies (core -> conversion and analysis)
{% set core_deps = load_file_data('conda/environment.core.yml')['dependencies'] %}
diff --git a/environment.dev.yml b/environment.dev.yml
deleted file mode 100644
index d75505969..000000000
--- a/environment.dev.yml
+++ /dev/null
@@ -1,13 +0,0 @@
-name: ada
-channels:
-# - ifcopenshell
- - krande/label/dev
- - krande
- - conda-forge
-dependencies:
- - ada-py
- - pytest
- - calculix
- - nomkl
- - semver
- - tomlkit
\ No newline at end of file
diff --git a/environment.yml b/environment.yml
index 4b2459fdb..3f4d3a90e 100644
--- a/environment.yml
+++ b/environment.yml
@@ -1,4 +1,4 @@
-name: ada
+name: ada-full
channels:
- conda-forge
- krande
diff --git a/examples/experiments/rendering_pygfx/render_pygfx_fem.py b/examples/experiments/rendering_pygfx/render_pygfx_fem.py
index 89b91884b..cd85c50ae 100644
--- a/examples/experiments/rendering_pygfx/render_pygfx_fem.py
+++ b/examples/experiments/rendering_pygfx/render_pygfx_fem.py
@@ -30,7 +30,7 @@ def main():
a.to_fem("cutout_bm_ufo", "usfos", scratch_dir="temp", overwrite=True)
a.to_gltf("temp/beam.glb")
- render = RendererPyGFX(render_backend=SqLiteBackend("temp/meshes.db"))
+ render = RendererPyGFX(render_backend=SqLiteBackend())
def _on_click(event, mesh_data: MeshInfo):
print(mesh_data, event.pick_info)
diff --git a/examples/experiments/rendering_pygfx/render_pygfx_fem_2.py b/examples/experiments/rendering_pygfx/render_pygfx_fem_2.py
new file mode 100644
index 000000000..de9ed59de
--- /dev/null
+++ b/examples/experiments/rendering_pygfx/render_pygfx_fem_2.py
@@ -0,0 +1,29 @@
+# pip install -U pygfx glfw jupyter_rfb pylinalg
+# or
+# mamba env update -f environment.yml --prune
+#
+import pathlib
+
+import meshio
+
+import ada
+from ada.config import logger
+
+logger.setLevel("INFO")
+_ROOT_DIR = pathlib.Path(__file__).parent.parent.parent.parent
+EX_DIR = _ROOT_DIR / "files" / "fem_files" / "cantilever" / "code_aster"
+
+
+def main():
+ rmed_file = EX_DIR / "eigen_shell_cantilever_code_aster.rmed"
+ # mesh = meshio.read(rmed_file, file_format="med")
+ # meshio.write("temp/eigen_shell_cantilever_code_aster.vtu", mesh, file_format="vtu")
+
+ rmed = ada.from_fem_res(rmed_file)
+ # rmed.to_xdmf("temp/eigen_shell_cantilever_code_aster.xdmf")
+ rmed.to_gltf("temp/eigen_shell_cantilever_code_aster.glb")
+ rmed.to_viewer(1, "modes___DEPL[0] - 13.5363")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/examples/experiments/rendering_pygfx/render_pygfx_part.py b/examples/experiments/rendering_pygfx/render_pygfx_part.py
index de7899dfb..0f8215996 100644
--- a/examples/experiments/rendering_pygfx/render_pygfx_part.py
+++ b/examples/experiments/rendering_pygfx/render_pygfx_part.py
@@ -28,7 +28,10 @@ def main():
bm4 = ada.Beam("my_beam_shell", (1, 1, 0), (1, 1, 1), "IPE300", color="yellow")
bm5 = ada.Beam("my_beam_xyz_shell", (3, 2, 1), (3.5, 2.5, 1.5), "IPE300", color="red")
bm6 = ada.Beam("my_beam_xyz", (1, 2, 1), (1.5, 2.5, 1.5), "IPE300", color="yellow")
- bm7_taper = ada.BeamTapered("my_beam_taper", (2, 2, 1), (2.5, 2.5, 1.5), "IPE600", "IPE300", color="blue")
+ bm7_taper = ada.BeamTapered("my_Ibeam_taper", (2, 2, 1), (2.5, 2.5, 1.5), "IPE600", "IPE300", color="blue")
+ bm8_taper = ada.BeamTapered(
+ "my_BOX_beam_taper", (4, 2, 1), (4.5, 2.5, 1.5), "BG300x200x8x10", "BG200x200x8x10", color="red"
+ )
render_override.update({bm4.guid: GeomRepr.SHELL, bm5.guid: GeomRepr.SHELL})
# All beam profiles as solids
@@ -107,6 +110,7 @@ def main():
sphere1,
rev,
bm7_taper,
+ bm8_taper,
pl1,
pl2,
pl3,
diff --git a/examples/experiments/trame/app_client_view.py b/examples/experiments/trame/app_client_view.py
deleted file mode 100644
index 1d5cdcd3f..000000000
--- a/examples/experiments/trame/app_client_view.py
+++ /dev/null
@@ -1,364 +0,0 @@
-r"""
-Version for trame 1.x - https://github.com/Kitware/trame/blob/release-v1/examples/VTK/Applications/FiniteElementAnalysis/app_client_view.py
-Delta v1..v2 - https://github.com/Kitware/trame/commit/03f28bb0084490acabf218264b96a1dbb3a17f19
-"""
-
-import io
-import os
-
-import numpy as np
-import pandas as pd
-from trame.app import get_server
-from trame.ui.vuetify import SinglePageLayout
-from trame.widgets import trame
-from trame.widgets import vtk as vtk_widgets
-from trame.widgets import vuetify
-from vtkmodules.numpy_interface.dataset_adapter import numpyTovtkDataArray as np2da
-from vtkmodules.util import vtkConstants
-from vtkmodules.vtkCommonCore import vtkIdList, vtkPoints
-from vtkmodules.vtkCommonDataModel import vtkCellArray, vtkUnstructuredGrid
-from vtkmodules.vtkFiltersCore import vtkThreshold
-
-# -----------------------------------------------------------------------------
-# Constants
-# -----------------------------------------------------------------------------
-
-VIEW_INTERACT = [
- {"button": 1, "action": "Rotate"},
- {"button": 2, "action": "Pan"},
- {"button": 3, "action": "Zoom", "scrollEnabled": True},
- {"button": 1, "action": "Pan", "alt": True},
- {"button": 1, "action": "Zoom", "control": True},
- {"button": 1, "action": "Pan", "shift": True},
- {"button": 1, "action": "Roll", "alt": True, "shift": True},
-]
-
-# -----------------------------------------------------------------------------
-# Trame setup
-# -----------------------------------------------------------------------------
-
-server = get_server()
-state, ctrl = server.state, server.controller
-
-# -----------------------------------------------------------------------------
-# VTK pipeline
-# -----------------------------------------------------------------------------
-
-vtk_idlist = vtkIdList()
-vtk_grid = vtkUnstructuredGrid()
-vtk_filter = vtkThreshold()
-vtk_filter.SetInputData(vtk_grid)
-field_to_keep = "my_array"
-
-
-@state.change("nodes_file", "elems_file", "field_file")
-def update_grid(nodes_file, elems_file, field_file, **kwargs):
- state.picking_modes = []
- if not nodes_file:
- return
-
- if not elems_file:
- return
-
- nodes_bytes = nodes_file.get("content")
- elems_bytes = elems_file.get("content")
-
- if isinstance(nodes_bytes, list):
- nodes_bytes = b"".join(nodes_bytes)
-
- if isinstance(elems_bytes, list):
- elems_bytes = b"".join(elems_bytes)
-
- df_nodes = pd.read_csv(
- io.StringIO(nodes_bytes.decode("utf-8")),
- delim_whitespace=True,
- header=None,
- skiprows=1,
- names=["id", "x", "y", "z"],
- )
-
- df_nodes["id"] = df_nodes["id"].astype(int)
- df_nodes = df_nodes.set_index("id", drop=True)
- # fill missing ids in range as VTK uses position (index) to map cells to points
- df_nodes = df_nodes.reindex(np.arange(df_nodes.index.min(), df_nodes.index.max() + 1), fill_value=0)
-
- df_elems = pd.read_csv(
- io.StringIO(elems_bytes.decode("utf-8")),
- skiprows=1,
- header=None,
- delim_whitespace=True,
- engine="python",
- index_col=None,
- ).sort_values(0)
- # order: 0: eid, 1: eshape, 2+: nodes, iloc[:,0] is index
- df_elems.iloc[:, 0] = df_elems.iloc[:, 0].astype(int)
-
- n_nodes = df_elems.iloc[:, 1].map(lambda x: int("".join(i for i in x if i.isdigit())))
- df_elems.insert(2, "n_nodes", n_nodes)
- # fill missing ids in range as VTK uses position (index) to map data to cells
- new_range = np.arange(df_elems.iloc[:, 0].min(), df_elems.iloc[:, 0].max() + 1)
- df_elems = df_elems.set_index(0, drop=False).reindex(new_range, fill_value=0)
-
- # mapping specific to Ansys Mechanical data
- vtk_shape_id_map = {
- "Tet4": vtkConstants.VTK_TETRA,
- "Tet10": vtkConstants.VTK_QUADRATIC_TETRA,
- "Hex8": vtkConstants.VTK_HEXAHEDRON,
- "Hex20": vtkConstants.VTK_QUADRATIC_HEXAHEDRON,
- "Tri6": vtkConstants.VTK_QUADRATIC_TRIANGLE,
- "Quad8": vtkConstants.VTK_QUADRATIC_QUAD,
- "Tri3": vtkConstants.VTK_TRIANGLE,
- "Quad4": vtkConstants.VTK_QUAD,
- "Wed15": vtkConstants.VTK_QUADRATIC_WEDGE,
- }
- df_elems["cell_types"] = np.nan
- df_elems.loc[df_elems.loc[:, 0] > 0, "cell_types"] = df_elems.loc[df_elems.loc[:, 0] > 0, 1].map(
- lambda x: vtk_shape_id_map[x.strip()] if x.strip() in vtk_shape_id_map.keys() else np.nan
- )
- df_elems = df_elems.dropna(subset=["cell_types"], axis=0)
-
- # convert dataframes to vtk-desired format
- points = df_nodes[["x", "y", "z"]].to_numpy()
- cell_types = df_elems["cell_types"].to_numpy()
- n_nodes = df_elems.loc[:, "n_nodes"].to_numpy()
- # subtract starting node id from all grid references in cells to avoid filling from 0 to first used node (in case mesh doesn't start at 1)
- p = df_elems.iloc[:, 3:-1].to_numpy() - df_nodes.index.min()
- # if you need to, re-order nodes here-ish
- a = np.hstack((n_nodes.reshape((len(n_nodes), 1)), p))
- # convert to flat numpy array
- cells = a.ravel()
- # remove nans (due to elements with different no. of nodes)
- cells = cells[np.logical_not(np.isnan(cells))]
- cells = cells.astype(int)
-
- # update grid
- vtk_pts = vtkPoints()
- vtk_pts.SetData(np2da(points))
- vtk_grid.SetPoints(vtk_pts)
-
- vtk_cells = vtkCellArray()
- vtk_cells.SetCells(cell_types.shape[0], np2da(cells, array_type=vtkConstants.VTK_ID_TYPE))
- vtk_grid.SetCells(np2da(cell_types, array_type=vtkConstants.VTK_UNSIGNED_CHAR), vtk_cells)
-
- # Add field if any
- if field_file:
- field_bytes = field_file.get("content")
- if isinstance(field_bytes, list):
- field_bytes = b"".join(field_bytes)
- df_elem_data = pd.read_csv(
- io.StringIO(field_bytes.decode("utf-8")),
- delim_whitespace=True,
- header=None,
- skiprows=1,
- names=["id", "val"],
- )
- df_elem_data = df_elem_data.sort_values("id").set_index("id", drop=True)
- # fill missing ids in range as VTK uses position (index) to map data to cells
- df_elem_data = df_elem_data.reindex(np.arange(df_elems.index.min(), df_elems.index.max() + 1), fill_value=0.0)
- np_val = df_elem_data["val"].to_numpy()
- # assign data to grid with the name 'my_array'
- vtk_array = np2da(np_val, name=field_to_keep)
- vtk_grid.GetCellData().SetScalars(vtk_array)
- state.full_range = vtk_array.GetRange()
- state.threshold_range = list(vtk_array.GetRange())
- state.picking_modes = ["hover"]
-
- ctrl.mesh_update()
-
-
-@state.change("threshold_range")
-def update_filter(threshold_range, **kwargs):
- vtk_filter.SetLowerThreshold(threshold_range[0])
- vtk_filter.SetUpperThreshold(threshold_range[1])
- ctrl.threshold_update()
-
-
-def reset():
- state.update(
- {
- "mesh": None,
- "threshold": None,
- "nodes_file": None,
- "elems_file": None,
- "field_file": None,
- }
- )
-
-
-@state.change("pick_data")
-def update_tooltip(pick_data, pixel_ratio, **kwargs):
- state.tooltip = ""
- state.tooltip_style = {"display": "none"}
- data = pick_data
-
- if data:
- xyx = data["worldPosition"]
- idx = vtk_grid.FindPoint(xyx)
- field = vtk_grid.GetCellData().GetArray(0)
- if idx > -1 and field:
- messages = []
- vtk_grid.GetPointCells(idx, vtk_idlist)
- for i in range(vtk_idlist.GetNumberOfIds()):
- cell_idx = vtk_idlist.GetId(i)
- value = field.GetValue(cell_idx)
- value_str = f"{value:.2f}"
- messages.append(f"Scalar: {value_str}")
-
- if len(messages):
- x, y, z = data["displayPosition"]
- state.tooltip = messages[0]
- state.tooltip_style = {
- "position": "absolute",
- "left": f"{(x / pixel_ratio) + 10}px",
- "bottom": f"{(y / pixel_ratio) + 10}px",
- "zIndex": 10,
- "pointerEvents": "none",
- }
-
-
-# -----------------------------------------------------------------------------
-# Web App setup
-# -----------------------------------------------------------------------------
-
-file_style = {
- "dense": True,
- "hide_details": True,
- "style": "max-width: 200px",
- "class": "mx-2",
- "small_chips": True,
- "clearable": ("false",),
- "accept": ".txt",
-}
-
-state.trame__title = "FEA - Mesh viewer"
-
-with SinglePageLayout(server) as layout:
- layout.title.set_text("Mesh Viewer")
- layout.icon.click = reset
-
- # Let the server know the browser pixel ratio
- trame.ClientTriggers(mounted="pixel_ratio = window.devicePixelRatio")
-
- # Toolbar ----------------------------------------
- with layout.toolbar:
- vuetify.VSpacer()
- vuetify.VRangeSlider(
- thumb_size=16,
- thumb_label=True,
- label="Threshold",
- v_if=("threshold",),
- v_model=("threshold_range", [0, 1]),
- min=("full_range[0]",),
- max=("full_range[1]",),
- dense=True,
- hide_details=True,
- style="max-width: 400px",
- )
- vuetify.VFileInput(
- v_show=("!mesh",),
- prepend_icon="mdi-vector-triangle",
- v_model=("nodes_file", None),
- placeholder="Nodes",
- **file_style,
- )
- vuetify.VFileInput(
- v_show=("!mesh",),
- prepend_icon="mdi-dots-triangle",
- v_model=("elems_file", None),
- placeholder="Elements",
- **file_style,
- )
- vuetify.VFileInput(
- v_show=("!threshold",),
- prepend_icon="mdi-gradient",
- v_model=("field_file", None),
- placeholder="Field",
- **file_style,
- )
- with vuetify.VBtn(v_if=("mesh",), icon=True, click=ctrl.view_reset_camera):
- vuetify.VIcon("mdi-crop-free")
-
- vuetify.VProgressLinear(indeterminate=True, absolute=True, bottom=True, active=("trame__busy",))
-
- trame.ClientStateChange(value="mesh", change=ctrl.view_reset_camera)
-
- # Content ----------------------------------------
- with layout.content:
- with vuetify.VContainer(
- fluid=True,
- classes="pa-0 fill-height",
- style="position: relative",
- ):
- with vtk_widgets.VtkView(
- ref="view",
- background=("[0.8, 0.8, 0.8]",),
- hover="pick_data = $event",
- picking_modes=("picking_modes", []),
- interactor_settings=("interactor_settings", VIEW_INTERACT),
- ) as view:
- ctrl.view_update = view.update
- ctrl.view_reset_camera = view.reset_camera
- with vtk_widgets.VtkGeometryRepresentation(
- v_if=("mesh",),
- property=(
- """{
- representation: threshold ? 1 : 2,
- color: threshold ? [0.3, 0.3, 0.3] : [1, 1, 1],
- opacity: threshold ? 0.2 : 1
- }""",
- ),
- ):
- mesh = vtk_widgets.VtkMesh("mesh", dataset=vtk_grid)
- ctrl.mesh_update = mesh.update
-
- with vtk_widgets.VtkGeometryRepresentation(
- v_if=("threshold",),
- color_data_range=("full_range", [0, 1]),
- ):
- threshold = vtk_widgets.VtkMesh("threshold", dataset=vtk_filter, field_to_keep=field_to_keep)
- ctrl.threshold_update = threshold.update
- with vuetify.VCard(
- style=("tooltip_style", {"display": "none"}),
- elevation=2,
- outlined=True,
- ):
- vuetify.VCardText("
{{ tooltip }}
"),
-
-
-# Variables not defined within HTML but used
-state.update(
- {
- "pixel_ratio": 1,
- "pick_data": None,
- "tooltip": "",
- }
-)
-
-# -----------------------------------------------------------------------------
-# Use --data to skip file upload
-# -----------------------------------------------------------------------------
-
-parser = server.cli
-parser.add_argument("--data", help="Unstructured file path", dest="data")
-args = parser.parse_args()
-if args.data:
- from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader
-
- reader = vtkXMLUnstructuredGridReader()
- reader.SetFileName(os.path.abspath(args.data))
- reader.Update()
- vtu = reader.GetOutput()
- vtk_grid.ShallowCopy(vtu)
-
- vtk_array = vtu.GetCellData().GetScalars()
- full_min, full_max = vtk_array.GetRange()
- state.full_range = [full_min, full_max]
- state.threshold_range = [full_min, full_max]
- state.picking_modes = ["hover"]
- ctrl.mesh_update()
- ctrl.threshold_update()
-
-# -----------------------------------------------------------------------------
-
-if __name__ == "__main__":
- server.start()
diff --git a/examples/experiments/trame/dynamic.py b/examples/experiments/trame/dynamic.py
deleted file mode 100644
index 28af561ff..000000000
--- a/examples/experiments/trame/dynamic.py
+++ /dev/null
@@ -1,49 +0,0 @@
-r"""
-Version for trame 1.x - https://github.com/Kitware/trame/blob/release-v1/examples/howdoi/dynamic.py
-Delta v1..v2 - https://github.com/Kitware/trame/commit/3ee54ce5b663bf2af12b3fbdda7aab944fb86298
-"""
-
-import asyncio
-
-from trame.app import asynchronous, get_server
-from trame.ui.vuetify import SinglePageLayout
-from trame.widgets import vuetify
-
-coundown_init = 10
-
-server = get_server()
-state = server.state
-state.trame__title = "Coundown"
-
-
-@asynchronous.task
-async def start_countdown():
- try:
- state.countdown = int(state.countdown)
- except:
- state.countdown = coundown_init
-
- while state.countdown > 0:
- with state:
- await asyncio.sleep(0.5)
- state.countdown -= 1
-
-
-with SinglePageLayout(server) as layout:
- layout.title.set_text("Countdown")
-
- with layout.toolbar:
- vuetify.VSpacer()
- vuetify.VBtn(
- "Start countdown",
- click=start_countdown,
- )
-
- with layout.content:
- vuetify.VTextField(
- v_model=("countdown", coundown_init),
- classes="ma-8",
- )
-
-if __name__ == "__main__":
- server.start()
diff --git a/examples/experiments/trame/environment.yml b/examples/experiments/trame/environment.yml
deleted file mode 100644
index a5fa36d5c..000000000
--- a/examples/experiments/trame/environment.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-name: ada-trame
-channels:
- - conda-forge
-dependencies:
- - pytest
- - websockets
- - trame
- - vtk
- - pandas
\ No newline at end of file
diff --git a/examples/experiments/trame/fem_viewer.py b/examples/experiments/trame/fem_viewer.py
deleted file mode 100644
index e7ff6559b..000000000
--- a/examples/experiments/trame/fem_viewer.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Required for rendering initialization, not necessary for
-# local rendering, but doesn't hurt to include it
-import vtkmodules.vtkRenderingOpenGL2 # noqa
-from trame.app import get_server
-from trame.ui.vuetify import SinglePageLayout
-from trame.widgets import vtk, vuetify
-from vtkmodules.vtkFiltersSources import vtkConeSource
-
-# Required for interactor initialization
-from vtkmodules.vtkInteractionStyle import vtkInteractorStyleSwitch # noqa
-from vtkmodules.vtkIOXML import vtkXMLUnstructuredGridReader
-from vtkmodules.vtkRenderingCore import (
- vtkActor,
- vtkPolyDataMapper,
- vtkRenderer,
- vtkRenderWindow,
- vtkRenderWindowInteractor,
-)
-
-# -----------------------------------------------------------------------------
-# VTK pipeline
-# -----------------------------------------------------------------------------
-
-renderer = vtkRenderer()
-renderWindow = vtkRenderWindow()
-renderWindow.AddRenderer(renderer)
-
-renderWindowInteractor = vtkRenderWindowInteractor()
-renderWindowInteractor.SetRenderWindow(renderWindow)
-renderWindowInteractor.GetInteractorStyle().SetCurrentStyleToTrackballCamera()
-
-# Read a vtu file
-vtu_source = vtkXMLUnstructuredGridReader()
-
-
-cone_source = vtkConeSource()
-mapper = vtkPolyDataMapper()
-mapper.SetInputConnection(cone_source.GetOutputPort())
-actor = vtkActor()
-actor.SetMapper(mapper)
-
-renderer.AddActor(actor)
-renderer.ResetCamera()
-
-# -----------------------------------------------------------------------------
-# Trame
-# -----------------------------------------------------------------------------
-
-server = get_server()
-ctrl = server.controller
-
-with SinglePageLayout(server) as layout:
- layout.title.set_text("Hello trame")
-
- with layout.content:
- with vuetify.VContainer(
- fluid=True,
- classes="pa-0 fill-height",
- ):
- view = vtk.VtkLocalView(renderWindow)
-
-# -----------------------------------------------------------------------------
-# Main
-# -----------------------------------------------------------------------------
-
-if __name__ == "__main__":
- server.start()
diff --git a/examples/experiments/imgui_and_pygfx/environment.yml b/examples/experiments/wgpu_gui/environment.yml
similarity index 100%
rename from examples/experiments/imgui_and_pygfx/environment.yml
rename to examples/experiments/wgpu_gui/environment.yml
diff --git a/examples/experiments/imgui_and_pygfx/pygx_and_pyimgui.py b/examples/experiments/wgpu_gui/pygx_and_pyimgui.py
similarity index 100%
rename from examples/experiments/imgui_and_pygfx/pygx_and_pyimgui.py
rename to examples/experiments/wgpu_gui/pygx_and_pyimgui.py
diff --git a/examples/experiments/imgui_and_pygfx/render_pyimgui.py b/examples/experiments/wgpu_gui/render_pyimgui.py
similarity index 100%
rename from examples/experiments/imgui_and_pygfx/render_pyimgui.py
rename to examples/experiments/wgpu_gui/render_pyimgui.py
diff --git a/examples/experiments/imgui_and_pygfx/testwindow.py b/examples/experiments/wgpu_gui/testwindow.py
similarity index 100%
rename from examples/experiments/imgui_and_pygfx/testwindow.py
rename to examples/experiments/wgpu_gui/testwindow.py
diff --git a/examples/experiments/wgpu_gui/triangle.py b/examples/experiments/wgpu_gui/triangle.py
new file mode 100644
index 000000000..6975082c5
--- /dev/null
+++ b/examples/experiments/wgpu_gui/triangle.py
@@ -0,0 +1,157 @@
+"""
+Example use of the wgpu API to draw a triangle. This example is set up
+so it can be run on canvases provided by any backend. Running this file
+as a script will use the auto-backend (using either glfw or jupyter).
+
+
+Similar example in other languages / API's:
+
+* Rust wgpu:
+ https://github.com/gfx-rs/wgpu-rs/blob/master/examples/hello-triangle/main.rs
+* C wgpu:
+ https://github.com/gfx-rs/wgpu/blob/master/examples/triangle/main.c
+* Python Vulkan:
+ https://github.com/realitix/vulkan/blob/master/example/contribs/example_glfw.py
+
+"""
+
+import wgpu
+
+# %% Shaders
+
+
+shader_source = """
+struct VertexInput {
+ @builtin(vertex_index) vertex_index : u32,
+};
+struct VertexOutput {
+ @location(0) color : vec4,
+ @builtin(position) pos: vec4,
+};
+
+@vertex
+fn vs_main(in: VertexInput) -> VertexOutput {
+ var positions = array, 3>(
+ vec2(0.0, -0.5),
+ vec2(0.5, 0.5),
+ vec2(-0.5, 0.75),
+ );
+ var colors = array, 3>( // srgb colors
+ vec3(1.0, 1.0, 0.0),
+ vec3(1.0, 0.0, 1.0),
+ vec3(0.0, 1.0, 1.0),
+ );
+ let index = i32(in.vertex_index);
+ var out: VertexOutput;
+ out.pos = vec4(positions[index], 0.0, 1.0);
+ out.color = vec4(colors[index], 1.0);
+ return out;
+}
+
+@fragment
+fn fs_main(in: VertexOutput) -> @location(0) vec4 {
+ let physical_color = pow(in.color.rgb, vec3(2.2)); // gamma correct
+ return vec4(physical_color, in.color.a);
+}
+"""
+
+
+# %% The wgpu calls
+
+
+def main(canvas, power_preference="high-performance", limits=None):
+ """Regular function to setup a viz on the given canvas."""
+ # Note: passing the canvas here can (oddly enough) prevent the
+ # adapter from being found. Seen with wx/Linux.
+ adapter = wgpu.request_adapter(canvas=None, power_preference=power_preference)
+ device = adapter.request_device(required_limits=limits)
+ return _main(canvas, device)
+
+
+async def main_async(canvas):
+ """Async function to setup a viz on the given canvas."""
+ adapter = await wgpu.request_adapter_async(canvas=canvas, power_preference="high-performance")
+ device = await adapter.request_device_async(required_limits={})
+ return _main(canvas, device)
+
+
+def _main(canvas, device):
+ shader = device.create_shader_module(code=shader_source)
+
+ # No bind group and layout, we should not create empty ones.
+ pipeline_layout = device.create_pipeline_layout(bind_group_layouts=[])
+
+ present_context = canvas.get_context()
+ render_texture_format = present_context.get_preferred_format(device.adapter)
+ present_context.configure(device=device, format=render_texture_format)
+
+ render_pipeline = device.create_render_pipeline(
+ layout=pipeline_layout,
+ vertex={
+ "module": shader,
+ "entry_point": "vs_main",
+ "buffers": [],
+ },
+ primitive={
+ "topology": wgpu.PrimitiveTopology.triangle_list,
+ "front_face": wgpu.FrontFace.ccw,
+ "cull_mode": wgpu.CullMode.none,
+ },
+ depth_stencil=None,
+ multisample=None,
+ fragment={
+ "module": shader,
+ "entry_point": "fs_main",
+ "targets": [
+ {
+ "format": render_texture_format,
+ "blend": {
+ "color": (
+ wgpu.BlendFactor.one,
+ wgpu.BlendFactor.zero,
+ wgpu.BlendOperation.add,
+ ),
+ "alpha": (
+ wgpu.BlendFactor.one,
+ wgpu.BlendFactor.zero,
+ wgpu.BlendOperation.add,
+ ),
+ },
+ },
+ ],
+ },
+ )
+
+ def draw_frame():
+ current_texture_view = present_context.get_current_texture()
+ command_encoder = device.create_command_encoder()
+
+ render_pass = command_encoder.begin_render_pass(
+ color_attachments=[
+ {
+ "view": current_texture_view,
+ "resolve_target": None,
+ "clear_value": (0, 0, 0, 1),
+ "load_op": wgpu.LoadOp.clear,
+ "store_op": wgpu.StoreOp.store,
+ }
+ ],
+ )
+
+ render_pass.set_pipeline(render_pipeline)
+ # render_pass.set_bind_group(0, no_bind_group, [], 0, 1)
+ render_pass.draw(3, 1, 0, 0)
+ render_pass.end()
+ device.queue.submit([command_encoder.finish()])
+
+ canvas.request_draw(draw_frame)
+ return device
+
+
+if __name__ == "__main__":
+ import wgpu.backends.rs # noqa: F401, Select Rust backend
+ from wgpu.gui.auto import WgpuCanvas, run
+
+ canvas = WgpuCanvas(size=(640, 480), title="wgpu triangle")
+ main(canvas)
+ run()
diff --git a/examples/experiments/wgpu_gui/wxpy.py b/examples/experiments/wgpu_gui/wxpy.py
new file mode 100644
index 000000000..128b0dd72
--- /dev/null
+++ b/examples/experiments/wgpu_gui/wxpy.py
@@ -0,0 +1,40 @@
+"""
+An example demonstrating a wx app with a wgpu viz inside.
+"""
+# run_example = false
+
+import wgpu.backends.rs # noqa: F401, Select Rust backend
+import wx
+from triangle import main
+from wgpu.gui.wx import WgpuWidget
+
+
+class Example(wx.Frame):
+ def __init__(self):
+ super().__init__(None, title="wgpu triangle embedded in a wx app")
+ self.SetSize(640, 480)
+
+ splitter = wx.SplitterWindow(self)
+
+ self.button = wx.Button(self, -1, "Hello world")
+ self.canvas1 = WgpuWidget(splitter)
+ self.canvas2 = WgpuWidget(splitter)
+
+ splitter.SplitVertically(self.canvas1, self.canvas2)
+ splitter.SetSashGravity(0.5)
+
+ sizer = wx.BoxSizer(wx.HORIZONTAL)
+ sizer.Add(self.button, 0, wx.EXPAND)
+ sizer.Add(splitter, 1, wx.EXPAND)
+ self.SetSizer(sizer)
+
+ self.Show()
+
+
+app = wx.App()
+example = Example()
+
+main(example.canvas1)
+main(example.canvas2)
+
+app.MainLoop()
diff --git a/examples/notebooks/basic_parametric_model.ipynb b/examples/notebooks/basic_parametric_model.ipynb
index 090681953..2a9c950e4 100644
--- a/examples/notebooks/basic_parametric_model.ipynb
+++ b/examples/notebooks/basic_parametric_model.ipynb
@@ -2,8 +2,13 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 1,
- "metadata": {},
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2023-12-20T18:39:26.243111300Z",
+ "start_time": "2023-12-20T18:39:23.141930600Z"
+ }
+ },
"outputs": [],
"source": [
"from ada.param_models.basic_module import SimpleStru\n",
@@ -12,15 +17,17 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"a = Assembly(\"ParamModel\")\n",
"\n",
- "for x in range(0, 2):\n",
- " for y in range(0, 2):\n",
- " for z in range(0, 2):\n",
+ "dim = 4\n",
+ "\n",
+ "for x in range(0, dim):\n",
+ " for y in range(0, dim):\n",
+ " for z in range(0, dim):\n",
" props = dict(name=f\"P{x}{y}{z}\", placement=Placement((x * 5, y * 5, z * 3)))\n",
" if z != 0:\n",
" props[\"add_bottom_floor\"] = False\n",
@@ -33,27 +40,18 @@
"metadata": {},
"outputs": [],
"source": [
- "a"
+ "a.show()"
]
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": null,
"metadata": {
"pycharm": {
"is_executing": true
}
},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Beginning writing to IFC file \"C:\\work\\code\\adapy\\examples\\temp\\massive_stru.ifc\" using IfcOpenShell\n",
- "IFC file creation complete\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"a.to_ifc(\"temp/massive_stru.ifc\")"
]
@@ -75,9 +73,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.10.4"
+ "version": "3.11.6"
}
},
"nbformat": 4,
- "nbformat_minor": 1
+ "nbformat_minor": 4
}
diff --git a/examples/notebooks/cantilever_fem_eigenfrequency.ipynb b/examples/notebooks/cantilever_fem_eigenfrequency.ipynb
index bef999770..afdbc008b 100644
--- a/examples/notebooks/cantilever_fem_eigenfrequency.ipynb
+++ b/examples/notebooks/cantilever_fem_eigenfrequency.ipynb
@@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -13,7 +13,7 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
@@ -22,7 +22,7 @@
" assembly = ada.Assembly(\"MyAssembly\") / [ada.Part(\"MyPart\") / bm]\n",
" part = bm.parent\n",
" part.fem = bm.to_fem_obj(0.1, geom_repr, options=GmshOptions(Mesh_ElementOrder=1))\n",
- " nodes = bm.bbox.sides.back(return_fem_nodes=True)\n",
+ " nodes = bm.bbox().sides.back(return_fem_nodes=True)\n",
" assembly.fem.add_bc(ada.fem.Bc(\"Fixed\", ada.fem.FemSet(\"bc_nodes\", nodes), [1, 2, 3, 4, 5, 6]))\n",
" assembly.fem.add_step(ada.fem.StepEigen(\"Eigen\", num_eigen_modes=10))\n",
" return assembly"
@@ -30,22 +30,9 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": null,
"metadata": {},
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Removing old files before copying new\n",
- "Created a Calculix input deck at \"c:\\AibelProgs\\ADA\\scratch\\Cantilever_CCX_EIG_sh\"\n",
- "--------------------------------------------------------------------------------\n",
- "Starting Calculix simulation \"Cantilever_CCX_EIG_sh\" (on Windows) using 2 cpus\n",
- "Finished Calculix simulation \"Cantilever_CCX_EIG_sh\"\n",
- "--------------------------------------------------------------------------------\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"a = make_fem(\"shell\")\n",
"ccx_res = a.to_fem(\"Cantilever_CCX_EIG_sh\", \"calculix\", overwrite=True, execute=True)"
@@ -53,582 +40,80 @@
},
{
"cell_type": "code",
- "execution_count": 4,
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "Removing old files before copying new\n",
- "Created a Code_Aster input deck at \"c:\\AibelProgs\\ADA\\scratch\\Cantilever_CA_EIG_sh\"\n",
- "--------------------------------------------------------------------------------\n",
- "Starting CodeAster simulation \"Cantilever_CA_EIG_sh\" (on Windows) using 2 cpus\n",
- "Finished CodeAster simulation \"Cantilever_CA_EIG_sh\"\n",
- "--------------------------------------------------------------------------------\n"
- ]
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
}
- ],
+ },
+ "outputs": [],
"source": [
"a = make_fem(\"shell\")\n",
"ca_res = a.to_fem(\"Cantilever_CA_EIG_sh\", \"code_aster\", overwrite=True, execute=True)"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
- "execution_count": 5,
- "outputs": [
- {
- "data": {
- "text/plain": "",
- "text/html": ""
- },
- "execution_count": 5,
- "metadata": {},
- "output_type": "execute_result"
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
}
- ],
+ },
+ "outputs": [],
"source": [
"scene = ccx_res.to_trimesh(1, \"DISP\", \"DISP\", 1, 20)\n",
"scene.show()"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
- "execution_count": 11,
- "outputs": [
- {
- "data": {
- "text/plain": "",
- "text/html": ""
- },
- "execution_count": 11,
- "metadata": {},
- "output_type": "execute_result"
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
}
- ],
+ },
+ "outputs": [],
"source": [
"scene = ca_res.to_trimesh(\n",
" 5.054232624598001, \"modes___DEPL[0] - 5.05423\", \"modes___DEPL[0] - 5.05423\", 5.054232624598001, 1\n",
")\n",
"scene.show()"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
- "execution_count": 7,
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "NodalFieldData(name='DISP', step=1, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, -3.41087e-06],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, 3.41087e-06],\n",
- " [ 7.24000e+02, -2.74524e-03, 1.10826e-01, -8.56491e-06],\n",
- " ...,\n",
- " [ 3.99900e+03, -9.31715e-04, 2.00099e-02, -7.00671e-05],\n",
- " [ 4.00000e+03, -1.37202e-03, 1.00144e-01, 5.60019e-06],\n",
- " [ 4.00200e+03, -1.37197e-03, 1.00145e-01, 5.58137e-06]]))\n",
- "NodalFieldData(name='STRESS', step=1, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, -2.03936e+08, -1.21381e+07, ..., 2.56137e+07,\n",
- " -2.82903e+06, 4.18204e+06],\n",
- " [ 7.23000e+02, -2.17485e+08, -1.88964e+07, ..., 2.14314e+07,\n",
- " 9.66455e+05, 3.82411e+06],\n",
- " [ 7.24000e+02, -6.35956e+03, -2.79858e+04, ..., 3.27970e+04,\n",
- " -4.91189e+03, 2.93014e+03],\n",
- " ...,\n",
- " [ 3.99900e+03, -7.74873e+07, 1.20800e+04, ..., 2.10305e+06,\n",
- " 1.04429e+06, -1.18072e+05],\n",
- " [ 4.00000e+03, -1.28169e+06, -1.03578e+05, ..., 6.24147e+05,\n",
- " -4.15328e+04, -9.39590e+03],\n",
- " [ 4.00200e+03, -1.12857e+06, 8.71849e+04, ..., 3.13911e+05,\n",
- " -7.05739e+03, -1.84342e+04]]))\n",
- "NodalFieldData(name='FORC', step=1, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, 6.53330e+04, -1.07592e+04, -1.53024e+03],\n",
- " [ 7.23000e+02, 7.85515e+04, -1.40050e+04, -1.53024e+03],\n",
- " [ 7.24000e+02, -1.47971e-01, 8.87822e+00, -4.64128e-04],\n",
- " ...,\n",
- " [ 3.99900e+03, -1.50657e-01, 3.23767e+00, -1.12640e-02],\n",
- " [ 4.00000e+03, -2.21856e-01, 1.61935e+01, 8.97857e-04],\n",
- " [ 4.00200e+03, -2.21850e-01, 1.61936e+01, 8.96655e-04]]))\n",
- "NodalFieldData(name='ERROR', step=1, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=2, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, -8.68046e-06],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, 8.68046e-06],\n",
- " [ 7.24000e+02, -6.08874e-03, -1.57821e-06, -1.09673e-01],\n",
- " ...,\n",
- " [ 3.99900e+03, 3.85751e-03, -2.37439e-05, -2.06486e-02],\n",
- " [ 4.00000e+03, 6.08523e-03, 8.64221e-07, -9.93334e-02],\n",
- " [ 4.00200e+03, 5.68611e-03, -1.48364e-06, -9.93333e-02]]))\n",
- "NodalFieldData(name='STRESS', step=2, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, -6.91858e+08, -1.76102e+08, ..., 4.61107e+07,\n",
- " -1.21985e+07, -1.90158e+07],\n",
- " [ 7.23000e+02, -5.51102e+08, -9.80246e+07, ..., 3.30262e+07,\n",
- " -1.00248e+07, -2.37452e+07],\n",
- " [ 7.24000e+02, -2.42211e+06, -1.50922e+06, ..., -2.05543e+05,\n",
- " -1.75751e+05, -2.35575e+05],\n",
- " ...,\n",
- " [ 3.99900e+03, 3.47924e+08, 1.47204e+07, ..., -7.57701e+06,\n",
- " -3.19286e+04, -1.76274e+04],\n",
- " [ 4.00000e+03, 5.66981e+06, -9.14460e+05, ..., -1.30009e+06,\n",
- " -5.22937e+05, -5.13672e+04],\n",
- " [ 4.00200e+03, 6.07139e+06, 1.17821e+06, ..., -1.32627e+06,\n",
- " -5.44291e+05, -1.03326e+04]]))\n",
- "NodalFieldData(name='FORC', step=2, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, 2.44747e+05, -4.83912e+04, 2.74414e+03],\n",
- " [ 7.23000e+02, 1.59511e+05, -6.81642e+04, 2.74415e+03],\n",
- " [ 7.24000e+02, -7.11192e+00, -4.65777e-04, -1.29791e+02],\n",
- " ...,\n",
- " [ 3.99900e+03, 9.43569e+00, -5.28246e-02, -4.93390e+01],\n",
- " [ 4.00000e+03, 1.42162e+01, -3.56750e-04, -2.37251e+02],\n",
- " [ 4.00200e+03, 1.38985e+01, -1.24734e-03, -2.37251e+02]]))\n",
- "NodalFieldData(name='ERROR', step=2, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=3, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, -1.07644e-05],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, 1.07644e-05],\n",
- " [ 7.24000e+02, -8.43192e-04, 1.03928e-01, 4.44451e-02],\n",
- " ...,\n",
- " [ 3.99900e+03, 1.30589e-03, -3.70553e-02, 8.68773e-03],\n",
- " [ 4.00000e+03, 4.39261e-04, -1.00334e-01, 2.17195e-02],\n",
- " [ 4.00200e+03, 4.54340e-04, -9.38277e-02, 2.17188e-02]]))\n",
- "NodalFieldData(name='STRESS', step=3, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, -6.13780e+08, -2.92880e+07, ..., 1.31018e+08,\n",
- " -2.18134e+07, 3.09124e+07],\n",
- " [ 7.23000e+02, -7.29195e+08, -7.40408e+07, ..., 8.82852e+07,\n",
- " -1.07050e+07, 3.01101e+07],\n",
- " [ 7.24000e+02, -1.30566e+07, -8.94854e+06, ..., 1.18981e+07,\n",
- " 2.71680e+05, 4.51064e+04],\n",
- " ...,\n",
- " [ 3.99900e+03, -1.07225e+06, 1.38892e+07, ..., -1.35158e+08,\n",
- " -3.64020e+05, -1.64143e+07],\n",
- " [ 4.00000e+03, -4.60724e+07, -1.31136e+07, ..., 5.14154e+07,\n",
- " -3.52024e+06, -3.72914e+06],\n",
- " [ 4.00200e+03, -2.81371e+07, 8.43444e+06, ..., -4.10903e+07,\n",
- " -2.47936e+06, -3.90687e+06]]))\n",
- "NodalFieldData(name='FORC', step=3, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, 1.90591e+05, -2.58796e+04, -1.47553e+04],\n",
- " [ 7.23000e+02, 2.91353e+05, -7.56071e+04, -1.47553e+04],\n",
- " [ 7.24000e+02, -9.07562e-01, 1.63314e+02, 4.76854e+01],\n",
- " ...,\n",
- " [ 3.99900e+03, 4.17965e+00, -1.22154e+02, 2.79634e+01],\n",
- " [ 4.00000e+03, 1.42308e+00, -3.16214e+02, 6.99483e+01],\n",
- " [ 4.00200e+03, 1.45537e+00, -3.09220e+02, 6.99475e+01]]))\n",
- "NodalFieldData(name='ERROR', step=3, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=4, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, 2.03003e-05],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, -2.03003e-05],\n",
- " [ 7.24000e+02, -9.44095e-03, 1.10146e-01, -2.94108e-04],\n",
- " ...,\n",
- " [ 3.99900e+03, 1.55703e-03, -6.81696e-02, -5.47525e-04],\n",
- " [ 4.00000e+03, -4.69733e-03, 7.35175e-02, 1.01284e-04],\n",
- " [ 4.00200e+03, -4.69491e-03, 7.35452e-02, 1.00590e-04]]))\n",
- "NodalFieldData(name='STRESS', step=4, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, 1.23044e+09, 7.88113e+07, ..., -1.91663e+08,\n",
- " 1.27573e+07, -1.87561e+07],\n",
- " [ 7.23000e+02, 1.28704e+09, 1.09565e+08, ..., -1.75883e+08,\n",
- " -8.72410e+06, -1.68601e+07],\n",
- " [ 7.24000e+02, 1.39786e+05, -5.44813e+05, ..., 7.49161e+05,\n",
- " -1.85547e+05, 9.10406e+04],\n",
- " ...,\n",
- " [ 3.99900e+03, -4.29278e+08, -1.44316e+06, ..., -2.56752e+07,\n",
- " 6.17004e+06, 1.42517e+06],\n",
- " [ 4.00000e+03, -4.55190e+07, -3.18476e+06, ..., 2.33900e+07,\n",
- " -1.44320e+06, -4.87261e+05],\n",
- " [ 4.00200e+03, -4.08574e+07, 2.68156e+06, ..., 8.71607e+06,\n",
- " -2.22614e+05, -7.84091e+05]]))\n",
- "NodalFieldData(name='FORC', step=4, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, -4.15000e+05, 8.33355e+04, 6.08984e+03],\n",
- " [ 7.23000e+02, -4.73732e+05, 9.18836e+04, 6.08981e+03],\n",
- " [ 7.24000e+02, -1.96657e+01, 3.33268e+02, -6.12414e-01],\n",
- " ...,\n",
- " [ 3.99900e+03, 9.72829e+00, -4.25459e+02, -3.40102e+00],\n",
- " [ 4.00000e+03, -2.93487e+01, 4.59482e+02, 6.23574e-01],\n",
- " [ 4.00200e+03, -2.93386e+01, 4.59545e+02, 6.21826e-01]]))\n",
- "NodalFieldData(name='ERROR', step=4, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=5, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, -3.58324e-05],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, 3.58324e-05],\n",
- " [ 7.24000e+02, 6.48433e-03, -1.22300e-01, -4.74048e-02],\n",
- " ...,\n",
- " [ 3.99900e+03, 1.08213e-03, -8.38361e-02, 1.89338e-02],\n",
- " [ 4.00000e+03, -3.37486e-03, 9.54567e-02, -1.98935e-02],\n",
- " [ 4.00200e+03, -3.48792e-03, 8.95547e-02, -1.98891e-02]]))\n",
- "NodalFieldData(name='STRESS', step=5, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, -2.05462e+09, -1.01466e+08, ..., 4.68157e+08,\n",
- " -6.87334e+07, 1.00161e+08],\n",
- " [ 7.23000e+02, -2.42417e+09, -2.45851e+08, ..., 3.32621e+08,\n",
- " -3.26414e+07, 9.76999e+07],\n",
- " [ 7.24000e+02, 1.01350e+08, 6.85359e+07, ..., -9.57317e+07,\n",
- " -2.03287e+06, -6.46503e+05],\n",
- " ...,\n",
- " [ 3.99900e+03, -6.79769e+08, 7.07168e+07, ..., -1.31830e+08,\n",
- " -4.15670e+06, -1.31568e+07],\n",
- " [ 4.00000e+03, 2.76382e+08, 9.19853e+07, ..., -3.60706e+08,\n",
- " 2.35742e+07, 2.78149e+07],\n",
- " [ 4.00200e+03, 1.50429e+08, -5.74205e+07, ..., 3.35355e+08,\n",
- " 1.76155e+07, 2.87299e+07]]))\n",
- "NodalFieldData(name='FORC', step=5, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, 6.52081e+05, -1.02506e+05, -4.71573e+04],\n",
- " [ 7.23000e+02, 9.78615e+05, -2.57949e+05, -4.71570e+04],\n",
- " [ 7.24000e+02, 7.42221e+01, -2.01338e+03, -5.38129e+02],\n",
- " ...,\n",
- " [ 3.99900e+03, 3.68150e+01, -2.93402e+03, 6.47561e+02],\n",
- " [ 4.00000e+03, -1.16226e+02, 3.20226e+03, -6.80242e+02],\n",
- " [ 4.00200e+03, -1.18805e+02, 3.13426e+03, -6.80191e+02]]))\n",
- "NodalFieldData(name='ERROR', step=5, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=6, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, 5.31110e-05],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, -5.31110e-05],\n",
- " [ 7.24000e+02, 1.50900e-02, -1.08129e-01, 1.62417e-03],\n",
- " ...,\n",
- " [ 3.99900e+03, -2.28667e-03, -7.46492e-02, -2.41729e-03],\n",
- " [ 4.00000e+03, 7.38672e-03, -5.00939e-02, -3.20093e-04],\n",
- " [ 4.00200e+03, 7.36676e-03, -5.01737e-02, -3.15106e-04]]))\n",
- "NodalFieldData(name='STRESS', step=6, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, 3.27620e+09, 2.28755e+08, ..., -5.96448e+08,\n",
- " 1.89270e+07, -2.61449e+07],\n",
- " [ 7.23000e+02, 3.33435e+09, 2.73691e+08, ..., -5.89148e+08,\n",
- " -3.44495e+07, -2.17918e+07],\n",
- " [ 7.24000e+02, -3.48328e+06, 7.94904e+05, ..., -2.10669e+06,\n",
- " 1.39063e+06, -5.26766e+05],\n",
- " ...,\n",
- " [ 3.99900e+03, -1.52967e+09, -8.74767e+06, ..., 3.37459e+07,\n",
- " 2.45595e+07, -5.70183e+06],\n",
- " [ 4.00000e+03, 3.14997e+08, 1.76107e+07, ..., -1.68452e+08,\n",
- " 9.43228e+06, 4.21574e+06],\n",
- " [ 4.00200e+03, 2.89520e+08, -1.48147e+07, ..., -4.55247e+07,\n",
- " 9.95467e+05, 6.11768e+06]]))\n",
- "NodalFieldData(name='FORC', step=6, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, -1.16470e+06, 2.73597e+05, 4.66125e+03],\n",
- " [ 7.23000e+02, -1.24340e+06, 2.56104e+05, 4.66071e+03],\n",
- " [ 7.24000e+02, 2.38498e+02, -2.43273e+03, 2.54722e+01],\n",
- " ...,\n",
- " [ 3.99900e+03, -1.08447e+02, -3.52312e+03, -1.13936e+02],\n",
- " [ 4.00000e+03, 3.49967e+02, -2.37737e+03, -1.47692e+01],\n",
- " [ 4.00200e+03, 3.49333e+02, -2.37884e+03, -1.46764e+01]]))\n",
- "NodalFieldData(name='ERROR', step=6, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=7, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, -3.92189e-05],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, 3.92189e-05],\n",
- " [ 7.24000e+02, 1.91367e-02, 4.53245e-05, 1.03924e-01],\n",
- " ...,\n",
- " [ 3.99900e+03, 4.25719e-03, 9.28204e-05, -7.03186e-02],\n",
- " [ 4.00000e+03, -1.90129e-02, -1.69421e-05, 7.06377e-02],\n",
- " [ 4.00200e+03, -1.77215e-02, 3.68963e-05, 7.06351e-02]]))\n",
- "NodalFieldData(name='STRESS', step=7, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, -3.57806e+09, -1.02879e+09, ..., 3.10005e+08,\n",
- " -1.15031e+08, -1.98323e+08],\n",
- " [ 7.23000e+02, -2.24156e+09, -3.05142e+08, ..., 1.37605e+08,\n",
- " -1.14625e+08, -2.16652e+08],\n",
- " [ 7.24000e+02, 7.10809e+07, 4.55847e+07, ..., 6.65950e+06,\n",
- " 5.42165e+06, 6.87061e+06],\n",
- " ...,\n",
- " [ 3.99900e+03, -1.72292e+09, -5.15524e+07, ..., -7.74523e+07,\n",
- " -1.23054e+07, 5.51171e+05],\n",
- " [ 4.00000e+03, -1.72258e+08, 2.26751e+07, ..., 3.52973e+07,\n",
- " 1.18639e+07, 1.84258e+06],\n",
- " [ 4.00200e+03, -1.85438e+08, -2.87345e+07, ..., 3.34330e+07,\n",
- " 1.23389e+07, 7.03712e+05]]))\n",
- "NodalFieldData(name='FORC', step=7, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, 1.37441e+06, -1.87160e+05, 2.42695e+04],\n",
- " [ 7.23000e+02, 5.44371e+05, -3.99002e+05, 2.42704e+04],\n",
- " [ 7.24000e+02, 7.14263e+02, 4.13462e-01, 3.84474e+03],\n",
- " ...,\n",
- " [ 3.99900e+03, 3.35341e+02, 8.12720e+00, -5.37031e+03],\n",
- " [ 4.00000e+03, -1.41982e+03, 4.76045e-01, 5.39827e+03],\n",
- " [ 4.00200e+03, -1.38701e+03, 1.13647e+00, 5.39818e+03]]))\n",
- "NodalFieldData(name='ERROR', step=7, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=8, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, -7.21304e-05],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, 7.21304e-05],\n",
- " [ 7.24000e+02, -1.46171e-02, 1.33360e-01, 4.52833e-02],\n",
- " ...,\n",
- " [ 3.99900e+03, -3.86217e-03, -7.04192e-02, 1.48027e-02],\n",
- " [ 4.00000e+03, 7.36951e-03, -7.32333e-02, 1.43736e-02],\n",
- " [ 4.00200e+03, 7.60898e-03, -6.90380e-02, 1.43693e-02]]))\n",
- "NodalFieldData(name='STRESS', step=8, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, -4.16993e+09, -2.16035e+08, ..., 1.04691e+09,\n",
- " -1.26699e+08, 1.94384e+08],\n",
- " [ 7.23000e+02, -4.87217e+09, -4.93936e+08, ..., 7.93731e+08,\n",
- " -5.68941e+07, 1.90096e+08],\n",
- " [ 7.24000e+02, -2.31688e+08, -1.50840e+08, ..., 2.32569e+08,\n",
- " 4.17304e+06, 2.69319e+06],\n",
- " ...,\n",
- " [ 3.99900e+03, -1.57695e+09, 8.87811e+07, ..., 4.91800e+08,\n",
- " -4.92511e+06, 4.45807e+07],\n",
- " [ 4.00000e+03, -2.61274e+08, -1.69201e+08, ..., 6.54300e+08,\n",
- " -3.80327e+07, -5.82587e+07],\n",
- " [ 4.00200e+03, -2.99515e+07, 9.83051e+07, ..., -8.29370e+08,\n",
- " -3.39795e+07, -5.83560e+07]]))\n",
- "NodalFieldData(name='FORC', step=8, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, 1.36693e+06, -2.57885e+05, -8.93751e+04],\n",
- " [ 7.23000e+02, 2.00083e+06, -5.40826e+05, -8.93731e+04],\n",
- " [ 7.24000e+02, -6.37748e+02, 8.20792e+03, 1.94352e+03],\n",
- " ...,\n",
- " [ 3.99900e+03, -5.00912e+02, -9.36708e+03, 1.92865e+03],\n",
- " [ 4.00000e+03, 9.66822e+02, -9.38006e+03, 1.86850e+03],\n",
- " [ 4.00200e+03, 9.87695e+02, -9.19336e+03, 1.86835e+03]]))\n",
- "NodalFieldData(name='ERROR', step=8, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=9, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, 9.43480e-05],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, -9.43480e-05],\n",
- " [ 7.24000e+02, -2.01317e-02, 1.04322e-01, -3.77916e-03],\n",
- " ...,\n",
- " [ 3.99900e+03, -7.02486e-03, -5.53659e-03, -3.79243e-04],\n",
- " [ 4.00000e+03, -9.51981e-03, 2.76243e-02, -1.59914e-04],\n",
- " [ 4.00200e+03, -9.44438e-03, 2.75359e-02, -1.76796e-04]]))\n",
- "NodalFieldData(name='STRESS', step=9, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, 5.98799e+09, 4.73977e+08, ..., -1.23195e+09,\n",
- " -8.88249e+06, 2.77566e+07],\n",
- " [ 7.23000e+02, 5.80473e+09, 4.38789e+08, ..., -1.32554e+09,\n",
- " -9.80516e+07, 3.42355e+07],\n",
- " [ 7.24000e+02, 1.99361e+07, 7.86357e+06, ..., -4.81495e+06,\n",
- " -4.94518e+06, 1.26325e+06],\n",
- " ...,\n",
- " [ 3.99900e+03, -9.67862e+07, 4.60302e+07, ..., 2.07250e+08,\n",
- " 1.16529e+05, -3.87978e+07],\n",
- " [ 4.00000e+03, -1.01980e+09, -3.68609e+07, ..., 5.66710e+08,\n",
- " -2.73301e+07, -1.62892e+07],\n",
- " [ 4.00200e+03, -9.68705e+08, 3.05433e+07, ..., 9.47124e+07,\n",
- " 3.44857e+04, -2.19260e+07]]))\n",
- "NodalFieldData(name='FORC', step=9, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, -2.27447e+06, 6.18947e+05, -2.74584e+04],\n",
- " [ 7.23000e+02, -2.17072e+06, 4.65949e+05, -2.74619e+04],\n",
- " [ 7.24000e+02, -1.15131e+03, 8.32536e+03, -2.11375e+02],\n",
- " ...,\n",
- " [ 3.99900e+03, -1.20642e+03, -9.40614e+02, -6.86726e+01],\n",
- " [ 4.00000e+03, -1.63011e+03, 4.74660e+03, -3.08259e+01],\n",
- " [ 4.00200e+03, -1.62141e+03, 4.74347e+03, -3.19278e+01]]))\n",
- "NodalFieldData(name='ERROR', step=9, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n",
- "NodalFieldData(name='DISP', step=10, components=['D1', 'D2', 'D3', 'ALL'], values=array([[ 7.21000e+02, 0.00000e+00, 0.00000e+00, 1.22593e-04],\n",
- " [ 7.23000e+02, 0.00000e+00, -0.00000e+00, -1.22593e-04],\n",
- " [ 7.24000e+02, -2.26488e-02, 1.35106e-01, 4.00456e-02],\n",
- " ...,\n",
- " [ 3.99900e+03, 8.74643e-03, -6.50017e-03, 2.21745e-03],\n",
- " [ 4.00000e+03, 1.08409e-02, -4.27929e-02, 7.55572e-03],\n",
- " [ 4.00200e+03, 1.11787e-02, -4.06418e-02, 7.56342e-03]]))\n",
- "NodalFieldData(name='STRESS', step=10, components=['SXX', 'SYY', 'SZZ', 'SXY', 'SYZ', 'SZX'], values=array([[ 7.21000e+02, 7.15431e+09, 3.90850e+08, ..., -2.01328e+09,\n",
- " 1.92093e+08, -3.18221e+08],\n",
- " [ 7.23000e+02, 8.26988e+09, 8.39810e+08, ..., -1.62114e+09,\n",
- " 7.96941e+07, -3.12375e+08],\n",
- " [ 7.24000e+02, -3.73894e+08, -2.23411e+08, ..., 4.03383e+08,\n",
- " 5.24595e+06, 6.87734e+06],\n",
- " ...,\n",
- " [ 3.99900e+03, -2.36450e+08, 1.03712e+08, ..., -1.40546e+09,\n",
- " -2.84517e+06, -9.46439e+07],\n",
- " [ 4.00000e+03, 4.70209e+08, -1.84983e+08, ..., 6.63095e+08,\n",
- " -2.69351e+07, -8.05495e+07],\n",
- " [ 4.00200e+03, 7.18251e+08, 9.04470e+07, ..., -1.44744e+09,\n",
- " -4.30182e+07, -7.63412e+07]]))\n",
- "NodalFieldData(name='FORC', step=10, components=['F1', 'F2', 'F3', 'ALL'], values=array([[ 7.21000e+02, -2.43673e+06, 5.48563e+05, 1.41641e+05],\n",
- " [ 7.23000e+02, -3.47498e+06, 9.70739e+05, 1.41631e+05],\n",
- " [ 7.24000e+02, -2.74967e+03, 2.26651e+04, 4.73185e+03],\n",
- " ...,\n",
- " [ 3.99900e+03, 3.15442e+03, -2.42590e+03, 7.86970e+02],\n",
- " [ 4.00000e+03, 3.95451e+03, -1.53097e+04, 2.72263e+03],\n",
- " [ 4.00200e+03, 4.03667e+03, -1.50379e+04, 2.72391e+03]]))\n",
- "NodalFieldData(name='ERROR', step=10, components=['STR(%)'], values=array([[ 721., 0.],\n",
- " [ 723., 0.],\n",
- " [ 724., 0.],\n",
- " ...,\n",
- " [3999., 0.],\n",
- " [4000., 0.],\n",
- " [4002., 0.]]))\n"
- ]
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
}
- ],
+ },
+ "outputs": [],
"source": [
"for cx in ccx_res.iter_results_by_field_value():\n",
" print(cx)"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
- "execution_count": 8,
- "outputs": [
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "NodalFieldData(name='modes___DEPL[0] - 5.05423', step=5.054232624598001, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, -2.47193551e-02, 9.98178713e-01, ...,\n",
- " -1.33139810e-02, 1.31013175e-04, -1.59825780e-19],\n",
- " [ 3.00000000e+00, 2.76803172e-15, 9.98178844e-01, ...,\n",
- " -1.34689994e-02, -3.07917368e-14, 2.74395442e-01],\n",
- " ...,\n",
- " [ 7.18000000e+02, -1.23347475e-02, 8.47167798e-01, ...,\n",
- " 1.23743173e-02, -9.86470873e-05, -2.10067683e-20],\n",
- " [ 7.19000000e+02, -8.39569590e-03, 1.80613230e-01, ...,\n",
- " 7.17880052e-03, -4.55272926e-05, 2.24459492e-20],\n",
- " [ 7.20000000e+02, -1.23526873e-02, 9.02048117e-01, ...,\n",
- " 1.27844112e-02, -8.56076939e-05, 8.81627408e-20]]))\n",
- "NodalFieldData(name='modes___DEPL[1] - 9.75129', step=9.75129233796498, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, -3.94961136e-03, 1.95139381e-01, ...,\n",
- " 9.18261499e-01, -4.28243886e-03, 2.92702338e-19],\n",
- " [ 3.00000000e+00, -2.48538907e-15, 1.95138750e-01, ...,\n",
- " 9.22801316e-01, 1.34584205e-14, 5.09347333e-02],\n",
- " ...,\n",
- " [ 7.18000000e+02, 2.00088082e-03, -1.70924160e-01, ...,\n",
- " 8.46655360e-01, -8.80163373e-03, 2.46556010e-19],\n",
- " [ 7.19000000e+02, 1.91711640e-03, -4.51094287e-02, ...,\n",
- " 2.29326010e-01, -9.43719871e-03, -1.14249057e-19],\n",
- " [ 7.20000000e+02, 1.98444957e-03, -1.79770653e-01, ...,\n",
- " 8.82676734e-01, -7.35454335e-03, -7.00491225e-20]]))\n",
- "NodalFieldData(name='modes___DEPL[2] - 19.4904', step=19.490358543863028, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, 5.36708722e-02, 1.24884706e-06, ...,\n",
- " 4.46461847e-03, -2.67864897e-01, 7.91179921e-17],\n",
- " [ 3.00000000e+00, 5.36744806e-02, 1.66532307e-13, ...,\n",
- " 1.30030855e-12, -2.66302526e-01, 2.08687698e-14],\n",
- " ...,\n",
- " [ 7.18000000e+02, -5.35636053e-02, 7.23586444e-06, ...,\n",
- " 3.26003084e-03, -2.69685606e-01, 4.01932133e-17],\n",
- " [ 7.19000000e+02, -3.64413085e-02, 1.95480443e-04, ...,\n",
- " -2.63474876e-04, -1.89065181e-01, -2.04126237e-17],\n",
- " [ 7.20000000e+02, -5.36436798e-02, 2.68381779e-06, ...,\n",
- " 3.49523504e-03, -2.69465208e-01, -2.13409197e-17]]))\n",
- "NodalFieldData(name='modes___DEPL[3] - 31.2829', step=31.282929691021273, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, -8.15264622e-02, 9.49722740e-01, ...,\n",
- " -3.35486865e-01, 7.52326961e-03, 5.54626807e-18],\n",
- " [ 3.00000000e+00, 1.45149992e-14, 9.49726923e-01, ...,\n",
- " -3.43649118e-01, -4.54339366e-14, 8.91325411e-01],\n",
- " ...,\n",
- " [ 7.18000000e+02, -3.99333014e-02, 4.53425571e-01, ...,\n",
- " 2.23048541e-01, -1.44342465e-02, -2.84550339e-18],\n",
- " [ 7.19000000e+02, 1.34501567e-02, -5.88985481e-01, ...,\n",
- " -2.95157988e-01, 4.91665985e-03, -5.67088753e-18],\n",
- " [ 7.20000000e+02, -4.05226719e-02, 6.32811887e-01, ...,\n",
- " 2.80488656e-01, -1.13264464e-02, -2.22189299e-18]]))\n",
- "NodalFieldData(name='modes___DEPL[4] - 41.7106', step=41.71058110137183, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, -1.45318705e-02, 1.81661927e-01, ...,\n",
- " 7.00805602e-01, -1.54416898e-02, -5.14566313e-18],\n",
- " [ 3.00000000e+00, -8.48526741e-16, 1.81659740e-01, ...,\n",
- " 7.16667395e-01, -5.53523846e-15, 1.84785431e-01],\n",
- " ...,\n",
- " [ 7.18000000e+02, 7.19995424e-03, -9.29466966e-02, ...,\n",
- " 4.43954970e-01, -3.10476359e-02, -2.83895738e-17],\n",
- " [ 7.19000000e+02, -2.33132806e-03, 1.10877436e-01, ...,\n",
- " -5.33084351e-01, 1.07271083e-02, 1.54954440e-17],\n",
- " [ 7.20000000e+02, 7.25570456e-03, -1.25151608e-01, ...,\n",
- " 5.72207164e-01, -2.64286630e-02, -2.63083736e-19]]))\n",
- "NodalFieldData(name='modes___DEPL[5] - 82.0474', step=82.04739582699456, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, 3.88941028e-02, -2.80138423e-01, ...,\n",
- " 5.93696125e-01, -2.36465554e-02, -1.48712996e-17],\n",
- " [ 3.00000000e+00, -4.45708846e-15, -2.80147731e-01, ...,\n",
- " 6.17052343e-01, 9.61378078e-15, -3.91537348e-01],\n",
- " ...,\n",
- " [ 7.18000000e+02, 1.77999701e-02, -4.64959400e-02, ...,\n",
- " -1.81968211e-01, 5.20522089e-02, -2.74689082e-17],\n",
- " [ 7.19000000e+02, -6.05552470e-03, -1.85480100e-01, ...,\n",
- " -8.75605788e-01, -2.85857980e-02, 1.67534863e-17],\n",
- " [ 7.20000000e+02, 1.89536396e-02, -1.29251891e-01, ...,\n",
- " -3.93157614e-01, 4.23813474e-02, 1.31105774e-17]]))\n",
- "NodalFieldData(name='modes___DEPL[6] - 105.024', step=105.02446835346396, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, -2.29287971e-02, 1.67210144e-01, ...,\n",
- " 5.15702863e-01, -2.26991946e-02, 1.28158921e-17],\n",
- " [ 3.00000000e+00, -2.16306028e-16, 1.67207548e-01, ...,\n",
- " 5.36820094e-01, 1.03283605e-14, 2.78904706e-01],\n",
- " ...,\n",
- " [ 7.18000000e+02, 1.05700572e-02, -2.91973434e-02, ...,\n",
- " 1.41012225e-01, -4.32376874e-02, 2.84723714e-18],\n",
- " [ 7.19000000e+02, -3.67355249e-03, -1.11025099e-01, ...,\n",
- " 5.00956428e-01, 1.71254097e-02, -1.67646195e-17],\n",
- " [ 7.20000000e+02, 1.12118262e-02, -7.82343796e-02, ...,\n",
- " 3.24618131e-01, -3.87809655e-02, -2.09291254e-17]]))\n",
- "NodalFieldData(name='modes___DEPL[7] - 110', step=110.00003089608711, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, 1.76659532e-01, 3.74690071e-05, ...,\n",
- " 1.35732194e-01, -8.70606631e-01, 7.22861470e-17],\n",
- " [ 3.00000000e+00, 1.76772029e-01, 4.91920283e-14, ...,\n",
- " -5.36290037e-13, -8.20062049e-01, 1.63440585e-13],\n",
- " ...,\n",
- " [ 7.18000000e+02, -1.73176189e-01, 2.22452644e-04, ...,\n",
- " 6.00265648e-02, -9.17004816e-01, 5.61347857e-17],\n",
- " [ 7.19000000e+02, 4.25306032e-02, 1.03271111e-03, ...,\n",
- " -8.84451656e-02, 2.85886412e-01, -1.33665303e-17],\n",
- " [ 7.20000000e+02, -1.75637015e-01, 9.20882336e-05, ...,\n",
- " 8.34039558e-02, -9.16149755e-01, 1.90831927e-17]]))\n",
- "NodalFieldData(name='modes___DEPL[8] - 122.51', step=122.51034460508012, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, -1.25921336e-03, 3.78247334e-02, ...,\n",
- " 1.00000000e+00, -4.27237642e-03, 7.08736747e-18],\n",
- " [ 3.00000000e+00, 3.10193440e-15, 3.78168886e-02, ...,\n",
- " 9.97140056e-01, -1.60964541e-14, -4.72682130e-03],\n",
- " ...,\n",
- " [ 7.18000000e+02, -5.88595665e-04, 3.01261091e-02, ...,\n",
- " -9.91055474e-01, -3.91789521e-04, 4.70445140e-17],\n",
- " [ 7.19000000e+02, -6.80354012e-04, 1.62175368e-02, ...,\n",
- " -4.49390160e-01, 6.83606627e-03, -3.04318538e-17],\n",
- " [ 7.20000000e+02, -6.29637368e-04, 3.28863972e-02, ...,\n",
- " -9.89303880e-01, -3.05438288e-04, -2.27388043e-17]]))\n",
- "NodalFieldData(name='modes___DEPL[9] - 129.258', step=129.25770426710713, components=['DX', 'DY', 'DZ', 'DRX', 'DRY', 'DRZ'], values=array([[ 1.00000000e+00, 0.00000000e+00, 0.00000000e+00, ...,\n",
- " 0.00000000e+00, 0.00000000e+00, 0.00000000e+00],\n",
- " [ 2.00000000e+00, -9.75849118e-03, 4.61340891e-02, ...,\n",
- " -6.01935581e-01, 2.15761076e-02, 9.35831665e-17],\n",
- " [ 3.00000000e+00, -1.41872796e-16, 4.61425865e-02, ...,\n",
- " -6.19639830e-01, 1.01025166e-14, 8.30455138e-02],\n",
- " ...,\n",
- " [ 7.18000000e+02, -4.00367909e-03, -1.12171397e-02, ...,\n",
- " 2.25900002e-01, -4.47179626e-02, 6.00139346e-17],\n",
- " [ 7.19000000e+02, -2.59912029e-03, 1.14293738e-02, ...,\n",
- " -1.44501233e-01, -7.17654397e-02, -1.70864929e-17],\n",
- " [ 7.20000000e+02, -4.60738473e-03, 8.43921476e-03, ...,\n",
- " 4.13894340e-01, -3.89851135e-02, -1.78693510e-17]]))\n"
- ]
+ "execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
}
- ],
+ },
+ "outputs": [],
"source": [
"for ca in ca_res.iter_results_by_field_value():\n",
" print(ca)"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
}
],
"metadata": {
@@ -647,9 +132,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.11.0"
+ "version": "3.11.6"
}
},
"nbformat": 4,
- "nbformat_minor": 1
+ "nbformat_minor": 4
}
diff --git a/examples/notebooks/cantilever_fem_extended.ipynb b/examples/notebooks/cantilever_fem_extended.ipynb
index a3a154e83..91ab603af 100644
--- a/examples/notebooks/cantilever_fem_extended.ipynb
+++ b/examples/notebooks/cantilever_fem_extended.ipynb
@@ -70,7 +70,7 @@
" ada.Material(\"S420\", CarbonSteel(\"S420\", plasticity_model=DnvGl16Mat(0.1, \"S420\"))),\n",
")\n",
"a = ada.Assembly(\"MyAssembly\") / [ada.Part(\"MyPart\") / bm]\n",
- "a"
+ "a.show()"
]
},
{
@@ -106,8 +106,8 @@
"origin = np.array([0.2, -0.1, -0.1])\n",
"points = [(0, 0), (0.1, 0), (0.05, 0.1)]\n",
"\n",
- "poly1 = bm.add_penetration(ada.PrimExtrude(\"Poly1\", points, h, normal, origin, xdir))\n",
- "bm"
+ "poly1 = bm.add_boolean(ada.PrimExtrude(\"Poly1\", points, h, normal, origin, xdir))\n",
+ "bm.show()"
]
},
{
@@ -124,8 +124,8 @@
"origin += np.array([0.2, 0, 0])\n",
"points = [(0, 0, r), (0.1, 0, r), (0.05, 0.1, r)]\n",
"\n",
- "poly2 = bm.add_penetration(ada.PrimExtrude(\"Poly2\", points, h, normal, origin, xdir))\n",
- "bm"
+ "poly2 = bm.add_boolean(ada.PrimExtrude(\"Poly2\", points, h, normal, origin, xdir))\n",
+ "bm.show()"
]
},
{
@@ -142,8 +142,8 @@
"origin += np.array([0.2, 0, 0])\n",
"points = [(0, 0, r), (0.1, 0, r), (0.1, 0.2, r), (0.0, 0.2, r)]\n",
"\n",
- "poly3 = bm.add_penetration(ada.PrimExtrude(\"Poly3\", points, h, normal, origin, xdir))\n",
- "bm"
+ "poly3 = bm.add_boolean(ada.PrimExtrude(\"Poly3\", points, h, normal, origin, xdir))\n",
+ "bm.show()"
]
},
{
@@ -160,8 +160,8 @@
"# Cylinder Extrude\n",
"x = origin[0] + 0.2\n",
"\n",
- "cyl = bm.add_penetration(ada.PrimCyl(\"cylinder\", (x, -0.1, 0), (x, 0.1, 0), 0.1))\n",
- "bm"
+ "cyl = bm.add_boolean(ada.PrimCyl(\"cylinder\", (x, -0.1, 0), (x, 0.1, 0), 0.1))\n",
+ "bm.show()"
]
},
{
@@ -178,8 +178,8 @@
"# Box Extrude\n",
"x += 0.2\n",
"\n",
- "box = bm.add_penetration(ada.PrimBox(\"box\", (x, -0.1, -0.1), (x + 0.2, 0.1, 0.1)))\n",
- "bm"
+ "box = bm.add_boolean(ada.PrimBox(\"box\", (x, -0.1, -0.1), (x + 0.2, 0.1, 0.1)))\n",
+ "bm.show()"
]
},
{
@@ -195,7 +195,7 @@
"source": [
"# Export IFC to the Home folder\n",
"a.to_ifc(\"../output/MyBeamWithHoles.ifc\")\n",
- "a"
+ "a.show()"
]
},
{
@@ -213,7 +213,7 @@
"p = a.get_part(\"MyPart\")\n",
"p.fem = bm.to_fem_obj(0.1, \"shell\", options=GmshOptions(Mesh_MeshSizeFromCurvature=True))\n",
"\n",
- "a"
+ "a.show()"
]
},
{
@@ -228,10 +228,10 @@
"outputs": [],
"source": [
"# Create a Static Analysis Step with a Gravity load (multiplied with 800 to get deformation)\n",
- "step = a.fem.add_step(ada.fem.StepImplicit(\"gravity\", nl_geom=True, init_incr=100.0, total_time=100.0))\n",
+ "step = a.fem.add_step(ada.fem.StepImplicitStatic(\"gravity\", nl_geom=True, init_incr=100.0, total_time=100.0))\n",
"step.add_load(ada.fem.LoadGravity(\"grav\", -9.81 * 800))\n",
"\n",
- "nodes = bm.bbox.sides.back(return_fem_nodes=True)\n",
+ "nodes = bm.bbox().sides.back(return_fem_nodes=True)\n",
"a.fem.add_bc(ada.fem.Bc(\"Fixed\", ada.fem.FemSet(\"bc_nodes\", nodes), [1, 2, 3]))"
]
},
@@ -337,9 +337,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.10.4"
+ "version": "3.11.6"
}
},
"nbformat": 4,
"nbformat_minor": 5
-}
\ No newline at end of file
+}
diff --git a/examples/notebooks/convert_fem.ipynb b/examples/notebooks/convert_fem.ipynb
index ca2534406..ea63e9e02 100644
--- a/examples/notebooks/convert_fem.ipynb
+++ b/examples/notebooks/convert_fem.ipynb
@@ -2,21 +2,27 @@
"cells": [
{
"cell_type": "markdown",
- "source": [
- "# Convert FEM models"
- ],
"metadata": {
"collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
"pycharm": {
"name": "#%% md\n"
}
- }
+ },
+ "source": [
+ "# Convert FEM models"
+ ]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true,
+ "jupyter": {
+ "outputs_hidden": true
+ },
"pycharm": {
"name": "#%%\n"
}
@@ -28,6 +34,15 @@
},
{
"cell_type": "markdown",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
"source": [
"## Sesam FEM to Abaqus inp and Code Aster\n",
"First drag and drop your FEM file onto the explorer window to the left next to the \"convert_fem.ipynb\"\n",
@@ -35,50 +50,47 @@
"\n",
"In addition to Sesam FEM models you can also import\n",
"Abaqus/Calculix (.inp) and Code Aster (.med)."
- ],
- "metadata": {
- "collapsed": false,
- "pycharm": {
- "name": "#%% md\n"
- }
- }
+ ]
},
{
"cell_type": "code",
"execution_count": null,
- "outputs": [],
- "source": [
- "a = ada.from_fem(\"T1.FEM\")\n",
- "a.to_fem(\"MyAbaqusModel\", \"abaqus\", make_zip_file=True, overwrite=True)\n",
- "a.to_fem(\"MyCodeAsterModel\", \"code_aster\", make_zip_file=True, overwrite=True)"
- ],
"metadata": {
"collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ },
"pycharm": {
"name": "#%%\n"
}
- }
+ },
+ "outputs": [],
+ "source": [
+ "a = ada.from_fem(\"T1.FEM\")\n",
+ "a.to_fem(\"MyAbaqusModel\", \"abaqus\", make_zip_file=True, overwrite=True)\n",
+ "a.to_fem(\"MyCodeAsterModel\", \"code_aster\", make_zip_file=True, overwrite=True)"
+ ]
}
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
- "version": 2
+ "version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.6"
+ "pygments_lexer": "ipython3",
+ "version": "3.11.6"
}
},
"nbformat": 4,
- "nbformat_minor": 0
-}
\ No newline at end of file
+ "nbformat_minor": 4
+}
diff --git a/examples/notebooks/convert_genie_xml_to_ifc.ipynb b/examples/notebooks/convert_genie_xml_to_ifc.ipynb
index eca6d16d4..fb06573bd 100644
--- a/examples/notebooks/convert_genie_xml_to_ifc.ipynb
+++ b/examples/notebooks/convert_genie_xml_to_ifc.ipynb
@@ -2,18 +2,24 @@
"cells": [
{
"cell_type": "markdown",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ }
+ },
"source": [
"# Convert FEM models"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
- "collapsed": true
+ "collapsed": true,
+ "jupyter": {
+ "outputs_hidden": true
+ }
},
"outputs": [],
"source": [
@@ -23,47 +29,53 @@
},
{
"cell_type": "markdown",
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ }
+ },
"source": [
"## Genie XML to IFC\n",
"First drag and drop your XML file onto the explorer window to the left next to the \"*.ipynb\" files\n",
"(notebook files). Then change the name from \"\" to whatever the name of your XML file is and what you want the exported IFC to be named."
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
},
{
"cell_type": "code",
"execution_count": null,
+ "metadata": {
+ "collapsed": false,
+ "jupyter": {
+ "outputs_hidden": false
+ }
+ },
"outputs": [],
"source": [
"a = ada.from_genie_xml(\"my_model.xml\")\n",
"a.to_ifc(\"my_model.ifc\")"
- ],
- "metadata": {
- "collapsed": false
- }
+ ]
}
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
- "version": 2
+ "version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.6"
+ "pygments_lexer": "ipython3",
+ "version": "3.11.6"
}
},
"nbformat": 4,
- "nbformat_minor": 0
+ "nbformat_minor": 4
}
diff --git a/examples/notebooks/ifc_revolved_area_solid.ipynb b/examples/notebooks/ifc_revolved_area_solid.ipynb
index 043a04696..4c2653627 100644
--- a/examples/notebooks/ifc_revolved_area_solid.ipynb
+++ b/examples/notebooks/ifc_revolved_area_solid.ipynb
@@ -264,9 +264,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.10.4"
+ "version": "3.11.6"
}
},
"nbformat": 4,
- "nbformat_minor": 1
+ "nbformat_minor": 4
}
diff --git a/examples/scripts/eigen_analysis.py b/examples/scripts/eigen_analysis.py
index 9adaf924d..e941b53a9 100644
--- a/examples/scripts/eigen_analysis.py
+++ b/examples/scripts/eigen_analysis.py
@@ -27,8 +27,10 @@ def run_ccx():
def run_code_aster():
- a = make_fem("shell")
- res = a.to_fem("Cantilever_CA_EIG_sh", "code_aster", scratch_dir=SCRATCH, overwrite=True, execute=True)
+ geo_repr = "solid"
+ a = make_fem(geo_repr)
+ res = a.to_fem(f"Cantilever_CA_EIG_{geo_repr}", "code_aster", scratch_dir=SCRATCH, overwrite=True, execute=True)
+ res.to_xdmf(res.name.replace(".rmed", ".xdmf"))
for x in res.iter_results_by_field_value():
print(x)
diff --git a/examples/scripts/fem_cube.py b/examples/scripts/fem_cube.py
new file mode 100644
index 000000000..6a5b8bb90
--- /dev/null
+++ b/examples/scripts/fem_cube.py
@@ -0,0 +1,52 @@
+import ada
+from ada.fem.meshing.concepts import GmshOptions
+
+
+def main(name="box", mesh_size=1.0, use_hex=True, elem_order=False, reduced_int=True):
+ options = GmshOptions(Mesh_ElementOrder=elem_order)
+ box = ada.PrimBox(name, (0, 0, 0), (1, 1, 1))
+ p = ada.Part("boxPart") / box
+ p.fem = p.to_fem_obj(mesh_size, use_hex=use_hex, options=options)
+
+ a = ada.Assembly() / p
+ # Create Step
+ step = a.fem.add_step(
+ ada.fem.StepImplicitStatic(
+ "static",
+ init_incr=1,
+ max_incr=25,
+ total_incr=100,
+ )
+ )
+ fe_surf_top = box.bbox().sides.top(return_surface=True, surf_name="top")
+ fe_nodes_btn = box.bbox().sides.bottom(return_fem_nodes=True)
+ fe_btn_set = a.fem.add_set(ada.fem.FemSet("bottom", fe_nodes_btn))
+
+ step.add_load(ada.fem.LoadPressure("pressure", 1e5, fe_surf_top))
+ step.add_bc(ada.fem.Bc("fix", fe_btn_set, (1, 2, 3, 4, 5, 6)))
+ step.add_history_output(ada.fem.HistOutput("displ", fe_btn_set, "node", ["U3"]))
+ field = step.field_outputs[0]
+ field.int_type = field.TYPES_INTERVAL.INTERVAL
+ field.int_value = 2
+
+ if reduced_int is False:
+ p.fem.options.ABAQUS.default_elements.SOLID.HEXAHEDRON = "C3D8"
+ p.fem.options.ABAQUS.default_elements.SOLID.HEXAHEDRON20 = "C3D20"
+ else:
+ p.fem.options.ABAQUS.default_elements.SOLID.HEXAHEDRON = "C3D8R"
+ p.fem.options.ABAQUS.default_elements.SOLID.HEXAHEDRON20 = "C3D20R"
+
+ a.to_fem(name, "abaqus", scratch_dir="temp/scratch", overwrite=True, execute=True)
+
+
+if __name__ == "__main__":
+ # main("hex1Rbox1x1x1", 2, elem_order=1)
+ # main("hex2Rbox1x1x1", 2, elem_order=2)
+ # main("hex1box1x1x1", 2, elem_order=1, reduced_int=False)
+ # main("hex2box1x1x1", 2, elem_order=2, reduced_int=False)
+ # main("tet2Rbox1x1x1", 2, elem_order=2, use_hex=False)
+ main("tet2box1x1x1", 2, elem_order=2, use_hex=False, reduced_int=False)
+
+ # main("hex1Rbox2x2x2", 0.5, elem_order=1)
+ # main("hex2Rbox2x2x2", 0.5, elem_order=2)
+ # main("tet2Rbox2x2x2", 0.5, elem_order=2, use_hex=False)
diff --git a/examples/scripts/param_model_analysis.py b/examples/scripts/param_model_analysis.py
index b86848cc4..7c882454d 100644
--- a/examples/scripts/param_model_analysis.py
+++ b/examples/scripts/param_model_analysis.py
@@ -1,9 +1,13 @@
+import pathlib
+
import ada
from ada.param_models.basic_module import SimpleStru
+SCRATCH = pathlib.Path("temp")
+
def gravity_step():
- step = ada.fem.StepImplicit("gravity", nl_geom=False, init_incr=50.0, total_time=100.0)
+ step = ada.fem.StepImplicitStatic("gravity", nl_geom=False, init_incr=50.0, total_time=100.0)
step.add_load(ada.fem.LoadGravity("grav", -9.81 * 80))
return step
@@ -13,14 +17,32 @@ def eigen_step():
def main():
+ fem_res = "ca_param_model_ca"
+ res_file = (SCRATCH / fem_res / fem_res).with_suffix(".rmed")
+
p = SimpleStru("MyStru")
- p.fem = p.to_fem_obj(0.1, use_quads=True)
+ p.fem = p.to_fem_obj(0.1, use_quads=False)
p.add_bcs()
a = ada.Assembly("ParametricSite") / p
- a.fem.add_step(gravity_step())
- a.to_fem("ca_param_model_ca", "code_aster", overwrite=True, execute=True)
+ # a.fem.add_step(gravity_step())
+ a.fem.add_step(eigen_step())
+ res = a.to_fem(fem_res, "code_aster", overwrite=True, execute=True, scratch_dir=SCRATCH)
+ mesh = res.to_meshio_mesh(make_3xn_dofs=True)
+ mesh.write(res_file.with_suffix(".vtu"))
+ # res.to_vtu()
+
# a.to_fem("ca_param_model_ses", "sesam", overwrite=True, execute=True)
+def read_res():
+ fem_res = "ca_param_model_ca"
+ res_file = (SCRATCH / fem_res / fem_res).with_suffix(".rmed")
+ res = ada.from_fem_res(res_file, "code_aster")
+ mesh = res.to_meshio_mesh(make_3xn_dofs=True)
+ mesh.write(res_file.with_suffix(".vtu"))
+ # res.to_xdmf(res.name + ".xdmf")
+
+
if __name__ == "__main__":
main()
+ # read_res()
diff --git a/examples/scripts/sdof/analyze.py b/examples/scripts/sdof/analyze.py
new file mode 100644
index 000000000..50022ed80
--- /dev/null
+++ b/examples/scripts/sdof/analyze.py
@@ -0,0 +1,229 @@
+import pathlib
+
+import code_aster
+import libaster
+from code_aster.Cata.Commands.assemblage import ASSEMBLAGE
+from code_aster.Cata.Commands.calc_modes import CALC_MODES
+from code_aster.Cata.Commands.impr_fonction import IMPR_FONCTION
+from code_aster.Cata.Commands.impr_table import IMPR_TABLE
+from code_aster.Cata.Commands.meca_statique import MECA_STATIQUE
+from code_aster.Cata.Commands.proj_base import PROJ_BASE
+
+import ada
+from ada.fem.formats.code_aster.results.results_helpers import (
+ export_mesh_data_to_sqlite,
+)
+from ada.fem.formats.code_aster.write.api_helpers import (
+ assign_boundary_conditions,
+ assign_element_characteristics,
+ assign_element_definitions,
+ assign_forces,
+ assign_material_definitions,
+ import_mesh,
+)
+
+USE_STAR = True
+if USE_STAR:
+ from code_aster.Cata.Language.SyntaxObjects import _F
+ from code_aster.Commands import *
+
+else:
+ from code_aster.Commands import (
+ IMPR_RESU,
+ POST_ELEM,
+ CREA_CHAMP,
+ DEFI_LIST_REEL,
+ COMB_MATR_ASSE,
+ DYNA_VIBRA,
+ RECU_FONCTION,
+ )
+
+ from code_aster.Supervis.ExecuteCommand import CO
+
+from ada.fem.formats.code_aster.execute import init_close_code_aster
+from ada.fem.results.sqlite_store import SQLiteFEAStore
+
+
+def basic_debug(debug_dir, operator_store: dict):
+ # write globals to file
+ with open(f"{debug_dir}/sdof1_ca_globals_{USE_STAR}.txt", "w") as f:
+ for key, value in operator_store.items():
+ f.write(f"{key} = {value}\n")
+
+
+@init_close_code_aster(info_level=2, temp_dir="temp")
+def transient_modal_analysis(a: ada.Assembly, scratch_dir):
+ if isinstance(scratch_dir, str):
+ scratch_dir = pathlib.Path(scratch_dir)
+ if not scratch_dir.is_absolute():
+ raise ValueError("Because Code Aster")
+
+ operator_store = globals()
+
+ # Import Mesh
+ mesh = import_mesh(a, scratch_dir=scratch_dir)
+
+ # Assign element definitions
+ model = assign_element_definitions(a, mesh)
+
+ # Assign Materials
+ material_field = assign_material_definitions(a, mesh)
+
+ # Sections
+ elem_car = assign_element_characteristics(a, model)
+
+ # Boundary Conditions
+ fix = assign_boundary_conditions(a, model)
+
+ # Assign Forces
+ forces = assign_forces(a, model)
+
+ # Step Information
+ linear_step: code_aster.ElasticResult = MECA_STATIQUE(
+ MODELE=model, CHAM_MATER=material_field, CARA_ELEM=elem_car, EXCIT=(_F(CHARGE=fix), _F(CHARGE=forces))
+ )
+
+ # Results Information
+ IMPR_RESU(
+ MODELE=model,
+ FORMAT="RESULTAT",
+ RESU=_F(NOM_CHAM="DEPL", GROUP_NO="mass_set", RESULTAT=linear_step, FORMAT_R="1PE12.3"),
+ )
+ massin: libaster.Table = POST_ELEM(
+ MODELE=model,
+ CHAM_MATER=material_field,
+ CARA_ELEM=elem_car,
+ MASS_INER=_F(GROUP_MA=("mass_set", "spring")),
+ TITRE="massin",
+ )
+ IMPR_TABLE(TABLE=massin, NOM_PARA=("LIEU", "MASSE"), FORMAT_R="1PE12.3")
+ ASSEMBLAGE(
+ MODELE=model,
+ CARA_ELEM=elem_car,
+ CHARGE=fix,
+ NUME_DDL=CO("numdof"),
+ MATR_ASSE=(_F(MATRICE=CO("rigidity"), OPTION="RIGI_MECA"), _F(MATRICE=CO("masse"), OPTION="MASS_MECA")),
+ )
+
+ rigidity = operator_store.get("rigidity")
+ masse = operator_store.get("masse")
+ # if nodamp:
+ undamped: code_aster.ModeResult = CALC_MODES(
+ TYPE_RESU="DYNAMIQUE",
+ OPTION="PLUS_PETITE",
+ MATR_RIGI=rigidity,
+ MATR_MASS=masse,
+ CALC_FREQ=_F(NMAX_FREQ=3),
+ )
+
+ IMPR_RESU(
+ MODELE=model,
+ FORMAT="RESULTAT",
+ RESU=_F(
+ RESULTAT=undamped,
+ NOM_PARA=("FREQ", "MASS_GENE", "MASS_EFFE_DX", "MASS_EFFE_DY", "MASS_EFFE_DZ"),
+ FORM_TABL="OUI",
+ ),
+ )
+ IMPR_RESU(FORMAT="MED", UNITE=80, RESU=_F(RESULTAT=undamped, NOM_CHAM="DEPL"))
+
+ numdof = operator_store.get("numdof")
+ # Transient Analysis
+ dsplini: libaster.FieldOnNodesReal = CREA_CHAMP(
+ TYPE_CHAM="NOEU_DEPL_R",
+ NUME_DDL=numdof,
+ OPERATION="AFFE",
+ PROL_ZERO="OUI",
+ MODELE=model,
+ AFFE=_F(GROUP_NO="mass_set", NOM_CMP="DX", VALE=-1),
+ )
+ nbvect = 3
+ PROJ_BASE(
+ BASE=undamped,
+ NB_VECT=nbvect,
+ MATR_ASSE_GENE=(_F(MATRICE=CO("stifGen"), MATR_ASSE=rigidity), _F(MATRICE=CO("massGen"), MATR_ASSE=masse)),
+ VECT_ASSE_GENE=_F(VECTEUR=CO("dispGen"), TYPE_VECT="DEPL", VECT_ASSE=dsplini),
+ )
+ # modal transient analysis
+ # #here we use the previously calculated
+ # natural frequency of the system
+ # to setup the stepping
+ natfreq = 1.59155e-01
+ # we calculate with 384 steps per period
+ number = 384
+ # we calculate over 4 periods
+ nperiod = 4.0
+
+ liste: libaster.ListOfFloats = DEFI_LIST_REEL(
+ DEBUT=0.0, INTERVALLE=_F(JUSQU_A=nperiod / natfreq, NOMBRE=int(number * nperiod))
+ )
+
+ # make 6 calculations with varying damping factor "xi"
+ it = 6
+ amorG = [None] * (it + 1)
+ tranG = [None] * (it + 1)
+ respo = [None] * (it + 1)
+ xi = [0.0, 0.01, 0.1, 0.5, 1.0, 2.0]
+ # set color for the plot
+ col = [1, 8, 4, 3, 11, 2]
+ # [black, purple, blue, green, orange, red]
+ stifGen: code_aster.GeneralizedAssemblyMatrixReal = operator_store.get("stifGen")
+ massGen: code_aster.GeneralizedAssemblyMatrixReal = operator_store.get("massGen")
+ dispGen: code_aster.GeneralizedAssemblyMatrixReal = operator_store.get("dispGen")
+
+ # Define sqlite results database
+ sqlite_file = (scratch_dir / a.name).with_suffix(".sqlite")
+ sql_store = SQLiteFEAStore(sqlite_file, clean_tables=True)
+ export_mesh_data_to_sqlite(0, a.name, mesh, sql_store)
+
+ sql_store.insert_table(
+ "FieldVars",
+ [(0, "U1", "Spatial Displacement"), (1, "V1", "Spatial Velocity"), (2, "A1", "Spatial Acceleration")],
+ )
+
+ for i in range(0, it):
+ sql_store.insert_table("Steps", [(i, "dynamic", f"xi={xi[i]}", "TIME")])
+ amorG[i] = COMB_MATR_ASSE(CALC_AMOR_GENE=_F(RIGI_GENE=stifGen, MASS_GENE=massGen, AMOR_REDUIT=xi[i]))
+ tranG[i] = DYNA_VIBRA(
+ BASE_CALCUL="GENE",
+ TYPE_CALCUL="TRAN",
+ MATR_MASS=massGen,
+ MATR_RIGI=stifGen,
+ MATR_AMOR=amorG[i],
+ ETAT_INIT=_F(DEPL=dispGen),
+ INCREMENT=_F(LIST_INST=liste),
+ SCHEMA_TEMPS=_F(SCHEMA="NEWMARK"),
+ )
+ respo[i] = RECU_FONCTION(
+ RESU_GENE=tranG[i], TOUT_INST="OUI", NOM_CHAM="DEPL", NOM_CMP="DX", GROUP_NO="mass_set"
+ )
+
+ IMPR_FONCTION(
+ FORMAT="TABLEAU", COURBE=_F(FONCTION=respo[i]), UNITE=8, TITRE="DX_endmass", SOUS_TITRE="DX_endmass"
+ )
+
+ displ: libaster.ListOfFloats = RECU_FONCTION(
+ RESU_GENE=tranG[i], TOUT_INST="OUI", NOM_CHAM="DEPL", NOM_CMP="DX", GROUP_NO="mass_set"
+ )
+ speed: libaster.ListOfFloats = RECU_FONCTION(
+ RESU_GENE=tranG[i], TOUT_INST="OUI", NOM_CHAM="VITE", NOM_CMP="DX", GROUP_NO="mass_set"
+ )
+ accel: libaster.ListOfFloats = RECU_FONCTION(
+ RESU_GENE=tranG[i], TOUT_INST="OUI", NOM_CHAM="ACCE", NOM_CMP="DX", GROUP_NO="mass_set"
+ )
+ np_t, np_d, np_v, np_a = (
+ liste.getValuesAsArray(),
+ displ.getValuesAsArray(),
+ speed.getValuesAsArray(),
+ accel.getValuesAsArray(),
+ )
+
+ # Write to sqlite db HistOutput
+ shared_opts = [-1, "NODAL", 0, -1, 2, i]
+ sql_store.insert_table("HistOutput", [(*shared_opts, 0, x, y) for x, y in zip(np_t, np_d[:, 1])])
+ sql_store.insert_table("HistOutput", [(*shared_opts, 1, x, y) for x, y in zip(np_t, np_v[:, 1])])
+ sql_store.insert_table("HistOutput", [(*shared_opts, 2, x, y) for x, y in zip(np_t, np_a[:, 1])])
+
+ undamped.printMedFile((scratch_dir / a.name).with_suffix(".rmed").as_posix())
+ sql_store.conn.close()
+ return sqlite_file
diff --git a/examples/scripts/sdof/main.py b/examples/scripts/sdof/main.py
new file mode 100644
index 000000000..d3fd69e19
--- /dev/null
+++ b/examples/scripts/sdof/main.py
@@ -0,0 +1,18 @@
+import pathlib
+
+from analyze import transient_modal_analysis
+from model import build_model
+from plot import plot_sdof
+
+
+def main():
+ scratch_dir = pathlib.Path("temp/sdof_test").resolve().absolute()
+ scratch_dir.mkdir(exist_ok=True, parents=True)
+
+ a = build_model("sdof_test")
+ sql_file = transient_modal_analysis(a, scratch_dir)
+ plot_sdof(sql_file, scratch_dir)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/examples/scripts/sdof/model.py b/examples/scripts/sdof/model.py
new file mode 100644
index 000000000..36d284790
--- /dev/null
+++ b/examples/scripts/sdof/model.py
@@ -0,0 +1,47 @@
+import ada
+
+
+def build_model(name: str):
+ p = ada.Part("SpringModel")
+ n1 = p.fem.nodes.add(ada.Node((0, 1, 1), nid=1))
+ n2 = p.fem.nodes.add(ada.Node((10, 1, 1), nid=2))
+
+ # Create BC
+ fs_fix = p.fem.add_set(ada.fem.FemSet("fix", [n1]))
+ p.fem.add_bc(ada.fem.Bc("fix_bc", fs_fix, (1, 2, 3, 4, 5, 6)))
+
+ # Create Mass
+ fs_point = p.fem.add_set(ada.fem.FemSet("point", [n2]))
+ p.fem.add_mass(ada.fem.Mass("mass", fs_point, 1, mass_id=2))
+
+ # Create Spring
+ con_section = p.fem.add_connector_section(ada.fem.ConnectorSection("SpringSection", 1, rigid_dofs=[1, 2]))
+ p.fem.add_connector(ada.fem.Connector("spring", 1, n1, n2, "BUSHING", con_section))
+
+ a = ada.Assembly(name) / p
+
+ # Create Step
+ step = a.fem.add_step(
+ ada.fem.StepImplicitDynamic(
+ "dynamic",
+ dyn_type=ada.fem.StepImplicitDynamic.TYPES_DYNAMIC.TRANSIENT_FIDELITY,
+ init_incr=1,
+ max_incr=0.1,
+ total_incr=1000,
+ )
+ )
+ step.add_load(ada.fem.LoadPoint("force", 1, fs_point, 1))
+ step.add_history_output(ada.fem.HistOutput("displ", fs_point, "node", ["U1"]))
+ # step.add_bc(fs_fix)
+
+ field = step.field_outputs[0]
+ field.int_type = field.TYPES_INTERVAL.INTERVAL
+ field.int_value = 100
+
+ # a.to_fem("sdof1_aba", "abaqus", scratch_dir=SCRATCH, overwrite=True)
+ # a.to_fem("sdof1_ca", "code_aster", scratch_dir=SCRATCH, overwrite=True)
+ return a
+
+
+if __name__ == "__main__":
+ build_model("sdof_local_test")
diff --git a/examples/scripts/sdof/plot.py b/examples/scripts/sdof/plot.py
new file mode 100644
index 000000000..91229c4e4
--- /dev/null
+++ b/examples/scripts/sdof/plot.py
@@ -0,0 +1,50 @@
+import pathlib
+
+import pandas as pd
+import plotly.graph_objects as go
+
+from ada.fem.results.sqlite_store import SQLiteFEAStore
+
+
+def plot_sdof(sqlite_file, plot_dir):
+ if isinstance(plot_dir, str):
+ plot_dir = pathlib.Path(plot_dir)
+
+ title = "SDOF: Displacement vs. Time"
+ xaxis_title = "Time [s]"
+ yaxis_title = "Displacement [m]"
+
+ sql_store = SQLiteFEAStore(sqlite_file)
+ steps = sql_store.get_steps()
+ if len(steps) == 0:
+ raise ValueError("It appears that there is no step data")
+ columns = ["Name", "Restype", "PointID", "StepName", "FieldVarName", "Frame", "Value"]
+ fig = go.Figure()
+ for step_id, step_name, step_descr, step_domain_type in steps:
+ # Plot displacement
+ legend = f"{step_name}_{step_descr}_U1"
+ df = pd.DataFrame(sql_store.get_history_data("U1", step_id), columns=columns)
+ fig.add_trace(go.Scatter(x=df["Frame"], y=df["Value"], mode="lines", name=legend))
+
+ # Plot speed
+ legend = f"{step_name}_{step_descr}_V1"
+ df = pd.DataFrame(sql_store.get_history_data("V1", step_id), columns=columns)
+ fig.add_trace(go.Scatter(x=df["Frame"], y=df["Value"], mode="lines", name=legend))
+
+ # Plot acceleration
+ legend = f"{step_name}_{step_descr}_A1"
+ df = pd.DataFrame(sql_store.get_history_data("A1", step_id), columns=columns)
+ fig.add_trace(go.Scatter(x=df["Frame"], y=df["Value"], mode="lines", name=legend))
+
+ fig.update_layout(
+ title=title,
+ xaxis_title=xaxis_title,
+ yaxis_title=yaxis_title,
+ # font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"),
+ )
+ fig.write_html(plot_dir / "results.html")
+ # fig.show()
+
+
+if __name__ == "__main__":
+ plot_sdof("temp")
diff --git a/examples/scripts/sdof/sdof.md b/examples/scripts/sdof/sdof.md
new file mode 100644
index 000000000..bb028009c
--- /dev/null
+++ b/examples/scripts/sdof/sdof.md
@@ -0,0 +1,18 @@
+# Single Degree of Freedom
+
+This Single Degree of Freedom example is based on "mass spring" example in ch. 2.5 from the
+"Beginning DYNA[mics] with code_aster" by Jean-Pierre Aubrey.
+
+The example demonstrates a transient modal analysis of a mass spring system with varying damping ratio's.
+
+The results of the sdof displacements as shown in the book
+![sdof displacement results from the book](./sdof_displ_plot_book.png)
+
+The results using ada-py
+![sdof displacement results using ada-py](./sdof_displ_plot_adapy.png)
+
+Remaining work to further improve this example:
+
+- [ ] Create a "transient modal analysis" Step definition in Ada-py as a subclass of `Step`.
+- [ ] Find the equivalent transient modal analysis input file setup for abaqus for a 1:1 comparison.
+
diff --git a/examples/scripts/sdof/sdof_displ_plot_adapy.png b/examples/scripts/sdof/sdof_displ_plot_adapy.png
new file mode 100644
index 000000000..51e3877f5
Binary files /dev/null and b/examples/scripts/sdof/sdof_displ_plot_adapy.png differ
diff --git a/examples/scripts/sdof/sdof_displ_plot_book.png b/examples/scripts/sdof/sdof_displ_plot_book.png
new file mode 100644
index 000000000..5be06a1c6
Binary files /dev/null and b/examples/scripts/sdof/sdof_displ_plot_book.png differ
diff --git a/examples/scripts/static_analysis.py b/examples/scripts/static_analysis.py
index 049686c48..b1fad6f22 100644
--- a/examples/scripts/static_analysis.py
+++ b/examples/scripts/static_analysis.py
@@ -7,7 +7,7 @@
def gravity_step():
- step = ada.fem.StepImplicit("gravity", nl_geom=False, init_incr=50.0, total_time=100.0)
+ step = ada.fem.StepImplicitStatic("gravity", nl_geom=False, init_incr=50.0, total_time=100.0)
step.add_load(ada.fem.LoadGravity("grav", -9.81 * 80))
return step
diff --git a/examples/scripts/units_convert/convert_length_unit_patch.py b/examples/scripts/units_convert/convert_length_unit_patch.py
new file mode 100644
index 000000000..222e1fd95
--- /dev/null
+++ b/examples/scripts/units_convert/convert_length_unit_patch.py
@@ -0,0 +1,90 @@
+# IfcPatch - IFC patching utiliy
+# Copyright (C) 2020, 2021 Dion Moult
+#
+# This file is part of IfcPatch.
+#
+# IfcPatch is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# IfcPatch is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with IfcPatch. If not, see .
+
+import ifcopenshell
+import ifcopenshell.api
+import ifcopenshell.api.owner.settings
+import ifcopenshell.util.element
+import ifcopenshell.util.pset
+
+
+class Patcher:
+ def __init__(self, src, file, logger, unit="METERS"):
+ """Converts the length unit of a model to the specified unit
+
+ Allowed metric units include METERS, MILLIMETERS, CENTIMETERS, etc.
+ Allowed imperial units include INCHES, FEET, MILES.
+
+ :param unit: The name of the desired unit.
+ :type unit: str
+
+ Example:
+
+ .. code:: python
+
+ # Convert to millimeters
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["MILLIMETERS"]})
+
+ # Convert to feet
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["FEET"]})
+ """
+ self.src = src
+ self.file = file
+ self.logger = logger
+ self.unit = unit
+
+ def patch(self):
+ unit = {"is_metric": "METERS" in self.unit, "raw": self.unit}
+ self.file_patched = ifcopenshell.api.run("project.create_file", version=self.file.schema)
+ if self.file.schema == "IFC2X3":
+ user = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningUser)
+ application = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningApplication)
+ old_get_user = ifcopenshell.api.owner.settings.get_user
+ old_get_application = ifcopenshell.api.owner.settings.get_application
+ ifcopenshell.api.owner.settings.get_user = lambda ifc: user
+ ifcopenshell.api.owner.settings.get_application = lambda ifc: application
+ project = ifcopenshell.api.run("root.create_entity", self.file_patched, ifc_class="IfcProject")
+ unit_assignment = ifcopenshell.api.run("unit.assign_unit", self.file_patched, **{"length": unit})
+
+ # Is there a better way?
+ for element in self.file.by_type("IfcGeometricRepresentationContext", include_subtypes=False):
+ element.Precision = 1e-8
+
+ # If we don't add openings first, they don't get converted
+ for element in self.file.by_type("IfcOpeningElement"):
+ self.file_patched.add(element)
+
+ for element in self.file:
+ self.file_patched.add(element)
+
+ new_length = [u for u in unit_assignment.Units if getattr(u, "UnitType", None) == "LENGTHUNIT"][0]
+ old_length = [
+ u
+ for u in self.file_patched.by_type("IfcProject")[1].UnitsInContext.Units
+ if getattr(u, "UnitType", None) == "LENGTHUNIT"
+ ][0]
+
+ for inverse in self.file_patched.get_inverse(old_length):
+ ifcopenshell.util.element.replace_attribute(inverse, old_length, new_length)
+
+ self.file_patched.remove(old_length)
+ self.file_patched.remove(project)
+
+ if self.file.schema == "IFC2X3":
+ ifcopenshell.api.owner.settings.get_user = old_get_user
+ ifcopenshell.api.owner.settings.get_application = old_get_application
diff --git a/examples/scripts/units_convert/convert_length_unit_patch_info.py b/examples/scripts/units_convert/convert_length_unit_patch_info.py
new file mode 100644
index 000000000..6833a5be5
--- /dev/null
+++ b/examples/scripts/units_convert/convert_length_unit_patch_info.py
@@ -0,0 +1,114 @@
+# IfcPatch - IFC patching utiliy
+# Copyright (C) 2020, 2021 Dion Moult
+#
+# This file is part of IfcPatch.
+#
+# IfcPatch is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# IfcPatch is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with IfcPatch. If not, see .
+import pathlib
+
+import ifcopenshell
+import ifcopenshell.api
+import ifcopenshell.api.owner.settings
+import ifcopenshell.util.element
+import ifcopenshell.util.pset
+
+
+class Patcher:
+ def __init__(self, src, file, logger, unit="METERS"):
+ """Converts the length unit of a model to the specified unit
+
+ Allowed metric units include METERS, MILLIMETERS, CENTIMETERS, etc.
+ Allowed imperial units include INCHES, FEET, MILES.
+
+ :param unit: The name of the desired unit.
+ :type unit: str
+
+ Example:
+
+ .. code:: python
+
+ # Convert to millimeters
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["MILLIMETERS"]})
+
+ # Convert to feet
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["FEET"]})
+ """
+ self.src = src
+ self.file = file
+ self.logger = logger
+ self.unit = unit
+
+ def patch(self):
+ unit = {"is_metric": "METERS" in self.unit, "raw": self.unit}
+ self.file_patched = ifcopenshell.api.run("project.create_file", version=self.file.schema)
+ if self.file.schema == "IFC2X3":
+ user = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningUser)
+ application = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningApplication)
+ old_get_user = ifcopenshell.api.owner.settings.get_user
+ old_get_application = ifcopenshell.api.owner.settings.get_application
+ ifcopenshell.api.owner.settings.get_user = lambda ifc: user
+ ifcopenshell.api.owner.settings.get_application = lambda ifc: application
+ project = ifcopenshell.api.run("root.create_entity", self.file_patched, ifc_class="IfcProject")
+ unit_assignment = ifcopenshell.api.run("unit.assign_unit", self.file_patched, **{"length": unit})
+
+ # Is there a better way?
+ for element in self.file.by_type("IfcGeometricRepresentationContext", include_subtypes=False):
+ element.Precision = 1e-8
+
+ # If we don't add openings first, they don't get converted
+ for element in self.file.by_type("IfcOpeningElement"):
+ self.file_patched.add(element)
+
+ for element in self.file:
+ self.file_patched.add(element)
+
+ new_length = [u for u in unit_assignment.Units if getattr(u, "UnitType", None) == "LENGTHUNIT"][0]
+ old_length = [
+ u
+ for u in self.file_patched.by_type("IfcProject")[1].UnitsInContext.Units
+ if getattr(u, "UnitType", None) == "LENGTHUNIT"
+ ][0]
+
+ for inverse in self.file_patched.get_inverse(old_length):
+ ifcopenshell.util.element.replace_attribute(inverse, old_length, new_length)
+
+ self.file_patched.remove(old_length)
+ self.file_patched.remove(project)
+
+ if self.file.schema == "IFC2X3":
+ ifcopenshell.api.owner.settings.get_user = old_get_user
+ ifcopenshell.api.owner.settings.get_application = old_get_application
+
+
+def main():
+ ifc_file = "beam-standard-case.ifc"
+ ifc_file_out = pathlib.Path("temp") / "beam-standard-case-re-exported.ifc"
+ ifc_file_out.parent.mkdir(exist_ok=True, parents=True)
+
+ # Convert the units of the IFC file
+ file = ifcopenshell.open(ifc_file)
+
+ # Convert the units of the IFC file
+ task = Patcher(src=ifc_file, file=file, logger=None, unit="METERS")
+ task.patch()
+
+ # Export the IFC file
+ task.file_patched.write(str(ifc_file_out))
+
+ for inverse in task.file_patched.by_type("IFCCARTESIANPOINT"):
+ print(tuple(inverse))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/examples/scripts/units_convert/convert_length_unit_patch_v2.py b/examples/scripts/units_convert/convert_length_unit_patch_v2.py
new file mode 100644
index 000000000..d476e7e67
--- /dev/null
+++ b/examples/scripts/units_convert/convert_length_unit_patch_v2.py
@@ -0,0 +1,128 @@
+# IfcPatch - IFC patching utiliy
+# Copyright (C) 2020, 2021 Dion Moult
+#
+# This file is part of IfcPatch.
+#
+# IfcPatch is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# IfcPatch is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with IfcPatch. If not, see .
+
+import ifcopenshell
+import ifcopenshell.api
+import ifcopenshell.api.owner.settings
+import ifcopenshell.express.schema
+import ifcopenshell.util.element
+import ifcopenshell.util.pset
+import ifcopenshell.util.unit
+
+wrap = ifcopenshell.ifcopenshell_wrapper
+
+
+def get_base_type_name(content_type: wrap.named_type | wrap.type_declaration) -> wrap.type_declaration | None:
+ cur_decl = content_type
+ while hasattr(cur_decl, "declared_type") is True:
+ cur_decl = cur_decl.declared_type()
+ if hasattr(cur_decl, "name") is False:
+ continue
+ if cur_decl.name() == "IfcLengthMeasure":
+ return cur_decl
+
+ if isinstance(cur_decl, wrap.aggregation_type):
+ res = cur_decl.type_of_element()
+ cur_decl = res.declared_type()
+ if hasattr(cur_decl, "name") and cur_decl.name() == "IfcLengthMeasure":
+ return cur_decl
+ while hasattr(cur_decl, "declared_type") is True:
+ cur_decl = cur_decl.declared_type()
+ if hasattr(cur_decl, "name") is False:
+ continue
+ if cur_decl.name() == "IfcLengthMeasure":
+ return cur_decl
+
+ return None
+
+
+class Patcher:
+ def __init__(self, src, file, logger, unit="METERS"):
+ """Converts the length unit of a model to the specified unit
+
+ Allowed metric units include METERS, MILLIMETERS, CENTIMETERS, etc.
+ Allowed imperial units include INCHES, FEET, MILES.
+
+ :param unit: The name of the desired unit.
+ :type unit: str
+
+ Example:
+
+ .. code:: python
+
+ # Convert to millimeters
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["MILLIMETERS"]})
+
+ # Convert to feet
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["FEET"]})
+ """
+ self.src = src
+ self.file: ifcopenshell.file = file
+ self.logger = logger
+ self.unit = unit
+ self.file_patched: ifcopenshell.file
+
+ def patch(self):
+ prefix = "MILLI" if self.unit == "MILLIMETERS" else None
+ self.file_patched = ifcopenshell.api.run("project.create_file", version=self.file.schema)
+ if self.file.schema == "IFC2X3":
+ user = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningUser)
+ application = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningApplication)
+ old_get_user = ifcopenshell.api.owner.settings.get_user
+ old_get_application = ifcopenshell.api.owner.settings.get_application
+ ifcopenshell.api.owner.settings.get_user = lambda ifc: user
+ ifcopenshell.api.owner.settings.get_application = lambda ifc: application
+
+ # Copy all elements from the original file to the patched file
+ for el in self.file:
+ self.file_patched.add(el)
+
+ unit_assignment = ifcopenshell.util.unit.get_unit_assignment(self.file_patched)
+
+ old_length = [u for u in unit_assignment.Units if getattr(u, "UnitType", None) == "LENGTHUNIT"][0]
+ new_length = ifcopenshell.api.run("unit.add_si_unit", self.file_patched, unit_type="LENGTHUNIT", prefix=prefix)
+
+ schema = wrap.schema_by_name(self.file.schema)
+ # Traverse all elements and their nested attributes in the file and convert them
+ for element in self.file_patched:
+ entity = schema.declaration_by_name(element.is_a())
+ attrs = entity.all_attributes()
+ for i, (attr, val, is_derived) in enumerate(zip(attrs, list(element), entity.derived())):
+ if is_derived:
+ continue
+ # Get all methods and attributes of the element
+ attr_type = attr.type_of_attribute()
+ base_type = get_base_type_name(attr_type)
+ if base_type is None:
+ continue
+ if val is None:
+ continue
+ if isinstance(val, tuple):
+ new_el = [ifcopenshell.util.unit.convert_unit(v, old_length, new_length) for v in val]
+ setattr(element, attr.name(), tuple(new_el))
+ else:
+ new_el = ifcopenshell.util.unit.convert_unit(val, old_length, new_length)
+ # set the new value
+ setattr(element, attr.name(), new_el)
+
+ self.file_patched.remove(old_length)
+ unit_assignment.Units = tuple([new_length, *unit_assignment.Units])
+
+ if self.file.schema == "IFC2X3":
+ ifcopenshell.api.owner.settings.get_user = old_get_user
+ ifcopenshell.api.owner.settings.get_application = old_get_application
diff --git a/examples/scripts/units_convert/convert_length_unit_patch_v3.py b/examples/scripts/units_convert/convert_length_unit_patch_v3.py
new file mode 100644
index 000000000..df35f9eec
--- /dev/null
+++ b/examples/scripts/units_convert/convert_length_unit_patch_v3.py
@@ -0,0 +1,96 @@
+# IfcPatch - IFC patching utiliy
+# Copyright (C) 2020, 2021 Dion Moult
+#
+# This file is part of IfcPatch.
+#
+# IfcPatch is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# IfcPatch is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with IfcPatch. If not, see .
+
+import ifcopenshell
+import ifcopenshell.api
+import ifcopenshell.api.owner.settings
+import ifcopenshell.util.element
+import ifcopenshell.util.pset
+
+
+class Patcher:
+ def __init__(self, src, file, logger, unit="METERS"):
+ """Converts the length unit of a model to the specified unit
+
+ Allowed metric units include METERS, MILLIMETERS, CENTIMETERS, etc.
+ Allowed imperial units include INCHES, FEET, MILES.
+
+ :param unit: The name of the desired unit.
+ :type unit: str
+
+ Example:
+
+ .. code:: python
+
+ # Convert to millimeters
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["MILLIMETERS"]})
+
+ # Convert to feet
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["FEET"]})
+ """
+ self.src = src
+ self.file = file
+ self.logger = logger
+ self.unit = unit
+
+ def patch(self):
+ unit = {"is_metric": "METERS" in self.unit, "raw": self.unit}
+ self.file_patched = ifcopenshell.api.run("project.create_file", version=self.file.schema)
+ if self.file.schema == "IFC2X3":
+ user = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningUser)
+ application = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningApplication)
+ old_get_user = ifcopenshell.api.owner.settings.get_user
+ old_get_application = ifcopenshell.api.owner.settings.get_application
+ ifcopenshell.api.owner.settings.get_user = lambda ifc: user
+ ifcopenshell.api.owner.settings.get_application = lambda ifc: application
+ # get project
+
+ project = ifcopenshell.api.run("root.create_entity", self.file_patched, ifc_class="IfcProject")
+ unit_assignment = ifcopenshell.api.run("unit.assign_unit", self.file_patched, **{"length": unit})
+
+ # Is there a better way?
+ for element in self.file.by_type("IfcGeometricRepresentationContext", include_subtypes=False):
+ element.Precision = 1e-8
+
+ # If we don't add openings first, they don't get converted
+ for opening_element in self.file.by_type("IfcOpeningElement"):
+ print(f"{opening_element=}")
+ self.file_patched.add(opening_element)
+
+ for element in self.file:
+ if element.is_a("IfcProject"):
+ continue
+ print(f"{element=}")
+ self.file_patched.add(element)
+
+ new_length = [u for u in unit_assignment.Units if getattr(u, "UnitType", None) == "LENGTHUNIT"][0]
+ old_length = [
+ u
+ for u in self.file_patched.by_type("IfcProject")[1].UnitsInContext.Units
+ if getattr(u, "UnitType", None) == "LENGTHUNIT"
+ ][0]
+
+ for inverse in self.file_patched.get_inverse(old_length):
+ ifcopenshell.util.element.replace_attribute(inverse, old_length, new_length)
+
+ self.file_patched.remove(old_length)
+ self.file_patched.remove(project)
+
+ if self.file.schema == "IFC2X3":
+ ifcopenshell.api.owner.settings.get_user = old_get_user
+ ifcopenshell.api.owner.settings.get_application = old_get_application
diff --git a/examples/scripts/units_convert/ifc_patch_convert_units.py b/examples/scripts/units_convert/ifc_patch_convert_units.py
new file mode 100644
index 000000000..dea20ee0d
--- /dev/null
+++ b/examples/scripts/units_convert/ifc_patch_convert_units.py
@@ -0,0 +1,33 @@
+import pathlib
+
+import ifcopenshell
+
+# from convert_length_unit_patch import Patcher
+from convert_length_unit_patch_v2 import Patcher
+
+# from convert_length_unit_patch_v3 import Patcher
+parents = list(pathlib.Path(__file__).resolve().absolute().parents)
+FILES_DIR = [fp for fp in parents if fp.name == "examples"][0].parent / "files"
+
+
+def main():
+ ifc_file = FILES_DIR / "ifc_files/beams/beam-standard-case.ifc"
+ ifc_file_out = pathlib.Path("temp") / "beam-standard-case-re-exported.ifc"
+ ifc_file_out.parent.mkdir(exist_ok=True, parents=True)
+
+ # Convert the units of the IFC file
+ file = ifcopenshell.open(ifc_file)
+
+ # Convert the units of the IFC file
+ task = Patcher(src=ifc_file, file=file, logger=None, unit="METERS")
+ task.patch()
+
+ # Export the IFC file
+ task.file_patched.write(str(ifc_file_out))
+
+ for inverse in task.file_patched.by_type("IFCCARTESIANPOINT"):
+ print(tuple(inverse))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/examples/scripts/units_convert/ifcva10_0.txt b/examples/scripts/units_convert/ifcva10_0.txt
new file mode 100644
index 000000000..056887b10
--- /dev/null
+++ b/examples/scripts/units_convert/ifcva10_0.txt
@@ -0,0 +1,119 @@
+# packages in environment at C:\miniforge3\envs\ifcva10_0:
+#
+# Name Version Build Channel
+aiohttp 3.9.1 py311ha68e1ae_0 conda-forge
+aiosignal 1.3.1 pyhd8ed1ab_0 conda-forge
+aom 3.5.0 h63175ca_0 conda-forge
+attrs 23.1.0 pyh71513ae_1 conda-forge
+boost-cpp 1.78.0 h9f4b32c_4 conda-forge
+bzip2 1.0.8 hcfcfb64_5 conda-forge
+ca-certificates 2023.11.17 h56e8100_0 conda-forge
+cgal-cpp 5.6 h2ea893f_0 conda-forge
+colorama 0.4.6 pyhd8ed1ab_0 conda-forge
+curl 8.5.0 hd5e4a3a_0 conda-forge
+dav1d 1.2.0 hcfcfb64_0 conda-forge
+double-conversion 3.2.0 h63175ca_1 conda-forge
+eigen 3.4.0 h91493d7_0 conda-forge
+expat 2.5.0 h63175ca_1 conda-forge
+ffmpeg 5.1.2 gpl_h5037a79_109 conda-forge
+font-ttf-dejavu-sans-mono 2.37 hab24e00_0 conda-forge
+font-ttf-inconsolata 3.000 h77eed37_0 conda-forge
+font-ttf-source-code-pro 2.038 h77eed37_0 conda-forge
+font-ttf-ubuntu 0.83 h77eed37_1 conda-forge
+fontconfig 2.14.2 hbde0cde_0 conda-forge
+fonts-conda-ecosystem 1 0 conda-forge
+fonts-conda-forge 1 0 conda-forge
+freeimage 3.18.0 hec5cf28_16 conda-forge
+freetype 2.12.1 hdaf720e_2 conda-forge
+frozenlist 1.4.0 py311ha68e1ae_1 conda-forge
+gettext 0.21.1 h5728263_0 conda-forge
+gl2ps 1.4.2 h0597ee9_0 conda-forge
+glew 2.1.0 h39d44d4_2 conda-forge
+glib 2.78.3 h12be248_0 conda-forge
+glib-tools 2.78.3 h12be248_0 conda-forge
+gst-plugins-base 1.22.7 h001b923_1 conda-forge
+gstreamer 1.22.7 hb4038d2_1 conda-forge
+hdf4 4.2.15 h1334946_6 conda-forge
+hdf5 1.12.2 nompi_h57737ce_101 conda-forge
+icu 73.2 h63175ca_0 conda-forge
+idna 3.6 pyhd8ed1ab_0 conda-forge
+ifcopenshell v0.7.0a10 py311hef85837_0 conda-forge
+imath 3.1.9 h12be248_0 conda-forge
+intel-openmp 2023.2.0 h57928b3_50497 conda-forge
+jsoncpp 1.9.5 h2d74725_1 conda-forge
+jxrlib 1.1 h8ffe710_2 conda-forge
+krb5 1.21.2 heb0366b_0 conda-forge
+lcms2 2.15 h3e3b177_1 conda-forge
+lerc 4.0.0 h63175ca_0 conda-forge
+libaec 1.1.2 h63175ca_1 conda-forge
+libblas 3.9.0 20_win64_mkl conda-forge
+libcblas 3.9.0 20_win64_mkl conda-forge
+libclang 15.0.7 default_h77d9078_3 conda-forge
+libclang13 15.0.7 default_h77d9078_3 conda-forge
+libcurl 8.5.0 hd5e4a3a_0 conda-forge
+libdeflate 1.18 hcfcfb64_0 conda-forge
+libexpat 2.5.0 h63175ca_1 conda-forge
+libffi 3.4.2 h8ffe710_5 conda-forge
+libglib 2.78.3 h16e383f_0 conda-forge
+libhwloc 2.9.1 h51c2c0f_0 conda-forge
+libiconv 1.17 hcfcfb64_1 conda-forge
+libjpeg-turbo 2.1.5.1 hcfcfb64_1 conda-forge
+liblapack 3.9.0 20_win64_mkl conda-forge
+libnetcdf 4.9.1 nompi_h83fa41b_102 conda-forge
+libogg 1.3.4 h8ffe710_1 conda-forge
+libopus 1.3.1 h8ffe710_1 conda-forge
+libpng 1.6.39 h19919ed_0 conda-forge
+libraw 0.21.1 h1334946_1 conda-forge
+libsqlite 3.44.2 hcfcfb64_0 conda-forge
+libssh2 1.11.0 h7dfc565_0 conda-forge
+libtheora 1.1.1 h8d14728_1005 conda-forge
+libtiff 4.5.1 h6c8260b_1 conda-forge
+libvorbis 1.3.7 h0e60522_0 conda-forge
+libwebp-base 1.3.2 hcfcfb64_0 conda-forge
+libxml2 2.10.4 hc3477c8_0 conda-forge
+libzip 1.10.1 h1d365fa_3 conda-forge
+libzlib 1.2.13 hcfcfb64_5 conda-forge
+loguru 0.7.2 py311h1ea47a8_1 conda-forge
+lz4-c 1.9.4 hcfcfb64_0 conda-forge
+mkl 2023.2.0 h6a75c08_50497 conda-forge
+mpfr 4.2.1 h64bf75a_0 conda-forge
+mpir 3.0.0 he025d50_1002 conda-forge
+multidict 6.0.4 py311ha68e1ae_1 conda-forge
+nlohmann_json 3.11.2 h39d44d4_0 conda-forge
+numpy 1.26.2 py311h0b4df5a_0 conda-forge
+occt 7.7.2 all_h165458f_201 conda-forge
+openexr 3.2.1 h5fba010_0 conda-forge
+openh264 2.3.1 h63175ca_2 conda-forge
+openjpeg 2.5.0 ha2aaf27_2 conda-forge
+openssl 3.2.0 hcfcfb64_1 conda-forge
+pcre2 10.42 h17e33f8_0 conda-forge
+pip 23.3.1 pyhd8ed1ab_0 conda-forge
+proj 9.1.1 heca977f_2 conda-forge
+pthreads-win32 2.9.1 hfa6e2cd_3 conda-forge
+pugixml 1.11.4 h63175ca_1 conda-forge
+python 3.11.6 h2628c8c_0_cpython conda-forge
+python_abi 3.11 4_cp311 conda-forge
+qt-main 5.15.8 he5a7383_16 conda-forge
+rapidjson 1.1.0 ha925a31_1002 conda-forge
+setuptools 68.2.2 pyhd8ed1ab_0 conda-forge
+sqlite 3.44.2 hcfcfb64_0 conda-forge
+svt-av1 1.4.1 h63175ca_0 conda-forge
+tbb 2021.9.0 h91493d7_0 conda-forge
+tbb-devel 2021.9.0 h91493d7_0 conda-forge
+tk 8.6.13 h5226925_1 conda-forge
+tzdata 2023c h71feb2d_0 conda-forge
+ucrt 10.0.22621.0 h57928b3_0 conda-forge
+utfcpp 4.0.4 h57928b3_0 conda-forge
+vc 14.3 hcf57466_18 conda-forge
+vc14_runtime 14.38.33130 h82b7239_18 conda-forge
+vs2015_runtime 14.38.33130 hcb4865c_18 conda-forge
+vtk 9.2.6 qt_py311h007783e_201 conda-forge
+wheel 0.42.0 pyhd8ed1ab_0 conda-forge
+win32_setctime 1.1.0 pyhd8ed1ab_0 conda-forge
+wslink 1.12.4 pyhd8ed1ab_0 conda-forge
+x264 1!164.3095 h8ffe710_2 conda-forge
+x265 3.5 h2d74725_3 conda-forge
+xz 5.2.6 h8d14728_0 conda-forge
+yarl 1.9.3 py311ha68e1ae_0 conda-forge
+zlib 1.2.13 hcfcfb64_5 conda-forge
+zstd 1.5.5 h12be248_0 conda-forge
diff --git a/examples/scripts/units_convert/ifcva10_1.txt b/examples/scripts/units_convert/ifcva10_1.txt
new file mode 100644
index 000000000..68045d517
--- /dev/null
+++ b/examples/scripts/units_convert/ifcva10_1.txt
@@ -0,0 +1,123 @@
+# packages in environment at C:\miniforge3\envs\ifcva10_1:
+#
+# Name Version Build Channel
+aiohttp 3.9.1 py311ha68e1ae_0 conda-forge
+aiosignal 1.3.1 pyhd8ed1ab_0 conda-forge
+aom 3.7.1 h63175ca_0 conda-forge
+attrs 23.1.0 pyh71513ae_1 conda-forge
+blosc 1.21.5 hdccc3a2_0 conda-forge
+bzip2 1.0.8 hcfcfb64_5 conda-forge
+ca-certificates 2023.11.17 h56e8100_0 conda-forge
+cgal-cpp 5.6 h1344ace_1 conda-forge
+colorama 0.4.6 pyhd8ed1ab_0 conda-forge
+dav1d 1.2.1 hcfcfb64_0 conda-forge
+double-conversion 3.3.0 h63175ca_0 conda-forge
+eigen 3.4.0 h91493d7_0 conda-forge
+expat 2.5.0 h63175ca_1 conda-forge
+ffmpeg 6.1.0 gpl_h8ec0088_102 conda-forge
+font-ttf-dejavu-sans-mono 2.37 hab24e00_0 conda-forge
+font-ttf-inconsolata 3.000 h77eed37_0 conda-forge
+font-ttf-source-code-pro 2.038 h77eed37_0 conda-forge
+font-ttf-ubuntu 0.83 h77eed37_1 conda-forge
+fontconfig 2.14.2 hbde0cde_0 conda-forge
+fonts-conda-ecosystem 1 0 conda-forge
+fonts-conda-forge 1 0 conda-forge
+freeimage 3.18.0 h609497f_18 conda-forge
+freetype 2.12.1 hdaf720e_2 conda-forge
+frozenlist 1.4.0 py311ha68e1ae_1 conda-forge
+gettext 0.21.1 h5728263_0 conda-forge
+gl2ps 1.4.2 h0597ee9_0 conda-forge
+glew 2.1.0 h39d44d4_2 conda-forge
+glib 2.78.3 h12be248_0 conda-forge
+glib-tools 2.78.3 h12be248_0 conda-forge
+gst-plugins-base 1.22.7 h001b923_1 conda-forge
+gstreamer 1.22.7 hb4038d2_1 conda-forge
+hdf4 4.2.15 h5557f11_7 conda-forge
+hdf5 1.14.3 nompi_h73e8ff5_100 conda-forge
+icu 73.2 h63175ca_0 conda-forge
+idna 3.6 pyhd8ed1ab_0 conda-forge
+ifcopenshell v0.7.0a10 py311h1bbf5e4_1 conda-forge
+imath 3.1.9 h12be248_0 conda-forge
+intel-openmp 2023.2.0 h57928b3_50497 conda-forge
+jsoncpp 1.9.5 h2d74725_1 conda-forge
+jxrlib 1.1 h8ffe710_2 conda-forge
+krb5 1.21.2 heb0366b_0 conda-forge
+lcms2 2.16 h67d730c_0 conda-forge
+lerc 4.0.0 h63175ca_0 conda-forge
+libaec 1.1.2 h63175ca_1 conda-forge
+libblas 3.9.0 20_win64_mkl conda-forge
+libboost 1.82.0 h65993cd_6 conda-forge
+libboost-devel 1.82.0 h91493d7_6 conda-forge
+libboost-headers 1.82.0 h57928b3_6 conda-forge
+libcblas 3.9.0 20_win64_mkl conda-forge
+libclang 15.0.7 default_h77d9078_3 conda-forge
+libclang13 15.0.7 default_h77d9078_3 conda-forge
+libcurl 8.5.0 hd5e4a3a_0 conda-forge
+libdeflate 1.19 hcfcfb64_0 conda-forge
+libexpat 2.5.0 h63175ca_1 conda-forge
+libffi 3.4.2 h8ffe710_5 conda-forge
+libglib 2.78.3 h16e383f_0 conda-forge
+libhwloc 2.9.3 default_haede6df_1009 conda-forge
+libiconv 1.17 hcfcfb64_1 conda-forge
+libjpeg-turbo 3.0.0 hcfcfb64_1 conda-forge
+liblapack 3.9.0 20_win64_mkl conda-forge
+libnetcdf 4.9.2 nompi_h8284064_112 conda-forge
+libogg 1.3.4 h8ffe710_1 conda-forge
+libopus 1.3.1 h8ffe710_1 conda-forge
+libpng 1.6.39 h19919ed_0 conda-forge
+libraw 0.21.1 h5557f11_2 conda-forge
+libsqlite 3.44.2 hcfcfb64_0 conda-forge
+libssh2 1.11.0 h7dfc565_0 conda-forge
+libtheora 1.1.1 h8d14728_1005 conda-forge
+libtiff 4.6.0 h6e2ebb7_2 conda-forge
+libvorbis 1.3.7 h0e60522_0 conda-forge
+libwebp-base 1.3.2 hcfcfb64_0 conda-forge
+libxml2 2.11.6 hc3477c8_0 conda-forge
+libzip 1.10.1 h1d365fa_3 conda-forge
+libzlib 1.2.13 hcfcfb64_5 conda-forge
+loguru 0.7.2 py311h1ea47a8_1 conda-forge
+lz4-c 1.9.4 hcfcfb64_0 conda-forge
+mkl 2023.2.0 h6a75c08_50497 conda-forge
+mpfr 4.2.1 h64bf75a_0 conda-forge
+mpir 3.0.0 he025d50_1002 conda-forge
+multidict 6.0.4 py311ha68e1ae_1 conda-forge
+nlohmann_json 3.11.2 h39d44d4_0 conda-forge
+numpy 1.26.2 py311h0b4df5a_0 conda-forge
+occt 7.7.2 all_h165458f_201 conda-forge
+openexr 3.2.1 h5fba010_0 conda-forge
+openh264 2.4.0 h63175ca_0 conda-forge
+openjpeg 2.5.0 h3d672ee_3 conda-forge
+openssl 3.2.0 hcfcfb64_1 conda-forge
+pcre2 10.42 h17e33f8_0 conda-forge
+pip 23.3.1 pyhd8ed1ab_0 conda-forge
+proj 9.3.0 he13c7e8_2 conda-forge
+pthreads-win32 2.9.1 hfa6e2cd_3 conda-forge
+pugixml 1.14 h63175ca_0 conda-forge
+python 3.11.6 h2628c8c_0_cpython conda-forge
+python_abi 3.11 4_cp311 conda-forge
+qt-main 5.15.8 h9e85ed6_18 conda-forge
+rapidjson 1.1.0 ha925a31_1002 conda-forge
+setuptools 68.2.2 pyhd8ed1ab_0 conda-forge
+snappy 1.1.10 hfb803bf_0 conda-forge
+sqlite 3.44.2 hcfcfb64_0 conda-forge
+svt-av1 1.7.0 h63175ca_0 conda-forge
+tbb 2021.11.0 h91493d7_0 conda-forge
+tbb-devel 2021.11.0 h91493d7_0 conda-forge
+tk 8.6.13 h5226925_1 conda-forge
+tzdata 2023c h71feb2d_0 conda-forge
+ucrt 10.0.22621.0 h57928b3_0 conda-forge
+utfcpp 4.0.4 h57928b3_0 conda-forge
+vc 14.3 hcf57466_18 conda-forge
+vc14_runtime 14.38.33130 h82b7239_18 conda-forge
+vs2015_runtime 14.38.33130 hcb4865c_18 conda-forge
+vtk 9.2.6 qt_py311h1234567_219 conda-forge
+vtk-base 9.2.6 qt_py311h1234567_219 conda-forge
+wheel 0.42.0 pyhd8ed1ab_0 conda-forge
+win32_setctime 1.1.0 pyhd8ed1ab_0 conda-forge
+wslink 1.12.4 pyhd8ed1ab_0 conda-forge
+x264 1!164.3095 h8ffe710_2 conda-forge
+x265 3.5 h2d74725_3 conda-forge
+xz 5.2.6 h8d14728_0 conda-forge
+yarl 1.9.3 py311ha68e1ae_0 conda-forge
+zlib 1.2.13 hcfcfb64_5 conda-forge
+zstd 1.5.5 h12be248_0 conda-forge
diff --git a/src/ada/__init__.py b/src/ada/__init__.py
index c9c5cc137..24aca41a7 100644
--- a/src/ada/__init__.py
+++ b/src/ada/__init__.py
@@ -44,6 +44,7 @@
__author__ = "Kristoffer H. Andersen"
+# A set of convenience name generators for plates and beams
PL_N = Counter(start=1, prefix="PL")
BM_N = Counter(start=1, prefix="BM")
diff --git a/src/ada/api/beams/geom_beams.py b/src/ada/api/beams/geom_beams.py
index ad6137278..dd0ade852 100644
--- a/src/ada/api/beams/geom_beams.py
+++ b/src/ada/api/beams/geom_beams.py
@@ -51,6 +51,11 @@ def straight_tapered_beam_to_geom(beam: BeamTapered, is_solid=True) -> Geometry:
return ibeam_taper_to_geom(beam)
else:
return ibeam_taper_to_face_geom(beam)
+ elif beam.section.type == beam.section.TYPES.BOX:
+ if is_solid:
+ return boxbeam_taper_to_geom(beam)
+ else:
+ raise NotImplementedError("Box beam taper to face geometry not implemented")
else:
raise NotImplementedError(f"Beam section type {beam.section.type} not implemented")
@@ -106,6 +111,15 @@ def section_to_arbitrary_profile_def_with_voids(section: Section, solid=True) ->
return geo_su.ArbitraryProfileDef(profile_type, outer_curve, inner_curves, profile_name=section.name)
+def boxbeam_taper_to_geom(beam: BeamTapered) -> Geometry:
+ profile1 = section_to_arbitrary_profile_def_with_voids(beam.section)
+ profile2 = section_to_arbitrary_profile_def_with_voids(beam.taper)
+
+ place = Axis2Placement3D(location=beam.n1.p, axis=beam.xvec, ref_direction=beam.yvec)
+ geom = geo_so.ExtrudedAreaSolidTapered(profile1, place, beam.length, Direction(0, 0, 1), profile2)
+ return Geometry(beam.guid, geom, beam.color)
+
+
def ibeam_taper_to_geom(beam: BeamTapered) -> Geometry:
profile1 = section_to_arbitrary_profile_def_with_voids(beam.section)
profile2 = section_to_arbitrary_profile_def_with_voids(beam.taper)
diff --git a/src/ada/api/beams/helpers.py b/src/ada/api/beams/helpers.py
index fe243bfa9..e3ba2a1a4 100644
--- a/src/ada/api/beams/helpers.py
+++ b/src/ada/api/beams/helpers.py
@@ -202,11 +202,16 @@ def is_strong_axis_stiffened(beam: Beam, other_beam: Beam) -> bool:
def get_justification(beam: Beam) -> Justification:
"""Justification line"""
# Check if both self.e1 and self.e2 are None
+ if beam.section.type in (beam.section.TYPES.TUBULAR, beam.section.TYPES.CIRCULAR):
+ bm_height = beam.section.r * 2
+ else:
+ bm_height = beam.section.h
+
if beam.e1 is None and beam.e2 is None:
return Justification.NA
elif beam.e1 is None or beam.e2 is None:
return Justification.CUSTOM
- elif beam.e1.is_equal(beam.e2) and beam.e1.is_equal(beam.up * beam.section.h / 2):
+ elif beam.e1.is_equal(beam.e2) and beam.e1.is_equal(beam.up * bm_height / 2):
return Justification.TOS
else:
return Justification.CUSTOM
diff --git a/src/ada/api/containers.py b/src/ada/api/containers.py
index 9d8a7e782..397ff650b 100644
--- a/src/ada/api/containers.py
+++ b/src/ada/api/containers.py
@@ -941,7 +941,9 @@ def min_nid(self) -> int:
def nodes(self) -> list[Node]:
return self._nodes
- def get_by_volume(self, p=None, vol_box=None, vol_cyl=None, tol=Settings.point_tol) -> List[Node]:
+ def get_by_volume(
+ self, p=None, vol_box=None, vol_cyl=None, tol=Settings.point_tol, single_member=False
+ ) -> list[Node]:
"""
:param p: Point
@@ -997,9 +999,20 @@ def eval_p_in_cyl(no):
return no
return None
- return list(filter(None, [eval_p_in_cyl(q) for q in simplesearch]))
+ result = list(filter(None, [eval_p_in_cyl(q) for q in simplesearch]))
else:
- return list(simplesearch)
+ result = list(simplesearch)
+
+ if len(result) == 0:
+ logger.info(f"No vertices found using {p=}, {vol_box=}, {vol_cyl=} and {tol=}")
+ return result
+
+ if single_member:
+ if len(result) != 1:
+ logger.warning(f"Returning member at index=0 despite {len(result)=}. Please check your results")
+ return result[0]
+
+ return result
def add(self, node: Node, point_tol: float = Settings.point_tol, allow_coincident: bool = False) -> Node:
"""Insert node into sorted list"""
diff --git a/src/ada/api/spatial/assembly.py b/src/ada/api/spatial/assembly.py
index 8de2a043d..52de68dae 100644
--- a/src/ada/api/spatial/assembly.py
+++ b/src/ada/api/spatial/assembly.py
@@ -19,11 +19,11 @@
FemSet,
StepEigen,
StepExplicit,
- StepImplicit,
+ StepImplicitStatic,
StepSteadyState,
)
-_step_types = Union[StepSteadyState, StepEigen, StepImplicit, StepExplicit]
+_step_types = Union[StepSteadyState, StepEigen, StepImplicitStatic, StepExplicit]
if TYPE_CHECKING:
import ifcopenshell
@@ -143,6 +143,7 @@ def to_fem(
metadata=None,
execute=False,
run_ext=False,
+ mesh_only=False,
cpus=1,
gpus=None,
overwrite=False,
@@ -151,6 +152,7 @@ def to_fem(
run_in_shell=False,
make_zip_file=False,
return_fea_results=True,
+ model_data_only=False,
) -> FEAResult | None:
"""
Create a FEM input file deck for executing fem analysis in a specified FEM format.
@@ -206,7 +208,9 @@ def to_fem(
scratch_dir = Settings.scratch_dir if scratch_dir is None else pathlib.Path(scratch_dir)
- write_to_fem(self, name, fem_format, overwrite, fem_converter, scratch_dir, metadata, make_zip_file)
+ write_to_fem(
+ self, name, fem_format, overwrite, fem_converter, scratch_dir, metadata, make_zip_file, model_data_only
+ )
# Execute
if execute:
diff --git a/src/ada/api/spatial/part.py b/src/ada/api/spatial/part.py
index 196deaaf7..8dde6f7ec 100644
--- a/src/ada/api/spatial/part.py
+++ b/src/ada/api/spatial/part.py
@@ -252,23 +252,25 @@ def add_section(self, section: Section) -> Section:
section.units = self.units
return self._sections.add(section)
- def add_object(self, obj: Part | Beam | Plate | Wall | Pipe | Shape | Weld):
- from ada import Beam, Part, Pipe, Plate, Shape, Wall, Weld
+ def add_object(self, obj: Part | Beam | Plate | Wall | Pipe | Shape | Weld | Section):
+ from ada import Beam, Part, Pipe, Plate, Section, Shape, Wall, Weld
if isinstance(obj, Beam):
- self.add_beam(obj)
+ return self.add_beam(obj)
elif isinstance(obj, Plate):
- self.add_plate(obj)
+ return self.add_plate(obj)
elif isinstance(obj, Pipe):
- self.add_pipe(obj)
+ return self.add_pipe(obj)
elif issubclass(type(obj), Part):
- self.add_part(obj)
+ return self.add_part(obj)
elif issubclass(type(obj), Shape):
- self.add_shape(obj)
+ return self.add_shape(obj)
elif isinstance(obj, Wall):
- self.add_wall(obj)
+ return self.add_wall(obj)
elif isinstance(obj, Weld):
- self.add_weld(obj)
+ return self.add_weld(obj)
+ elif isinstance(obj, Section):
+ return self.add_section(obj)
else:
raise NotImplementedError(f'"{type(obj)}" is not yet supported for smart append')
@@ -756,11 +758,15 @@ def post_pro(buffer_items, tree):
self.to_trimesh_scene(**kwargs).export(gltf_file, buffer_postprocessor=post_pro)
- def to_trimesh_scene(self, render_override: dict[str, GeomRepr | str] = None, filter_by_guids=None):
+ def to_trimesh_scene(
+ self, render_override: dict[str, GeomRepr | str] = None, filter_by_guids=None, merge_meshes=True
+ ):
from ada.occ.tessellating import BatchTessellator
bt = BatchTessellator()
- return bt.tessellate_part(self)
+ return bt.tessellate_part(
+ self, merge_meshes=merge_meshes, render_override=render_override, filter_by_guids=filter_by_guids
+ )
def to_stp(
self,
@@ -790,6 +796,14 @@ def to_viewer(self, **kwargs):
send_to_viewer(self, **kwargs)
+ def show(self):
+ from ada.occ.tessellating import BatchTessellator
+
+ bt = BatchTessellator()
+ scene = bt.tessellate_part(self)
+
+ return scene.show("notebook")
+
@property
def parts(self) -> dict[str, Part]:
return self._parts
diff --git a/src/ada/base/physical_objects.py b/src/ada/base/physical_objects.py
index 717159e16..061d5a194 100644
--- a/src/ada/base/physical_objects.py
+++ b/src/ada/base/physical_objects.py
@@ -160,6 +160,30 @@ def to_obj_mesh(self, geom_repr: str | GeomRepr = GeomRepr.SOLID, export_config:
return occ_geom_to_poly_mesh(self, geom_repr=geom_repr, export_config=export_config)
+ def show(self):
+ from itertools import groupby
+
+ import trimesh
+
+ from ada.occ.tessellating import BatchTessellator
+ from ada.visit.gltf.optimize import concatenate_stores
+ from ada.visit.gltf.store import merged_mesh_to_trimesh_scene
+
+ bt = BatchTessellator()
+ mesh_stores = list(bt.batch_tessellate([self]))
+
+ scene = trimesh.Scene()
+ mesh_map = []
+
+ for mat_id, meshes in groupby(mesh_stores, lambda x: x.material):
+ meshes = list(meshes)
+
+ merged_store = concatenate_stores(meshes)
+ mesh_map.append((mat_id, meshes, merged_store))
+ merged_mesh_to_trimesh_scene(scene, merged_store, bt.get_mat_by_id(mat_id), mat_id, None)
+
+ return scene.show("notebook")
+
@property
def booleans(self) -> list[Boolean]:
return self._booleans
diff --git a/src/ada/cadit/ifc/convert_length_unit_patch.py b/src/ada/cadit/ifc/convert_length_unit_patch.py
new file mode 100644
index 000000000..78506b9cb
--- /dev/null
+++ b/src/ada/cadit/ifc/convert_length_unit_patch.py
@@ -0,0 +1,124 @@
+# IfcPatch - IFC patching utiliy
+# Copyright (C) 2020, 2021 Dion Moult
+#
+# This file is part of IfcPatch.
+#
+# IfcPatch is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# IfcPatch is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with IfcPatch. If not, see .
+
+import ifcopenshell
+import ifcopenshell.api
+import ifcopenshell.api.owner.settings
+import ifcopenshell.express.schema
+import ifcopenshell.util.element
+import ifcopenshell.util.pset
+import ifcopenshell.util.unit
+
+wrap = ifcopenshell.ifcopenshell_wrapper
+
+
+def get_base_type_name(content_type: wrap.named_type | wrap.type_declaration) -> wrap.type_declaration | None:
+ cur_decl = content_type
+ while hasattr(cur_decl, "declared_type") is True:
+ cur_decl = cur_decl.declared_type()
+ if hasattr(cur_decl, "name") is False:
+ continue
+ if cur_decl.name() == "IfcLengthMeasure":
+ return cur_decl
+
+ if isinstance(cur_decl, wrap.aggregation_type):
+ res = cur_decl.type_of_element()
+ cur_decl = res.declared_type()
+ if hasattr(cur_decl, "name") and cur_decl.name() == "IfcLengthMeasure":
+ return cur_decl
+ while hasattr(cur_decl, "declared_type") is True:
+ cur_decl = cur_decl.declared_type()
+ if hasattr(cur_decl, "name") is False:
+ continue
+ if cur_decl.name() == "IfcLengthMeasure":
+ return cur_decl
+
+ return None
+
+
+class Patcher:
+ def __init__(self, src, file, logger, unit="METERS"):
+ """Converts the length unit of a model to the specified unit
+
+ Allowed metric units include METERS, MILLIMETERS, CENTIMETERS, etc.
+ Allowed imperial units include INCHES, FEET, MILES.
+
+ :param unit: The name of the desired unit.
+ :type unit: str
+
+ Example:
+
+ .. code:: python
+
+ # Convert to millimeters
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["MILLIMETERS"]})
+
+ # Convert to feet
+ ifcpatch.execute({"input": "input.ifc", "file": model, "recipe": "ConvertLengthUnit", "arguments": ["FEET"]})
+ """
+ self.src = src
+ self.file: ifcopenshell.file = file
+ self.logger = logger
+ self.unit = unit
+ self.file_patched: ifcopenshell.file
+
+ def patch(self):
+ self.file_patched = ifcopenshell.api.run("project.create_file", version=self.file.schema)
+ if self.file.schema == "IFC2X3":
+ user = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningUser)
+ application = self.file_patched.add(self.file.by_type("IfcProject")[0].OwnerHistory.OwningApplication)
+ old_get_user = ifcopenshell.api.owner.settings.get_user
+ old_get_application = ifcopenshell.api.owner.settings.get_application
+ ifcopenshell.api.owner.settings.get_user = lambda ifc: user
+ ifcopenshell.api.owner.settings.get_application = lambda ifc: application
+
+ # Copy all elements from the original file to the patched file
+ for el in self.file:
+ self.file_patched.add(el)
+
+ prefix = "MILLI" if self.unit == "MILLIMETERS" else None
+ new_length = ifcopenshell.api.run("unit.add_si_unit", self.file_patched, unit_type="LENGTHUNIT", prefix=prefix)
+ unit_assignment = ifcopenshell.util.unit.get_unit_assignment(self.file)
+ old_length = [u for u in unit_assignment.Units if getattr(u, "UnitType", None) == "LENGTHUNIT"][0]
+
+ schema = wrap.schema_by_name(self.file.schema)
+ # Traverse all elements and their nested attributes in the file and convert them
+ for element in self.file_patched:
+ entity = schema.declaration_by_name(element.is_a())
+ attrs = entity.all_attributes()
+ for i, (attr, val, is_derived) in enumerate(zip(attrs, list(element), entity.derived())):
+ if is_derived:
+ continue
+ # Get all methods and attributes of the element
+ attr_type = attr.type_of_attribute()
+ base_type = get_base_type_name(attr_type)
+ if base_type is None:
+ continue
+ if val is None:
+ continue
+ if isinstance(val, tuple):
+ new_el = [ifcopenshell.util.unit.convert_unit(v, old_length, new_length) for v in val]
+ setattr(element, attr.name(), tuple(new_el))
+ else:
+ new_el = ifcopenshell.util.unit.convert_unit(val, old_length, new_length)
+ # set the new value
+ setattr(element, attr.name(), new_el)
+
+ if self.file.schema == "IFC2X3":
+ ifcopenshell.api.owner.settings.get_user = old_get_user
+ ifcopenshell.api.owner.settings.get_application = old_get_application
diff --git a/src/ada/cadit/ifc/store.py b/src/ada/cadit/ifc/store.py
index 33d4edfb3..a068cd7c8 100644
--- a/src/ada/cadit/ifc/store.py
+++ b/src/ada/cadit/ifc/store.py
@@ -10,7 +10,7 @@
from ada.base.changes import ChangeAction
from ada.base.types import GeomRepr
-from ada.cadit.ifc.units_conversion import convert_units
+from ada.cadit.ifc.units_conversion import convert_file_length_units
from ada.cadit.ifc.utils import assembly_to_ifc_file, default_settings, get_unit_type
from ada.cadit.ifc.write.write_sections import get_profile_class
from ada.cadit.ifc.write.write_user import create_owner_history_from_user
@@ -170,7 +170,7 @@ def load_ifc_content_from_file(
unit_type = get_unit_type(self.f)
if unit_type != self.assembly.units:
- self.f = convert_units(self.assembly.units, self.f)
+ self.f = convert_file_length_units(self.f, self.assembly.units)
if elements2part is None:
self.reader.load_spatial_hierarchy()
diff --git a/src/ada/cadit/ifc/units_conversion.py b/src/ada/cadit/ifc/units_conversion.py
index e74fc0791..6f62b03bd 100644
--- a/src/ada/cadit/ifc/units_conversion.py
+++ b/src/ada/cadit/ifc/units_conversion.py
@@ -1,50 +1,96 @@
+from typing import Any, Iterable
+
import ifcopenshell.api
+import ifcopenshell.util.element
+import ifcopenshell.util.unit
from ada.base.units import Units
+from ada.cadit.ifc.convert_length_unit_patch import Patcher
-def convert_units(units: Units, file: ifcopenshell.file):
- from ada import Units
-
+def convert_units(units: Units, f: ifcopenshell.file):
units_str = "MILLIMETERS" if units == Units.MM else "METERS"
- unit = {"is_metric": True, "raw": units_str}
- file_patched = ifcopenshell.api.run("project.create_file", version=file.schema)
- if file.schema == "IFC2X3":
- user = file_patched.add(file.by_type("IfcProject")[0].OwnerHistory.OwningUser)
- application = file_patched.add(file.by_type("IfcProject")[0].OwnerHistory.OwningApplication)
- old_get_user = ifcopenshell.api.owner.settings.get_user
- old_get_application = ifcopenshell.api.owner.settings.get_application
- ifcopenshell.api.owner.settings.get_user = lambda ifc: user
- ifcopenshell.api.owner.settings.get_application = lambda ifc: application
- project = ifcopenshell.api.run("root.create_entity", file_patched, ifc_class="IfcProject")
- unit_assignment = ifcopenshell.api.run("unit.assign_unit", file_patched, **{"length": unit})
-
- # Is there a better way?
- for element in file.by_type("IfcGeometricRepresentationContext", include_subtypes=False):
- element.Precision = 1e-8
-
- # If we don't add openings first, they don't get converted
- for element in file.by_type("IfcOpeningElement"):
- file_patched.add(element)
-
- for element in file:
- file_patched.add(element)
-
- new_length = [u for u in unit_assignment.Units if getattr(u, "UnitType", None) == "LENGTHUNIT"][0]
- old_length = [
- u
- for u in file_patched.by_type("IfcProject")[1].UnitsInContext.Units
- if getattr(u, "UnitType", None) == "LENGTHUNIT"
- ][0]
-
- for inverse in file_patched.get_inverse(old_length):
- ifcopenshell.util.element.replace_attribute(inverse, old_length, new_length)
- file_patched.remove(old_length)
- file_patched.remove(project)
+ task = Patcher(src=None, file=f, logger=None, unit=units_str)
+ task.patch()
+
+ return task.file_patched
+
+
+def is_attr_type(
+ content_type: ifcopenshell.ifcopenshell_wrapper.named_type | ifcopenshell.ifcopenshell_wrapper.type_declaration,
+ ifc_unit_type_name: str,
+) -> ifcopenshell.ifcopenshell_wrapper.type_declaration | None:
+ cur_decl = content_type
+ while hasattr(cur_decl, "declared_type") is True:
+ cur_decl = cur_decl.declared_type()
+ if hasattr(cur_decl, "name") is False:
+ continue
+ if cur_decl.name() == ifc_unit_type_name:
+ return cur_decl
+
+ if isinstance(cur_decl, ifcopenshell.ifcopenshell_wrapper.aggregation_type):
+ res = cur_decl.type_of_element()
+ cur_decl = res.declared_type()
+ if hasattr(cur_decl, "name") and cur_decl.name() == ifc_unit_type_name:
+ return cur_decl
+ while hasattr(cur_decl, "declared_type") is True:
+ cur_decl = cur_decl.declared_type()
+ if hasattr(cur_decl, "name") is False:
+ continue
+ if cur_decl.name() == ifc_unit_type_name:
+ return cur_decl
+
+ return None
+
+
+def iter_element_and_attributes_per_type(
+ ifc_file: ifcopenshell.file, attr_type_name: str
+) -> Iterable[tuple[ifcopenshell.entity_instance, ifcopenshell.ifcopenshell_wrapper.attribute, Any, str]]:
+ schema = ifcopenshell.ifcopenshell_wrapper.schema_by_name(ifc_file.schema)
+
+ for element in ifc_file:
+ entity = schema.declaration_by_name(element.is_a())
+ attrs = entity.all_attributes()
+ for i, (attr, val, is_derived) in enumerate(zip(attrs, list(element), entity.derived())):
+ if is_derived:
+ continue
- if file.schema == "IFC2X3":
- ifcopenshell.api.owner.settings.get_user = old_get_user
- ifcopenshell.api.owner.settings.get_application = old_get_application
+ # Get all methods and attributes of the element
+ attr_type = attr.type_of_attribute()
+ base_type = is_attr_type(attr_type, attr_type_name)
+ if base_type is None:
+ continue
+
+ if val is None:
+ continue
+
+ yield element, attr, val
+
+
+def convert_file_length_units(ifc_file: ifcopenshell.file, units: Units) -> ifcopenshell.file:
+ """Converts all units in an IFC file to the specified target units. Returns a new file."""
+ target_units = "MILLIMETERS" if units == Units.MM else "METERS"
+ prefix = "MILLI" if target_units == "MILLIMETERS" else None
+
+ # Copy all elements from the original file to the patched file
+ file_patched = ifcopenshell.file.from_string(ifc_file.wrapped_data.to_string())
+
+ unit_assignment = ifcopenshell.util.unit.get_unit_assignment(file_patched)
+
+ old_length = [u for u in unit_assignment.Units if getattr(u, "UnitType", None) == "LENGTHUNIT"][0]
+ new_length = ifcopenshell.api.run("unit.add_si_unit", file_patched, unit_type="LENGTHUNIT", prefix=prefix)
+
+ # Traverse all elements and their nested attributes in the file and convert them
+ for element, attr, val in iter_element_and_attributes_per_type(file_patched, "IfcLengthMeasure"):
+ if isinstance(val, tuple):
+ new_value = [ifcopenshell.util.unit.convert_unit(v, old_length, new_length) for v in val]
+ setattr(element, attr.name(), tuple(new_value))
+ else:
+ new_value = ifcopenshell.util.unit.convert_unit(val, old_length, new_length)
+ setattr(element, attr.name(), new_value)
+
+ file_patched.remove(old_length)
+ unit_assignment.Units = tuple([new_length, *unit_assignment.Units])
return file_patched
diff --git a/src/ada/cadit/step/read/reader_utils.py b/src/ada/cadit/step/read/reader_utils.py
index e48013f9d..9431ed9ba 100644
--- a/src/ada/cadit/step/read/reader_utils.py
+++ b/src/ada/cadit/step/read/reader_utils.py
@@ -9,6 +9,7 @@
from OCC.Core.TopoDS import TopoDS_Shape, TopoDS_Shell
from ada.base.adacpp_interface import adacpp_switch
+from ada.config import logger
from ada.occ.xcaf_utils import get_color
try:
@@ -89,14 +90,17 @@ def _get_sub_shapes(lab, loc):
color_set = True
if not color_set:
- if (
- color_tool.GetColor(lab_subs, 0, c)
- or color_tool.GetColor(lab_subs, 1, c)
- or color_tool.GetColor(lab_subs, 2, c)
- ):
- color_tool.SetInstanceColor(shape, 0, c)
- color_tool.SetInstanceColor(shape, 1, c)
- color_tool.SetInstanceColor(shape, 2, c)
+ try:
+ if (
+ color_tool.GetColor(lab_subs, 0, c)
+ or color_tool.GetColor(lab_subs, 1, c)
+ or color_tool.GetColor(lab_subs, 2, c)
+ ):
+ color_tool.SetInstanceColor(shape, 0, c)
+ color_tool.SetInstanceColor(shape, 1, c)
+ color_tool.SetInstanceColor(shape, 2, c)
+ except TypeError as e:
+ logger.warning(f"Could not set color for {lab_subs.GetLabelName()}: {e}")
shape_to_disp = BRepBuilderAPI_Transform(shape_sub, loc.Transformation()).Shape()
if shape_to_disp not in output_shapes:
diff --git a/src/ada/fem/__init__.py b/src/ada/fem/__init__.py
index ad83958de..ca5b55435 100644
--- a/src/ada/fem/__init__.py
+++ b/src/ada/fem/__init__.py
@@ -9,7 +9,13 @@
from .outputs import FieldOutput, HistOutput
from .sections import ConnectorSection, FemSection
from .sets import FemSet
-from .steps import StepEigen, StepExplicit, StepImplicit, StepSteadyState
+from .steps import (
+ StepEigen,
+ StepExplicit,
+ StepImplicitDynamic,
+ StepImplicitStatic,
+ StepSteadyState,
+)
from .surfaces import Surface
__all__ = [
@@ -20,7 +26,8 @@
"Interaction",
"StepSteadyState",
"StepEigen",
- "StepImplicit",
+ "StepImplicitStatic",
+ "StepImplicitDynamic",
"StepExplicit",
"Surface",
"Elem",
diff --git a/src/ada/fem/common.py b/src/ada/fem/common.py
index 69254dd7c..5cdfa6b69 100644
--- a/src/ada/fem/common.py
+++ b/src/ada/fem/common.py
@@ -1,7 +1,7 @@
from __future__ import annotations
from dataclasses import dataclass
-from typing import TYPE_CHECKING, List, Union
+from typing import TYPE_CHECKING
import numpy as np
@@ -13,10 +13,20 @@
class FemBase:
- def __init__(self, name, metadata, parent: Union[FEM, Step]):
+ """Base class for all FEM objects
+
+ Args:
+ name (str): Name of the object
+ metadata (dict, optional): Metadata for the object. Defaults to None.
+ parent (FEM, optional): Parent FEM object. Defaults to None.
+ str_override (str, optional): String representation of the object. Will override object writing. Defaults to None.
+ """
+
+ def __init__(self, name, metadata, parent: FEM | Step, str_override: str | None = None):
self.name = name
self.parent = parent
self._metadata = metadata if metadata is not None else dict()
+ self._str_override = str_override
@property
def name(self):
@@ -35,7 +45,7 @@ def name(self, value):
self._name = value.strip()
@property
- def parent(self) -> FEM:
+ def parent(self) -> FEM | Step:
return self._parent
@parent.setter
@@ -43,9 +53,17 @@ def parent(self, value):
self._parent = value
@property
- def metadata(self):
+ def metadata(self) -> dict:
return self._metadata
+ @property
+ def str_override(self) -> str | None:
+ return self._str_override
+
+ @str_override.setter
+ def str_override(self, value):
+ self._str_override = value
+
class CsysSystems:
RECTANGULAR = "RECTANGULAR"
@@ -67,7 +85,7 @@ def __init__(
name,
definition=TYPES_DEFINITIONS.COORDINATES,
system=TYPES_SYSTEM.RECTANGULAR,
- nodes: List[Node] = None,
+ nodes: list[Node] = None,
coords=None,
metadata=None,
parent: FEM = None,
@@ -90,7 +108,7 @@ def system(self):
return self._system
@property
- def nodes(self) -> List[Node]:
+ def nodes(self) -> list[Node]:
return self._nodes
@property
@@ -108,7 +126,7 @@ def __repr__(self):
class Amplitude(FemBase):
- def __init__(self, name: str, x: List[float], y: List[float], smooth=None, metadata=None, parent: FEM = None):
+ def __init__(self, name: str, x: list[float], y: list[float], smooth=None, metadata=None, parent: FEM = None):
super().__init__(name, metadata, parent)
self._x = x
self._y = y
diff --git a/src/ada/fem/concept.py b/src/ada/fem/concept.py
index 512b5ab48..de0dddddf 100644
--- a/src/ada/fem/concept.py
+++ b/src/ada/fem/concept.py
@@ -2,7 +2,7 @@
from dataclasses import dataclass, field
from itertools import chain
-from typing import TYPE_CHECKING, Dict, Iterable, List, Tuple, Union
+from typing import TYPE_CHECKING, Dict, Iterable, List, Union
from ada.api.containers import Nodes
from ada.config import logger
@@ -32,13 +32,13 @@
Spring,
StepEigen,
StepExplicit,
- StepImplicit,
+ StepImplicitStatic,
StepSteadyState,
)
from ada.fem.results.common import Mesh
from ada.fem.steps import Step
-_step_types = Union["StepSteadyState", "StepEigen", "StepImplicit", "StepExplicit"]
+_step_types = Union["StepSteadyState", "StepEigen", "StepImplicitStatic", "StepExplicit"]
@dataclass
@@ -66,7 +66,9 @@ class FEM:
constraints: Dict[str, Constraint] = field(init=False, default_factory=dict)
bcs: List[Bc] = field(init=False, default_factory=list)
- steps: List[Union[StepSteadyState, StepEigen, StepImplicit, StepExplicit]] = field(init=False, default_factory=list)
+ steps: List[Union[StepSteadyState, StepEigen, StepImplicitStatic, StepExplicit]] = field(
+ init=False, default_factory=list
+ )
nodes: Nodes = field(default_factory=Nodes, init=True)
ref_points: Nodes = field(default_factory=Nodes, init=True)
@@ -121,12 +123,12 @@ def add_bc(self, bc: Bc) -> Bc:
self.bcs.append(bc)
return bc
- def add_mass(self, mass: Mass) -> Tuple[Mass, FemSet]:
+ def add_mass(self, mass: Mass) -> Mass:
mass.parent = self
self.elements.add(mass)
elset = self.sets.add(FemSet(mass.name + "_set", [mass], "elset"))
mass.elset = elset
- return mass, elset
+ return mass
def add_set(
self,
@@ -244,12 +246,20 @@ def add_connector_section(self, connector_section: ConnectorSection) -> Connecto
return connector_section
def add_connector(self, connector: Connector) -> Connector:
+ from ada import Assembly
+
connector.parent = self
+ if not isinstance(self.parent, Assembly):
+ logger.warning(
+ "Connector Elements can usually only be added to an Assembly object. Please check your model."
+ )
+
self.elements.add(connector)
connector.csys.parent = self
if connector.con_sec.parent is None:
self.add_connector_section(connector.con_sec)
- self.add_set(FemSet(name=connector.name, members=[connector], set_type="elset"))
+ fs = self.add_set(FemSet(name=connector.name, members=[connector], set_type="elset"))
+ connector.elset = fs
return connector
def add_rp(self, name: str, node: Node):
@@ -407,6 +417,16 @@ def __add__(self, other: FEM):
self.elements += other.elements
self.sections += other.sections
+
+ # Copy any Beam sections to the current FEM parent Part object
+ if self.parent is not None:
+ for sec in self.sections:
+ sec.parent = self
+ if sec.section is None:
+ continue
+ if sec.section.parent != self.parent:
+ self.parent.add_object(sec.section)
+
self.sets += other.sets
for bc in other.bcs:
diff --git a/src/ada/fem/constraints.py b/src/ada/fem/constraints.py
index b0ba28f29..2acc7d078 100644
--- a/src/ada/fem/constraints.py
+++ b/src/ada/fem/constraints.py
@@ -9,6 +9,8 @@
from .surfaces import Surface
if TYPE_CHECKING:
+ from ada import Part
+
from .common import Amplitude
from .concept import FEM
@@ -250,11 +252,7 @@ def magnitude(self):
return self._magnitude
@property
- def initial_state_part(self):
- """
-
- :rtype: ada.Part
- """
+ def initial_state_part(self) -> Part:
return self._initial_state_part
@property
diff --git a/src/ada/fem/containers.py b/src/ada/fem/containers.py
index e32ade8c1..04bda694c 100644
--- a/src/ada/fem/containers.py
+++ b/src/ada/fem/containers.py
@@ -177,7 +177,17 @@ def __add__(self, other: FemElements):
for el in other.elements:
el.parent = self.parent
- return FemElements(chain.from_iterable([self.elements, other.elements]), self.parent)
+ other_num = len(other.elements)
+ self_num = len(self.elements)
+ final_elem = FemElements(chain.from_iterable([self.elements, other.elements]), self.parent)
+ if len(final_elem.elements) != (other_num + self_num):
+ raise ValueError("Unequal length of elements after concatenation")
+
+ self._elements = final_elem.elements
+ self._idmap = final_elem.idmap
+ self._group_by_types()
+ return self
+ # return final_elem
def __repr__(self):
data = {}
@@ -301,7 +311,7 @@ def lines_ecc(self) -> Iterable[Elem]:
@property
def connectors(self) -> Iterable[Connector]:
- return filter(lambda x: isinstance(x.type, Connector), self.elements)
+ return filter(lambda x: isinstance(x, Connector), self.elements)
@property
def masses(self) -> Iterable[Mass]:
diff --git a/src/ada/fem/elements.py b/src/ada/fem/elements.py
index 2f234f42c..583601563 100644
--- a/src/ada/fem/elements.py
+++ b/src/ada/fem/elements.py
@@ -137,7 +137,7 @@ def eccentricity(self, value: Eccentricity):
self._eccentricity = value
@property
- def elset(self):
+ def elset(self) -> FemSet:
return self._elset
@elset.setter
@@ -251,8 +251,6 @@ def __init__(
):
if type(n1) is not Node or type(n2) is not Node:
raise ValueError("Connector Start\\end must be nodes")
- super(Connector, self).__init__(el_id, [n1, n2], ElemType.CONNECTOR_SHAPES.CONNECTOR)
- super(Elem, self).__init__(name, metadata, parent)
self._n1 = n1
self._n2 = n2
self._con_type = con_type
@@ -260,6 +258,9 @@ def __init__(
self._preload = preload
self._csys = csys if csys is not None else Csys(f"{name}_csys")
+ super(Connector, self).__init__(el_id, [n1, n2], ElemType.CONNECTOR_SHAPES.CONNECTOR)
+ super(Elem, self).__init__(name, metadata, parent)
+
@property
def con_type(self):
return self._con_type
@@ -301,7 +302,7 @@ def csys(self, value: Csys):
self._csys = value
def __repr__(self):
- return f'ConnectorElem(ID: {self.id}, Type: {self.type}, End1: "{self.n1}", End2: "{self.n2}")'
+ return f'Connector(ID: {self.id}, Type: {self.type}, End1: "{self.n1}", End2: "{self.n2}")'
class Spring(Elem):
@@ -471,7 +472,7 @@ def point_mass_type(self, value):
self._ptype = value
def __repr__(self) -> str:
- return f"Mass({self.name}, {self.point_mass_type}, [{self.mass}])"
+ return f"Mass(ID: {self._el_id}, {self.name}, {self.point_mass_type}, [{self.mass}])"
def find_element_type_from_list(elements: List[Elem]) -> str:
diff --git a/src/ada/fem/exceptions/model_definition.py b/src/ada/fem/exceptions/model_definition.py
index 8eb75b8ab..846359678 100644
--- a/src/ada/fem/exceptions/model_definition.py
+++ b/src/ada/fem/exceptions/model_definition.py
@@ -15,3 +15,7 @@ def __init__(self, name):
self.name = name
self.message = f"FemSet {name} already exists"
super(FemSetNameExists, self).__init__(self.message)
+
+
+class DoesNotSupportMultiPart(Exception):
+ pass
diff --git a/src/ada/fem/formats/abaqus/write/write_connectors.py b/src/ada/fem/formats/abaqus/write/write_connectors.py
index d1a12d1bc..c68676617 100644
--- a/src/ada/fem/formats/abaqus/write/write_connectors.py
+++ b/src/ada/fem/formats/abaqus/write/write_connectors.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from collections.abc import Iterable
from typing import TYPE_CHECKING
@@ -9,11 +11,11 @@
from ada.fem import Connector, ConnectorSection
-def connectors_str(fem: "FEM") -> str:
+def connectors_str(fem: FEM) -> str:
return "\n".join([connector_str(con, True) for con in fem.elements.connectors])
-def connector_sections_str(fem: "FEM") -> str:
+def connector_sections_str(fem: FEM) -> str:
return "\n".join([connector_section_str(consec) for consec in fem.connector_sections.values()])
@@ -38,53 +40,72 @@ def connector_str(connector: "Connector", written_on_assembly_level: bool) -> st
**"""
-def connector_section_str(con_sec: "ConnectorSection") -> str:
- conn_txt = """*Connector Behavior, name={0}""".format(con_sec.name)
+def connector_elastic_str(con_sec: ConnectorSection) -> str:
elast = con_sec.elastic_comp
- damping = con_sec.damping_comp
- plastic_comp = con_sec.plastic_comp
- rigid_dofs = con_sec.rigid_dofs
- soft_elastic_dofs = con_sec.soft_elastic_dofs
if isinstance(elast, float):
- conn_txt += """\n*Connector Elasticity, component=1\n{0:.3E},""".format(elast)
- else:
- for i, comp in enumerate(elast):
- if isinstance(comp, Iterable) is False:
- conn_txt += """\n*Connector Elasticity, component={1} \n{0:.3E},""".format(comp, i + 1)
- else:
- conn_txt += f"\n*Connector Elasticity, nonlinear, component={i + 1}, DEPENDENCIES=1"
- for val in comp:
- conn_txt += "\n" + ", ".join([f"{x:>12.3E}" if u <= 1 else f",{x:>12d}" for u, x in enumerate(val)])
+ return """\n*Connector Elasticity, component=1\n{0:.3E},""".format(elast)
+
+ conn_txt = ""
+ for i, comp in enumerate(elast):
+ if isinstance(comp, Iterable) is False:
+ conn_txt += """\n*Connector Elasticity, component={1} \n{0:.3E},""".format(comp, i + 1)
+ else:
+ conn_txt += f"\n*Connector Elasticity, nonlinear, component={i + 1}, DEPENDENCIES=1"
+ for val in comp:
+ conn_txt += "\n" + ", ".join([f"{x:>12.3E}" if u <= 1 else f",{x:>12d}" for u, x in enumerate(val)])
+ return conn_txt
+
+
+def connector_plastic_str(con_sec: ConnectorSection) -> str:
+ plastic_comp = con_sec.plastic_comp
+ if plastic_comp is None:
+ return ""
+
+ conn_txt = ""
+ for i, comp in enumerate(plastic_comp):
+ conn_txt += """\n*Connector Plasticity, component={}\n*Connector Hardening, definition=TABULAR""".format(i + 1)
+ for val in comp:
+ force, motion, rate = val
+ conn_txt += "\n{}, {}, {}".format(force, motion, rate)
+
+ return conn_txt
+
+
+def connector_damping_str(con_sec: ConnectorSection) -> str:
+ damping = con_sec.damping_comp
if isinstance(damping, float):
- conn_txt += """\n*Connector Damping, component=1\n{0:.3E},""".format(damping)
- else:
- for i, comp in enumerate(damping):
- if isinstance(comp, float):
- conn_txt += """\n*Connector Damping, component={1} \n{0:.3E},""".format(comp, i + 1)
- else:
- conn_txt += """\n*Connector Damping, nonlinear, component=1, DEPENDENCIES=1"""
- for val in comp:
- conn_txt += "\n" + ", ".join(
- ["{:>12.3E}".format(x) if u <= 1 else ",{:>12d}".format(x) for u, x in enumerate(val)]
- )
-
- # Optional Choices
- if plastic_comp is not None:
- for i, comp in enumerate(plastic_comp):
- conn_txt += """\n*Connector Plasticity, component={}\n*Connector Hardening, definition=TABULAR""".format(
- i + 1
- )
+ return """\n*Connector Damping, component=1\n{0:.3E},""".format(damping)
+
+ conn_txt = ""
+ for i, comp in enumerate(damping):
+ if isinstance(comp, float):
+ conn_txt += """\n*Connector Damping, component={1} \n{0:.3E},""".format(comp, i + 1)
+ else:
+ conn_txt += """\n*Connector Damping, nonlinear, component=1, DEPENDENCIES=1"""
for val in comp:
- force, motion, rate = val
- conn_txt += "\n{}, {}, {}".format(force, motion, rate)
+ conn_txt += "\n" + ", ".join(
+ ["{:>12.3E}".format(x) if u <= 1 else ",{:>12d}".format(x) for u, x in enumerate(val)]
+ )
- if rigid_dofs is not None:
- conn_txt += "\n*Connector Elasticity, rigid\n "
- conn_txt += ", ".join(["{0}".format(x) for x in rigid_dofs])
+ return conn_txt
+
+
+def connector_rigid_str(con_sec: ConnectorSection) -> str:
+ rigid_dofs = con_sec.rigid_dofs
+
+ if rigid_dofs is None:
+ return ""
+
+ return "\n*Connector Elasticity, rigid\n " + ", ".join(["{0}".format(x) for x in rigid_dofs])
+
+
+def connector_section_str(con_sec: "ConnectorSection") -> str:
+ conn_txt = """*Connector Behavior, name={0}""".format(con_sec.name)
- if soft_elastic_dofs is not None:
- for dof in soft_elastic_dofs:
- conn_txt += "\n*Connector Elasticity, component={0}\n 5.0,\n".format(dof)
+ conn_txt += connector_elastic_str(con_sec)
+ conn_txt += connector_damping_str(con_sec)
+ conn_txt += connector_plastic_str(con_sec)
+ conn_txt += connector_rigid_str(con_sec)
return conn_txt
diff --git a/src/ada/fem/formats/abaqus/write/write_main_inp.py b/src/ada/fem/formats/abaqus/write/write_main_inp.py
index 91911ba56..334d2de7b 100644
--- a/src/ada/fem/formats/abaqus/write/write_main_inp.py
+++ b/src/ada/fem/formats/abaqus/write/write_main_inp.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import os
import shutil
from typing import TYPE_CHECKING
@@ -11,9 +13,10 @@
from ada.api.spatial import Assembly, Part
-def write_main_inp_str(assembly: "Assembly", analysis_dir) -> str:
+def write_main_inp_str(assembly: Assembly, analysis_dir) -> str:
part_str = "\n".join(map(part_inp_str, filter(skip_if_this, assembly.get_all_subparts())))
i_str = "\n".join((instance_str(i, analysis_dir) for i in filter(inst_skip, assembly.get_all_subparts()))).rstrip()
+ all_fem_parts = [p.fem for p in assembly.get_all_subparts(include_self=True)]
step_str = "** No Steps added"
incl = "*INCLUDE,INPUT=core_input_files"
@@ -26,7 +29,7 @@ def write_main_inp_str(assembly: "Assembly", analysis_dir) -> str:
step_str = "\n".join(list(map(main_step_inp_str, assembly.fem.steps))).rstrip()
if len(assembly.fem.amplitudes) > 0:
ampl_str = f"{incl}\\amplitude_data.inp"
- if len(assembly.fem.connector_sections) > 0:
+ if len([con for fem_part in all_fem_parts for con in fem_part.connector_sections.values()]) > 0:
consec_str = f"{incl}\\connector_sections.inp"
if len(assembly.fem.intprops) > 0:
iprop_str = f"{incl}\\interaction_prop.inp"
@@ -79,10 +82,12 @@ def instance_str(part: "Part", analysis_dir) -> str:
def skip_if_this(p):
if p.fem.initial_state is not None:
return False
- return len(p.fem.elements)
+
+ return len(p.fem.elements) + len(p.fem.nodes) > 0
def inst_skip(p):
if p.fem.initial_state is not None:
return True
- return len(p.fem.elements)
+
+ return len(p.fem.elements) + len(p.fem.nodes) > 0
diff --git a/src/ada/fem/formats/abaqus/write/write_output_requests.py b/src/ada/fem/formats/abaqus/write/write_output_requests.py
index 953899503..7c293b828 100644
--- a/src/ada/fem/formats/abaqus/write/write_output_requests.py
+++ b/src/ada/fem/formats/abaqus/write/write_output_requests.py
@@ -24,7 +24,15 @@ def hist_output_str(hist_output: HistOutput) -> str:
iname2 = get_instance_name(hist_output.fem_set[0], True)
fem_set_str = f", master={iname1}, slave={iname2}"
else:
- fem_set_str = "" if hist_output.fem_set is None else get_instance_name(hist_output.fem_set, True)
+ if hist_output.fem_set is None:
+ fem_set_str = ""
+ else:
+ instance_name = get_instance_name(hist_output.fem_set, True)
+ if hist_output.type in (HistOutput.TYPES.ENERGY, HistOutput.TYPES.CONTACT):
+ fem_set_str = f", elset={instance_name}"
+ else:
+ fem_set_str = instance_name
+
return f"""*Output, history, {hist_output.int_type}={hist_output.int_value}
** HISTORY OUTPUT: {hist_output.name}
**
diff --git a/src/ada/fem/formats/abaqus/write/write_parts.py b/src/ada/fem/formats/abaqus/write/write_parts.py
index dfaee3912..6d0fd4ae0 100644
--- a/src/ada/fem/formats/abaqus/write/write_parts.py
+++ b/src/ada/fem/formats/abaqus/write/write_parts.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import os
from typing import TYPE_CHECKING
@@ -16,9 +18,9 @@
from ada import Assembly, Part
-def write_all_parts(assembly: "Assembly", analysis_dir):
+def write_all_parts(assembly: Assembly, analysis_dir):
for part in assembly.get_all_subparts():
- if len(part.fem.elements) == 0:
+ if len(part.fem.elements) == 0 and len(part.fem.nodes) == 0:
continue
if assembly.convert_options.hinges_to_coupling is True:
diff --git a/src/ada/fem/formats/abaqus/write/write_steps.py b/src/ada/fem/formats/abaqus/write/write_steps.py
index 710eac1ce..8f83dd71c 100644
--- a/src/ada/fem/formats/abaqus/write/write_steps.py
+++ b/src/ada/fem/formats/abaqus/write/write_steps.py
@@ -8,7 +8,7 @@
StepEigen,
StepEigenComplex,
StepExplicit,
- StepImplicit,
+ StepImplicitStatic,
StepSteadyState,
)
@@ -18,7 +18,7 @@
if TYPE_CHECKING:
from ada import FEM
-_step_types = Union[StepEigen, StepExplicit, StepImplicit, StepSteadyState, StepEigenComplex]
+_step_types = Union[StepEigen, StepExplicit, StepImplicitStatic, StepSteadyState, StepEigenComplex]
def main_step_inp_str(step: _step_types) -> str:
@@ -125,7 +125,7 @@ def restart_request_str(step: _step_types):
return f"*Restart, write, frequency={solver_options.restart_int}"
-def dynamic_implicit_str(step: StepImplicit):
+def dynamic_implicit_str(step: StepImplicitStatic):
return f"""*Step, name={step.name}, nlgeom={bool2text(step.nl_geom)}, inc={step.total_incr}
*Dynamic,application={step.dyn_type}, INITIAL={bool2text(step.options.ABAQUS.init_accel_calc)}
{step.init_incr},{step.total_time},{step.min_incr}, {step.max_incr}"""
@@ -139,7 +139,7 @@ def explicit_str(step: StepExplicit):
0.06, 1.2"""
-def static_step_str(step: StepImplicit):
+def static_step_str(step: StepImplicitStatic):
stabilize_str = ""
solver_options = step.options.ABAQUS
stabilize = solver_options.stabilize
diff --git a/src/ada/fem/formats/abaqus/write/writer.py b/src/ada/fem/formats/abaqus/write/writer.py
index 0aa5c4e45..8c70d6862 100644
--- a/src/ada/fem/formats/abaqus/write/writer.py
+++ b/src/ada/fem/formats/abaqus/write/writer.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import os
from io import StringIO
from typing import TYPE_CHECKING
@@ -8,7 +7,7 @@
from ...tools import FEA_IO, tool_register
from .write_amplitudes import amplitudes_str
from .write_bc import boundary_conditions_str
-from .write_connectors import connector_sections_str, connectors_str
+from .write_connectors import connector_section_str, connector_str
from .write_constraints import constraints_str
from .write_elements import elements_str
from .write_interactions import eval_interactions, int_prop_str
@@ -30,7 +29,9 @@
@tool_register(fem_format=FEATypes.ABAQUS, io=FEA_IO.write)
-def to_fem(assembly: Assembly, name, analysis_dir=None, metadata=None, writable_obj: StringIO = None):
+def to_fem(
+ assembly: Assembly, name, analysis_dir=None, metadata=None, writable_obj: StringIO = None, model_data_only=False
+):
"""Build the Abaqus Analysis input deck"""
# Write part bulk files
@@ -38,21 +39,36 @@ def to_fem(assembly: Assembly, name, analysis_dir=None, metadata=None, writable_
# Write Assembly level files
core_dir = analysis_dir / r"core_input_files"
- os.makedirs(core_dir)
+ core_dir.mkdir(parents=True, exist_ok=True)
afem = assembly.fem
+ all_fem_parts = [p.fem for p in assembly.get_all_subparts(include_self=True)]
# Main Input File
with open(analysis_dir / f"{name}.inp", "w") as d:
d.write(write_main_inp_str(assembly, analysis_dir))
# Connector Sections
+ all_con_sections = [con for fem_part in all_fem_parts for con in fem_part.connector_sections.values()]
with open(core_dir / "connector_sections.inp", "w") as d:
- d.write(connector_sections_str(afem))
+ if len(all_con_sections) > 0:
+ for con_section in all_con_sections:
+ if con_section.str_override is not None:
+ d.write(con_section.str_override)
+ continue
+ d.write(connector_section_str(con_section))
+ else:
+ d.write("** No Connector Sections")
# Connectors
+ all_connectors = [con for fem_part in all_fem_parts for con in fem_part.elements.connectors]
with open(core_dir / "connectors.inp", "w") as d:
- d.write(connectors_str(afem) if len(list(afem.elements.connectors)) > 0 else "**")
+ if len(all_connectors) > 0:
+ for con in all_connectors:
+ d.write(connector_str(con, True))
+ # d.write(connectors_str(afem))
+ else:
+ d.write("** No Connectors")
# Constraints
with open(core_dir / "constraints.inp", "w") as d:
diff --git a/src/ada/fem/formats/calculix/write/write_steps.py b/src/ada/fem/formats/calculix/write/write_steps.py
index c32d3bdc0..be28a5aaa 100644
--- a/src/ada/fem/formats/calculix/write/write_steps.py
+++ b/src/ada/fem/formats/calculix/write/write_steps.py
@@ -1,10 +1,10 @@
from __future__ import annotations
from ada.core.utils import bool2text
-from ada.fem.steps import Step, StepEigen, StepImplicit
+from ada.fem.steps import Step, StepEigen, StepImplicitStatic
-def step_str(step: StepEigen | StepImplicit):
+def step_str(step: StepEigen | StepImplicitStatic):
from .write_loads import load_str
from .writer import bc_str, interactions_str
@@ -60,7 +60,7 @@ def step_str(step: StepEigen | StepImplicit):
*End Step"""
-def static_step(step: StepImplicit):
+def static_step(step: StepImplicitStatic):
return f"""*Step, nlgeom={bool2text(step.nl_geom)}, inc={step.total_incr}
*Static
{step.init_incr}, {step.total_time}, {step.min_incr}, {step.max_incr}"""
diff --git a/src/ada/fem/formats/calculix/write/writer.py b/src/ada/fem/formats/calculix/write/writer.py
index d2fa68112..144761f11 100644
--- a/src/ada/fem/formats/calculix/write/writer.py
+++ b/src/ada/fem/formats/calculix/write/writer.py
@@ -31,7 +31,7 @@
@tool_register(fem_format=FEATypes.CALCULIX, io=FEA_IO.write)
-def to_fem(assembly: Assembly, name, analysis_dir, metadata=None):
+def to_fem(assembly: Assembly, name, analysis_dir, metadata=None, model_data_only=False):
"""Write a Calculix input file stack"""
check_compatibility(assembly)
diff --git a/src/ada/fem/formats/code_aster/common.py b/src/ada/fem/formats/code_aster/common.py
index c19c9ba80..36521795e 100644
--- a/src/ada/fem/formats/code_aster/common.py
+++ b/src/ada/fem/formats/code_aster/common.py
@@ -1,7 +1,14 @@
from enum import Enum
from ada.config import logger
-from ada.fem.shapes.definitions import LineShapes, ShellShapes, SolidShapes
+from ada.fem.shapes.definitions import (
+ ConnectorTypes,
+ LineShapes,
+ MassTypes,
+ ShellShapes,
+ SolidShapes,
+ SpringTypes,
+)
def ada_to_med_type(value):
@@ -30,7 +37,11 @@ def med_to_ada_type(value):
_ada_to_med_type = {
+ MassTypes.MASS: "PO1",
+ SpringTypes.SPRING1: "PO1",
+ SpringTypes.SPRING2: "SE2",
LineShapes.LINE: "SE2",
+ ConnectorTypes.CONNECTOR: "SE2",
LineShapes.LINE3: "SE3",
ShellShapes.TRI: "TR3",
ShellShapes.TRI6: "TR6",
diff --git a/src/ada/fem/formats/code_aster/execute.py b/src/ada/fem/formats/code_aster/execute.py
index f784460de..cd089ea40 100644
--- a/src/ada/fem/formats/code_aster/execute.py
+++ b/src/ada/fem/formats/code_aster/execute.py
@@ -1,4 +1,9 @@
+import os
import pathlib
+import time
+from functools import wraps
+
+from ada.config import logger
from ..utils import LocalExecute
@@ -76,3 +81,80 @@ def write_export_file(name: str, cpus: int):
F rmed {name}.rmed R 80"""
return export_str
+
+
+def clear_temp_files(this_dir):
+ patterns = ["fort*", "glob*", "vola*", "pick.code_aster*"]
+
+ for pattern in patterns:
+ for f in this_dir.glob(pattern):
+ if f.is_file():
+ os.remove(f)
+
+
+def init_close_code_aster(func_=None, *, info_level=1, temp_dir=None):
+ def actual_decorator(func):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ print("Starting code_aster")
+ start = time.time()
+ conda_dir = pathlib.Path(os.getenv("CONDA_PREFIX"))
+ lib_dir = conda_dir / "lib"
+ lib_aster_dir = lib_dir / "aster"
+ os.environ["LD_LIBRARY_PATH"] = lib_aster_dir.as_posix() + ":" + os.getenv("LD_LIBRARY_PATH", "")
+ os.environ["PYTHONPATH"] = lib_aster_dir.as_posix() + ":" + os.getenv("PYTHONPATH", "")
+ os.environ["ASTER_LIBDIR"] = lib_dir.as_posix()
+ os.environ["ASTER_DATADIR"] = (conda_dir / "share/aster").as_posix()
+ os.environ["ASTER_LOCALEDIR"] = (conda_dir / "share/locale/aster").as_posix()
+ os.environ["ASTER_ELEMENTSDIR"] = lib_aster_dir.as_posix()
+
+ import code_aster
+
+ this_dir = pathlib.Path(".").resolve().absolute()
+
+ nonlocal temp_dir
+ if temp_dir is None:
+ clear_temp_files(this_dir) # Assuming you have this function defined elsewhere
+ else:
+ if isinstance(temp_dir, str):
+ temp_dir = pathlib.Path(temp_dir)
+ temp_dir = temp_dir.resolve().absolute()
+
+ if temp_dir.exists():
+ clear_temp_files(temp_dir)
+
+ temp_dir.mkdir(exist_ok=True, parents=True)
+ logger.info("Changing current directory to keep Code_Aster files away from the code directory")
+ os.chdir(temp_dir)
+
+ print(f"{info_level=}")
+ code_aster.init(INFO=info_level)
+
+ result = None
+ run_issue = None
+ try:
+ result = func(*args, **kwargs)
+ except BaseException as e:
+ # Assuming you have a logger
+ logger.error(e)
+ run_issue = e
+ raise
+ finally:
+ code_aster.close()
+ if temp_dir is not None:
+ # Change back
+ os.chdir(this_dir)
+ end = time.time()
+ print(f"Simulation time: {end - start:.2f}s")
+
+ if result is not None:
+ return result
+
+ raise Exception(run_issue)
+
+ return wrapper
+
+ if func_ is None:
+ return actual_decorator
+ else:
+ return actual_decorator(func_)
diff --git a/src/ada/fem/formats/code_aster/results/results_helpers.py b/src/ada/fem/formats/code_aster/results/results_helpers.py
new file mode 100644
index 000000000..12075ac7a
--- /dev/null
+++ b/src/ada/fem/formats/code_aster/results/results_helpers.py
@@ -0,0 +1,50 @@
+import libaster
+import numpy as np
+
+from ada.fem.results.sqlite_store import SQLiteFEAStore
+
+
+def export_mesh_data_to_sqlite(instance_id, mesh_name: str, mesh: libaster.Mesh, sql_store: SQLiteFEAStore):
+ # Add ModelInstance to SQLite
+ sql_store.insert_table("ModelInstances", [(instance_id, mesh_name)])
+
+ # Get Point Data
+ coords = mesh.getCoordinates()
+ coord_values = np.asarray(coords.getValues()).reshape(-1, 3)
+ point_data = [(instance_id, i, x, y, z) for i, (x, y, z) in enumerate(coord_values, start=1)]
+ sql_store.insert_table("Points", point_data)
+
+ # Get Element Data
+ elem_conn_data = []
+ elem_info = []
+ for elem_index, nodal_conn in enumerate(mesh.getConnectivity(), start=0):
+ cell_type = mesh.getCellTypeName(elem_index)
+ elem_id = elem_index + 1
+ int_points = -1
+ elem_info.append((instance_id, elem_id, cell_type, int_points))
+ for seq, node_index in enumerate(nodal_conn):
+ node_id = node_index + 1
+ elem_conn_data.append((instance_id, elem_id, node_id, seq))
+
+ sql_store.insert_table("ElementConnectivity", elem_conn_data)
+ sql_store.insert_table("ElementInfo", elem_info)
+
+ # Insert Sets
+ set_id = 0
+
+ point_sets = []
+ point_set_names = mesh.getGroupsOfNodes()
+ point_set_nodes = mesh.getNodes(point_set_names)
+ for point_set_name, point_set_nodes in zip(point_set_names, point_set_nodes):
+ point_set = (set_id, point_set_name, instance_id, point_set_nodes)
+ point_sets.append(point_set)
+ set_id += 1
+ sql_store.insert_table("PointSets", point_sets)
+
+ groups_of_cells = mesh.getGroupsOfCells()
+ cell_ids = mesh.getCells(groups_of_cells)
+ cell_sets = []
+ for cell_group_name, cell_id in zip(groups_of_cells, cell_ids):
+ cell_sets.append((set_id, cell_group_name, instance_id, cell_id))
+ set_id += 1
+ sql_store.insert_table("ElementSets", cell_sets)
diff --git a/src/ada/fem/formats/code_aster/write/api_helpers.py b/src/ada/fem/formats/code_aster/write/api_helpers.py
new file mode 100644
index 000000000..eece6441e
--- /dev/null
+++ b/src/ada/fem/formats/code_aster/write/api_helpers.py
@@ -0,0 +1,195 @@
+# Code related to the new 16.4 api of code_aster
+from __future__ import annotations
+
+import pathlib
+from typing import TYPE_CHECKING, Iterable
+
+import code_aster
+from code_aster.Cata.Language.SyntaxObjects import _F
+from code_aster.Commands import AFFE_CARA_ELEM, AFFE_CHAR_MECA, AFFE_MODELE, DEFI_GROUP
+
+import ada.fem
+from ada.fem import Connector, ConnectorSection, Elem, Mass
+from ada.fem.formats.code_aster.write.writer import write_to_med
+from ada.fem.formats.utils import get_fem_model_from_assembly
+
+if TYPE_CHECKING:
+ from ada import FEM, Assembly
+
+DISPL_DOF_MAP = {1: "DX", 2: "DY", 3: "DZ", 4: "DRX", 5: "DRY", 6: "DRZ"}
+FORCE_DOF_MAP = {1: "FX", 2: "FY", 3: "FZ", 4: "FRX", 5: "FRY", 6: "FRZ"}
+
+
+def import_mesh(a: Assembly, scratch_dir):
+ if isinstance(scratch_dir, str):
+ scratch_dir = pathlib.Path(scratch_dir)
+
+ p = get_fem_model_from_assembly(a)
+ med_file = (scratch_dir / a.name).with_suffix(".med")
+ write_to_med(a.name, p, med_file)
+
+ mesh = code_aster.Mesh()
+ mesh.readMedFile(med_file.as_posix(), a.name)
+
+ DEFI_GROUP(MAILLAGE=mesh, reuse=mesh, CREA_GROUP_NO=_F(TOUT_GROUP_MA="OUI"))
+ return mesh
+
+
+def assembly_fem_iterator(a: Assembly) -> Iterable[FEM]:
+ parts_w_fem = [p for p in a.get_all_parts_in_assembly() if not p.fem.is_empty()]
+ if len(parts_w_fem) != 1:
+ raise NotImplementedError("Assemblies with multiple parts containing FEM data is not yet supported")
+ p = parts_w_fem[0]
+ yield a.fem
+ yield p.fem
+
+
+def assembly_element_iterator(a: Assembly) -> Iterable[Elem]:
+ for fem in assembly_fem_iterator(a):
+ for elem in fem.elements:
+ yield elem
+
+
+def assign_element_definitions(a: Assembly, mesh: code_aster.Mesh) -> code_aster.Model | None:
+ discrete_elements = []
+ line_elements = []
+
+ for elem in assembly_element_iterator(a):
+ if isinstance(elem, (Connector, Mass)):
+ discrete_elements.append(elem)
+ elif isinstance(elem, Elem) and elem.fem_sec.type:
+ line_elements.append(elem)
+
+ # Discrete Elements
+ discrete_modelings = []
+ if len(discrete_elements) > 0:
+ elset_names = [el.elset.name for el in discrete_elements]
+ discrete_modelings.append(
+ _F(
+ GROUP_MA=elset_names,
+ PHENOMENE="MECANIQUE",
+ MODELISATION="DIS_T",
+ )
+ )
+
+ model: code_aster.Model = AFFE_MODELE(AFFE=(*discrete_modelings,), MAILLAGE=mesh)
+ return model
+
+
+def assign_material_definitions(a: Assembly, mesh: code_aster.Mesh) -> code_aster.MaterialField:
+ mat_map = {}
+ for elem in assembly_element_iterator(a):
+ if isinstance(elem, Connector):
+ conn_prop = elem.con_sec
+ if conn_prop not in mat_map.keys():
+ mat_map[conn_prop] = []
+ mat_map[conn_prop].append(elem.elset.name)
+ elif isinstance(elem, Mass):
+ continue
+ else:
+ mat = elem.fem_sec.material
+ if mat not in mat_map.keys():
+ mat_map[mat] = []
+ mat_map[mat].append(elem.elset.name)
+
+ material = code_aster.MaterialField(mesh)
+ for mat, element_names in mat_map.items():
+ if isinstance(mat, ConnectorSection):
+ if isinstance(mat.elastic_comp, (float, int)):
+ pass
+ # Todo: figure out where this is supposed to be implemented
+ else:
+ raise NotImplementedError("Currently only supports linear elastic connectors")
+
+ dummy = code_aster.Material()
+ dummy.addProperties("ELAS", E=1, NU=0.3, RHO=1)
+ material.addMaterialOnGroupOfCells(dummy, element_names)
+ else:
+ raise NotImplementedError("")
+
+ material.build()
+ return material
+
+
+def assign_element_characteristics(
+ a: Assembly, model: code_aster.Model, rigid_size=1e8
+) -> code_aster.ElementaryCharacteristics:
+ discrete_elements = []
+
+ for elem in assembly_element_iterator(a):
+ if isinstance(elem, Mass):
+ mass = elem.mass
+ if isinstance(mass, (float, int)):
+ value = mass
+ else:
+ raise NotImplementedError("A non-scalar mass is not yet supported")
+ mass_def = _F(GROUP_MA=elem.elset.name, CARA="M_T_D_N", VALE=value)
+ discrete_elements.append(mass_def)
+ elif isinstance(elem, Connector):
+ con_sec = elem.con_sec
+ if isinstance(con_sec.elastic_comp, (float, int)):
+ value = [con_sec.elastic_comp, con_sec.elastic_comp, con_sec.elastic_comp]
+ else:
+ raise NotImplementedError("Only scalar values are currently accepted for Connector elasticity")
+
+ if isinstance(con_sec.rigid_dofs, list):
+ for index in con_sec.rigid_dofs:
+ value[index] = rigid_size
+
+ con_elem = _F(GROUP_MA=elem.elset.name, CARA="K_T_D_L", VALE=value, REPERE="GLOBAL")
+ discrete_elements.append(con_elem)
+
+ else:
+ raise NotImplementedError(f"Currently unsupported non-discrete element type {elem}")
+
+ elem_car: code_aster.ElementaryCharacteristics = AFFE_CARA_ELEM(MODELE=model, DISCRET=discrete_elements)
+ return elem_car
+
+
+def assign_boundary_conditions(a: Assembly, model: code_aster.Model) -> code_aster.MechanicalLoadReal:
+ imposed_bcs = []
+
+ for fem in assembly_fem_iterator(a):
+ for bc in fem.bcs:
+ # Todo: Need to figure out rules for when code_aster accepts imposing constraints on nodal rotations.
+ skip_rotations = True
+
+ if skip_rotations:
+ dofs = [x for x in bc.dofs if x < 4]
+ else:
+ dofs = bc.dofs
+ dofs_constrained = {DISPL_DOF_MAP[x]: 0 for x in dofs}
+ ca_bc = _F(GROUP_NO=bc.fem_set.name, **dofs_constrained)
+ imposed_bcs.append(ca_bc)
+
+ fix: code_aster.MechanicalLoadReal = AFFE_CHAR_MECA(MODELE=model, DDL_IMPO=imposed_bcs)
+ return fix
+
+
+def assign_forces(a: Assembly, model: code_aster.Model) -> code_aster.MechanicalLoadReal:
+ nodal_loads = []
+ for fem in assembly_fem_iterator(a):
+ for load in fem.get_all_loads():
+ imposed_loads = {
+ FORCE_DOF_MAP[x]: force for x, force in enumerate(load.forces, start=1) if float(force) != 0.0
+ }
+ ca_load = _F(GROUP_NO=load.fem_set.name, **imposed_loads)
+ nodal_loads.append(ca_load)
+
+ forces: code_aster.MechanicalLoadReal = AFFE_CHAR_MECA(MODELE=model, FORCE_NODALE=nodal_loads)
+ return forces
+
+
+def assign_steps(
+ a: Assembly,
+ model: code_aster.Model,
+ fix: code_aster.MechanicalLoadReal,
+ forces: code_aster.MechanicalLoadReal,
+ material_field: code_aster.MaterialField,
+ elem_car: code_aster.ElementaryCharacteristics,
+) -> code_aster.ElasticResult:
+ for step in a.fem.steps:
+ if isinstance(step, ada.fem.StepImplicitDynamic):
+ raise NotImplementedError("Not yet implemented 'StepImplicitDynamic'")
+ elif isinstance(step, ada.fem.StepImplicitStatic):
+ pass
diff --git a/src/ada/fem/formats/code_aster/write/steps/dynamic.py b/src/ada/fem/formats/code_aster/write/steps/dynamic.py
new file mode 100644
index 000000000..d7bf37ac6
--- /dev/null
+++ b/src/ada/fem/formats/code_aster/write/steps/dynamic.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from ada.fem.steps import StepImplicitDynamic
+
+if TYPE_CHECKING:
+ import ada
+
+
+def step_dynamic_str(step: StepImplicitDynamic, part: ada.Part) -> str:
+ return ""
diff --git a/src/ada/fem/formats/code_aster/write/steps/static.py b/src/ada/fem/formats/code_aster/write/steps/static.py
index f42b6edd0..588710a9c 100644
--- a/src/ada/fem/formats/code_aster/write/steps/static.py
+++ b/src/ada/fem/formats/code_aster/write/steps/static.py
@@ -2,7 +2,7 @@
from typing import TYPE_CHECKING
-from ada.fem import StepImplicit
+from ada.fem import StepImplicitStatic
from .static_lin import step_static_lin_str
from .static_nonlin import step_static_nonlin_str
@@ -11,7 +11,7 @@
from ada.api.spatial import Part
-def step_static_str(step: StepImplicit, part: Part) -> str:
+def step_static_str(step: StepImplicitStatic, part: Part) -> str:
if step.nl_geom is True:
return step_static_nonlin_str(step, part)
else:
diff --git a/src/ada/fem/formats/code_aster/write/steps/static_lin.py b/src/ada/fem/formats/code_aster/write/steps/static_lin.py
index 31b0ee161..fec0686df 100644
--- a/src/ada/fem/formats/code_aster/write/steps/static_lin.py
+++ b/src/ada/fem/formats/code_aster/write/steps/static_lin.py
@@ -2,7 +2,7 @@
from typing import TYPE_CHECKING
-from ada.fem import StepImplicit
+from ada.fem import StepImplicitStatic
from ..write_loads import write_load
from .fields import create_field_output_str
@@ -11,7 +11,7 @@
from ada.api.spatial import Part
-def step_static_lin_str(step: StepImplicit, part: Part) -> str:
+def step_static_lin_str(step: StepImplicitStatic, part: Part) -> str:
from ada.fem.exceptions.model_definition import (
NoBoundaryConditionsApplied,
NoLoadsApplied,
diff --git a/src/ada/fem/formats/code_aster/write/steps/static_nonlin.py b/src/ada/fem/formats/code_aster/write/steps/static_nonlin.py
index 7ff2c54ce..46fbbd70f 100644
--- a/src/ada/fem/formats/code_aster/write/steps/static_nonlin.py
+++ b/src/ada/fem/formats/code_aster/write/steps/static_nonlin.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from typing import TYPE_CHECKING
-from ada.fem import StepImplicit
+from ada.fem import StepImplicitStatic
from ada.fem.loads import Load
from ..write_loads import write_load
@@ -149,7 +149,7 @@ def write(self):
)"""
-def step_static_nonlin_str(step: StepImplicit, part: Part) -> str:
+def step_static_nonlin_str(step: StepImplicitStatic, part: Part) -> str:
from ada.fem.exceptions.model_definition import NoLoadsApplied
load_str = "\n".join(list(map(write_load, step.loads)))
diff --git a/src/ada/fem/formats/code_aster/write/write_med.py b/src/ada/fem/formats/code_aster/write/write_med.py
index 7512429ed..6d724267f 100644
--- a/src/ada/fem/formats/code_aster/write/write_med.py
+++ b/src/ada/fem/formats/code_aster/write/write_med.py
@@ -1,8 +1,11 @@
+from __future__ import annotations
+
from typing import TYPE_CHECKING
+import h5py
import numpy as np
-from ada.config import Settings, logger
+from ada.config import Settings
from ada.fem.shapes import definitions as shape_def
from ..common import IntType, ada_to_med_type
@@ -12,7 +15,9 @@
from ada.api.spatial import Part
-def med_elements(part: "Part", time_step, profile, families, int_type: IntType = IntType.INT32):
+def med_elements(
+ part: Part, time_step: h5py.Group, profile: str, families: h5py.Group, int_type: IntType = IntType.INT32
+):
"""
Add the following ['FAM', 'NOD', 'NUM'] to the 'MAI' group
@@ -24,13 +29,17 @@ def get_node_ids_from_element(el_):
elements_group = time_step.create_group("MAI")
elements_group.attrs.create("CGT", 1)
+
for group, elements in part.fem.elements.group_by_type():
- if isinstance(group, (shape_def.MassTypes, shape_def.SpringTypes)):
- logger.warning("NotImplemented: Skipping Mass or Spring Elements")
- continue
med_type = ada_to_med_type(group)
elements = list(elements)
- cells = np.array(list(map(get_node_ids_from_element, elements)))
+ if isinstance(group, (shape_def.MassTypes, shape_def.SpringTypes)):
+ cells = np.array([el.members[0].id for el in elements])
+ else:
+ cells = np.array(list(map(get_node_ids_from_element, elements)))
+
+ if med_type in elements_group:
+ raise ValueError(f"med_type {med_type} is already defined. rewrite is needed.")
med_cells = elements_group.create_group(med_type)
med_cells.attrs.create("CGT", 1)
diff --git a/src/ada/fem/formats/code_aster/write/write_sets.py b/src/ada/fem/formats/code_aster/write/write_sets.py
index 590837dc4..92165d8a5 100644
--- a/src/ada/fem/formats/code_aster/write/write_sets.py
+++ b/src/ada/fem/formats/code_aster/write/write_sets.py
@@ -12,6 +12,10 @@
from ada.api.spatial import Part
+def get_node_ids_from_element(el_):
+ return [int(n.id - 1) for n in el_.nodes]
+
+
def _add_cell_sets(cells_group, part: "Part", families):
"""
@@ -33,13 +37,7 @@ def _add_cell_sets(cells_group, part: "Part", families):
res_data = resolve_ids_in_multiple(tags, tags_data, True)
- def get_node_ids_from_element(el_):
- return [int(n.id - 1) for n in el_.nodes]
-
for group, elements in part.fem.elements.group_by_type():
- if isinstance(group, (shape_def.MassTypes, shape_def.SpringTypes)):
- logger.warning("NotImplemented: Skipping Mass or Spring Elements")
- continue
elements = list(elements)
cell_ids = {el.id: i for i, el in enumerate(elements)}
@@ -50,7 +48,11 @@ def get_node_ids_from_element(el_):
for index in list_filtered:
cell_data[index] = t
- cells = np.array(list(map(get_node_ids_from_element, elements)))
+ if isinstance(group, (shape_def.MassTypes, shape_def.SpringTypes)):
+ cells = np.array([el.members[0].id for el in elements])
+ else:
+ cells = np.array(list(map(get_node_ids_from_element, elements)))
+
med_type = ada_to_med_type(group)
med_cells = cells_group.get(med_type)
family = med_cells.create_dataset("FAM", data=cell_data)
@@ -61,11 +63,6 @@ def get_node_ids_from_element(el_):
def _add_node_sets(nodes_group, part: "Part", points, families):
- """
- :param nodes_group:
- :param part:
- :param families:
- """
tags = dict()
nsets = dict()
for key, val in part.fem.nsets.items():
diff --git a/src/ada/fem/formats/code_aster/write/write_steps.py b/src/ada/fem/formats/code_aster/write/write_steps.py
index f9b2bcdf0..2e8c1a620 100644
--- a/src/ada/fem/formats/code_aster/write/write_steps.py
+++ b/src/ada/fem/formats/code_aster/write/write_steps.py
@@ -2,17 +2,17 @@
from typing import TYPE_CHECKING
-from ada.fem import StepEigen, StepImplicit
+from ada.fem import StepEigen, StepImplicitStatic
if TYPE_CHECKING:
from ada.api.spatial import Part
-from .steps import eigen, static
+from .steps import dynamic, eigen, static
-def create_step_str(step: StepEigen | StepImplicit, part: Part) -> str:
+def create_step_str(step: StepEigen | StepImplicitStatic, part: Part) -> str:
st = StepEigen.TYPES
- step_map = {st.STATIC: static.step_static_str, st.EIGEN: eigen.step_eig_str}
+ step_map = {st.STATIC: static.step_static_str, st.EIGEN: eigen.step_eig_str, st.DYNAMIC: dynamic.step_dynamic_str}
step_writer = step_map.get(step.type, None)
diff --git a/src/ada/fem/formats/code_aster/write/writer.py b/src/ada/fem/formats/code_aster/write/writer.py
index 330ffcfbf..f3059c427 100644
--- a/src/ada/fem/formats/code_aster/write/writer.py
+++ b/src/ada/fem/formats/code_aster/write/writer.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import pathlib
from typing import TYPE_CHECKING
import h5py
@@ -21,7 +20,7 @@
from ada.api.spatial import Assembly, Part
-def to_fem(assembly: Assembly, name, analysis_dir, metadata=None):
+def to_fem(assembly: Assembly, name, analysis_dir, metadata=None, model_data_only=False):
"""Write Code_Aster .med and .comm file from Assembly data"""
from ada.materials.utils import shorten_material_names
@@ -35,7 +34,11 @@ def to_fem(assembly: Assembly, name, analysis_dir, metadata=None):
shorten_material_names(assembly)
# TODO: Implement support for multiple parts. Need to understand how submeshes in Salome and Code Aster works.
# for p in filter(lambda x: len(x.fem.elements) != 0, assembly.get_all_parts_in_assembly(True)):
- write_to_med(name, p, analysis_dir)
+
+ filename = (analysis_dir / name).with_suffix(".med")
+ write_to_med(name, p, filename)
+ if model_data_only:
+ return
with open((analysis_dir / name).with_suffix(".comm"), "w") as f:
f.write(create_comm_str(assembly, p))
@@ -118,12 +121,9 @@ def create_comm_str(assembly: Assembly, part: Part) -> str:
return comm_str
-def write_to_med(name, part: Part, analysis_dir):
+def write_to_med(name, part: Part, filename):
"""Custom Method for writing a part directly based on meshio"""
- analysis_dir = pathlib.Path(analysis_dir)
- filename = (analysis_dir / name).with_suffix(".med")
-
with h5py.File(filename, "w") as f:
mesh_name = name if name is not None else part.fem.name
# Strangely the version must be 3.0.x
diff --git a/src/ada/fem/formats/general.py b/src/ada/fem/formats/general.py
index 7b48e9b5e..c5f278f9d 100644
--- a/src/ada/fem/formats/general.py
+++ b/src/ada/fem/formats/general.py
@@ -114,6 +114,7 @@ def write_to_fem(
scratch_dir,
metadata: dict,
make_zip_file,
+ model_data_only=False,
):
from ada.fem.formats.utils import default_fem_res_path, folder_prep, should_convert
@@ -131,7 +132,7 @@ def write_to_fem(
if fem_exporter is None:
raise ValueError(f'FEM export for "{fem_format}" using "{fem_converter}" is currently not supported')
- fem_exporter(assembly, name, analysis_dir, metadata)
+ fem_exporter(assembly, name, analysis_dir, metadata, model_data_only)
if make_zip_file is True:
import shutil
diff --git a/src/ada/fem/formats/mesh_io/writer.py b/src/ada/fem/formats/mesh_io/writer.py
index ec227d5a3..ec146bac8 100644
--- a/src/ada/fem/formats/mesh_io/writer.py
+++ b/src/ada/fem/formats/mesh_io/writer.py
@@ -12,7 +12,7 @@
from ada.fem.shapes.definitions import MassTypes, SpringTypes
-def meshio_to_fem(assembly: Assembly, name: str, scratch_dir=None, metadata=None) -> None:
+def meshio_to_fem(assembly: Assembly, name: str, scratch_dir=None, metadata=None, model_data_only=False) -> None:
"""Convert Assembly information to FEM using Meshio"""
if scratch_dir is None:
scratch_dir = _Settings.scratch_dir
diff --git a/src/ada/fem/formats/sesam/results/read_sif.py b/src/ada/fem/formats/sesam/results/read_sif.py
index fce0c07ac..c2935982d 100644
--- a/src/ada/fem/formats/sesam/results/read_sif.py
+++ b/src/ada/fem/formats/sesam/results/read_sif.py
@@ -430,17 +430,18 @@ def get_sif_mesh(self) -> Mesh:
sif = self.sif
- nodes = FemNodes(coords=sif.nodes[:, 1:], identifiers=sif.node_ids[:, 0])
+ nodes = FemNodes(coords=sif.nodes[:, 1:], identifiers=np.asarray(sif.node_ids[:, 0], dtype=int))
+ sorted_elem_data = sorted(sif.elements, key=lambda x: x[0])
elem_blocks = []
- for eltype, elements in groupby(sif.elements, key=lambda x: x[0]):
+ for eltype, elements in groupby(sorted_elem_data, key=lambda x: x[0]):
elem_type = int(eltype)
elem_data = list(elements)
general_elem_type = sesam_eltype_2_general(elem_type)
num_nodes = ShapeResolver.get_el_nodes_from_type(general_elem_type)
elem_identifiers = np.array([x[1] for x in elem_data], dtype=int)
- elem_node_refs = np.array([x[2][:num_nodes] for x in elem_data], dtype=float)
- res = sesam_eltype_2_general(elem_type)
- elem_info = ElementInfo(type=res, source_software=FEATypes.SESAM, source_type=elem_type)
+ elem_node_refs = np.array([x[2][:num_nodes] for x in elem_data], dtype=int)
+
+ elem_info = ElementInfo(type=general_elem_type, source_software=FEATypes.SESAM, source_type=elem_type)
elem_blocks.append(
ElementBlock(elem_info=elem_info, node_refs=elem_node_refs, identifiers=elem_identifiers)
)
@@ -470,6 +471,10 @@ def get_sif_results(self) -> list[ElementFieldData | NodalFieldData]:
def get_result_name_map(self):
tdresref = self.sif.get_tdresref()
rdresref = self.sif.get_rdresref()
+ if tdresref is None:
+ # No STEP name is defined
+ return {key: key for key, value in rdresref.items()}
+
return {key: tdresref[value[1]][-1] for key, value in rdresref.items()}
def get_nodal_data(self) -> list[NodalFieldData]:
@@ -519,10 +524,12 @@ def _get_line_field_data(self, rv_forces, ires, irforc, elem_type, nsp) -> Eleme
rdforces_map = self.sif.get_rdforces_map()
force_types = [FORCE_MAP[c][0] for c in rdforces_map[irforc]]
data = np.array(list(_iter_line_forces(rv_forces, rdforces_map, nsp)))
+ elem_type_ada = sesam_eltype_2_general(elem_type)
return ElementFieldData(
"FORCES",
int(ires),
components=force_types,
+ elem_type=elem_type_ada,
values=data,
field_pos=ElementFieldData.field_pos.INT,
int_positions=INT_LOCATIONS[elem_type],
@@ -561,9 +568,11 @@ def _get_shell_field_data(self, rv_stresses, ires, irstrs, elem_type: int, nsp)
rdstress_map = self.sif.get_rdstress_map()
stress_types = [STRESS_MAP[c][0] for c in rdstress_map[irstrs]]
data = np.array(list(_iter_shell_stress(rv_stresses, rdstress_map, nsp)))
+ elem_type_ada = sesam_eltype_2_general(elem_type)
return ElementFieldData(
"STRESS",
int(ires),
+ elem_type=elem_type_ada,
components=stress_types,
values=data,
field_pos=ElementFieldData.field_pos.INT,
diff --git a/src/ada/fem/formats/sesam/results/sin2sif.py b/src/ada/fem/formats/sesam/results/sin2sif.py
index 0b6b4d82c..f701aeeb7 100644
--- a/src/ada/fem/formats/sesam/results/sin2sif.py
+++ b/src/ada/fem/formats/sesam/results/sin2sif.py
@@ -31,7 +31,7 @@ def convert_sin_to_sif(sin_file: str | pathlib.Path, use_siu=False) -> None:
run_str = f"{exe_str}\nstart /w %EXEPATH% /INTER=L/COM-FI=run_prepost.jnl {run_params} & {log_params}"
- run_bat_file = sin_file.parent / "run_sin2sif.bat"
+ run_bat_file = (sin_file.parent / "run_sin2sif.bat").resolve().absolute()
with open(run_bat_file, "w") as f:
f.write(run_str)
diff --git a/src/ada/fem/formats/sesam/write/write_sections.py b/src/ada/fem/formats/sesam/write/write_sections.py
index 843a71acb..48a05a4a5 100644
--- a/src/ada/fem/formats/sesam/write/write_sections.py
+++ b/src/ada/fem/formats/sesam/write/write_sections.py
@@ -5,7 +5,6 @@
from ada.core.utils import Counter, make_name_fem_ready
from ada.fem import FemSection
from ada.fem.exceptions.element_support import IncompatibleElements
-from ada.fem.shapes import ElemType
from .write_utils import write_ff
@@ -35,24 +34,24 @@ def sections_str(fem: FEM, thick_map) -> str:
names_str = ""
concept_str = ""
tdsconc_str, sconcept_str, scon_mesh = "", "", ""
-
+ shid.set_i(max(fem.sections.id_map.keys()) + 1)
sec_names = []
- for fem_sec in fem.sections:
- if fem_sec.type == ElemType.LINE:
- sec = create_line_section(fem_sec, sec_names, sec_ids)
- names_str += sec.names_str
- sec_str += sec.sec_str
-
- stru = create_sconcept_str(fem_sec)
- tdsconc_str += stru.tdsconc_str
- sconcept_str += stru.sconcept_str
- scon_mesh += stru.scon_mesh
- elif fem_sec.type == ElemType.SHELL:
- sec_str += create_shell_section_str(fem_sec, thick_map)
- elif fem_sec.type == ElemType.SOLID:
- sec_str += create_solid_section(fem_sec)
- else:
- raise IncompatibleElements(f"Solid element type {fem_sec.type} is not yet supported for writing to Sesam")
+ for sh_sec in fem.sections.shells:
+ sec_str += create_shell_section_str(sh_sec, thick_map)
+
+ for fem_sec in fem.sections.lines:
+ sec = create_line_section(fem_sec, sec_names, sec_ids)
+ names_str += sec.names_str
+ sec_str += sec.sec_str
+
+ stru = create_sconcept_str(fem_sec)
+ tdsconc_str += stru.tdsconc_str
+ sconcept_str += stru.sconcept_str
+ scon_mesh += stru.scon_mesh
+
+ # TODO: Add support for solid elements
+ for fem_sec in fem.sections.solids:
+ sec_str += create_solid_section(fem_sec)
return names_str + sec_str + concept_str + tdsconc_str + sconcept_str + scon_mesh
@@ -62,7 +61,7 @@ def create_shell_section_str(fem_sec: FemSection, thick_map) -> str:
sh_id = next(shid)
thick_map[fem_sec.thickness] = sh_id
else:
- sh_id = thick_map[fem_sec.thickness]
+ return ""
return write_ff("GELTH", [(sh_id, fem_sec.thickness, 5)])
diff --git a/src/ada/fem/formats/sesam/write/write_steps.py b/src/ada/fem/formats/sesam/write/write_steps.py
index 408751fb7..037b37f41 100644
--- a/src/ada/fem/formats/sesam/write/write_steps.py
+++ b/src/ada/fem/formats/sesam/write/write_steps.py
@@ -3,12 +3,12 @@
import datetime
from ada.core.utils import get_current_user
-from ada.fem.steps import Step, StepEigen, StepImplicit
+from ada.fem.steps import Step, StepEigen, StepImplicitStatic
from .templates import sestra_eig_inp_str, sestra_header_inp_str, sestra_static_inp_str
-def write_sestra_inp(name, step: StepEigen | StepImplicit):
+def write_sestra_inp(name, step: StepEigen | StepImplicitStatic):
step_map = {Step.TYPES.EIGEN: write_sestra_eig_str, Step.TYPES.STATIC: write_sestra_static_str}
step_str_writer = step_map.get(step.type, None)
if step_str_writer is None:
@@ -26,5 +26,5 @@ def write_sestra_eig_str(name: str, step: StepEigen):
return sestra_eig_inp_str.format(name=name, modes=step.num_eigen_modes, supnr=1)
-def write_sestra_static_str(name: str, step: StepImplicit):
+def write_sestra_static_str(name: str, step: StepImplicitStatic):
return sestra_static_inp_str.format(name=name, supnr=1)
diff --git a/src/ada/fem/formats/sesam/write/writer.py b/src/ada/fem/formats/sesam/write/writer.py
index c8b80ea0b..c7f96e4b3 100644
--- a/src/ada/fem/formats/sesam/write/writer.py
+++ b/src/ada/fem/formats/sesam/write/writer.py
@@ -7,6 +7,7 @@
from ada.config import logger
from ada.core.utils import Counter, get_current_user
from ada.fem import FEM
+from ada.fem.exceptions.model_definition import DoesNotSupportMultiPart
from .templates import top_level_fem_str
from .write_utils import write_ff
@@ -15,7 +16,7 @@
from ada import Material
-def to_fem(assembly, name, analysis_dir=None, metadata=None):
+def to_fem(assembly, name, analysis_dir=None, metadata=None, model_data_only=False):
from .write_constraints import constraint_str
from .write_elements import elem_str
from .write_loads import loads_str
@@ -31,7 +32,9 @@ def to_fem(assembly, name, analysis_dir=None, metadata=None):
parts = list(filter(lambda x: len(x.fem.nodes) > 0, assembly.get_all_subparts(include_self=True)))
if len(parts) != 1:
- raise ValueError(f"Sesam writer currently only works for a single part. Currently found {len(parts)}")
+ raise DoesNotSupportMultiPart(
+ f"Sesam writer currently only works for a single part. Currently found {len(parts)}"
+ )
if len(assembly.fem.steps) > 1:
logger.error("Sesam writer currently only supports 1 step. Will only use 1st step")
diff --git a/src/ada/fem/formats/usfos/write/writer.py b/src/ada/fem/formats/usfos/write/writer.py
index 7c39e3cca..a186ab257 100644
--- a/src/ada/fem/formats/usfos/write/writer.py
+++ b/src/ada/fem/formats/usfos/write/writer.py
@@ -9,7 +9,7 @@
from .write_profiles import sections_str
-def to_fem(assembly: Assembly, name, analysis_dir=None, metadata=None):
+def to_fem(assembly: Assembly, name, analysis_dir=None, metadata=None, model_data_only=False):
metadata = dict() if metadata is None else metadata
assembly.consolidate_materials()
parts = list(filter(lambda x: len(x.fem.nodes) > 0, assembly.get_all_subparts(include_self=True)))
diff --git a/src/ada/fem/formats/utils.py b/src/ada/fem/formats/utils.py
index 2d55b93ed..5fb617252 100644
--- a/src/ada/fem/formats/utils.py
+++ b/src/ada/fem/formats/utils.py
@@ -209,11 +209,14 @@ def get_exe_path(fea_type: FEATypes):
exe_name = fea_type
env_name = f"ADA_{exe_name}_exe"
+ env_path = os.getenv(env_name, None)
+ if env_path is not None:
+ exe_path = pathlib.Path(env_path)
+ if exe_path.exists():
+ return exe_path
if Settings.fem_exe_paths.get(exe_name, None) is not None:
exe_path = Settings.fem_exe_paths[exe_name]
- elif os.getenv(env_name):
- exe_path = os.getenv(env_name)
elif shutil.which(f"{exe_name}"):
exe_path = shutil.which(f"{exe_name}")
elif shutil.which(f"{exe_name}.exe"):
@@ -289,7 +292,7 @@ def _overwrite_dir(analysis_dir):
send2trash(analysis_dir)
else:
shutil.rmtree(analysis_dir)
- except WindowsError as e:
+ except BaseException as e:
print(f"Failed to delete due to '{e}'")
os.makedirs(analysis_dir, exist_ok=True)
@@ -481,15 +484,19 @@ def convert_shell_elem_to_plates(elem: Elem, parent: Part) -> list[Plate]:
)
)
else:
- plates.append(
- Plate.from_3d_points(
- f"sh{elem.id}",
- [n.p for n in elem.nodes],
- fem_sec.thickness,
- mat=fem_sec.material,
- parent=parent,
+ try:
+ plates.append(
+ Plate.from_3d_points(
+ f"sh{elem.id}",
+ [n.p for n in elem.nodes],
+ fem_sec.thickness,
+ mat=fem_sec.material,
+ parent=parent,
+ )
)
- )
+ except BaseException as e:
+ logger.error(f"Unable to convert {elem.id=} to plate due to {e}")
+
return plates
diff --git a/src/ada/fem/formats/vtu/__init__.py b/src/ada/fem/formats/vtu/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/ada/fem/formats/vtu/write.py b/src/ada/fem/formats/vtu/write.py
new file mode 100644
index 000000000..e3348f5d2
--- /dev/null
+++ b/src/ada/fem/formats/vtu/write.py
@@ -0,0 +1,145 @@
+import base64
+import pathlib
+import struct
+import xml.etree.ElementTree as ET
+
+import numpy as np
+
+from ada.fem.results.common import ElementBlock, FemNodes
+from ada.fem.shapes.definitions import LineShapes, ShellShapes
+
+
+def array_to_binary(array, dtype):
+ binary_data = struct.pack(f"{len(array)}{dtype}", *array)
+ header = struct.pack("I", len(binary_data))
+ return base64.b64encode(header + binary_data).decode()
+
+
+# Mapping between custom shape enums and VTK types
+# https://vtk.org/doc/nightly/html/vtkCellType_8h_source.html
+VTK_TYPE_MAP = {
+ LineShapes.LINE: 3,
+ LineShapes.LINE3: 21,
+ ShellShapes.TRI: 5,
+ ShellShapes.QUAD: 9,
+}
+
+# New mapping dictionary
+numpy_to_vtu_type = {
+ np.dtype(np.float32): "Float32",
+ np.dtype(np.float64): "Float64",
+ np.dtype(np.int8): "Int8",
+ np.dtype(np.int16): "Int16",
+ np.dtype(np.int32): "Int32",
+ np.dtype(np.int64): "Int64",
+ np.dtype(np.uint8): "UInt8",
+ np.dtype(np.uint16): "UInt16",
+ np.dtype(np.uint32): "UInt32",
+ np.dtype(np.uint64): "UInt64",
+}
+
+numpy_to_struct_type = {
+ np.dtype(np.float32): "f",
+ np.dtype(np.float64): "d",
+ np.dtype(np.int8): "b",
+ np.dtype(np.int16): "h",
+ np.dtype(np.int32): "i",
+ np.dtype(np.int64): "q",
+ np.dtype(np.uint8): "B",
+ np.dtype(np.uint16): "H",
+ np.dtype(np.uint32): "I",
+ np.dtype(np.uint64): "Q",
+}
+
+
+def write_to_vtu_object(nodes: FemNodes, element_blocks: list[ElementBlock], point_data: dict, cell_data: dict):
+ all_node_refs = []
+ all_types = []
+ offsets = []
+ offset = 0
+
+ for block in element_blocks:
+ vtk_type = VTK_TYPE_MAP.get(block.elem_info.type)
+ # Block node_refs starts at 1, but VTK starts at 0
+ refs = block.node_refs - 1
+ for refs in refs:
+ all_node_refs.extend(refs)
+ all_types.append(vtk_type)
+ offset += len(refs)
+ offsets.append(offset)
+
+ root = ET.Element("VTKFile", type="UnstructuredGrid", version="1.0", byte_order="LittleEndian")
+ unstructured_grid = ET.SubElement(root, "UnstructuredGrid")
+ piece = ET.SubElement(
+ unstructured_grid, "Piece", NumberOfPoints=str(nodes.coords.shape[0]), NumberOfCells=str(len(all_types))
+ )
+
+ # Points
+ points_element = ET.SubElement(piece, "Points")
+ data_array = ET.SubElement(points_element, "DataArray", type="Float32", NumberOfComponents="3", format="binary")
+ data_array.text = array_to_binary(nodes.coords.flatten(), "f")
+
+ # Cells
+ cells_element = ET.SubElement(piece, "Cells")
+
+ # Connectivity
+ data_array = ET.SubElement(cells_element, "DataArray", type="Int32", Name="connectivity", format="binary")
+ data_array.text = array_to_binary(all_node_refs, "i")
+
+ # Offsets
+ data_array = ET.SubElement(cells_element, "DataArray", type="Int32", Name="offsets", format="binary")
+ data_array.text = array_to_binary(offsets, "i")
+
+ # Types
+ data_array = ET.SubElement(cells_element, "DataArray", type="UInt8", Name="types", format="binary")
+ data_array.text = array_to_binary(all_types, "B")
+
+ # Point Data
+ point_data_element = ET.SubElement(piece, "PointData")
+ for key, value in point_data.items():
+ data_type = numpy_to_vtu_type[np.dtype(value.dtype)]
+ num_components = str(value.shape[1] if len(value.shape) > 1 else 1)
+ if num_components == "6":
+ num_components = "3"
+ value = value[:, :3]
+ struct_type = numpy_to_struct_type[np.dtype(value.dtype)]
+ data_array = ET.SubElement(
+ point_data_element,
+ "DataArray",
+ type=data_type,
+ NumberOfComponents=num_components,
+ Name=key,
+ format="binary",
+ )
+ data_array.text = array_to_binary(value.flatten(), struct_type)
+
+ # Cell Data
+ cell_data = {}
+ cell_data_element = ET.SubElement(piece, "CellData")
+ for key, value_ in cell_data.items():
+ if len(value_) != 1:
+ raise ValueError("Cell data must be a single value per cell")
+ value = value_[0]
+ data_type = numpy_to_vtu_type[np.dtype(value.dtype)]
+ struct_type = numpy_to_struct_type[np.dtype(value.dtype)]
+ num_components = str(value.shape[1] if len(value.shape) > 1 else 1)
+ data_array = ET.SubElement(
+ cell_data_element, "DataArray", type=data_type, NumberOfComponents=num_components, Name=key, format="binary"
+ )
+ data_array.text = array_to_binary(value.flatten(), struct_type)
+
+ return ET.ElementTree(root)
+
+
+def write_to_vtu_file(
+ nodes: FemNodes, element_blocks: list[ElementBlock], point_data: dict, cell_data: dict, filename: str
+):
+ tree = write_to_vtu_object(nodes, element_blocks, point_data, cell_data)
+
+ if isinstance(filename, str):
+ filename = pathlib.Path(filename).resolve().absolute()
+
+ filename.parent.mkdir(parents=True, exist_ok=True)
+ with open(filename, "wb") as f:
+ f.write(b'\n')
+ tree.write(f)
diff --git a/src/ada/fem/outputs.py b/src/ada/fem/outputs.py
index 9e0b409c8..5b1603ba0 100644
--- a/src/ada/fem/outputs.py
+++ b/src/ada/fem/outputs.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, List, Union
+from typing import TYPE_CHECKING
from .common import FemBase
from .sets import FemSet
@@ -81,9 +81,9 @@ class HistOutput(FemBase):
def __init__(
self,
name: str,
- fem_set: Union[FemSet, None, List[Surface]],
+ fem_set: FemSet | None | list[Surface],
set_type: str,
- variables: List[str],
+ variables: list[str],
int_value=1,
int_type=TYPES_INTERVAL.FREQUENCY,
metadata=None,
@@ -103,11 +103,11 @@ def __init__(
self._int_type = int_type
@property
- def parent(self) -> "Step":
+ def parent(self) -> Step:
return self._parent
@parent.setter
- def parent(self, value: "Step"):
+ def parent(self, value: Step):
self._parent = value
@property
diff --git a/src/ada/fem/results/common.py b/src/ada/fem/results/common.py
index 97698b39f..a779743a6 100644
--- a/src/ada/fem/results/common.py
+++ b/src/ada/fem/results/common.py
@@ -13,6 +13,7 @@
from ada.fem.shapes.definitions import LineShapes, MassTypes, ShellShapes, SolidShapes
from ...core.guid import create_guid
+from ...visit.comms import send_to_viewer
from ...visit.gltf.graph import GraphNode, GraphStore
from ...visit.gltf.meshes import GroupReference, MergedMesh, MeshType
from .field_data import ElementFieldData, NodalFieldData, NodalFieldType
@@ -155,7 +156,7 @@ def create_mesh_stores(
except IndexError as e:
logger.error(e)
continue
- if isinstance(elem_shape.type, shape_def.LineShapes):
+ if isinstance(elem_shape.type, (shape_def.LineShapes, shape_def.ConnectorTypes)):
continue
face_s = len(faces)
@@ -287,9 +288,10 @@ def _get_point_and_cell_data(self) -> tuple[dict, dict]:
point_data[name] = res
elif isinstance(x, ElementFieldData) and x.field_pos == x.field_pos.INT:
if isinstance(res, dict):
- cell_data.update(res)
+ for key, value in res.items():
+ cell_data[key] = value
else:
- cell_data[name] = [res]
+ cell_data[name] = res
else:
raise ValueError()
@@ -308,11 +310,31 @@ def _warp_data(self, vertices: np.ndarray, field: str, step, scale: float = 1.0)
result = vertices + data[:, :3] * scale
return result
- def to_meshio_mesh(self) -> meshio.Mesh:
+ def to_meshio_mesh(self, make_3xn_dofs=True) -> meshio.Mesh:
cells = self._get_cell_blocks()
cell_data, point_data = self._get_point_and_cell_data()
- return meshio.Mesh(points=self.mesh.nodes.coords, cells=cells, cell_data=cell_data, point_data=point_data)
+ mesh = meshio.Mesh(points=self.mesh.nodes.coords, cells=cells, cell_data=cell_data, point_data=point_data)
+
+ # RMED has 6xN DOF's vertex vectors, but VTU has 3xN DOF's vectors
+ if make_3xn_dofs:
+ new_fields = {}
+ for key, field in mesh.point_data.items():
+ if field.shape[1] == 6:
+ new_fields[key] = np.array_split(field, 2, axis=1)[0]
+ else:
+ new_fields[key] = field
+
+ mesh.point_data = new_fields
+
+ return mesh
+
+ def to_vtu(self, filepath, make_3xn_dofs=True):
+ from ada.fem.formats.vtu.write import write_to_vtu_file
+
+ cell_data, point_data = self._get_point_and_cell_data()
+
+ write_to_vtu_file(self.mesh.nodes, self.mesh.elements, point_data, cell_data, filepath)
def to_xdmf(self, filepath):
cells = self._get_cell_blocks()
@@ -387,6 +409,11 @@ def to_gltf(self, dest_file, step: int, field: str, warp_field=None, warp_step=N
with open(dest_file, "wb") as f:
scene.export(file_obj=f, file_type=dest_file.suffix[1:])
+ def to_viewer(
+ self, step: int, field: str, warp_field: str = None, warp_step: int = None, warp_scale: float = None, cfunc=None
+ ):
+ send_to_viewer(self.to_trimesh(step, field, warp_field, warp_step, warp_scale, cfunc))
+
def get_eig_summary(self) -> EigenDataSummary:
"""If the results are eigenvalue results, this method will return a summary of the eigenvalues and modes"""
from ada.fem.results.eigenvalue import EigenDataSummary, EigenMode
diff --git a/src/ada/fem/results/concepts.py b/src/ada/fem/results/concepts.py
index 31690e6e3..2fb35f407 100644
--- a/src/ada/fem/results/concepts.py
+++ b/src/ada/fem/results/concepts.py
@@ -1,6 +1,5 @@
from __future__ import annotations
-import json
import os
import pathlib
import subprocess
@@ -22,7 +21,7 @@
if TYPE_CHECKING:
from ada import Assembly
- from ada.visit.concept import PartMesh, VisMesh
+ from ada.visit.concept import PartMesh
class Results:
@@ -100,31 +99,6 @@ def _get_results_from_result_file(self, file_ref, overwrite=False):
return res_reader(self, file_ref, overwrite)
- def save_output(self, dest_file) -> None:
- if self.output is None or self.output.stdout is None:
- print("No output is found")
- return None
- dest_file = pathlib.Path(dest_file)
-
- os.makedirs(dest_file.parent, exist_ok=True)
- with open(dest_file, "w") as f:
- f.write(self.output.stdout)
-
- def save_results_to_json(self, dest_file):
- dest_file = pathlib.Path(dest_file).with_suffix(".json")
- res = dict(
- name=self.name,
- fem_format=self.fem_format,
- eigen_mode_data=self.eigen_mode_data.to_dict(),
- metadata=self.metadata,
- last_modified=self.last_modified,
- )
- with open(dest_file, "w") as f:
- try:
- json.dump(res, f, indent=4)
- except TypeError as e:
- raise TypeError(e)
-
def save_results_to_excel(self, dest_file, filter_components_by_name=None):
"""This method is just a sample for how certain results can easily be exported to Excel"""
@@ -164,17 +138,6 @@ def save_results_to_excel(self, dest_file, filter_components_by_name=None):
workbook.close()
- def to_vis_mesh(self, data_type: str = None, name: str = "AdaFEM") -> VisMesh | None:
- from ada.visualize.concept import VisMesh
-
- name = self.assembly.name if self.assembly is not None else name
- pm = self.result_mesh.to_part_mesh(name=name, data_type=data_type)
- if len(pm.id_map) == 0:
- logger.warning("Created Part mesh contains no object meshes")
- return None
- project = self.assembly.metadata.get("project", "DummyProject") if self.assembly is not None else "DummyProject"
- return VisMesh(name=name, project=project, world=[pm], meta=None)
-
@property
def name(self):
return self._name
@@ -497,49 +460,3 @@ def to_part_mesh(self, name: str, data_type: str = None) -> PartMesh:
)
return PartMesh(name=name, id_map=id_map)
-
-
-def get_fem_stats(fem_file, dest_md_file, data_file="data.json"):
- """
-
- :param fem_file:
- :param dest_md_file:
- :param data_file: Destination of data.json file (keeping track of last modified status etc..)
- """
- import json
- import os
-
- from ada import Assembly
- from ada.fem.utils import get_eldata
-
- dest_md_file = pathlib.Path(dest_md_file)
- data_file = pathlib.Path(data_file)
- a = Assembly()
- a.read_fem(fem_file)
-
- out_str = ""
-
- for name, part in a.parts.items():
- fem = part.fem
- r = get_eldata(fem_source=fem)
- if len(r.keys()) == 0:
- continue
- out_str += f"* **{name}**: ("
-
- el_data = ""
- for el_type, el_num in r.items():
- el_data += f"{el_type}: {el_num}, "
-
- out_str += el_data[:-2] + ")\n"
-
- os.makedirs(dest_md_file.parent, exist_ok=True)
-
- with open(dest_md_file, "w") as f:
- f.write(out_str)
-
- if data_file.exists():
- with open(data_file, "r") as f:
- data = json.load(f)
- else:
- data = dict()
- print(data)
diff --git a/src/ada/fem/results/field_data.py b/src/ada/fem/results/field_data.py
index ad8e06737..6a8e7a3a7 100644
--- a/src/ada/fem/results/field_data.py
+++ b/src/ada/fem/results/field_data.py
@@ -6,6 +6,8 @@
import numpy as np
+from ada.fem.shapes.definitions import LineShapes, ShellShapes, SolidShapes
+
@dataclass
class FieldData:
@@ -50,6 +52,7 @@ class ElementFieldData(FieldData):
"""Values from element integration points"""
field_pos: FieldPosition = FieldPosition.NODAL
+ elem_type: LineShapes | ShellShapes | SolidShapes = None
COLS: ClassVar[list[str]] = ["elem_label", "sec_num"]
int_positions: list[tuple] = None
diff --git a/src/ada/fem/results/resources/results.sql b/src/ada/fem/results/resources/results.sql
new file mode 100644
index 000000000..00d7b5d03
--- /dev/null
+++ b/src/ada/fem/results/resources/results.sql
@@ -0,0 +1,103 @@
+create table ElementConnectivity
+(
+ InstanceID INTEGER,
+ ElemID INTEGER,
+ PointID INTEGER,
+ Seq INTEGER
+);
+
+create table ElementInfo
+(
+ InstanceID INTEGER,
+ ElemID INTEGER,
+ Type TEXT,
+ IntPoints INTEGER
+);
+
+create table ElementSets
+(
+ SetID INTEGER,
+ Name TEXT,
+ InstanceID INTEGER,
+ ElemID INTEGER
+);
+
+create table FieldElem
+(
+ InstanceID INTEGER,
+ ElemID INTEGER,
+ StepID INTEGER,
+ Location TEXT,
+ IntPt INTEGER,
+ FieldVarID INTEGER,
+ Frame REAL,
+ Value REAL
+);
+
+create table FieldNodes
+(
+ InstanceID INTEGER,
+ PointID INTEGER,
+ StepID INTEGER,
+ FieldVarID INTEGER,
+ Frame REAL,
+ Value REAL
+);
+
+create table FieldVars
+(
+ FieldID INTEGER,
+ Name TEXT,
+ Description TEXT
+);
+
+create table HistOutput
+(
+ Region TEXT,
+ ResType TEXT,
+ InstanceID INTEGER,
+ ElemID INTEGER,
+ PointID INTEGER,
+ StepID INTEGER,
+ FieldVarID INTEGER,
+ Frame REAL,
+ Value REAL
+);
+
+create table ModelInstances
+(
+ ID INTEGER,
+ Name TEXT
+);
+
+create table PointSets
+(
+ SetID INTEGER,
+ Name TEXT,
+ InstanceID INTEGER,
+ PointID INTEGER
+);
+
+create table Points
+(
+ InstanceID INTEGER,
+ ID INTEGER,
+ X REAL,
+ Y REAL,
+ Z REAL
+);
+
+create table Steps
+(
+ ID INTEGER,
+ Name TEXT,
+ Description TEXT,
+ DomainType TEXT
+);
+
+create table metadata
+(
+ project TEXT,
+ user TEXT,
+ filename TEXT
+);
\ No newline at end of file
diff --git a/src/ada/fem/results/sqlite_store.py b/src/ada/fem/results/sqlite_store.py
new file mode 100644
index 000000000..e44276f8c
--- /dev/null
+++ b/src/ada/fem/results/sqlite_store.py
@@ -0,0 +1,208 @@
+import pathlib
+import sqlite3
+
+_RESULTS_SCHEMA_PATH = pathlib.Path(__file__).parent / "resources/results.sql"
+
+
+class SQLiteFEAStore:
+ def __init__(self, db_file, clean_tables=False):
+ if isinstance(db_file, str):
+ db_file = pathlib.Path(db_file)
+ clean_start = False
+ if not db_file.exists():
+ clean_start = True
+
+ self.db_file = db_file
+ self.conn = sqlite3.connect(db_file)
+ if clean_start:
+ self._init_db()
+ else:
+ if clean_tables:
+ # clear all tables
+ self.conn.executescript("DELETE FROM HistOutput;")
+ self.conn.executescript("DELETE FROM FieldElem;")
+ self.conn.executescript("DELETE FROM FieldNodes;")
+ self.conn.executescript("DELETE FROM FieldVars;")
+ self.conn.executescript("DELETE FROM ModelInstances;")
+ self.conn.executescript("DELETE FROM Steps;")
+ self.conn.executescript("DELETE FROM FieldVars;")
+ self.conn.executescript("DELETE FROM Points;")
+ self.conn.executescript("DELETE FROM ElementConnectivity;")
+ self.conn.executescript("DELETE FROM ElementInfo;")
+ self.conn.executescript("DELETE FROM PointSets;")
+ self.conn.executescript("DELETE FROM ElementSets;")
+
+ self.cursor = self.conn.cursor()
+
+ def __del__(self):
+ self.conn.close()
+
+ def _init_db(self):
+ with open(_RESULTS_SCHEMA_PATH, "r") as f:
+ schema = f.read()
+ self.conn.executescript(schema)
+
+ def insert_table(self, table_name: str, data: list[tuple]):
+ if not data:
+ print("No data to insert")
+ return
+
+ num_columns = len(data[0])
+ placeholders = ", ".join(["?" for _ in range(num_columns)])
+ sql_query = f"INSERT INTO {table_name} VALUES ({placeholders})"
+
+ self.cursor.executemany(sql_query, data)
+ self.conn.commit()
+
+ def get_steps(self):
+ query = """SELECT * FROM Steps"""
+ self.cursor.execute(query)
+ results = self.cursor.fetchall()
+ return results
+
+ def get_field_vars(self):
+ query = """SELECT * FROM FieldVars"""
+ self.cursor.execute(query)
+ results = self.cursor.fetchall()
+ return results
+
+ def get_history_data(
+ self, field_var=None, step_id=None, instance_id=None, point_id=None, elem_id=None, return_df=False
+ ):
+ base_query = """SELECT mi.Name,
+ ho.ResType,
+ ho.Region,
+ ho.PointID,
+ ho.ElemID,
+ st.Name,
+ fv.Name,
+ ho.Frame,
+ ho.Value
+ FROM FieldVars as fv
+ INNER JOIN HistOutput ho ON fv.FieldID = ho.FieldVarID
+ INNER JOIN ModelInstances as mi on ho.InstanceID = mi.ID
+ INNER JOIN Steps as st on ho.StepID = st.ID
+
+ """
+ params = []
+
+ add_queries = []
+ if field_var is not None:
+ add_queries += ["fv.Name == ?"]
+ params = [field_var]
+
+ if step_id is not None:
+ add_queries += ["ho.StepID = ?"]
+ params.append(step_id)
+
+ if instance_id is not None:
+ add_queries += ["ho.InstanceID = ?"]
+ params.append(instance_id)
+
+ if point_id is not None:
+ add_queries += ["ho.PointID = ?"]
+ params.append(point_id)
+
+ if elem_id is not None:
+ add_queries += ["ho.ElemID = ?"]
+ params.append(elem_id)
+
+ if len(add_queries) > 0:
+ base_query += "WHERE " + add_queries[0]
+ if len(add_queries) > 1:
+ extra_queries = " AND".join([f" AND {x}" for x in add_queries[1:]])
+ base_query += extra_queries
+
+ self.cursor.execute(base_query, params)
+ results = self.cursor.fetchall()
+ if return_df:
+ import pandas as pd
+
+ columns = [
+ "Name",
+ "Restype",
+ "Region",
+ "PointID",
+ "ElemID",
+ "StepName",
+ "FieldVarName",
+ "Frame",
+ "Value",
+ ]
+ df = pd.DataFrame(results, columns=columns)
+ return df
+ return results
+
+ def get_field_elem_data(self, name, step_id=None, instance_id=None, elem_id=None, int_point=None):
+ """This returns a join from the FieldVars table and the FieldElem tables."""
+ base_query = """SELECT mi.Name,
+ fe.ElemID,
+ st.Name,
+ fv.Name,
+ fe.IntPt,
+ fe.Frame,
+ fe.Value
+ FROM FieldVars as fv
+ INNER JOIN FieldElem fe ON fv.FieldID = fe.FieldVarID
+ INNER JOIN ModelInstances as mi on fe.InstanceID = mi.ID
+ INNER JOIN Steps as st on fe.StepID = st.ID
+
+ WHERE fv.Name = ?"""
+
+ params = [name]
+
+ if step_id is not None:
+ base_query += " AND fe.StepID = ?"
+ params.append(step_id)
+
+ if instance_id is not None:
+ base_query += " AND fe.InstanceID = ?"
+ params.append(instance_id)
+
+ if elem_id is not None:
+ base_query += " AND fe.ElemID = ?"
+ params.append(elem_id)
+
+ if int_point is not None:
+ base_query += " AND fe.IntPt = ?"
+ params.append(int_point)
+
+ self.cursor.execute(base_query, params)
+ results = self.cursor.fetchall()
+ return results
+
+ def get_field_nodal_data(self, name, step_id=None, instance_id=None, point_id=None):
+ """This returns a join from the FieldVars table and the FieldNodes tables."""
+ base_query = """SELECT mi.Name,
+ fn.PointID,
+ st.Name,
+ fv.Name,
+ fn.Frame,
+ fn.Value
+ FROM FieldVars as fv
+ INNER JOIN FieldNodes fn ON fv.FieldID = fn.FieldVarID
+ INNER JOIN ModelInstances as mi on fn.InstanceID = mi.ID
+ INNER JOIN Steps as st on fn.StepID = st.ID
+
+ WHERE fv.Name = ?"""
+
+ params = [name]
+
+ if step_id is not None:
+ base_query += " AND fn.StepID = ?"
+ params.append(step_id)
+
+ if instance_id is not None:
+ base_query += " AND fn.InstanceID = ?"
+ params.append(instance_id)
+
+ if point_id is not None:
+ base_query += " AND fn.PointID = ?"
+ params.append(point_id)
+
+ self.cursor.execute(base_query, params)
+ results = self.cursor.fetchall()
+ return results
+
+ def __repr__(self):
+ return f"SQLiteFEAStore({self.db_file})"
diff --git a/src/ada/fem/sections.py b/src/ada/fem/sections.py
index 6aebd5f31..153c7e3c6 100644
--- a/src/ada/fem/sections.py
+++ b/src/ada/fem/sections.py
@@ -1,20 +1,19 @@
from __future__ import annotations
-from typing import TYPE_CHECKING, Any, List, Tuple, Union
+from typing import TYPE_CHECKING, List, TypeVar, Union
import numpy as np
from ada.base.types import GeomRepr
from ada.config import logger
from ada.core.utils import Counter
+from ada.core.vector_transforms import normal_to_points_in_plane
from ada.core.vector_utils import calc_yvec, calc_zvec, unit_vector, vector_length
+from ada.fem.common import FemBase
+from ada.fem.shapes import ElemType
from ada.materials import Material
from ada.sections import Section
-from ..core.vector_transforms import normal_to_points_in_plane
-from .common import FemBase
-from .shapes import ElemType
-
if TYPE_CHECKING:
from ada import Beam, Plate
from ada.fem import FemSet
@@ -90,6 +89,10 @@ def type(self):
def id(self):
return self._id
+ @id.setter
+ def id(self, value):
+ self._id = value
+
@property
def elset(self):
return self._elset
@@ -204,14 +207,6 @@ def int_points(self):
def refs(self) -> List[Union[Beam, Plate]]:
return self._refs
- def unique_fem_section_permutation(self) -> Tuple[int, Material, Section, tuple, tuple, float]:
- if self.type == self.SEC_TYPES.LINE:
- return self.id, self.material, self.section, tuple(self.local_x), tuple(self.local_z), self.thickness
- elif self.type == self.SEC_TYPES.SHELL:
- return self.id, self.material, self.section, (None,), tuple(self.local_z), self.thickness
- else:
- return self.id, self.material, self.section, (None,), tuple(self.local_z), 0.0
-
def has_equal_props(self, other: FemSection):
equal_mat = self.material == other.material
if self.type == self.SEC_TYPES.SHELL:
@@ -243,17 +238,63 @@ def __repr__(self):
)
+# Todo: This should be improved
+_A = TypeVar("_A", float, int)
+_T_E = TypeVar(
+ "_T_E",
+ float,
+ list[float | int],
+ list[tuple[tuple[float | int, float | int]]],
+ list[tuple[tuple[float | int, float | int]]],
+)
+_T_D = TypeVar(
+ "_T_D",
+ float,
+ list[float],
+ list[int],
+ list[tuple[tuple[float, float]]],
+ list[tuple[tuple[int, int]]],
+)
+_T_P = TypeVar(
+ "_T_P",
+ float,
+ list[float],
+ list[int],
+ list[tuple[tuple[float, float]]],
+ list[tuple[tuple[int, int]]],
+)
+_T_R = TypeVar(
+ "_T_R",
+ float,
+ list[float],
+ list[int],
+ list[tuple[tuple[float, float]]],
+ list[tuple[tuple[int, int]]],
+)
+
+
class ConnectorSection(FemBase):
- """A Connector Section"""
+ """A connector section.
+
+ All *_comp properties can be one of the following
+
+ * scalar stiffness value (assumed linear stiffness in all degrees of freedom)
+ * Tabular stiffness values where the structure is as follows
+
+ list[ # Tabular data for stiffness in all Degrees of Freedom
+ list[ # Tabular data for stiffness in Degree of Freedom i]
+ list[numeric scalar, numeric scalar] # Tabular scalars [Force, Displacement]
+ ]
+ ]
+ """
def __init__(
self,
name,
- elastic_comp: Union[None, float, List[Any]] = None,
- damping_comp: Union[None, float, List[Any]] = None,
- plastic_comp: Union[None, float, List[Any]] = None,
- rigid_dofs: Union[None, float, List[Any]] = None,
- soft_elastic_dofs=None,
+ elastic_comp: _T_E = None,
+ damping_comp: _T_D = None,
+ plastic_comp: _T_P = None,
+ rigid_dofs: _T_R = None,
metadata=None,
parent=None,
):
@@ -262,10 +303,9 @@ def __init__(
self._damping_comp = damping_comp if damping_comp is not None else []
self._plastic_comp = plastic_comp
self._rigid_dofs = rigid_dofs
- self._soft_elastic_dofs = soft_elastic_dofs
@property
- def elastic_comp(self):
+ def elastic_comp(self) -> _T_E:
return self._elastic_comp
@elastic_comp.setter
@@ -273,17 +313,13 @@ def elastic_comp(self, value):
self._elastic_comp = value
@property
- def damping_comp(self):
+ def damping_comp(self) -> _T_D:
return self._damping_comp
@property
- def plastic_comp(self):
+ def plastic_comp(self) -> _T_P:
return self._plastic_comp
@property
- def rigid_dofs(self):
+ def rigid_dofs(self) -> _T_R:
return self._rigid_dofs
-
- @property
- def soft_elastic_dofs(self):
- return self._soft_elastic_dofs
diff --git a/src/ada/fem/shapes/definitions.py b/src/ada/fem/shapes/definitions.py
index c324aae4f..bad8d2b5b 100644
--- a/src/ada/fem/shapes/definitions.py
+++ b/src/ada/fem/shapes/definitions.py
@@ -7,8 +7,6 @@
from ada.base.types import GeomRepr
from ada.config import logger
-# The element names are based on the naming scheme by meshio
-
class UnsupportedFeaShapeException(Exception):
pass
@@ -121,6 +119,7 @@ def to_geom_repr(el_type):
raise ValueError(f'Unrecognized Shape Type: "{el_type}"')
+# todo: clean up elem shape types. Mass and connector shapes should be removed (they are either Point or Line shapes) now that they are subclasses of Elem.
class ElemType:
SHELL = GeomRepr.SHELL
SOLID = GeomRepr.SOLID
@@ -209,7 +208,7 @@ def elem_type_group(self):
return ElemType.SOLID
elif isinstance(self.type, ShellShapes):
return ElemType.SHELL
- elif isinstance(self.type, LineShapes):
+ elif isinstance(self.type, (LineShapes, ConnectorTypes)):
return ElemType.LINE
elif isinstance(self.type, SpringTypes):
return ElemType.LINE
diff --git a/src/ada/fem/steps.py b/src/ada/fem/steps.py
index 93f2e6e4b..e4e62dc74 100644
--- a/src/ada/fem/steps.py
+++ b/src/ada/fem/steps.py
@@ -186,12 +186,10 @@ def hist_outputs(self) -> list[HistOutput]:
return self._hist_outputs
def __repr__(self):
- return f"{self.__class__.name}({self.name}, type={self.type}, nl_geom={self.nl_geom})"
+ return f"{self.__class__.__name__}({self.name}, type={self.type}, nl_geom={self.nl_geom})"
-class StepImplicit(Step):
- TYPES_DYNAMIC = _DynStepType
-
+class StepImplicitStatic(Step):
def __init__(
self,
name,
@@ -202,7 +200,6 @@ def __init__(
init_incr=100.0,
min_incr=1e-8,
max_incr=100.0,
- dyn_type=TYPES_DYNAMIC.QUASI_STATIC,
**kwargs,
):
"""
@@ -223,12 +220,8 @@ def __init__(
else:
total_time = init_incr
- super(StepImplicit, self).__init__(name, implicit_type, total_time=total_time, nl_geom=nl_geom, **kwargs)
+ super(StepImplicitStatic, self).__init__(name, implicit_type, total_time=total_time, nl_geom=nl_geom, **kwargs)
- if dyn_type not in _DynStepType.all:
- raise ValueError(f'Dynamic input type "{dyn_type}" is not supported')
-
- self._dyn_type = dyn_type
self._total_incr = total_incr
self._init_incr = init_incr
self._min_incr = min_incr
@@ -250,6 +243,39 @@ def min_incr(self):
def max_incr(self):
return self._max_incr
+
+class StepImplicitDynamic(StepImplicitStatic):
+ TYPES_DYNAMIC = _DynStepType
+
+ def __init__(
+ self,
+ name,
+ dyn_type=TYPES_DYNAMIC.QUASI_STATIC,
+ nl_geom=False,
+ total_time=100.0,
+ total_incr=1000,
+ init_incr=100.0,
+ min_incr=1e-8,
+ max_incr=100.0,
+ **kwargs,
+ ):
+ if dyn_type not in _DynStepType.all:
+ raise ValueError(f'Dynamic input type "{dyn_type}" is not supported')
+
+ self._dyn_type = dyn_type
+
+ super().__init__(
+ name=name,
+ implicit_type=Step.TYPES.DYNAMIC,
+ nl_geom=nl_geom,
+ total_time=total_time,
+ total_incr=total_incr,
+ init_incr=init_incr,
+ min_incr=min_incr,
+ max_incr=max_incr,
+ **kwargs,
+ )
+
@property
def dyn_type(self):
return self._dyn_type
diff --git a/src/ada/occ/exceptions.py b/src/ada/occ/exceptions.py
index 7ad711f8b..9471069c5 100644
--- a/src/ada/occ/exceptions.py
+++ b/src/ada/occ/exceptions.py
@@ -2,6 +2,10 @@ class UnableToCreateSolidOCCGeom(Exception):
pass
+class UnableToCreateCurveOCCGeom(Exception):
+ pass
+
+
class UnableToCreateTesselationFromSolidOCCGeom(Exception):
pass
diff --git a/src/ada/occ/geom/curves.py b/src/ada/occ/geom/curves.py
index 2c8803dbb..b704f4c90 100644
--- a/src/ada/occ/geom/curves.py
+++ b/src/ada/occ/geom/curves.py
@@ -5,6 +5,7 @@
from ada.geom import curves as geo_cu
from ada.geom.surfaces import PolyLoop
+from ada.occ.exceptions import UnableToCreateCurveOCCGeom
from ada.occ.utils import point3d
@@ -30,7 +31,7 @@ def segments_to_wire(segments: list[geo_cu.Line | geo_cu.ArcLine]) -> TopoDS_Wir
try:
return wire.Wire()
except RuntimeError:
- raise ValueError("Segments do not form a closed loop")
+ raise UnableToCreateCurveOCCGeom("Segments do not form a closed loop")
def make_wire_from_indexed_poly_curve_geom(
diff --git a/src/ada/occ/store.py b/src/ada/occ/store.py
index d8317cf80..784df3243 100644
--- a/src/ada/occ/store.py
+++ b/src/ada/occ/store.py
@@ -47,7 +47,7 @@ def shape_iterator(
if isinstance(geom_repr, str):
geom_repr = GeomRepr.from_str(geom_repr)
- def safe_geom(obj_):
+ def safe_geom(obj_, name_ref=None):
geo_repr = render_override.get(obj_.guid, geom_repr)
try:
if geo_repr == GeomRepr.SOLID:
@@ -63,7 +63,10 @@ def safe_geom(obj_):
occ_geom = BRepBuilderAPI_Transform(occ_geom, trsf, True).Shape()
return occ_geom
except RuntimeError as e:
- logger.warning(f"Failed to add shape {obj.name} due to {e}")
+ logger.warning(f'Failed to add shape {obj.name} due to "{e}" from {name_ref}')
+ return None
+ except BaseException as e:
+ logger.warning(f'Failed to add shape {obj.name} due to "{e}" from {name_ref}')
return None
if isinstance(part, StepStore):
@@ -76,11 +79,11 @@ def safe_geom(obj_):
geom_repr = GeomRepr.from_str(geom_repr)
if issubclass(type(obj), ada.Shape):
- geom = safe_geom(obj)
+ geom = safe_geom(obj, part.name)
elif isinstance(obj, (ada.Beam, ada.Plate, ada.Wall)):
- geom = safe_geom(obj)
+ geom = safe_geom(obj, part.name)
elif isinstance(obj, (ada.PipeSegStraight, ada.PipeSegElbow)):
- geom = safe_geom(obj)
+ geom = safe_geom(obj, part.name)
else:
logger.error(f"Geometry type {type(obj)} not yet implemented")
geom = None
diff --git a/src/ada/occ/tessellating.py b/src/ada/occ/tessellating.py
index 76d7b11b6..de6e89260 100644
--- a/src/ada/occ/tessellating.py
+++ b/src/ada/occ/tessellating.py
@@ -51,10 +51,9 @@ def tessellate_shape(shape: TopoDS_Shape, quality=1.0, render_edges=False, paral
# first, compute the tesselation
try:
tess = ShapeTesselator(shape)
+ tess.Compute(compute_edges=render_edges, mesh_quality=quality, parallel=parallel)
except RuntimeError as e:
- raise UnableToCreateTesselationFromSolidOCCGeom(e)
-
- tess.Compute(compute_edges=render_edges, mesh_quality=quality, parallel=parallel)
+ raise UnableToCreateTesselationFromSolidOCCGeom(f'Failed to tessellate OCC geometry due to "{e}"')
# get vertices and normals
vertices_position = tess.GetVerticesPositionAsTuple()
@@ -178,7 +177,9 @@ def batch_tessellate(
logger.error(e)
continue
- def tessellate_part(self, part: Part, filter_by_guids=None, render_override=None) -> trimesh.Scene:
+ def tessellate_part(
+ self, part: Part, filter_by_guids=None, render_override=None, merge_meshes=True
+ ) -> trimesh.Scene:
graph = part.get_graph_store()
scene = trimesh.Scene(base_frame=graph.top_level.name)
@@ -189,8 +190,12 @@ def tessellate_part(self, part: Part, filter_by_guids=None, render_override=None
all_shapes = sorted(shapes_tess_iter, key=lambda x: x.material)
for mat_id, meshes in groupby(all_shapes, lambda x: x.material):
- merged_store = concatenate_stores(meshes)
- merged_mesh_to_trimesh_scene(scene, merged_store, self.get_mat_by_id(mat_id), mat_id, graph)
+ if merge_meshes:
+ merged_store = concatenate_stores(meshes)
+ merged_mesh_to_trimesh_scene(scene, merged_store, self.get_mat_by_id(mat_id), mat_id, graph)
+ else:
+ for mesh_store in meshes:
+ merged_mesh_to_trimesh_scene(scene, mesh_store, self.get_mat_by_id(mat_id), mat_id, graph)
shell_color = Color.from_str("white")
shell_color_id = self.add_color(shell_color)
diff --git a/src/ada/param_models/fem_models.py b/src/ada/param_models/fem_models.py
index f007f8b84..daf9c0a3a 100644
--- a/src/ada/param_models/fem_models.py
+++ b/src/ada/param_models/fem_models.py
@@ -1,7 +1,7 @@
import numpy as np
from ada import Assembly, Beam, Material, Part, PrimBox, PrimCyl, PrimExtrude, User
-from ada.fem import Bc, FemSet, Load, StepImplicit
+from ada.fem import Bc, FemSet, Load, StepImplicitStatic
from ada.fem.shapes import ElemType
from ada.fem.utils import get_beam_end_nodes
from ada.materials.metals import CarbonSteel, DnvGl16Mat
@@ -53,7 +53,7 @@ def beam_ex1(p1=(0, 0, 0), p2=(1.5, 0, 0), profile="IPE400", geom_repr=ElemType.
# Add a set containing ALL elements (necessary for Calculix loads).
fs = p.fem.add_set(FemSet("Eall", [el for el in p.fem.elements], FemSet.TYPES.ELSET))
- step = a.fem.add_step(StepImplicit("gravity", nl_geom=False, init_incr=100.0, total_time=100.0))
+ step = a.fem.add_step(StepImplicitStatic("gravity", nl_geom=False, init_incr=100.0, total_time=100.0))
step.add_load(Load("grav", Load.TYPES.GRAVITY, -9.81 * 800, fem_set=fs))
fix_set = p.fem.add_set(FemSet("bc_nodes", get_beam_end_nodes(bm), FemSet.TYPES.NSET))
diff --git a/src/ada/visit/comms.py b/src/ada/visit/comms.py
index e40e96a1e..df5db517d 100644
--- a/src/ada/visit/comms.py
+++ b/src/ada/visit/comms.py
@@ -6,6 +6,8 @@
import sys
import time
+import trimesh
+
import ada
from ada.config import logger
from ada.visit.websocket_server import is_server_running, start_server
@@ -14,14 +16,16 @@
WEBSOCKET_EXE_PY = pathlib.Path(__file__).parent / "websocket_server.py"
-def send_to_viewer(part: ada.Part, host="localhost", port=8765, origins: list[str] = None, meta: dict = None):
+def send_to_viewer(
+ part: ada.Part | trimesh.Scene, host="localhost", port=8765, origins: list[str] = None, meta: dict = None
+):
if origins is None:
send_to_local_viewer(part, host=host, port=port)
else:
send_to_web_viewer(part, port=port, origins=origins, meta=meta)
-def send_to_local_viewer(part: ada.Part, host="localhost", port=8765):
+def send_to_local_viewer(part: ada.Part | trimesh.Scene, host="localhost", port=8765):
"""Send a part to the viewer. This will start the viewer if it is not already running."""
from ada.visit.websocket_server import WebSocketServer
@@ -41,12 +45,15 @@ def send_to_local_viewer(part: ada.Part, host="localhost", port=8765):
while ws.check_server_running() is False:
time.sleep(0.1)
- start = time.time()
- data = io.BytesIO()
- part.to_trimesh_scene().export(data, file_type="glb")
- end = time.time()
- logger.info(f"Exported to glb in {end - start:.2f} seconds")
- ws.send(data.getvalue())
+ with io.BytesIO() as data:
+ start = time.time()
+ if isinstance(part, trimesh.Scene):
+ part.export(data, file_type="glb")
+ else:
+ part.to_trimesh_scene().export(data, file_type="glb")
+ end = time.time()
+ logger.info(f"Exported to glb in {end - start:.2f} seconds")
+ ws.send(data.getvalue())
def send_to_web_viewer(part: ada.Part, port=8765, origins: list[str] = None, meta: dict = None):
diff --git a/src/ada/visit/concept.py b/src/ada/visit/concept.py
index 9036ca66e..1ec053920 100644
--- a/src/ada/visit/concept.py
+++ b/src/ada/visit/concept.py
@@ -1,8 +1,5 @@
from __future__ import annotations
-import datetime
-import os
-import pathlib
from dataclasses import dataclass, field
import numpy as np
@@ -10,153 +7,6 @@
from ada.config import logger
-from .colors import VisColor
-
-
-@dataclass
-class VisMesh:
- """Visual Mesh"""
-
- name: str
- project: str = None
- world: list[PartMesh] = field(default_factory=list, repr=False)
- meshes: dict[str, VisNode] = field(default_factory=dict, repr=False)
- meta: None | dict = field(default=None, repr=False)
- created: str = None
- translation: np.ndarray = None
- cache_file: pathlib.Path = field(default=pathlib.Path(".cache/meshes.h5"), repr=False)
- overwrite_cache: bool = False
- colors: dict[str, VisColor] = field(default_factory=dict)
- merged: bool = False
-
- def __post_init__(self):
- if self.created is None:
- self.created = datetime.datetime.utcnow().strftime("%m/%d/%Y, %H:%M:%S")
-
- def move_objects_to_center(self, override_center=None):
- self.translation = override_center if override_center is not None else -self.vol_center
- for pm in self.world:
- pm.move_objects_to_center(self.translation)
-
- def add_mesh(self, guid, parent_guid, position, indices, normals=None, matrix=None, color_ref=None):
- obj_group = self._h5cache_group.create_group(guid)
- obj_group.attrs.create("COLOR", color_ref)
- if matrix is not None:
- obj_group.attrs.create("MATRIX", matrix)
- obj_group.create_dataset("POSITION", data=position)
- obj_group.create_dataset("NORMAL", data=normals)
- obj_group.create_dataset("INDEX", data=indices)
- self.meshes[guid] = VisNode(guid, parent_guid)
-
- @property
- def vol_center(self) -> np.ndarray:
- return (self.bbox[0] + self.bbox[1]) / 2
-
- @property
- def bbox(self) -> tuple[np.ndarray, np.ndarray]:
- res = np.concatenate([np.array(x.bbox) for x in self.world])
- return res.min(0), res.max(0)
-
- @property
- def num_polygons(self):
- return sum([x.num_polygons for x in self.world])
-
- def _convert_to_trimesh(self, embed_meta=True) -> trimesh.Scene:
- scene = trimesh.Scene()
- meta_set = set(self.meta.keys())
-
- id_sequence = dict()
-
- for world in self.world:
- world_map_set = set(world.id_map.keys())
- res = meta_set - world_map_set
- if self.merged is False:
- for spatial_node in res:
- spatial_name, spatial_parent = self.meta.get(spatial_node)
- scene.graph.update(
- frame_to=spatial_name, frame_from=spatial_parent if spatial_parent != "*" else None
- )
-
- for key, obj in world.id_map.items():
- if self.merged is False:
- name, parent_guid = self.meta.get(key)
- parent_name, _ = self.meta.get(parent_guid)
- else:
- name = key
- parent_name = "world"
-
- for i, new_mesh in enumerate(obj.to_trimesh()):
- name = name if i == 0 else f"{name}_{i:02d}"
- scene.add_geometry(new_mesh, node_name=name, geom_name=name, parent_node_name=parent_name)
- id_sequence[name] = obj.id_sequence
-
- if embed_meta:
- scene.metadata["meta"] = self.meta
- scene.metadata["id_sequence"] = id_sequence
-
- return scene
-
- def _export_using_trimesh(self, mesh: trimesh.Scene, dest_file: pathlib.Path):
- os.makedirs(dest_file.parent, exist_ok=True)
- print(f'Writing Visual Mesh to "{dest_file}"')
- with open(dest_file, "wb") as f:
- mesh.export(file_obj=f, file_type=dest_file.suffix[1:])
-
- def to_stl(self, dest_file):
- dest_file = pathlib.Path(dest_file).with_suffix(".stl")
- mesh: trimesh.Trimesh = self._convert_to_trimesh()
- self._export_using_trimesh(mesh, dest_file)
-
- def to_gltf(self, dest_file, only_these_guids: list[str] = None, embed_meta=False):
- from ..core.vector_transforms import rot_matrix
-
- dest_file = pathlib.Path(dest_file).with_suffix(".glb")
- mesh: trimesh.Trimesh = self._convert_to_trimesh(embed_meta=embed_meta)
-
- # Trimesh automatically transforms by setting up = Y. This will counteract that transform
- m3x3 = rot_matrix((0, -1, 0))
- m3x3_with_col = np.append(m3x3, np.array([[0], [0], [0]]), axis=1)
- m4x4 = np.r_[m3x3_with_col, [np.array([0, 0, 0, 1])]]
- mesh.apply_transform(m4x4)
-
- self._export_using_trimesh(mesh, dest_file)
-
- def merge_objects_in_parts_by_color(self) -> VisMesh:
- to_be_merged_part = None
- for pmesh in self.world:
- if to_be_merged_part is None:
- to_be_merged_part = pmesh
- continue
- to_be_merged_part += pmesh
- if to_be_merged_part is None:
- logger.error(f"{self.name} has no parts!?. returning empty model")
- merged_part = []
- else:
- merged_part = to_be_merged_part.merge_by_color()
-
- return VisMesh(
- name=self.name,
- created=self.created,
- project=self.project,
- world=[merged_part],
- meta=self.meta,
- translation=self.translation,
- merged=True,
- )
-
- def __add__(self, other: VisMesh):
- new_meta = dict()
- if self.meta is not None:
- new_meta.update(self.meta)
- if other.meta is not None:
- new_meta.update(other.meta)
- return VisMesh(
- name=self.name,
- project=self.project,
- world=self.world + other.world,
- meta=new_meta,
- )
-
@dataclass
class PartMesh:
@@ -346,12 +196,6 @@ def __add__(self, other: ObjectMesh):
return self
-@dataclass
-class VisNode:
- guid: str
- parent: str
-
-
def get_shape(np_array: np.ndarray) -> int:
if len(np_array.shape) == 1:
shape = len(np_array.shape)
diff --git a/src/ada/visit/gltf/store.py b/src/ada/visit/gltf/store.py
index b8f9684d2..04a40d90c 100644
--- a/src/ada/visit/gltf/store.py
+++ b/src/ada/visit/gltf/store.py
@@ -188,7 +188,11 @@ def get_buffer_data(self, accessor_index: int) -> np.ndarray:
def merged_mesh_to_trimesh_scene(
- scene: trimesh.Scene, merged_mesh: MergedMesh, pbr_mat: dict | Color, buffer_id: int, graph_store: GraphStore
+ scene: trimesh.Scene,
+ merged_mesh: MergedMesh | MeshStore,
+ pbr_mat: dict | Color,
+ buffer_id: int,
+ graph_store: GraphStore,
):
vertices = merged_mesh.position.reshape(int(len(merged_mesh.position) / 3), 3)
if merged_mesh.type == MeshType.TRIANGLES:
@@ -200,6 +204,7 @@ def merged_mesh_to_trimesh_scene(
f"mat{buffer_id}", baseColorFactor=pbr_mat.rgb255, doubleSided=True
)
mesh.visual = trimesh.visual.TextureVisuals(material=pbr_mat)
+ mesh.visual.uv = np.zeros((len(mesh.vertices), 2))
elif merged_mesh.type == MeshType.LINES:
entities = [Line(x) for x in merged_mesh.indices.reshape(int(len(merged_mesh.indices) / 2), 2)]
mesh = trimesh.path.Path3D(entities=entities, vertices=vertices)
@@ -223,14 +228,15 @@ def merged_mesh_to_trimesh_scene(
mesh,
node_name=f"node{buffer_id}",
geom_name=f"node{buffer_id}",
- parent_node_name=graph_store.top_level.name,
+ parent_node_name=graph_store.top_level.name if graph_store else None,
)
- id_sequence = dict()
- for group in merged_mesh.groups:
- n = graph_store.nodes.get(group.node_id)
- if n is None:
- raise ValueError(f"Node {group.node_id} not found in graph store")
- id_sequence[n.hash] = (group.start, group.start + group.length - 1)
+ if graph_store:
+ id_sequence = dict()
+ for group in merged_mesh.groups:
+ n = graph_store.nodes.get(group.node_id)
+ if n is None:
+ raise ValueError(f"Node {group.node_id} not found in graph store")
+ id_sequence[n.hash] = (group.start, group.start + group.length - 1)
- scene.metadata[f"id_sequence{buffer_id}"] = id_sequence
+ scene.metadata[f"id_sequence{buffer_id}"] = id_sequence
diff --git a/src/ada/visit/interface.py b/src/ada/visit/interface.py
deleted file mode 100644
index 6b90a6a11..000000000
--- a/src/ada/visit/interface.py
+++ /dev/null
@@ -1,89 +0,0 @@
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-import ifcopenshell.geom
-import numpy as np
-
-from ada.core.guid import create_guid
-from ada.visit.concept import ObjectMesh, PartMesh, VisMesh
-
-if TYPE_CHECKING:
- from ada import Part
-
-
-def part_to_vis_mesh2(part: Part, auto_sync_ifc_store=True, cpus: int = 1) -> VisMesh:
- ifc_store = part.get_assembly().ifc_store
- if auto_sync_ifc_store:
- ifc_store.sync()
-
- settings = ifcopenshell.geom.settings()
- settings.set(settings.USE_PYTHON_OPENCASCADE, False)
- settings.set(settings.SEW_SHELLS, False)
- settings.set(settings.WELD_VERTICES, True)
- settings.set(settings.INCLUDE_CURVES, False)
- settings.set(settings.USE_WORLD_COORDS, True)
- settings.set(settings.VALIDATE_QUANTITIES, False)
-
- id_map = dict()
-
- res = list(ifc_store.assembly.get_all_physical_objects())
-
- if len(res) > 0:
- iterator = ifc_store.get_ifc_geom_iterator(settings, cpus=cpus)
- iterator.initialize()
- while True:
- shape = iterator.get()
- if shape:
- obj_mesh = product_to_obj_mesh(shape)
- id_map[shape.guid] = obj_mesh
-
- if not iterator.next():
- break
-
- pm = PartMesh(name=part.name, id_map=id_map)
- meta = {
- p.guid: (p.name, p.parent.name if p.parent is not None else "*")
- for p in part.get_all_subparts(include_self=True)
- }
- parts_d = {p.guid: (p.name, p.parent.guid) for p in part.get_all_physical_objects()}
- meta.update(parts_d)
-
- for p in part.get_all_subparts(include_self=True):
- if p.fem.is_empty() is True:
- continue
-
- mesh = p.fem.to_mesh()
- coords = mesh.nodes.coords
- edges, faces = mesh.get_edges_and_faces_from_mesh()
- guid = create_guid()
- name = p.fem.name
- meta.update({guid: (name, p.guid)})
-
- id_map.update({guid: ObjectMesh(guid, faces, coords, edges=edges)})
-
- return VisMesh(part.name, world=[pm], meta=meta)
-
-
-def product_to_obj_mesh(shape: ifcopenshell.ifcopenshell_wrapper.TriangulationElement) -> ObjectMesh:
- geometry = shape.geometry
- vertices = np.array(geometry.verts, dtype="float32").reshape(int(len(geometry.verts) / 3), 3)
- faces = np.array(geometry.faces, dtype=int)
- normals = np.array(geometry.normals) if len(geometry.normals) != 0 else None
-
- if normals is not None and len(normals) > 0:
- normals = normals.astype(dtype="float32").reshape(int(len(normals) / 3), 3)
-
- mats = geometry.materials
- if len(mats) == 0:
- # colour = [1.0, 0.0, 0.0, 1.0]
- colour = None
- else:
- mat0 = mats[0]
- opacity = 1.0 - mat0.transparency
- if mat0.diffuse == (0.0, 0.0, 0.0):
- colour = None
- else:
- colour = [*mat0.diffuse, opacity]
-
- return ObjectMesh(shape.guid, faces, vertices, normals, color=colour)
diff --git a/src/ada/visit/render_base.py b/src/ada/visit/render_base.py
deleted file mode 100644
index 06af45bda..000000000
--- a/src/ada/visit/render_base.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Using optional renderer pygfx
-
-
-class JupyterRenderer:
- def __init__(self, obj):
- super().__init__()
- self.obj = obj
-
- def render(self):
- return self.obj._repr_html_()
-
- def update(self):
- pass
diff --git a/src/ada/visit/render_pygfx.py b/src/ada/visit/render_pygfx.py
index 78a065249..901964f2c 100644
--- a/src/ada/visit/render_pygfx.py
+++ b/src/ada/visit/render_pygfx.py
@@ -90,7 +90,10 @@ def _init_scene(self):
def _get_scene_meshes(self, scene: trimesh.Scene, tag: str) -> Iterable[gfx.Mesh]:
for key, m in scene.geometry.items():
mesh = gfx_utils.gfx_mesh_from_mesh(m)
- buffer_id = int(float(key.replace("node", "")))
+ if "node" in key:
+ buffer_id = int(float(key.replace("node", "")))
+ else:
+ buffer_id = len(self._mesh_map)
self._mesh_map[mesh.id] = (tag, buffer_id)
yield mesh
diff --git a/src/ada/visit/render_pygfx_helpers.py b/src/ada/visit/render_pygfx_helpers.py
index 6f70547b9..bc7c6d6ef 100644
--- a/src/ada/visit/render_pygfx_helpers.py
+++ b/src/ada/visit/render_pygfx_helpers.py
@@ -12,6 +12,7 @@
)
from pygfx.utils import Color
+from ada.config import logger
from ada.visit.colors import Color as AdaColor
from ada.visit.gltf.meshes import MeshStore
@@ -73,7 +74,7 @@ def __init__(self, size=1.0, thickness=2):
line_positions *= line_size
geometry = Geometry(positions=line_positions, colors=colors)
- material = LineSegmentMaterial(vertex_colors=True, thickness=thickness, aa=True)
+ material = LineSegmentMaterial(thickness=thickness, aa=True, color_mode="vertex")
super().__init__(geometry, material)
@@ -188,7 +189,10 @@ def __init__(
def tri_mat_to_gfx_mat(
tri_mat: trimesh.visual.material.PBRMaterial,
) -> gfx.MeshPhongMaterial | gfx.MeshBasicMaterial:
- color = gfx.Color(*[x / 255 for x in tri_mat.baseColorFactor[:3]])
+ if tri_mat.baseColorFactor is None:
+ color = gfx.Color(1, 1, 1)
+ else:
+ color = gfx.Color(*[x / 255 for x in tri_mat.baseColorFactor[:3]])
return gfx.MeshPhongMaterial(color=color, flat_shading=True)
@@ -257,7 +261,19 @@ def gfx_mesh_from_mesh(
positions=np.ascontiguousarray(mesh.vertices, dtype="f4"),
indices=np.ascontiguousarray(mesh.faces, dtype="i4"),
)
- mat = tri_mat_to_gfx_mat(mesh.visual.material) if material is None else material
+ if material is None:
+ # This seems to have broken with newer versions of pygfx
+ if hasattr(mesh.visual, "material"):
+ mat = tri_mat_to_gfx_mat(mesh.visual.material)
+ else:
+ logger.warning(
+ "No material found for mesh, using default color. Maybe related to changes in trimesh>4?"
+ )
+ color = mesh.visual.main_color
+ mat = gfx.MeshPhongMaterial(color=color, flat_shading=True)
+ else:
+ mat = material
+
mesh = gfx.Mesh(geom, material=mat)
return mesh
diff --git a/tests/core/fem/formats/calculix/test_io_fem_calculix.py b/tests/core/fem/formats/calculix/test_io_fem_calculix.py
index a829d5a3d..f9a048d56 100644
--- a/tests/core/fem/formats/calculix/test_io_fem_calculix.py
+++ b/tests/core/fem/formats/calculix/test_io_fem_calculix.py
@@ -1,7 +1,7 @@
import pytest
from ada import Assembly
-from ada.fem import LoadGravity, StepImplicit
+from ada.fem import LoadGravity, StepImplicitStatic
@pytest.fixture
@@ -20,7 +20,7 @@ def test_read_C3D20(example_files):
def test_write_test_model(test_shell_beam, test_calculix_dir):
a = test_shell_beam
- my_step = StepImplicit("static", total_time=1, max_incr=1, init_incr=1, nl_geom=True)
+ my_step = StepImplicitStatic("static", total_time=1, max_incr=1, init_incr=1, nl_geom=True)
my_step.add_load(LoadGravity("Gravity"))
a.fem.add_step(my_step)
diff --git a/tests/core/fem/formats/vtu/__init__.py b/tests/core/fem/formats/vtu/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/core/fem/formats/vtu/test_vtu_write.py b/tests/core/fem/formats/vtu/test_vtu_write.py
new file mode 100644
index 000000000..0ade11ce9
--- /dev/null
+++ b/tests/core/fem/formats/vtu/test_vtu_write.py
@@ -0,0 +1,50 @@
+import numpy as np
+
+from ada.fem.formats.general import FEATypes
+from ada.fem.formats.vtu.write import write_to_vtu_file
+from ada.fem.results.common import ElementBlock, ElementInfo, FemNodes
+from ada.fem.shapes.definitions import LineShapes, ShellShapes
+
+
+def test_basic_vtu_write():
+ # Sample usage
+ elem_info = ElementInfo(type=ShellShapes.TRI, source_software=FEATypes.CODE_ASTER, source_type="ELGA3")
+ element_block = ElementBlock(
+ elem_info=elem_info,
+ node_refs=np.array([[0, 1, 3], [1, 2, 3]], dtype=np.int32),
+ identifiers=np.array([1, 2], dtype=np.int32),
+ )
+ fem_nodes = FemNodes(
+ coords=np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]], dtype=np.float32),
+ identifiers=np.array([1, 2, 3, 4], dtype=np.int32),
+ )
+
+ point_data = {"Temperature": np.array([30.5, 32.5, 34.0, 36.0], dtype=np.float32)}
+ cell_data = {"Stress": np.array([1.0, 2.0], dtype=np.float32)}
+
+ write_to_vtu_file(fem_nodes, [element_block], point_data, cell_data, "temp/vtu/basic_mesh.vtu")
+
+
+def test_mixed_mesh():
+ # Sample usage
+ element_blocks = [
+ ElementBlock(
+ elem_info=ElementInfo(type=ShellShapes.TRI, source_software=FEATypes.GMSH, source_type="your_source_type"),
+ node_refs=np.array([[0, 1, 2], [2, 3, 0]]),
+ identifiers=np.array([1, 2]),
+ ),
+ ElementBlock(
+ elem_info=ElementInfo(type=LineShapes.LINE, source_software=FEATypes.GMSH, source_type="your_source_type"),
+ node_refs=np.array([[0, 1], [1, 2]]),
+ identifiers=np.array([3, 4]),
+ ),
+ ]
+ fem_nodes = FemNodes(
+ coords=np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]], dtype=np.float32),
+ identifiers=np.array([1, 2, 3, 4]),
+ )
+
+ point_data = {"Temperature": np.array([30.5, 32.5, 34.0, 36.0], dtype=np.float32)}
+ cell_data = {"Stress": np.array([1.0, 2.0, 0.5, 0.8], dtype=np.float32)}
+
+ write_to_vtu_file(fem_nodes, element_blocks, point_data, cell_data, "temp/vtu/mixed_mesh.vtu")
diff --git a/tests/core/fem/test_surfaces.py b/tests/core/fem/test_surfaces.py
index ae5f7b0c3..fd3882969 100644
--- a/tests/core/fem/test_surfaces.py
+++ b/tests/core/fem/test_surfaces.py
@@ -31,7 +31,7 @@ def build_box_model(geom_repr: str | GeomRepr, use_hex_quad):
p.fem = p.to_fem_obj(0.5, shp_repr=geom_repr, interactive=False, **props)
# Add Step
- step = a.fem.add_step(ada.fem.StepImplicit("MyStep"))
+ step = a.fem.add_step(ada.fem.StepImplicitStatic("MyStep"))
# Add Boundary condition
btn_nodes = box.bbox().sides.bottom(return_fem_nodes=True)
@@ -91,7 +91,7 @@ def test_surface_beam(surfaces_test_dir):
p.fem = p.to_fem_obj(0.10, "solid", interactive=False, options=GmshOptions(Mesh_ElementOrder=2))
# Add Step
- step = a.fem.add_step(ada.fem.StepImplicit("MyStep"))
+ step = a.fem.add_step(ada.fem.StepImplicitStatic("MyStep"))
# Add Boundary Condition
start_of_beam = bm.bbox().sides.back(return_fem_nodes=True)
diff --git a/tests/core/parametric_modelling/test_param_models.py b/tests/core/parametric_modelling/test_param_models.py
index 0634ef4ad..b04bbb52e 100644
--- a/tests/core/parametric_modelling/test_param_models.py
+++ b/tests/core/parametric_modelling/test_param_models.py
@@ -17,7 +17,7 @@ def test_to_fem(param_models_test_dir):
cog = param_model.fem.elements.calc_cog()
tol = 0.01
- my_step = a.fem.add_step(ada.fem.StepImplicit("static", total_time=1, max_incr=1, init_incr=1, nl_geom=False))
+ my_step = a.fem.add_step(ada.fem.StepImplicitStatic("static", total_time=1, max_incr=1, init_incr=1, nl_geom=False))
my_step.add_load(ada.fem.Load("Gravity", "gravity", -9.81))
# a.to_fem("SimpleStru_ca", fem_format="code_aster", overwrite=True, execute=False)
diff --git a/tests/fem/test_fem_static_cantilever.py b/tests/fem/test_fem_static_cantilever.py
index 2694c84de..b4ae67bc5 100644
--- a/tests/fem/test_fem_static_cantilever.py
+++ b/tests/fem/test_fem_static_cantilever.py
@@ -62,7 +62,7 @@ def test_fem_static(
props = dict(use_hex=use_hex_quad) if geom_repr == GeomRepr.SOLID else dict(use_quads=use_hex_quad)
- step = a.fem.add_step(ada.fem.StepImplicit("gravity", nl_geom=nl_geom, init_incr=100.0, total_time=100.0))
+ step = a.fem.add_step(ada.fem.StepImplicitStatic("gravity", nl_geom=nl_geom, init_incr=100.0, total_time=100.0))
step.add_load(ada.fem.LoadGravity("grav", -9.81 * 80))
if overwrite is False:
diff --git a/tests/full/rendering_pygfx/test_read_scene.py b/tests/full/rendering_pygfx/test_read_scene.py
index 117051111..b3539f8a5 100644
--- a/tests/full/rendering_pygfx/test_read_scene.py
+++ b/tests/full/rendering_pygfx/test_read_scene.py
@@ -7,7 +7,15 @@ def test_read_fem_object():
p = ada.Part("part") / bm
p.fem = p.to_fem_obj(0.1, "line")
a = ada.Assembly() / p
- a.to_gltf("beam_wMesh.glb")
+ # a.to_gltf("temp/beam_wMesh.glb")
+
+ renderer = RendererPyGFX(no_gui=True)
+ renderer.add_trimesh_scene(a.to_trimesh_scene(), "myFEM")
+
+
+def test_visualize_box_geom():
+ bm = ada.BeamTapered("bm", (0, 0, 0), (1, 0, 0), "BG300x200x8x10", "BG200x200x8x10")
+ a = ada.Assembly() / (ada.Part("part") / bm)
renderer = RendererPyGFX(no_gui=True)
renderer.add_trimesh_scene(a.to_trimesh_scene(), "myFEM")