Skip to content

MAINT: Bump pyvista[trame] from 0.43.8 to 0.43.9 #8247

MAINT: Bump pyvista[trame] from 0.43.8 to 0.43.9

MAINT: Bump pyvista[trame] from 0.43.8 to 0.43.9 #8247

Workflow file for this run

name: CI
on:
pull_request:
workflow_dispatch:
inputs:
run_all_tests:
description: 'Run all extended MAPDL build tests'
required: true
type: boolean
push:
tags:
- "*"
branches:
- main
schedule:
# * is a special character in YAML so you have to quote this string
- cron: '30 4 * * *'
env:
PROJECT_NAME: 'PyMAPDL'
MAIN_PYTHON_VERSION: '3.10'
PACKAGE_NAME: 'ansys-mapdl-core'
PACKAGE_NAMESPACE: 'ansys.mapdl.core'
DOCUMENTATION_CNAME: 'mapdl.docs.pyansys.com'
MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }}
MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }}
PYANSYS_OFF_SCREEN: True
DPF_START_SERVER: False
DPF_PORT: 21004
MAPDL_PACKAGE: ghcr.io/ansys/mapdl
MAPDL_IMAGE_VERSION_DOCS_BUILD: v24.1-ubuntu-student
ON_CI: True
PYTEST_ARGUMENTS: '-vvv --durations=10 --maxfail=3 --reruns 3 --reruns-delay 4 --cov=ansys.mapdl.core --cov-report=html'
# Following env vars when changed will "reset" the mentioned cache,
# by changing the cache file name. It is rendered as ...-v%RESET_XXX%-...
# You should go up in number, if you go down (or repeat a previous value)
# you might end up reusing a previous cache if it haven't been deleted already.
# It applies 7 days retention policy by default.
RESET_EXAMPLES_CACHE: 2
RESET_DOC_BUILD_CACHE: 2
RESET_AUTOSUMMARY_CACHE: 2
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
permissions:
contents: write
packages: read
pull-requests: write
issues: write
jobs:
doc-style:
name: "Documentation style check"
runs-on: ubuntu-latest
steps:
- name: "Ansys documentation style checks"
uses: ansys/actions/doc-style@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
vale-version: "3.4.1"
smoke-tests:
name: Build and smoke test (${{ matrix.os }} | Python ${{ matrix.python-version }}) (Release=${{ matrix.should-release }})
runs-on: ${{ matrix.os }}
if: github.ref != 'refs/heads/main'
timeout-minutes: 20
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ['3.9', '3.10', '3.11', '3.12']
# Only perform wheelhouse builds for macOS when releasing
should-release:
- ${{ github.event_name == 'push' && contains(github.ref, 'refs/tags') }}
exclude:
- should-release: false
os: macos-latest
steps:
- name: "Build wheelhouse and perform smoke test"
uses: ansys/actions/build-wheelhouse@v6
with:
library-name: ${{ env.PACKAGE_NAME }}
operating-system: ${{ matrix.os }}
python-version: ${{ matrix.python-version }}
- name: "Importing library"
run: |
python -c "from ansys.mapdl import core as pymapdl; print(pymapdl.Report())"
- name: "Checking plotting support"
run:
python -c "from pyvista.plotting import system_supports_plotting; print('System support plotting ' + str(system_supports_plotting()))"
docs-build:
name: "Build documentation"
runs-on: ubuntu-latest
needs: doc-style
timeout-minutes: 60
outputs:
PYMAPDL_VERSION: ${{ steps.version.outputs.PYMAPDL_VERSION }}
env:
PYMAPDL_PORT: 21000 # default won't work on GitHub runners
PYMAPDL_DB_PORT: 21001 # default won't work on GitHub runners
PYMAPDL_START_INSTANCE: FALSE
ON_DOCUMENTATION: TRUE
steps:
- name: "Install Git and checkout project"
uses: actions/checkout@v4.1.6
- name: "Login in Github container registry"
uses: docker/login-action@v3.2.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: "Pull, launch, and validate MAPDL service"
id: start_mapdl
env:
LICENSE_SERVER: ${{ secrets.LICENSE_SERVER }}
MAPDL_VERSION: ${{ env.MAPDL_IMAGE_VERSION_DOCS_BUILD }}
DISTRIBUTED_MODE: "dmp"
run: |
export INSTANCE_NAME=MAPDL_0
.ci/start_mapdl.sh &> mapdl_launch.log & export DOCKER_PID=$!
echo "Launching MAPDL service at PID: $DOCKER_PID"
echo "DOCKER_PID=$(echo $DOCKER_PID)" >> $GITHUB_OUTPUT
- name: "DPF server activation"
run: |
$(docker pull ghcr.io/ansys/dpf-core:22.2dev && docker run -d --name dpfserver -p ${{ env.DPF_PORT }}:50052 ghcr.io/ansys/dpf-core:22.2dev && echo "DPF Server active on port ${{ env.DPF_PORT }}.") &
- name: "Getting files change filters"
uses: dorny/paths-filter@v3
id: changes
with:
filters: |
workflows:
- '.github/workflows/**'
- 'pyproject.toml'
examples:
- 'examples/**'
- 'pyproject.toml'
- name: "Setup Python with cache"
uses: actions/setup-python@v5
if: steps.changes.outputs.workflows != 'true'
with:
cache: 'pip'
python-version: ${{ env.MAIN_PYTHON_VERSION }}
- name: "Setup Python without cache"
uses: actions/setup-python@v5
if: steps.changes.outputs.workflows == 'true'
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}
- name: "Install OS packages"
run: |
sudo apt update
sudo apt install zip pandoc libgl1-mesa-glx xvfb texlive-latex-extra latexmk graphviz texlive-xetex texlive-fonts-extra qpdf xindy
- name: "Test virtual framebuffer"
run: |
pip install -r .ci/requirements_test_xvfb.txt
xvfb-run python .ci/display_test.py
- name: "Install ansys-mapdl-core"
run: |
pip install .
xvfb-run python -c "from ansys.mapdl import core as pymapdl; print(pymapdl.Report())"
- name: "Retrieve PyMAPDL version"
id: version
run: |
echo "PYMAPDL_VERSION=$(python -c 'from ansys.mapdl.core import __version__; print(__version__)')" >> $GITHUB_OUTPUT
echo "PyMAPDL version is: $(python -c "from ansys.mapdl.core import __version__; print(__version__)")"
- name: "Cache examples"
uses: actions/cache@v4
if: steps.changes.outputs.examples != 'true' || (github.ref == 'refs/heads/main' && !contains(github.ref, 'refs/tags'))
with:
path: doc/source/examples
key: Examples-v${{ env.RESET_EXAMPLES_CACHE }}-${{ steps.version.outputs.PYMAPDL_VERSION }}-${{ github.sha }}
restore-keys: |
Examples-v${{ env.RESET_EXAMPLES_CACHE }}-${{ steps.version.outputs.PYMAPDL_VERSION }}
- name: "Cache docs build directory"
uses: actions/cache@v4
if: steps.changes.outputs.workflows != 'true' || (github.ref == 'refs/heads/main' && !contains(github.ref, 'refs/tags'))
with:
path: doc/_build
key: doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYMAPDL_VERSION }}-${{ github.sha }}
restore-keys: |
doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYMAPDL_VERSION }}
- name: "Cache autosummary"
uses: actions/cache@v4
if: steps.changes.outputs.workflows != 'true' || (github.ref == 'refs/heads/main' && !contains(github.ref, 'refs/tags'))
with:
path: doc/source/**/_autosummary/*.rst
key: autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYMAPDL_VERSION }}-${{ github.sha }}
restore-keys: |
autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYMAPDL_VERSION }}
- name: "Install docs build requirements"
run: |
pip install .[doc]
- name: "Waiting for the services to be up"
run: |
.ci/waiting_services.sh
- name: "Build documentation"
run: |
xvfb-run make -C doc html SPHINXOPTS="-j auto -W --keep-going"
- name: "Substitute defective GIF"
run: |
.ci/substitute_defective_gif.sh
- name: "Upload HTML Documentation"
uses: actions/upload-artifact@v4
with:
name: documentation-html
path: doc/_build/html
retention-days: 7
- name: "Build PDF Documentation"
working-directory: doc
run: make pdf
- name: "Show latex dir"
working-directory: doc
run: ls _build/latex
- name: "Upload PDF documentation"
uses: actions/upload-artifact@v4
with:
name: documentation-pdf
path: doc/_build/latex/pymapdl*.pdf
retention-days: 7
- name: 'Upload minimal requirements file'
# To include it in the release
uses: actions/upload-artifact@v4
with:
name: minimum_requirements.txt
path: ./minimum_requirements.txt
- name: "Collect logs on failure"
if: always()
env:
MAPDL_VERSION: ${{ env.MAPDL_IMAGE_VERSION_DOCS_BUILD }}
MAPDL_INSTANCE: MAPDL_0
LOG_NAMES: logs-build-docs
run: |
.ci/collect_mapdl_logs.sh
- name: "Upload logs to GitHub"
if: always()
uses: actions/upload-artifact@master
with:
name: logs-build-docs.tgz
path: ./logs-build-docs.tgz
- name: "Display files structure"
if: always()
env:
MAPDL_INSTANCE: mapdl
LOG_NAMES: logs-build-docs
run: |
.ci/display_logs.sh
build-test:
name: "Remote: Build & test MAPDL ${{ matrix.mapdl-version }} (Extended testing ${{ matrix.extended_testing }})"
runs-on: ubuntu-latest
if: github.ref != 'refs/heads/main' || github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
timeout-minutes: 35
strategy:
fail-fast: false
matrix:
mapdl-version: [
'v22.2.1',
'v22.2-ubuntu',
'v23.1.0',
'v23.1-ubuntu',
'v23.2.0',
'v23.2-ubuntu',
'v24.1.0',
'v24.1-ubuntu',
'v24.1-ubuntu-student',
'v24.2.0',
'latest-ubuntu',
'latest-ubuntu-student',
]
extended_testing:
- ${{ github.event_name == 'schedule' || ( github.event_name == 'workflow_dispatch' && inputs.run_all_tests ) || ( github.event_name == 'push' && contains(github.ref, 'refs/tags') ) }}
exclude:
# In PRs skipping all MAPDL version except student ones (commented out).
- extended_testing: false
mapdl-version: 'v22.2.1'
- extended_testing: false
mapdl-version: 'v22.2-ubuntu'
- extended_testing: false
mapdl-version: 'v23.1.0'
- extended_testing: false
mapdl-version: 'v23.1-ubuntu'
- extended_testing: false
mapdl-version: 'v23.2.0'
- extended_testing: false
mapdl-version: 'v23.2-ubuntu'
- extended_testing: false
mapdl-version: 'v24.1.0'
- extended_testing: false
mapdl-version: 'v24.1-ubuntu'
# - extended_testing: false
# mapdl-version: 'v24.1-ubuntu-student'
- extended_testing: false
mapdl-version: 'v24.2.0'
- extended_testing: false
mapdl-version: 'latest-ubuntu'
# - extended_testing: false
# mapdl-version: 'latest-ubuntu-student'
env:
PYMAPDL_PORT: 21000 # default won't work on GitHub runners
PYMAPDL_PORT2: 21001 # for the pool testing and default won't work on GitHub runners
PYMAPDL_DB_PORT: 21002 # default won't work on GitHub runners
PYMAPDL_DB_PORT2: 21003 # default won't work on GitHub runners
PYMAPDL_START_INSTANCE: FALSE
ON_LOCAL: FALSE
ON_UBUNTU: FALSE
outputs:
DISTRIBUTED_MODE: ${{ steps.distributed_mode.outputs.distributed_mode }}
steps:
- name: "Install Git and checkout project"
uses: actions/checkout@v4.1.6
- name: "Login in Github container registry"
uses: docker/login-action@v3.2.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: "Getting SMP/DMP mode"
id: distributed_mode
run: |
image=${{ matrix.mapdl-version }}
export distributed_mode="smp"
if [[ $image == *".1."* ]]; then
export distributed_mode="dmp";
fi
echo "Distributed mode: $distributed_mode"
echo "distributed_mode=$(echo $distributed_mode)" >> $GITHUB_OUTPUT
- name: "Pull, launch, and validate MAPDL service"
id: start_mapdl
env:
LICENSE_SERVER: ${{ secrets.LICENSE_SERVER }}
MAPDL_VERSION: ${{ matrix.mapdl-version }}
DISTRIBUTED_MODE: ${{ steps.distributed_mode.outputs.distributed_mode }}
run: |
echo "Launching first MAPDL instance..."
export INSTANCE_NAME=MAPDL_0
.ci/start_mapdl.sh &> mapdl_launch_0.log & export DOCKER_PID_0=$!
echo "Launching a second instance for MAPDL pool testing..."
export PYMAPDL_PORT=${{ env.PYMAPDL_PORT2 }}
export PYMAPDL_DB_PORT=${{ env.PYMAPDL_DB_PORT2 }}
export INSTANCE_NAME=MAPDL_1
.ci/start_mapdl.sh &> mapdl_launch_1.log & export DOCKER_PID_1=$!
echo "Launching MAPDL service 0 at PID: $DOCKER_PID_0"
echo "Launching MAPDL service 1 at PID: $DOCKER_PID_2"
echo "DOCKER_PID_0=$(echo $DOCKER_PID_0)" >> $GITHUB_OUTPUT
echo "DOCKER_PID_1=$(echo $DOCKER_PID_1)" >> $GITHUB_OUTPUT
- name: "DPF server activation"
run: |
$(docker pull ghcr.io/ansys/dpf-core:22.2dev && docker run -d --name dpfserver -p ${{ env.DPF_PORT }}:50052 ghcr.io/ansys/dpf-core:22.2dev && echo "DPF Server active on port ${{ env.DPF_PORT }}.") &
- name: "Getting files change filters"
uses: dorny/paths-filter@v3
id: changes
with:
filters: |
workflows:
- '.github/workflows/**'
- name: "Setup Python with cache"
uses: actions/setup-python@v5
if: steps.changes.outputs.workflows != 'true'
with:
cache: 'pip'
python-version: ${{ env.MAIN_PYTHON_VERSION }}
- name: "Setup Python without cache"
uses: actions/setup-python@v5
if: steps.changes.outputs.workflows == 'true'
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}
- name: "Install os packages"
run: |
sudo apt update
sudo apt install libgl1-mesa-glx xvfb
- name: "Test virtual framebuffer"
run: |
pip install -r .ci/requirements_test_xvfb.txt
xvfb-run python .ci/display_test.py
- name: Install ansys-mapdl-core
run: |
python -m pip install build
python -m build
python -m pip install dist/*.whl
xvfb-run python -c "from ansys.mapdl import core as pymapdl; print(pymapdl.Report())"
- name: "Unit testing requirements installation"
run: |
python -m pip install .[tests]
- name: "Waiting for the services to be up"
run: |
.ci/waiting_services.sh
- name: "Unit testing"
env:
DISTRIBUTED_MODE: ${{ steps.distributed_mode.outputs.distributed_mode }}
run: |
if [[ "${{ matrix.mapdl-version }}" == *"ubuntu"* ]]; then export ON_UBUNTU=true; else export ON_UBUNTU=false; fi
echo "ON_UBUNTU: $ON_UBUNTU"
xvfb-run pytest \
${{ env.PYTEST_ARGUMENTS }} \
--ignore_image_cache \
--cov-report=xml:centos-${{ matrix.mapdl-version }}-remote.xml
- uses: codecov/codecov-action@v4
name: "Upload coverage to Codecov"
with:
token: ${{ secrets.CODECOV_TOKEN }} # required
name: centos-${{ matrix.mapdl-version }}-remote.xml
flags: centos,remote,${{ matrix.mapdl-version }}
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
with:
name: centos-${{ matrix.mapdl-version }}-remote.xml
path: ./centos-${{ matrix.mapdl-version }}-remote.xml
- name: "Check package"
run: |
pip install twine
twine check dist/*
- name: "Upload wheel and binaries"
uses: actions/upload-artifact@v4
with:
name: PyMAPDL-packages-${{ matrix.mapdl-version }}
path: dist/
retention-days: 7
- name: "Collect logs on failure"
if: always()
env:
MAPDL_VERSION: ${{ matrix.mapdl-version }}
MAPDL_INSTANCE: mapdl
LOG_NAMES: logs-${{ matrix.mapdl-version }}
run: |
.ci/collect_mapdl_logs.sh
- name: "Upload logs to GitHub"
if: always()
uses: actions/upload-artifact@v4
with:
name: logs-${{ matrix.mapdl-version }}.tgz
path: ./logs-${{ matrix.mapdl-version }}.tgz
- name: "Display files structure"
if: always()
env:
MAPDL_INSTANCE: mapdl
LOG_NAMES: logs-${{ matrix.mapdl-version }}
run: |
.ci/display_logs.sh
build-test-ubuntu:
name: "Local: Build & test on Ubuntu MAPDL ${{ matrix.mapdl-image }} (Extended testing ${{ matrix.extended_testing }})"
runs-on: ubuntu-latest
if: github.ref != 'refs/heads/main' || github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
timeout-minutes: 55
outputs:
pushed: ${{ steps.attatch-to-pr.outputs.pushed }}
committed: ${{ steps.attatch-to-pr.outputs.committed }}
commit_long_sha: ${{ steps.attatch-to-pr.outputs.commit_long_sha }}
strategy:
fail-fast: false
matrix:
mapdl-image: [
'v23.2-ubuntu',
'v24.1-ubuntu',
'v24.1-ubuntu-student',
'latest-ubuntu',
'latest-ubuntu-student',
]
extended_testing:
- ${{ github.event_name == 'schedule' || ( github.event_name == 'workflow_dispatch' && inputs.run_all_tests ) || ( github.event_name == 'push' && contains(github.ref, 'refs/tags') ) }}
exclude:
- extended_testing: false
mapdl-image: 'v23.2-ubuntu'
- extended_testing: false
mapdl-image: 'v24.1-ubuntu'
- extended_testing: false
mapdl-image: 'latest-ubuntu'
container:
image: ghcr.io/ansys/mapdl:${{ matrix.mapdl-image }}
options: -u=0:0 --oom-kill-disable --memory=6656MB --memory-swap=16896MB --shm-size=1gb --entrypoint /bin/bash
credentials:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
env:
ON_LOCAL: true
ON_UBUNTU: true
ON_STUDENT: true
P_SCHEMA: "/ansys_inc/v241/ansys/ac4/schema"
PYTEST_TIMEOUT: 120 # seconds. Limit the duration for each unit test
steps:
- name: "Install Git and checkout project"
uses: actions/checkout@v4.1.6
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.event.pull_request.head.ref }}
- name: "Setup Python"
uses: actions/setup-python@v5
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}
- name: "Checking Python"
run: |
python --version
python -m pip install --upgrade pip
python -m venv ./.venv
source ./.venv/bin/activate
- name: "Install OS packages"
run: |
apt update
apt install -y libgl1-mesa-glx xvfb libgomp1
- name: "Test virtual framebuffer"
run: |
python -m pip install -r .ci/requirements_test_xvfb.txt
xvfb-run python .ci/display_test.py
- name: "Install ansys-mapdl-core"
run: |
python -m pip install build
python -m build
python -m pip install dist/*.whl
xvfb-run python -c "from ansys.mapdl import core as pymapdl; print(pymapdl.Report())"
- name: "Unit testing requirements installation"
run: |
python -m pip install .[tests]
- name: "Unit testing"
env:
ANSYSLMD_LICENSE_FILE: "1055@${{ secrets.LICENSE_SERVER }}"
run: |
unset PYMAPDL_PORT
unset PYMAPDL_START_INSTANCE
export AWP_ROOT222=/ansys_inc
xvfb-run pytest -k "not test_dpf" \
${{ env.PYTEST_ARGUMENTS }} \
--reset_only_failed --add_missing_images \
--cov-report=xml:${{ matrix.mapdl-image }}-local.xml
- name: "Adding the directory as safe directory for later step"
run: |
git config --global --add safe.directory $GITHUB_WORKSPACE
- name: "Attaching modified files to PR"
id: attatch-to-pr
uses: EndBug/add-and-commit@v9
with:
message: Update the image cache
committer_name: GitHub Actions
committer_email: actions@github.com
add: './tests/.image_cache/*.png'
- name: "PR comment with reactions"
if: ${{ steps.attatch-to-pr.outputs.pushed == 'true' }}
uses: thollander/actions-comment-pull-request@v2
with:
message: |
Hello! :wave:
Your PR is changing the image cache. So I am attaching the new image cache in a new [commit](https://github.com/ansys/pymapdl/commit/${{ steps.attatch-to-pr.outputs.commit_long_sha }}).
This commit does not re-run the CICD workflows (since no changes are made in the codebase) therefore you will see the actions showing in their status `Expected — Waiting for status to be reported`. Do not worry. You commit workflow is still running [here](https://github.com/ansys/pymapdl/pull/${{ github.event.pull_request.number }}/checks?sha=${{ github.event.pull_request.head.sha }}) :smile:
You might want to rerun the test to make sure that everything is passing. You can retrigger the CICD sending an empty commit `git commit -m "Empty comment to trigger CICD" --allow-empty`.
You will see this message everytime your commit changes the image cache but you are not attaching the updated cache. :nerd_face:
reactions: rocket
- uses: codecov/codecov-action@v4
name: "Upload coverage to Codecov"
with:
token: ${{ secrets.CODECOV_TOKEN }} # required
root_dir: ${{ github.workspace }}
name: ${{ matrix.mapdl-image }}-local.xml
flags: ubuntu,local,${{ matrix.mapdl-image }}
- name: 'Upload coverage artifacts'
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.mapdl-image }}-local.xml
path: ./${{ matrix.mapdl-image }}-local.xml
build-test-ubuntu-minimal:
name: "Local: Build & test minimal setup on Ubuntu MAPDL ${{ matrix.mapdl-image }} (Extended testing ${{ matrix.extended_testing }})"
runs-on: ubuntu-latest
if: github.ref != 'refs/heads/main' || github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
timeout-minutes: 55
strategy:
fail-fast: false
matrix:
mapdl-image: [
'v23.2-ubuntu',
'v24.1-ubuntu',
'v24.1-ubuntu-student',
'latest-ubuntu',
'latest-ubuntu-student',
]
extended_testing:
- ${{ github.event_name == 'schedule' || ( github.event_name == 'workflow_dispatch' && inputs.run_all_tests ) || ( github.event_name == 'push' && contains(github.ref, 'refs/tags') ) }}
exclude:
- extended_testing: false
mapdl-image: 'v23.2-ubuntu'
- extended_testing: false
mapdl-image: 'v24.1-ubuntu'
- extended_testing: false
mapdl-image: 'latest-ubuntu'
container:
image: ghcr.io/ansys/mapdl:${{ matrix.mapdl-image }}
options: -u=0:0 --oom-kill-disable --memory=6656MB --memory-swap=16896MB --shm-size=1gb --entrypoint /bin/bash
credentials:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
env:
ON_LOCAL: true
ON_UBUNTU: true
ON_STUDENT: true
steps:
- name: "Install Git and checkout project"
uses: actions/checkout@v4.1.6
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.event.pull_request.head.ref }}
- name: "Installing missing package"
run: |
sudo apt-get update
sudo apt-get install -y libgomp1
- name: "Setup Python"
uses: actions/setup-python@v5
with:
python-version: ${{ env.MAIN_PYTHON_VERSION }}
- name: "Checking Python"
run: |
python --version
python -m pip install --upgrade pip
- name: "Install ansys-mapdl-core"
run: |
python -m pip install . --no-deps
python -m pip install -r minimum_requirements.txt
python -c "from ansys.mapdl import core as pymapdl; print('Import successfull')"
- name: "Unit testing requirements installation"
run: |
python -m pip install pytest pytest-rerunfailures pytest-cov
- name: "Unit testing"
env:
ANSYSLMD_LICENSE_FILE: "1055@${{ secrets.LICENSE_SERVER }}"
run: |
# Because there is no 'ansys-tools-path' we need to input the
# executable path with the env var: PYMAPDL_MAPDL_EXEC.
if [[ "${{ matrix.mapdl-image }}" == *"latest-ubuntu"* ]] ; then
version="242"
else
version=$(echo "${{ matrix.mapdl-image }}" | head -c 5 | tail -c 4 | tr -d '.')
fi;
echo "$version"
export PYMAPDL_MAPDL_EXEC=/ansys_inc/v"$version"/ansys/bin/ansys"$version"
echo "$PYMAPDL_MAPDL_EXEC"
unset PYMAPDL_START_INSTANCE
pytest -k "not test_dpf" \
${{ env.PYTEST_ARGUMENTS }} \
--cov-report=xml:${{ matrix.mapdl-image }}-minimal.xml
- uses: codecov/codecov-action@v4
name: "Upload coverage to Codecov"
with:
token: ${{ secrets.CODECOV_TOKEN }} # required
root_dir: ${{ github.workspace }}
name: ${{ matrix.mapdl-image }}-minimal.xml
flags: ubuntu,local,${{ matrix.mapdl-image }},minimal
- name: 'Upload coverage artifacts'
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.mapdl-image }}-minimal.xml
path: ./${{ matrix.mapdl-image }}-minimal.xml
test-windows:
# Skipped
if: github.repository == ''
name: "Local: Build & test on Windows"
runs-on: [self-hosted, Windows, pymapdl]
timeout-minutes: 30
env:
ON_LOCAL: TRUE
steps:
- uses: actions/checkout@v4.1.6
# Skipping because it is installed locally.
# - name: Setup Python
# uses: actions/setup-python@v5
# with:
# python-version: 3.9
- name: "Checking python_"
shell: powershell
run: |
python -m pip install --upgrade pip
- name: "Creating python venv"
shell: powershell
run: |
python -m venv .\.venv
.\.venv\Scripts\activate
- name: "Install ansys-mapdl-core"
shell: powershell
run: |
python -m pip install build
python -m build
$FILE_=Resolve-Path '.\dist\*.whl'
python -m pip install $FILE_.Path --upgrade
python -c "from ansys.mapdl import core as pymapdl; print(pymapdl.Report())"
- name: "Unit testing requirements installation"
shell: powershell
run: |
python -m pip install .[tests]
# - name: DPF Server Activation
# run: |
# docker pull ghcr.io/ansys/dpf-core:22.2dev
# docker run -d --name dpfserver -p ${{ env.DPF_PORT }}:50052 ghcr.io/ansys/dpf-core:22.2dev && echo "DPF Server active on port ${{ env.DPF_PORT }}."
- name: "Unit testing"
shell: powershell
run: |
set PYMAPDL_PORT=
set PYMAPDL_START_INSTANCE=
python -m pytest -k "not test_database and not test_dpf" \
${{ env.PYTEST_ARGUMENTS }} \
--ignore_image_cache \
--cov-report=xml:windows-v22.2.0-local.xml
- uses: codecov/codecov-action@v4
name: "Upload coverage to Codecov"
with:
token: ${{ secrets.CODECOV_TOKEN }} # required
name: windows-v22.2.0-local.xml
flags: windows,local,v22.2.0
- name: Upload coverage artifacts
uses: actions/upload-artifact@v4
with:
name: windows-v22.2.0-local.xml
path: ./windows_local.xml
release:
name: "Release"
if: github.event_name == 'push' && contains(github.ref, 'refs/tags')
needs: [smoke-tests, docs-build, build-test, build-test-ubuntu, build-test-ubuntu-minimal]
runs-on: ubuntu-latest
steps:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.9
- uses: actions/download-artifact@v4
- name: Copying files and overwrite same name files
# (to avoid upload issue)
run: |
mkdir artifacts
find . -iname 'ansys_mapdl_core-*.tar.gz' -exec cp \{\} ./artifacts/ \;
find . -iname 'ansys_mapdl_core-*-py3-none-any.whl' -exec cp \{\} ./artifacts/ \;
find . -iname 'minimum_requirements.txt' -exec cp \{\} ./artifacts/ \;
- name: Display structure of downloaded files
run: ls -Rla
- name: "Release to GitHub"
uses: softprops/action-gh-release@v2
with:
files: |
./**/artifacts/ansys_mapdl_core-*-py3-none-any.whl
./**/artifacts/ansys_mapdl_core-*.tar.gz
./**/artifacts/minimum_requirements.txt
./**/pymapdl-Documentation-*.pdf
./**/ansys-mapdl-core*wheelhouse*.zip
- name: Upload to Public PyPi
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
run: |
pip install twine
twine upload --skip-existing ./**/*.whl
twine upload --skip-existing ./**/*.tar.gz
upload-docs-release:
name: "Upload release documentation"
if: github.event_name == 'push' && contains(github.ref, 'refs/tags')
runs-on: ubuntu-latest
needs: [release]
steps:
- name: Deploy the stable documentation
# TODO: testing SEO improvements. This branch avoids creating a
# sitemap.xml pages in opposite to v5.
uses: ansys/actions/doc-deploy-stable@feat/seo-improvements
with:
cname: ${{ env.DOCUMENTATION_CNAME }}
token: ${{ secrets.GITHUB_TOKEN }}
render-last: '5'
doc-index-stable:
name: "Deploy stable docs index"
runs-on: ubuntu-latest
needs: upload-docs-release
steps:
- name: "Install Git and clone project"
uses: actions/checkout@v4.1.6
- name: "Install the package requirements"
run: pip install -e .
- name: "Get the version to PyMeilisearch"
run: |
VERSION=$(python -c "from ansys.mapdl.core import __version__; print('.'.join(__version__.split('.')[:2]))")
VERSION_MEILI=$(python -c "from ansys.mapdl.core import __version__; print('-'.join(__version__.split('.')[:2]))")
echo "Calculated VERSION: $VERSION"
echo "Calculated VERSION_MEILI: $VERSION_MEILI"
echo "VERSION=$VERSION" >> $GITHUB_ENV
echo "VERSION_MEILI=$VERSION_MEILI" >> $GITHUB_ENV
- name: "Deploy the latest documentation index"
uses: ansys/actions/doc-deploy-index@v6
with:
cname: ${{ env.DOCUMENTATION_CNAME }}/version/${{ env.VERSION }}
index-name: pymapdl-v${{ env.VERSION_MEILI }}
host-url: ${{ vars.MEILISEARCH_HOST_URL }}
api-key: ${{ env.MEILISEARCH_API_KEY }}
upload-dev-docs:
name: Upload dev documentation
if: github.ref == 'refs/heads/main' && !contains(github.ref, 'refs/tags')
runs-on: ubuntu-latest
needs: [docs-build]
steps:
- name: Deploy the latest documentation
# TODO: testing SEO improvements. This branch reuses the "index.html" from the stable version
uses: ansys/actions/doc-deploy-dev@feat/seo-improvements
with:
cname: ${{ env.DOCUMENTATION_CNAME }}
token: ${{ secrets.GITHUB_TOKEN }}
doc-index-dev:
name: "Deploy dev index docs"
runs-on: ubuntu-latest
needs: upload-dev-docs
steps:
- name: "Deploy the latest documentation index"
uses: ansys/actions/doc-deploy-index@v6
with:
cname: ${{ env.DOCUMENTATION_CNAME }}/version/dev
index-name: pymapdl-vdev
host-url: ${{ vars.MEILISEARCH_HOST_URL }}
api-key: ${{ env.MEILISEARCH_API_KEY }}
notify:
name: Notify failed build
needs: [smoke-tests, docs-build, build-test, build-test-ubuntu, build-test-ubuntu-minimal]
if: failure() && github.event_name == 'schedule'
runs-on: ubuntu-latest
steps:
- name: Open issue
uses: jayqi/failed-build-issue-action@v1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
title-template: "Failed scheduled build"
label-name: "Build failed"
test_julia:
name: "Test julia implementation"
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
julia-version: ['1.10.2'] # '1.6.7'
os: [ubuntu-latest, windows-latest]
steps:
- uses: actions/checkout@v4.1.6
- name: "Set up Julia"
uses: julia-actions/setup-julia@v2
with:
version: ${{ matrix.julia-version }}
- name: "Getting python interpreter"
id: get_python
run: |
pycallpython=$(julia -e 'using Pkg;Pkg.add("PyCall");using PyCall;println(PyCall.python)')
echo "pythonpath=$(echo $pycallpython)" >> $GITHUB_OUTPUT
- name: Installing pymapdl
env:
PYTHON_PATH: ${{ steps.get_python.outputs.pythonpath }}
run: |
"$PYTHON_PATH" -m pip install -e .
- name: "Starting julia"
shell: julia {0}
run: |
using Pkg; Pkg.add("PyCall");using PyCall;pymapdl = pyimport("ansys.mapdl.core");print(pymapdl.__version__)