Skip to content

Commit c07e5dd

Browse files
committed
ci: adding ci testing and release
1 parent 4ce99b7 commit c07e5dd

File tree

5 files changed

+370
-3
lines changed

5 files changed

+370
-3
lines changed

.github/workflows/ci.yaml

Lines changed: 161 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,161 @@
1+
name: numflow.py CI
2+
3+
on:
4+
push:
5+
branches: [release, main]
6+
pull_request:
7+
branches: [main]
8+
9+
env:
10+
REGISTRY: ghcr.io
11+
IMAGE_NAME: ${{ github.repository }}
12+
13+
jobs:
14+
test:
15+
runs-on: ubuntu-latest
16+
steps:
17+
- uses: actions/checkout@v3
18+
with:
19+
submodules: "recursive"
20+
21+
- name: Set up Python 3.11
22+
uses: actions/setup-python@v3
23+
with:
24+
python-version: 3.11
25+
26+
- name: Install dependencies
27+
run: |
28+
python -m pip install --upgrade pip
29+
pip install pytest pytest-cov
30+
31+
- name: Build numflow.py package
32+
run: |
33+
pip install .
34+
pip install numpy
35+
36+
- name: Test with pytest
37+
run: |
38+
pytest ./tests/* --cov=numflow
39+
40+
- name: Upload to Coveralls
41+
env:
42+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
43+
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_TOKEN }}
44+
run: |
45+
pip install coveralls
46+
coveralls
47+
48+
make-wheels:
49+
name: Make ${{ matrix.os }} wheels
50+
if: github.event_name == 'push' && contains(github.ref, 'release')
51+
needs: test
52+
runs-on: ${{ matrix.os }}
53+
strategy:
54+
fail-fast: false
55+
matrix:
56+
os: ["macos-latest", "ubuntu-latest", "windows-latest"]
57+
steps:
58+
- name: "Checkout repo"
59+
uses: actions/checkout@v3
60+
with:
61+
submodules: "recursive"
62+
63+
- name: "Build wheels"
64+
uses: pypa/cibuildwheel@v2.9.0
65+
66+
- name: Verify clean directory
67+
run: git diff --exit-code
68+
shell: bash
69+
70+
- name: "Upload wheel as artifact"
71+
uses: actions/upload-artifact@v3
72+
with:
73+
name: artifact-${{ matrix.os }}-wheel
74+
path: ./wheelhouse/*.whl
75+
76+
make-sdist:
77+
name: Make source distribution
78+
if: github.event_name == 'push' && contains(github.ref, 'release')
79+
needs: test
80+
runs-on: ubuntu-latest
81+
steps:
82+
- uses: actions/checkout@v3
83+
with:
84+
submodules: "recursive"
85+
86+
- name: Build SDist
87+
run: pipx run build --sdist
88+
89+
- name: Check metadata
90+
run: pipx run twine check dist/*
91+
92+
- uses: actions/upload-artifact@v3
93+
with:
94+
name: artifact-source-dist
95+
path: dist/*.tar.gz
96+
97+
tag-and-release:
98+
name: Tag commit and create a release
99+
needs: [make-wheels, make-sdist]
100+
runs-on: ubuntu-latest
101+
steps:
102+
- uses: actions/checkout@v3
103+
with:
104+
submodules: "recursive"
105+
106+
- name: Set up Python 3.11
107+
uses: actions/setup-python@v3
108+
with:
109+
python-version: 3.11
110+
111+
- name: Install dependencies
112+
run: |
113+
python -m pip install --upgrade pip
114+
pip install scikit-build
115+
116+
- name: "Get release version"
117+
run: |
118+
CURRENT_VERSION=$(python3 setup.py --version | tail -1)
119+
echo "Current version: $CURRENT_VERSION"
120+
echo "CURRENT_VERSION=v$CURRENT_VERSION" >> $GITHUB_ENV
121+
122+
- uses: rickstaa/action-create-tag@v1
123+
with:
124+
tag: ${{ env.CURRENT_VERSION }}
125+
126+
- uses: ncipollo/release-action@v1
127+
with:
128+
token: ${{ secrets.GITHUB_TOKEN }}
129+
generateReleaseNotes: true
130+
tag: ${{ env.CURRENT_VERSION }}
131+
132+
upload:
133+
name: Upload to PyPI
134+
needs: tag-and-release
135+
runs-on: ubuntu-latest
136+
environment:
137+
name: pypi
138+
url: https://pypi.org/p/numflow
139+
permissions:
140+
id-token: write
141+
steps:
142+
- name: Download all artifacts
143+
uses: actions/download-artifact@v3
144+
with:
145+
path: dist
146+
147+
- name: Copy artifacts to dist/ folder
148+
run: |
149+
echo "* Downloaded artifacts:"
150+
ls dist/
151+
cd dist/
152+
echo "* Copying artifacts to dist/ folder:"
153+
find . -mindepth 2 -maxdepth 2 -type f -print -exec mv {} . \;
154+
echo "* Deleting empty directories:"
155+
find . -maxdepth 1 -type d -empty -print -exec rmdir {} +
156+
cd ..
157+
echo "* Prepared artifacts:"
158+
ls dist/
159+
160+
- name: Publish package distributions to PyPI
161+
uses: pypa/gh-action-pypi-publish@release/v1

.github/workflows/wheel.yaml

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
name: "Create Python distributions"
2+
3+
on:
4+
workflow_dispatch: {}
5+
6+
jobs:
7+
make-wheels:
8+
name: Make ${{ matrix.os }} wheels
9+
runs-on: ${{ matrix.os }}
10+
strategy:
11+
fail-fast: false
12+
matrix:
13+
os: ["macos-latest", "ubuntu-latest", "windows-latest"]
14+
steps:
15+
- name: "Checkout repo"
16+
uses: actions/checkout@v3
17+
with:
18+
submodules: "recursive"
19+
20+
- name: "Build wheels"
21+
uses: pypa/cibuildwheel@v2.9.0
22+
23+
- name: Verify clean directory
24+
run: git diff --exit-code
25+
shell: bash
26+
27+
- name: "Upload wheel as artifact"
28+
uses: actions/upload-artifact@v3
29+
with:
30+
name: artifact-${{ matrix.os }}-wheel
31+
path: ./wheelhouse/*.whl
32+
33+
make-sdist:
34+
name: Make source distribution
35+
runs-on: ubuntu-latest
36+
steps:
37+
- uses: actions/checkout@v3
38+
with:
39+
submodules: "recursive"
40+
41+
- name: Build SDist
42+
run: pipx run build --sdist
43+
44+
- name: Check metadata
45+
run: pipx run twine check dist/*
46+
47+
- uses: actions/upload-artifact@v3
48+
with:
49+
name: artifact-source-dist
50+
path: dist/*.tar.gz
51+
52+
upload:
53+
name: Checkout Artifacts
54+
needs: [make-wheels, make-sdist]
55+
runs-on: ubuntu-latest
56+
steps:
57+
- name: Download all artifacts
58+
uses: actions/download-artifact@v3
59+
with:
60+
path: dist
61+
62+
- name: Copy artifacts to dist/ folder
63+
run: |
64+
echo "* Downloaded artifacts:"
65+
ls dist/
66+
cd dist/
67+
echo "* Copying artifacts to dist/ folder:"
68+
find . -mindepth 2 -maxdepth 2 -type f -print -exec mv {} . \;
69+
echo "* Deleting empty directories:"
70+
find . -maxdepth 1 -type d -empty -print -exec rmdir {} +
71+
cd ..
72+
echo "* Prepared artifacts:"
73+
ls dist/

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
C++ based tool for converting vector field data into models for rendering.
44

5+
I made a [blogpost describing the design, there are also some outputs, see more here!](https://vojtatom.github.io/numflow.py)
6+
57
## Dev
68

79
Developing the package on localhost is recommanded in devcontainer - see `.devcontainer` folder.

tests/test_kernels.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,15 +32,19 @@ def test_glyph_kernel(rectilinear_csv_data: str):
3232
field = dataset_kernel(rectilinear_csv_data)
3333
points = points_kernel([0.1, 0.1, -10], [60, 60, -10], [10, 10, 1])
3434
glyphs = glyph_kernel(field, points)
35-
assert glyphs.shape == (100, 3)
35+
assert np.shape(glyphs) == (100, 3)
3636

3737

3838
def test_streamline_kernel(rectilinear_csv_data: str):
3939
field = dataset_kernel(rectilinear_csv_data)
4040
points = points_kernel([0.1, 0.1, -10], [60, 60, -10], [10, 10, 1])
4141
streamlines = stream_kernel(field, points, 0, 10)
4242
assert len(streamlines.l()) == 100
43+
assert np.shape(streamlines.l()) == (100,)
4344
assert len(streamlines.t()) == sum(streamlines.l())
44-
assert len(streamlines.y()) == sum(streamlines.l()) * 3
45-
assert len(streamlines.f()) == sum(streamlines.l()) * 3
45+
assert np.shape(streamlines.t()) == (sum(streamlines.l()),)
46+
assert len(streamlines.y()) == sum(streamlines.l())
47+
assert np.shape(streamlines.y()) == (sum(streamlines.l()), 3)
48+
assert len(streamlines.f()) == sum(streamlines.l())
49+
assert np.shape(streamlines.f()) == (sum(streamlines.l()), 3)
4650

tests/test_visualization.py

Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
from numflow.compute import RectilinearField3D
2+
from numflow import Dataset, Visualization, points, random_points
3+
import json
4+
import base64
5+
import numpy as np
6+
import math
7+
8+
def nround(x, base=5):
9+
return base * math.ceil(x/base)
10+
11+
def test_visualization(rectilinear_csv_data: str):
12+
dataset = Dataset(rectilinear_csv_data)
13+
assert dataset.file_name == rectilinear_csv_data
14+
assert type(dataset.data) == RectilinearField3D
15+
16+
dataset_name = "rectilinear.csv"
17+
size = "Size: 72000 elements"
18+
shape = "Shape: 60 x 60 x 20"
19+
memory = "Memory consumed: 288000 bytes"
20+
x_range = "X range: 0.100000 - 60.000000"
21+
y_range = "Y range: 0.100000 - 60.000000"
22+
z_range = "Z range: -20.000000 - 0.500000"
23+
24+
25+
info = dataset.info().split("\n")
26+
assert info[0][-len(dataset_name):] == dataset_name
27+
assert info[1] == size
28+
assert info[2] == shape
29+
assert info[3] == memory
30+
assert info[4] == x_range
31+
assert info[5] == y_range
32+
assert info[6] == z_range
33+
34+
layer_z = -10
35+
vis = Visualization()
36+
layer = points([0.1, 0.1, layer_z], [60, 60, layer_z], [1000, 1000, 1])
37+
vis.layer(dataset, layer)
38+
layerS = points([0.1, 0.1, layer_z], [60, 60, layer_z], [200, 200, 1])
39+
vis.streamlines(dataset, layerS, tbound=0.01, size=0.2, appearance='transparent', sampling=3, divisions=3)
40+
vis.glphys(dataset, layer, size=0.2, appearance='transparent')
41+
vis.save(f"test.flow")
42+
43+
json_file = open("test.flow")
44+
data = json.load(json_file)
45+
json_file.close()
46+
47+
# there is only one scene
48+
assert len(data) == 1
49+
50+
scene = data[0]
51+
assert len(scene) == 4
52+
53+
glyph = scene['glyphs']
54+
layer = scene['layer']
55+
streamlines = scene['streamlines']
56+
stats = scene['stats']
57+
58+
# test glyphs
59+
# there is only one glyph group
60+
assert len(glyph) == 1
61+
glyph_group = glyph[0]
62+
63+
assert set(glyph_group.keys()) == {'meta', 'points', 'values'}
64+
gpoints = glyph_group['points']
65+
gvalues = glyph_group['values']
66+
67+
# expected size of glyphs is (1000 * 1000 * 3 * 4) bytes
68+
# also add the base64 encoding - 4/3 * size = 16000000
69+
assert len(gpoints) == 16000000
70+
assert len(gvalues) == 16000000
71+
72+
# test layer
73+
# there is only one layer group
74+
assert len(layer) == 1
75+
layer_group = layer[0]
76+
77+
assert set(layer_group.keys()) == {'meta', 'values', 'points'}
78+
lpoints = layer_group['points']
79+
lvalues = layer_group['values']
80+
81+
# expected size of layer is (1000 * 1000 * 3 * 4) bytes
82+
# also add the base64 encoding - 4/3 * size = 16000000
83+
assert len(lpoints) == nround(1000 * 1000 * 3 * 4 * 4 / 3, 4)
84+
assert len(lvalues) == nround(1000 * 1000 * 3 * 4 * 4 / 3, 4)
85+
86+
# test streamlines
87+
# there is only one streamlines group
88+
assert len(streamlines) == 1
89+
streamlines_group = streamlines[0]
90+
91+
assert set(streamlines_group.keys()) == {'values', 'times', 'meta', 'lengths', 'points'}
92+
spoints = streamlines_group['points']
93+
svalues = streamlines_group['values']
94+
stimes = streamlines_group['times']
95+
slengths = streamlines_group['lengths']
96+
97+
# expected size of lengths is (200 * 200 * 4) bytes
98+
# also add the base64 encoding - 4/3 * size =~ 213336 with corrections
99+
assert len(slengths) == nround(200 * 200 * 4 * 4 / 3, 4)
100+
101+
#decode lengths and sum total
102+
lengths = base64.b64decode(slengths)
103+
lengths = np.frombuffer(lengths, dtype=np.int32)
104+
sum_length = np.sum(lengths)
105+
106+
expected_point_size = nround(sum_length * 3 * 4 * 4 / 3, 4)
107+
assert len(spoints) == expected_point_size
108+
assert len(svalues) == expected_point_size
109+
expected_time_size = nround(sum_length * 4 * 4 / 3, 4)
110+
assert len(stimes) == expected_time_size
111+
112+
# test stats
113+
assert set(stats.keys()) == {'points', 'values'}
114+
spoints = stats['points']
115+
svalues = stats['values']
116+
117+
# expected statistical keys
118+
assert set(spoints.keys()) == {'max', 'center', 'min', 'scale_factor'}
119+
assert set(svalues.keys()) == {'y', 'x', 'z', 'xyz'}
120+
121+
122+
123+
124+
125+
126+
127+

0 commit comments

Comments
 (0)