Skip to content

Commit 1ce53f3

Browse files
committed
Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main
2 parents f69e491 + f2ac602 commit 1ce53f3

File tree

13 files changed

+733
-387
lines changed

13 files changed

+733
-387
lines changed

.github/workflows/development.yaml

Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
name: Development
2+
on:
3+
pull_request:
4+
push:
5+
tags:
6+
- '*.*.*'
7+
jobs:
8+
test-changelog:
9+
runs-on: ubuntu-latest
10+
steps:
11+
- uses: actions/checkout@v2
12+
- name: Get changelog entry
13+
id: changelog_reader
14+
uses: guzman-raphael/changelog-reader-action@v5
15+
with:
16+
path: ./CHANGELOG.md
17+
- name: Verify changelog parsing
18+
env:
19+
TAG_NAME: ${{steps.changelog_reader.outputs.version}}
20+
RELEASE_NAME: Release ${{steps.changelog_reader.outputs.version}}
21+
BODY: ${{steps.changelog_reader.outputs.changes}}
22+
PRERELEASE: ${{steps.changelog_reader.outputs.status == 'prereleased'}}
23+
DRAFT: ${{steps.changelog_reader.outputs.status == 'unreleased'}}
24+
run: |
25+
echo "TAG_NAME=${TAG_NAME}"
26+
echo "RELEASE_NAME=${RELEASE_NAME}"
27+
echo "BODY=${BODY}"
28+
echo "PRERELEASE=${PRERELEASE}"
29+
echo "DRAFT=${DRAFT}"
30+
build:
31+
needs: test-changelog
32+
runs-on: ubuntu-latest
33+
strategy:
34+
matrix:
35+
include:
36+
- py_ver: 3.8
37+
distro: alpine
38+
image: djbase
39+
env:
40+
PY_VER: ${{matrix.py_ver}}
41+
DISTRO: ${{matrix.distro}}
42+
IMAGE: ${{matrix.image}}
43+
DOCKER_CLIENT_TIMEOUT: "120"
44+
COMPOSE_HTTP_TIMEOUT: "120"
45+
steps:
46+
- uses: actions/checkout@v2
47+
- name: Compile image
48+
run: |
49+
export PKG_NAME=$(python3 -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])")
50+
export PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__ = / {print $2}')
51+
export HOST_UID=$(id -u)
52+
docker-compose -f docker-compose-build.yaml up --exit-code-from element --build
53+
IMAGE=$(docker images --filter "reference=datajoint/${PKG_NAME}*" \
54+
--format "{{.Repository}}")
55+
TAG=$(docker images --filter "reference=datajoint/${PKG_NAME}*" --format "{{.Tag}}")
56+
docker save "${IMAGE}:${TAG}" | \
57+
gzip > "image-${PKG_NAME}-${PKG_VERSION}-py${PY_VER}-${DISTRO}.tar.gz"
58+
echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_ENV
59+
echo "PKG_VERSION=${PKG_VERSION}" >> $GITHUB_ENV
60+
- name: Add image artifact
61+
uses: actions/upload-artifact@v2
62+
with:
63+
name: image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py${{matrix.py_ver}}-${{matrix.distro}}
64+
path: "image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py${{matrix.py_ver}}-\
65+
${{matrix.distro}}.tar.gz"
66+
retention-days: 1
67+
- if: matrix.py_ver == '3.8' && matrix.distro == 'alpine'
68+
name: Add pip artifacts
69+
uses: actions/upload-artifact@v2
70+
with:
71+
name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}
72+
path: dist
73+
retention-days: 1
74+
publish-release:
75+
if: github.event_name == 'push'
76+
needs: build
77+
runs-on: ubuntu-latest
78+
env:
79+
TWINE_USERNAME: ${{secrets.twine_test_username}}
80+
TWINE_PASSWORD: ${{secrets.twine_test_password}}
81+
outputs:
82+
release_upload_url: ${{steps.create_gh_release.outputs.upload_url}}
83+
steps:
84+
- uses: actions/checkout@v2
85+
- name: Determine package version
86+
run: |
87+
PKG_NAME=$(python3 -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])")
88+
SDIST_PKG_NAME=$(echo ${PKG_NAME} | sed 's|_|-|g')
89+
PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__ = / {print $2}')
90+
echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_ENV
91+
echo "PKG_VERSION=${PKG_VERSION}" >> $GITHUB_ENV
92+
echo "SDIST_PKG_NAME=${SDIST_PKG_NAME}" >> $GITHUB_ENV
93+
- name: Get changelog entry
94+
id: changelog_reader
95+
uses: guzman-raphael/changelog-reader-action@v5
96+
with:
97+
path: ./CHANGELOG.md
98+
version: ${{env.PKG_VERSION}}
99+
- name: Create GH release
100+
id: create_gh_release
101+
uses: actions/create-release@v1
102+
env:
103+
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
104+
with:
105+
tag_name: ${{steps.changelog_reader.outputs.version}}
106+
release_name: Release ${{steps.changelog_reader.outputs.version}}
107+
body: ${{steps.changelog_reader.outputs.changes}}
108+
prerelease: ${{steps.changelog_reader.outputs.status == 'prereleased'}}
109+
draft: ${{steps.changelog_reader.outputs.status == 'unreleased'}}
110+
- name: Fetch image artifact
111+
uses: actions/download-artifact@v2
112+
with:
113+
name: image-${{env.PKG_NAME}}-${{env.PKG_VERSION}}-py3.8-alpine
114+
- name: Fetch pip artifacts
115+
uses: actions/download-artifact@v2
116+
with:
117+
name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}
118+
path: dist
119+
- name: Publish pip release
120+
run: |
121+
export HOST_UID=$(id -u)
122+
docker load < "image-${{env.PKG_NAME}}-${PKG_VERSION}-py3.8-alpine.tar.gz"
123+
docker-compose -f docker-compose-build.yaml run \
124+
-e TWINE_USERNAME=${TWINE_USERNAME} -e TWINE_PASSWORD=${TWINE_PASSWORD} element \
125+
sh -lc "pip install twine && python -m twine upload --repository testpypi dist/*"
126+
- name: Determine pip artifact paths
127+
run: |
128+
echo "PKG_WHEEL_PATH=$(ls dist/${PKG_NAME}-*.whl)" >> $GITHUB_ENV
129+
echo "PKG_SDIST_PATH=$(ls dist/${SDIST_PKG_NAME}-*.tar.gz)" >> $GITHUB_ENV
130+
- name: Upload pip wheel asset to release
131+
uses: actions/upload-release-asset@v1
132+
env:
133+
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
134+
with:
135+
upload_url: ${{steps.create_gh_release.outputs.upload_url}}
136+
asset_path: ${{env.PKG_WHEEL_PATH}}
137+
asset_name: pip-${{env.PKG_NAME}}-${{env.PKG_VERSION}}.whl
138+
asset_content_type: application/zip
139+
- name: Upload pip sdist asset to release
140+
uses: actions/upload-release-asset@v1
141+
env:
142+
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
143+
with:
144+
upload_url: ${{steps.create_gh_release.outputs.upload_url}}
145+
asset_path: ${{env.PKG_SDIST_PATH}}
146+
asset_name: pip-${{env.SDIST_PKG_NAME}}-${{env.PKG_VERSION}}.tar.gz
147+
asset_content_type: application/gzip

.gitignore

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,7 @@ celerybeat-schedule
8686

8787
# dotenv
8888
./.env
89+
.env
8990

9091
# virtualenv
9192
.venv
@@ -113,4 +114,5 @@ dj_local_conf_old.json
113114
**/*~
114115
**/#*#
115116
**/.#*
116-
docker-compose.yml
117+
118+
docker-compose.y*ml

CHANGELOG.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
# Changelog
2+
3+
Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention.
4+
5+
## [0.1.0a3] - 2021-04-26
6+
### Added
7+
+ Added GitHub Action release process
8+
+ `probe` and `ephys` elements
9+
+ Readers for: `SpikeGLX`, `Open Ephys`, `Kilosort`
10+
+ Probe table supporting: Neuropixels probes 1.0 - 3A, 1.0 - 3B, 2.0 - SS, 2.0 - MS
11+
12+
[0.1.0a3]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0a3

Dockerfile

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
ARG PY_VER
2+
ARG DISTRO
3+
ARG IMAGE
4+
ARG PKG_NAME
5+
ARG PKG_VERSION
6+
7+
FROM datajoint/${IMAGE}:py${PY_VER}-${DISTRO}
8+
COPY --chown=dja:anaconda ./requirements.txt ./setup.py \
9+
/main/
10+
COPY --chown=dja:anaconda ./${PKG_NAME} /main/${PKG_NAME}
11+
RUN \
12+
cd /main && \
13+
pip install . && \
14+
rm -R /main/*
15+
WORKDIR /main

docker-compose-build.yaml

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
# PY_VER=3.8 IMAGE=djbase DISTRO=alpine PKG_NAME=$(python -c "print([p for p in __import__('setuptools').find_packages() if '.' not in p][0])") PKG_VERSION=$(cat ${PKG_NAME}/version.py | awk -F\' '/__version__/ {print $2}') HOST_UID=$(id -u) docker-compose -f docker-compose-build.yaml up --exit-code-from element --build
2+
#
3+
# Intended for updating dependencies and docker image.
4+
# Used to build release artifacts.
5+
version: "2.4"
6+
services:
7+
element:
8+
build:
9+
context: .
10+
args:
11+
- PY_VER
12+
- DISTRO
13+
- IMAGE
14+
- PKG_NAME
15+
- PKG_VERSION
16+
image: datajoint/${PKG_NAME}:${PKG_VERSION}
17+
user: ${HOST_UID}:anaconda
18+
volumes:
19+
- .:/main
20+
command:
21+
- sh
22+
- -lc
23+
- |
24+
set -e
25+
rm -R build dist *.egg-info || echo "No prev build"
26+
python setup.py bdist_wheel sdist

element_array_ephys/__init__.py

Lines changed: 55 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,56 @@
1-
__author__ = "DataJoint NEURO"
2-
__date__ = "December 15, 2020"
3-
__version__ = "0.0.1"
1+
import datajoint as dj
2+
import pathlib
43

5-
__all__ = ['__author__', '__version__', '__date__']
4+
5+
dj.config['enable_python_native_blobs'] = True
6+
7+
8+
def find_full_path(root_directories, relative_path):
9+
"""
10+
Given a relative path, search and return the full-path
11+
from provided potential root directories (in the given order)
12+
:param root_directories: potential root directories
13+
:param relative_path: the relative path to find the valid root directory
14+
:return: root_directory (pathlib.Path object)
15+
"""
16+
relative_path = pathlib.Path(relative_path)
17+
18+
if relative_path.exists():
19+
return relative_path
20+
21+
# turn to list if only a single root directory is provided
22+
if isinstance(root_directories, (str, pathlib.Path)):
23+
root_directories = [root_directories]
24+
25+
for root_dir in root_directories:
26+
if (pathlib.Path(root_dir) / relative_path).exists():
27+
return pathlib.Path(root_dir) / relative_path
28+
29+
raise FileNotFoundError('No valid full-path found (from {})'
30+
' for {}'.format(root_directories, relative_path))
31+
32+
33+
def find_root_directory(root_directories, full_path):
34+
"""
35+
Given multiple potential root directories and a full-path,
36+
search and return one directory that is the parent of the given path
37+
:param root_directories: potential root directories
38+
:param full_path: the relative path to search the root directory
39+
:return: full-path (pathlib.Path object)
40+
"""
41+
full_path = pathlib.Path(full_path)
42+
43+
if not full_path.exists():
44+
raise FileNotFoundError(f'{full_path} does not exist!')
45+
46+
# turn to list if only a single root directory is provided
47+
if isinstance(root_directories, (str, pathlib.Path)):
48+
root_directories = [root_directories]
49+
50+
try:
51+
return next(pathlib.Path(root_dir) for root_dir in root_directories
52+
if pathlib.Path(root_dir) in set(full_path.parents))
53+
54+
except StopIteration:
55+
raise FileNotFoundError('No valid root directory found (from {})'
56+
' for {}'.format(root_directories, full_path))

0 commit comments

Comments
 (0)