diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 00000000..691907f5
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,59 @@
+name: 🐞 Bug report
+description: Create a report to help us improve (use this to report bugs only).
+title: "[BUG]
"
+labels: [🐞 bug]
+body:
+ - type: checkboxes
+ attributes:
+ label: Is there an existing issue for this?
+ description: Please search to see if an issue already exists for the bug you encountered.
+ options:
+ - label: I have searched the existing issues
+ required: true
+ - type: textarea
+ attributes:
+ label: Describe the bug
+ description: A clear and concise description of what the bug is. Include images if relevant.
+ placeholder: A bug happened!
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Screenshots [optional]
+ description: |
+ Add screenshots to help explain your problem. You can also add a video here.
+
+ Tip: You can attach images or video files by clicking this area to highlight it and then dragging files in.
+ validations:
+ required: false
+ - type: textarea
+ attributes:
+ label: Steps To Reproduce
+ description: Steps to reproduce the bug.
+ placeholder: |
+ 1. Visit '...'
+ 2. Click on '...'
+ 3. Scroll to '...'
+ 4. See error
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Expected behavior
+ description: A clear and concise description of what you expected to happen
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Additional context
+ description: |
+ Links? References? Anything that will give us more context about the issue you are encountering!
+ validations:
+ required: false
+ - type: dropdown
+ id: assign
+ attributes:
+ label: Are you working on this?
+ options:
+ - "Yes"
+ - "No"
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 00000000..368ba171
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,39 @@
+name: ✨ Idea [Feature request]
+description: Tell us about the idea you have !
+title: "[Feature] "
+labels: [idea]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Your suggestions are highly valuable.
+ - type: textarea
+ attributes:
+ label: Feature description
+ description: |
+ Is your feature request related to a problem? A clear and concise description of what the feature is.
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Describe the solution
+ description: A clear and concise description of what you want to happen.
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Describe alternatives
+ description: |
+ A clear and concise description of any alternative solutions or features you have considered.
+ - type: textarea
+ attributes:
+ label: Additional context
+ description: |
+ Add any other additional context or screenshots about the feature request here.
+ - type: dropdown
+ id: assign
+ attributes:
+ label: Are you working on this?
+ options:
+ - "Yes"
+ - "No"
diff --git a/.github/actions/download-artifact/action.yml b/.github/actions/download-artifact/action.yml
new file mode 100644
index 00000000..f97c3c01
--- /dev/null
+++ b/.github/actions/download-artifact/action.yml
@@ -0,0 +1,39 @@
+---
+name: Download Artifact
+description: Download artifact from the same or different workflow
+
+inputs:
+ name:
+ description: Artifact to be downloaded
+ required: true
+ type: string
+
+runs:
+ using: composite
+ steps:
+ - name: Download Artifact
+ uses: actions/github-script@v6
+ with:
+ script: |
+ var inputs = ${{ toJSON(inputs) }}
+ var artifactName = inputs['name']
+ let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ run_id: context.payload.workflow_run.id,
+ });
+ let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
+ return artifact.name == artifactName
+ })[0];
+ let download = await github.rest.actions.downloadArtifact({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ artifact_id: matchArtifact.id,
+ archive_format: 'zip',
+ });
+ let fs = require('fs');
+ fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/${artifactName}.zip`, Buffer.from(download.data));
+
+ - name: 'Unzip artifact'
+ run: unzip ${{ inputs.name }}.zip
+ shell: bash
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
new file mode 100644
index 00000000..2e179097
--- /dev/null
+++ b/.github/pull_request_template.md
@@ -0,0 +1,13 @@
+### Describe the changes you have made in this PR
+
+_A clear and concise description of what you want to happen_
+
+### Link this PR to an issue [optional]
+
+Fixes _#ISSUE-NUMBER_
+
+### Checklist
+
+- [ ] Self-review of changed code
+- [ ] Added automated tests where applicable
+- [ ] Update Docs & Guides
\ No newline at end of file
diff --git a/.github/workflows/analysis.yml b/.github/workflows/analysis.yml
new file mode 100644
index 00000000..55f59cf5
--- /dev/null
+++ b/.github/workflows/analysis.yml
@@ -0,0 +1,173 @@
+name: Algorithm Analysis
+
+on:
+ push:
+ branches:
+ - 'main'
+ - 'analysis/**'
+
+jobs:
+ algorithms:
+ runs-on: ubuntu-latest
+ outputs: # here we use the outputs from steps, and set outputs for the job `configure`
+ algorithms: ${{ steps.algorithms.outputs.algorithms }}
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ id: setup_python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - name: Cache pip
+ uses: actions/cache@v3
+ id: pip-cache
+ with:
+ key: ${{ runner.os }}-${{ env.pythonLocation }}-pip-${{ hashFiles('**/requirements.txt') }}
+ path: ${{ env.pythonLocation }}
+ if: steps.pip-cache.outputs.cache-hit != 'true'
+
+ - name: Install dependencies
+ run: |
+ pip install -r requirements.txt
+
+ - name: Read algorithms
+ id: algorithms
+ run: |
+ echo 'algorithms<> $GITHUB_OUTPUT
+ cat ./tests/IVIMmodels/unit_tests/algorithms.json >> $GITHUB_OUTPUT
+ echo 'EOF' >> $GITHUB_OUTPUT
+ - name: Log algorithms
+ run: |
+ echo "${{fromJson(steps.algorithms.outputs.algorithms)}}"
+ echo "${{fromJson(steps.algorithms.outputs.algorithms).algorithms}}"
+ - name: Log algorithms file
+ run: cat ./tests/IVIMmodels/unit_tests/algorithms.json
+
+ build:
+ runs-on: ubuntu-latest
+ needs: algorithms
+ continue-on-error: false
+ strategy:
+ fail-fast: false
+ matrix:
+ algorithm: ${{fromJson(needs.algorithms.outputs.algorithms).algorithms}}
+ SNR: [10, 30, 50, 100, 200]
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ if: steps.pip-cache.outputs.cache-hit != 'true'
+ - name: Restore cache
+ id: python-cache
+ uses: actions/cache@v3
+ with:
+ key: ${{ runner.os }}-${{ env.pythonLocation }}-pip-${{ hashFiles('**/requirements.txt') }}
+ path: ${{ env.pythonLocation }}
+
+ - name: Generate fitting data
+ run: |
+ python -m pytest -m slow --selectAlgorithm ${{ matrix.algorithm }} --saveFileName test_output_${{ matrix.algorithm }}_${{ matrix.SNR }}.csv --SNR ${{ matrix.SNR }} --fitCount 300 --saveDurationFileName test_duration_${{ matrix.algorithm }}_${{ matrix.SNR }}.csv
+ - name: Upload raw data
+ uses: actions/upload-artifact@v3
+ with:
+ name: Working_Data
+ retention-days: 1
+ path: |
+ test_output_${{ matrix.algorithm }}_${{ matrix.SNR }}.csv
+ test_duration_${{ matrix.algorithm }}_${{ matrix.SNR }}.csv
+
+ merge:
+ runs-on: ubuntu-latest
+ needs: build
+ steps:
+ - name: Download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ path: artifacts
+ - name: Merge fitting results
+ run: |
+ head -n 1 $(ls artifacts/Working_Data/test_output_*.csv | head -n 1) > test_output.csv
+ tail -q -n +2 artifacts/Working_Data/test_output_*.csv >> test_output.csv
+ - name: Merge timing results
+ run: |
+ head -n 1 $(ls artifacts/Working_Data/test_duration_*.csv | head -n 1) > test_duration.csv
+ tail -q -n +2 artifacts/Working_Data/test_duration_*.csv >> test_duration.csv
+ - name: Upload merged artifacts
+ uses: actions/upload-artifact@v3
+ with:
+ name: Data
+ path: |
+ test_output.csv
+ test_duration.csv
+
+ analyze:
+ runs-on: ubuntu-latest
+ needs: merge
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up R
+ uses: r-lib/actions/setup-r@v2
+ with:
+ use-public-rspm: true
+ - name: Install R dependencies
+ uses: r-lib/actions/setup-r-dependencies@v2
+ with:
+ packages: |
+ any::plyr
+ any::dplyr
+ any::tidyverse
+ any::data.table
+ any::ggplot2
+ - name: Download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: Data
+ - name: Generate figures
+ run: Rscript --vanilla tests/IVIMmodels/unit_tests/analyze.r test_output.csv test_duration.csv
+ - name: Upload figures
+ uses: actions/upload-artifact@v3
+ if: always()
+ with:
+ name: Figures
+ path: |
+ D.pdf
+ f.pdf
+ Dp.pdf
+ D_limited.pdf
+ f_limited.pdf
+ Dp_limited.pdf
+ durations.pdf
+ curve_plot.pdf
+ fitted_curves.pdf
+
+ compare:
+ runs-on: ubuntu-latest
+ needs: merge
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up R
+ uses: r-lib/actions/setup-r@v2
+ with:
+ use-public-rspm: true
+ - name: Install R dependencies
+ uses: r-lib/actions/setup-r-dependencies@v2
+ with:
+ packages: |
+ any::tidyverse
+ any::assertr
+ - name: Download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: Data
+ - name: Test against previous results
+ run: Rscript --vanilla tests/IVIMmodels/unit_tests/compare.r test_output.csv test_reference.csv tests/IVIMmodels/unit_tests/reference_output.csv test_results.csv
+ - name: Upload data
+ uses: actions/upload-artifact@v3
+ if: always()
+ with:
+ name: Comparison
+ path: |
+ test_reference.csv
+ test_results.csv
diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml
new file mode 100644
index 00000000..05ddf537
--- /dev/null
+++ b/.github/workflows/documentation.yml
@@ -0,0 +1,66 @@
+name: Build & Deploy Documentation
+
+on:
+ workflow_run:
+ workflows: [Algorithm Analysis]
+ types:
+ - completed
+permissions:
+ contents: read
+ pages: write
+ id-token: write
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Setup Pages
+ uses: actions/configure-pages@v4
+ - name: Set up Python
+ id: setup_python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - name: Cache pip
+ uses: actions/cache@v3
+ id: pip-cache
+ with:
+ key: ${{ runner.os }}-${{ env.pythonLocation }}-pip-${{ hashFiles('**/requirements.txt') }}
+ path: ${{ env.pythonLocation }}
+ if: steps.pip-cache.outputs.cache-hit != 'true'
+
+ - name: Install dependencies
+ run: |
+ pip install -r requirements.txt
+
+ # Action to download artifacts from a different workflow (analysis.yml)
+ - name: 'Download artifact'
+ if: ${{ github.event.workflow_run.conclusion == 'success' }}
+ uses: ./.github/actions/download-artifact
+ with:
+ name: 'Figures'
+
+ - name: Build html
+ run: |
+ mkdir docs/_static
+ mv *.pdf docs/_static/
+ sphinx-apidoc -o docs src
+ cd docs/
+ make html
+
+ - name: Upload docs artifact
+ uses: actions/upload-pages-artifact@v3
+ with:
+ path: 'docs/_build/html'
+
+ deploy:
+ needs: build
+ environment:
+ name: github-pages
+ url: ${{ steps.deployment.outputs.page_url }}
+
+ runs-on: ubuntu-latest
+ steps:
+ - name: Deploy to GitHub Pages
+ id: deployment
+ uses: actions/deploy-pages@v4
diff --git a/.github/workflows/unit_test.yml b/.github/workflows/unit_test.yml
index 65f19297..34880070 100644
--- a/.github/workflows/unit_test.yml
+++ b/.github/workflows/unit_test.yml
@@ -18,11 +18,12 @@ jobs:
# - os: windows-latest
# python-version: "3.7"
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
+ cache: 'pip'
# You can test your matrix by printing the current Python version
- name: Display Python version
run: python -c "import sys; print(sys.version)"
@@ -33,4 +34,4 @@ jobs:
- name: Test with pytest
run: |
pip install pytest pytest-cov
- python -m pytest --doctest-modules --junitxml=junit/test-results.xml --cov=com --cov-report=xml --cov-report=html
+ python -m pytest --doctest-modules --junitxml=junit/test-results.xml --cov=. --cov-report=xml --cov-report=html
diff --git a/.gitignore b/.gitignore
index d27b6ffb..2df670a5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,24 @@
.idea*
-.github*
+.github/*
+.github/ISSUE_TEMPLATE/
+.github/pull_request_template.md
+__pycache__/
*.nii.gz
+*.nii
+*.dcm
+*.mat
+*.raw
+bvals.txt
+download
+md5sums.txt
+*.gz
+*.zip
+*.tmp
+
+# Unit test / coverage reports
+.tox/
+.coverage
+.cache
+nosetests.xml
+coverage.xml
+*.pyc
\ No newline at end of file
diff --git a/Docker/.dockerignore b/Docker/.dockerignore
new file mode 100644
index 00000000..31bf0749
--- /dev/null
+++ b/Docker/.dockerignore
@@ -0,0 +1,16 @@
+# Ignore editor and system files
+.vscode/
+.git/
+.gitignore
+*.md
+
+# Exclude development and test artifacts
+__pycache__
+conftest
+
+# Docs
+docs
+doc
+
+# Others
+
diff --git a/Docker/Dockerfile b/Docker/Dockerfile
new file mode 100644
index 00000000..a5537f95
--- /dev/null
+++ b/Docker/Dockerfile
@@ -0,0 +1,17 @@
+FROM python:3.11-slim
+
+WORKDIR /usr/src/app
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ build-essential \
+ libssl-dev \
+ && apt-get clean \
+ && rm -rf /var/lib/apt/lists/*
+
+COPY ../requirements.txt ./
+
+RUN pip install --no-cache-dir -r requirements.txt
+
+COPY .. .
+
+ENTRYPOINT ["python3", "-m", "WrapImage.nifti_wrapper"]
diff --git a/Docker/README.md b/Docker/README.md
new file mode 100644
index 00000000..32f5305d
--- /dev/null
+++ b/Docker/README.md
@@ -0,0 +1,61 @@
+# TF2.4_IVIM-MRI_CodeCollection
+
+This project is designed to run the `nifti_wrapper` script using a Docker container. Below are the steps to build and run the Docker image.
+
+## Prerequisites
+
+- Docker must be installed on your system.
+
+## Directory Structure
+
+```
+~/TF2.4_IVIM-MRI_CodeCollection/
+│
+├── Docker/
+│ └── Dockerfile
+│
+├── WrapImage/
+│ └── nifti_wrapper.py
+│
+└── requirements.txt
+```
+
+## Options
+
+Before running the Docker container, here are the available options for the `Docker image` script:
+
+- `input_file`: Path to the input 4D NIfTI file.
+- `bvec_file`: Path to the b-vector file.
+- `bval_file`: Path to the b-value file.
+- `--affine`: Affine matrix for NIfTI image (optional).
+- `--algorithm`: Select the algorithm to use (default is "OJ_GU_seg").
+- `algorithm_args`: Additional arguments for the algorithm (optional).
+
+## Building the Docker Image
+
+1. Open a terminal and navigate to the project directory:
+
+ ```sh
+ cd ~/TF2.4_IVIM-MRI_CodeCollection
+ ```
+
+2. Build the Docker image using the `docker build` command:
+
+ ```sh
+ sudo docker build -t tf2.4_ivim-mri_codecollection -f Docker/Dockerfile .
+ ```
+
+## Running the Docker Container
+
+1. Once the image is built, you can run the Docker container using the `docker run` command. This command runs the Docker image with the specified input files:
+
+ ```sh
+ sudo docker run -it --rm --name TF2.4_IVIM-MRI_CodeCollection \
+ -v ~/TF2.4_IVIM-MRI_CodeCollection:/usr/src/app \
+ -v ~/TF2.4_IVIM-MRI_CodeCollection:/usr/app/output \
+ tf2.4_ivim-mri_codecollection Downloads/brain.nii.gz Downloads/brain.bvec Downloads/brain.bval \
+ ```
+
+ Replace `brain.nii.gz`, `brain.bvec`, and `brain.bval` with the actual file names you want to use.
+
+---
diff --git a/README.md b/README.md
index 5cb41858..bf18eb68 100644
--- a/README.md
+++ b/README.md
@@ -4,9 +4,9 @@ The ISMRM Open Science Initiative for Perfusion Imaging (OSIPI) is an initiative
>“promote the sharing of perfusion imaging software in order to eliminate the practice of duplicate development, improve the reproducibility of perfusion imaging research, and speed up the translation into tools for discovery science, drug development, and clinical practice”
-This **IVIM code collection** code library is maintained by OSIPI [Taskforce 2.4](https://www.osipi.org/task-force-2-4/) (*currently not available*) and aims to collect, test and share open-source code related to intravoxel incoherent motion (IVIM) analysis of diffusion encoded MRI data to be used in research and software development. Code contributions can include any code related IVIM analysis (denoising, motion correction, model fitting, etc.), but at an initial phase, development of tests and other features of the repository will predominantly focus on fitting algorithms. A future goal of the IVIM OSIPI task force is to develop a fully tested and harmonized code library, building upon the contributions obtained through this initiative.
+This **IVIM code collection** code library is maintained by OSIPI [Taskforce 2.4](https://www.osipi.org/task-force-2-4/) (*currently not available*) and aims to collect, test and share open-source code related to intravoxel incoherent motion (IVIM) analysis of diffusion encoded MRI data to be used in research and software development. Code contributions can include any code related IVIM analysis (denoising, motion correction, model fitting, etc.), but at an initial phase, development of tests and other features of the repository will predominantly focus on fitting algorithms. A goal of the IVIM OSIPI task force is to develop a fully tested and harmonized code library, building upon the contributions obtained through this initiative. Documentation and analysis are available on the [OSIPI TF2.4](https://osipi.github.io/TF2.4_IVIM-MRI_CodeCollection/).
-## How to contibute
+## How to contribute
If you would like to get involve in OSIPI and work within the task force, please email the contacts listed on our website.
@@ -30,4 +30,6 @@ The **test** folder contains the test files corresponding to the contributed cod
The **utils** folder contains various helpful tools.
## View Testing Reports
-*to be added*
+[](https://github.com/OSIPI/TF2.4_IVIM-MRI_CodeCollection/actions/workflows/unit_test.yml)
+[](https://github.com/OSIPI/TF2.4_IVIM-MRI_CodeCollection/actions/workflows/analysis.yml)
+[](https://github.com/OSIPI/TF2.4_IVIM-MRI_CodeCollection/actions/workflows/documentation.yml)
diff --git a/WrapImage/nifti_wrapper.py b/WrapImage/nifti_wrapper.py
new file mode 100644
index 00000000..5eae67ee
--- /dev/null
+++ b/WrapImage/nifti_wrapper.py
@@ -0,0 +1,131 @@
+import argparse
+import json
+import os
+import nibabel as nib
+from src.wrappers.OsipiBase import OsipiBase
+import numpy as np
+from tqdm import tqdm
+
+
+def read_nifti_file(input_file):
+ """
+ For reading the 4d nifti image
+ """
+ nifti_img = nib.load(input_file)
+ return nifti_img.get_fdata(), nifti_img.header
+
+def read_json_file(json_file):
+ """
+ For reading the json file
+ """
+
+ if not os.path.exists(json_file):
+ raise FileNotFoundError(f"File '{json_file}' not found.")
+
+ with open(json_file, "r") as f:
+ try:
+ json_data = json.load(f)
+ except json.JSONDecodeError as e:
+ raise ValueError(f"Error decoding JSON in file '{json_file}': {e}")
+
+ return json_data
+
+def read_bval_file(bval_file):
+ """
+ For reading the bval file
+ """
+ if not os.path.exists(bval_file):
+ raise FileNotFoundError(f"File '{bval_file}' not found.")
+
+ bval_data = np.genfromtxt(bval_file, dtype=float)
+ return bval_data
+
+def read_bvec_file(bvec_file):
+ """
+ For reading the bvec file
+ """
+ if not os.path.exists(bvec_file):
+ raise FileNotFoundError(f"File '{bvec_file}' not found.")
+
+ bvec_data = np.genfromtxt(bvec_file)
+ bvec_data = np.transpose(bvec_data) # Transpose the array
+ return bvec_data
+
+def save_nifti_file(data, output_file, affine=None, **kwargs):
+ """
+ For saving the 3d nifti images of the output of the algorithm
+ """
+ if affine is None:
+ affine = np.eye(data.ndim + 1)
+ output_img = nib.nifti1.Nifti1Image(data, affine , **kwargs)
+ nib.save(output_img, output_file)
+
+def loop_over_first_n_minus_1_dimensions(arr):
+ """
+ Loops over the first n-1 dimensions of a numpy array.
+
+ Args:
+ arr: A numpy array.
+
+ Yields:
+ A tuple containing the indices for the current iteration and a flattened view of the remaining dimensions.
+ """
+ n = arr.ndim
+ for idx in np.ndindex(*arr.shape[:n-1]):
+ flat_view = arr[idx].flatten()
+ yield idx, flat_view
+
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Read a 4D NIfTI phantom file along with BIDS JSON, b-vector, and b-value files.")
+ parser.add_argument("input_file", type=str, help="Path to the input 4D NIfTI file.")
+ parser.add_argument("bvec_file", type=str, help="Path to the b-vector file.")
+ parser.add_argument("bval_file", type=str, help="Path to the b-value file.")
+ parser.add_argument("--affine", type=float, nargs="+", help="Affine matrix for NIfTI image.")
+ parser.add_argument("--algorithm", type=str, default="OJ_GU_seg", help="Select the algorithm to use.")
+ parser.add_argument("--algorithm_args", nargs=argparse.REMAINDER, help="Additional arguments for the algorithm.")
+
+ args = parser.parse_args()
+
+ try:
+ # Read the 4D NIfTI file
+ data, _ = read_nifti_file(args.input_file)
+
+ # Read the b-vector, and b-value files
+ bvecs = read_bvec_file(args.bvec_file)
+ bvals = read_bval_file(args.bval_file)
+
+ # Pass additional arguments to the algorithm
+
+ fit = OsipiBase(algorithm=args.algorithm)
+ f_image = []
+ Dp_image = []
+ D_image = []
+
+ # This is necessary for the tqdm to display progress bar.
+ n = data.ndim
+ total_iteration = np.prod(data.shape[:n-1])
+ for idx, view in tqdm(loop_over_first_n_minus_1_dimensions(data), desc=f"{args.algorithm} is fitting", dynamic_ncols=True, total=total_iteration):
+ [f_fit, Dp_fit, D_fit] = fit.osipi_fit(view, bvals)
+ f_image.append(f_fit)
+ Dp_image.append(Dp_fit)
+ D_image.append(D_fit)
+
+ # Convert lists to NumPy arrays
+ f_image = np.array(f_image)
+ Dp_image = np.array(Dp_image)
+ D_image = np.array(D_image)
+
+ # Reshape arrays if needed
+ f_image = f_image.reshape(data.shape[:data.ndim-1])
+ Dp_image = Dp_image.reshape(data.shape[:data.ndim-1])
+ D_image = D_image.reshape(data.shape[:data.ndim-1])
+
+ save_nifti_file(f_image, "f.nii.gz", args.affine)
+ save_nifti_file(Dp_image, "dp.nii.gz", args.affine)
+ save_nifti_file(D_image, "d.nii.gz", args.affine)
+
+ except Exception as e:
+ print(f"Error: {e}")
+
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 00000000..086585c2
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,180 @@
+import pytest
+import pathlib
+import json
+import csv
+# import datetime
+
+
+def pytest_addoption(parser):
+ parser.addoption(
+ "--SNR",
+ default=[100],
+ nargs="+",
+ type=int,
+ help="Evaluation test SNR",
+ )
+ parser.addoption(
+ "--ricianNoise",
+ default=False,
+ type=bool,
+ help="Use Rician noise, non-rician is gaussian",
+ )
+ parser.addoption(
+ "--usePrior",
+ default=False,
+ type=bool,
+ help="Use a prior where accepted",
+ )
+ parser.addoption(
+ "--algorithmFile",
+ default="tests/IVIMmodels/unit_tests/algorithms.json",
+ type=str,
+ help="Algorithm file name",
+ )
+ parser.addoption(
+ "--dataFile",
+ default="tests/IVIMmodels/unit_tests/generic.json",
+ type=str,
+ help="Default data file name",
+ )
+ parser.addoption(
+ "--saveFileName",
+ default="",
+ type=str,
+ help="Saved results file name",
+ )
+ parser.addoption(
+ "--rtol",
+ default=1,
+ type=float,
+ help="Relative tolerance",
+ )
+ parser.addoption(
+ "--atol",
+ default=1,
+ type=float,
+ help="Absolute tolerance",
+ )
+ parser.addoption(
+ "--fitCount",
+ default=10,
+ type=int,
+ help="Number of fits to perform on the same parameters",
+ )
+ parser.addoption(
+ "--saveDurationFileName",
+ default="",
+ type=str,
+ help="Saved duration results file name",
+ )
+ parser.addoption(
+ "--selectAlgorithm",
+ default=[""],
+ nargs="+",
+ type=str,
+ help="Drop all algorithms except for these from the list"
+ )
+ parser.addoption(
+ "--dropAlgorithm",
+ default=[""],
+ nargs="+",
+ type=str,
+ help="Drop this algorithm from the list"
+ )
+
+
+@pytest.fixture(scope="session")
+def save_file(request):
+ filename = request.config.getoption("--saveFileName")
+ if filename:
+ current_folder = pathlib.Path.cwd()
+ filename = current_folder / filename
+ # print(filename)
+ # filename.unlink(missing_ok=True)
+ filename = filename.as_posix()
+
+ data = data_list(request.config.getoption("--dataFile")) # TODO: clean up this hacky way to get bvalues
+ [_, bvalues, _] = next(data)
+ bvalue_string = ["bval_" + str(bvalue) for bvalue in bvalues]
+ # bvalue_string = ["b_0.0","b_1.0","b_2.0","b_5.0","b_10.0","b_20.0","b_30.0","b_50.0","b_75.0","b_100.0","b_150.0","b_250.0","b_350.0","b_400.0","b_550.0","b_700.0","b_850.0","b_1000.0"]
+
+ with open(filename, "w") as csv_file:
+ writer = csv.writer(csv_file, delimiter=',')
+ writer.writerow(("Algorithm", "Region", "SNR", "index", "f", "Dp", "D", "f_fitted", "Dp_fitted", "D_fitted", *bvalue_string))
+ yield writer
+ # writer.writerow(["", datetime.datetime.now()])
+ else:
+ yield None
+ # return filename
+
+@pytest.fixture(scope="session")
+def save_duration_file(request):
+ filename = request.config.getoption("--saveDurationFileName")
+ if filename:
+ current_folder = pathlib.Path.cwd()
+ filename = current_folder / filename
+ # print(filename)
+ # filename.unlink(missing_ok=True)
+ filename = filename.as_posix()
+ with open(filename, "w") as csv_file:
+ writer = csv.writer(csv_file, delimiter=',')
+ writer.writerow(("Algorithm", "Region", "SNR", "Duration [us]", "Count"))
+ yield writer
+ # writer.writerow(["", datetime.datetime.now()])
+ else:
+ yield None
+ # return filename
+
+@pytest.fixture(scope="session")
+def rtol(request):
+ return request.config.getoption("--rtol")
+
+@pytest.fixture(scope="session")
+def atol(request):
+ return request.config.getoption("--atol")
+
+@pytest.fixture(scope="session")
+def fit_count(request):
+ return request.config.getoption("--fitCount")
+
+@pytest.fixture(scope="session")
+def rician_noise(request):
+ return request.config.getoption("--ricianNoise")
+
+@pytest.fixture(scope="session")
+def use_prior(request):
+ return request.config.getoption("--usePrior")
+
+
+def pytest_generate_tests(metafunc):
+ if "SNR" in metafunc.fixturenames:
+ metafunc.parametrize("SNR", metafunc.config.getoption("SNR"))
+ if "ivim_algorithm" in metafunc.fixturenames:
+ algorithms = algorithm_list(metafunc.config.getoption("algorithmFile"), metafunc.config.getoption("selectAlgorithm"), metafunc.config.getoption("dropAlgorithm"))
+ metafunc.parametrize("ivim_algorithm", algorithms)
+ if "ivim_data" in metafunc.fixturenames:
+ data = data_list(metafunc.config.getoption("dataFile"))
+ metafunc.parametrize("ivim_data", data)
+
+
+def algorithm_list(filename, selected, dropped):
+ current_folder = pathlib.Path.cwd()
+ algorithm_path = current_folder / filename
+ with algorithm_path.open() as f:
+ algorithm_information = json.load(f)
+ algorithms = set(algorithm_information["algorithms"])
+ algorithms = algorithms - set(dropped)
+ if len(selected) > 0 and selected[0]:
+ algorithms = algorithms & set(selected)
+ return list(algorithms)
+
+def data_list(filename):
+ current_folder = pathlib.Path.cwd()
+ data_path = current_folder / filename
+ with data_path.open() as f:
+ all_data = json.load(f)
+
+ bvals = all_data.pop('config')
+ bvals = bvals['bvalues']
+ for name, data in all_data.items():
+ yield name, bvals, data
diff --git a/doc/code_contributions_record.csv b/doc/code_contributions_record.csv
index e69de29b..54a56759 100644
--- a/doc/code_contributions_record.csv
+++ b/doc/code_contributions_record.csv
@@ -0,0 +1,20 @@
+Technique,Category,Subcategory,notes,subfolder,Link to source code,Authors,Institution,function/module,DOI,Tester,test status,Wrapped
+IVIM,Fitting,LSQ fitting,,OGC_AmsterdamUMC,TF2.4_IVIM-MRI_CodeCollection/src/original/OGC_AmsterdamUMC/,Oliver Gurney-Champion,Amsterdam UMC,fit_least_squares/fit_least_squares_array,,tbd,,
+IVIM,Fitting,segmented LSQ fitting,,OGC_AmsterdamUMC,TF2.4_IVIM-MRI_CodeCollection/src/original/OGC_AmsterdamUMC/,Oliver Gurney-Champion,Amsterdam UMC,fit_segmented/fit_segmented_array,,tbd,,OGC_AmsterdamUMC_biexp
+Tri-exponential,Fitting,LSQ fitting,,OGC_AmsterdamUMC,TF2.4_IVIM-MRI_CodeCollection/src/original/OGC_AmsterdamUMC/,Oliver Gurney-Champion,Amsterdam UMC,fit_least_squares_tri_exp/fit_least_squares_array_tri_exp,,tbd,,
+Tri-exponential,Fitting,Segmented LSQ fitting,,OGC_AmsterdamUMC,TF2.4_IVIM-MRI_CodeCollection/src/original/OGC_AmsterdamUMC/,Oliver Gurney-Champion,Amsterdam UMC,fit_segmented_tri_exp/fit_segmented_array_tri_exp,https://doi.org/10.3389/fphys.2022.942495,tbd,,OGC_AmsterdamUMC_biexp_segmented
+IVIM,Fitting,Bayesian,,OGC_AmsterdamUMC,TF2.4_IVIM-MRI_CodeCollection/src/original/OGC_AmsterdamUMC/,Oliver Gurney-Champion/Sebastiano Barbieri,Amsterdam UMC,fit_bayesian_array,https://doi.org/10.1002/mrm.28852,tbd,,OGC_AmsterdamUMC_Bayesian_biexp
+IVIM,Fitting,two-step segmented fit approach,also includes ADC calculation as a separate function,PvH_KB_NKI,TF2.4_IVIM-MRI_CodeCollection/src/original/PvH_KB_NKI/,Petra van Houdt/Stefan Zijlema/Koen Baas,the Netherlands Cancer Institute,DWI_functions_standalone.py,https://doi.org/10.3389/fonc.2021.705964,tbd,,PvH_KB_NKI_IVIMfit
+IVIM,Fitting,two-step (segmented) LSQ fitting, cut-off chosen for brain data; option to fit IVIM with inversion recovery or without IR,PV_MUMC,TF2.4_IVIM-MRI_CodeCollection/src/original/PV_MUMC/,Paulien Voorter,Maastricht University Medical Center,two_step_IVIM_fit.py,,tbd,,PV_MUMC_biexp
+IVIM,Fitting,bi-exponential NLLS,Supports units in mm2/s and µm2/ms,IAR_LundUniversity,TF2.4_IVIM-MRI_CodeCollection/src/original/IAR_LundUniversity/ivim_fit_method_biexp.py,Ivan A. Rashid,Lund University,IvimModelBiexp,tba,tbd,,IAR_LU_biexp
+IVIM,Fitting,2-step segmented NLLS,First estimates and fixes D before a bi-exponential NLLS fit. Supports units in mm2/s and µm2/ms,IAR_LundUniversity,TF2.4_IVIM-MRI_CodeCollection/src/original/IAR_LundUniversity/ivim_fit_method_segmented_2step.py,Ivan A. Rashid,Lund University,IvimModelSegmented2Step,tba,tbd,,IAR_LU_segmented_2step
+IVIM,Fitting,3-step segmented NLLS,First estimates and fixes D followed by an estimate of D* followed by a bi-exponential NLLS fit. Supports units in mm2/s and µm2/ms,IAR_LundUniversity,TF2.4_IVIM-MRI_CodeCollection/src/original/IAR_LundUniversity/ivim_fit_method_segmented_3step.py,Ivan A. Rashid,Lund University,IvimModelSegmented3Step,tba,tbd,,IAR_LU_segmented_3step
+IVIM,Fitting,2-step segmented NLLS,First estimates and fixes D. Subtracts the diffusion signal and estimated D*. Supports units in mm2/s and µm2/ms,IAR_LundUniversity,TF2.4_IVIM-MRI_CodeCollection/src/original/IAR_LundUniversity/ivim_fit_method_subtracted.py,Ivan A. Rashid,Lund University,IvimModelSubtracted,tba,tbd,,IAR_LU_subtracted
+IVIM,Fitting,Variable projection,See referenced article. Supports units in mm2/s and µm2/ms,IAR_LundUniversity,TF2.4_IVIM-MRI_CodeCollection/src/original/IAR_LundUniversity/ivim_fit_method_modified_mix.py,Farooq et al. Modified by Ivan A. Rashid,Lund University,IvimModelVP,https://doi.org/10.1038/srep38927,tbd,,IAR_LU_modified_mix
+IVIM,Fitting,Variable projection,See referenced article. Supports units in mm2/s and µm2/ms,IAR_LundUniversity,TF2.4_IVIM-MRI_CodeCollection/src/original/IAR_LundUniversity/ivim_fit_method_modified_topopro.py,Fadnavis et al. Modified by Ivan A. Rashid,Lund University,IvimModelTopoPro,https://doi.org/10.3389/fnins.2021.779025,tbd,,IAR_LU_modified_topopro
+IVIM,Fitting,Linear fit,Linear fit for D with extrapolation for f. Supports units in mm2/s and µm2/ms,IAR_LundUniversity,TF2.4_IVIM-MRI_CodeCollection/src/original/IAR_LundUniversity/ivim_fit_method_modified_linear.py,Modified by Ivan A. Rashid,Lund University,IvimModelLinear,tba,tbd,,
+IVIM,Fitting,sIVIM fit,NLLS of the simplified IVIM model (sIVIM). Supports units in mm2/s and µm2/ms,IAR_LundUniversity,TF2.4_IVIM-MRI_CodeCollection/src/original/IAR_LundUniversity/ivim_fit_method_modified_sivim.py,Modified by Ivan A. Rashid,Lund University,IvimModelsIVIM,tba,tbd,,
+IVIM,Fitting,Segmented NLLS fitting,MATLAB code,OJ_GU,TF2.4_IVIM-MRI_CodeCollection/src/original/OJ_GU/,Oscar Jalnefjord,University of Gothenburg,IVIM_seg,https://doi.org/10.1007/s10334-018-0697-5,tbd,,OJ_GU_seg
+IVIM,Fitting,Bayesian,MATLAB code,OJ_GU,TF2.4_IVIM-MRI_CodeCollection/src/original/OJ_GU/,Oscar Jalnefjord,University of Gothenburg,IVIM_bayes,https://doi.org/10.1002/mrm.26783,tbd,,
+IVIM,Fitting,Segmented NLLS fitting,Specifically tailored algorithm for NLLS segmented fitting,OJ_GU,TF2.4_IVIM-MRI_CodeCollection/src/original/OJ_GU/,Oscar Jalnefjord,University of Gothenburg,seg,https://doi.org/10.1007/s10334-018-0697-5,tbd,,
+IVIM,Fitting,Linear fit,Linear fit for D and D* and f. Intended to be extremely fast but not always accurate,ETP_SRI,TF2.4_IVIM-MRI_CodeCollection/src/original/ETP_SRI/LinearFitting.py,Eric Peterson,SRI International,,,tbd,,ETP_SRI_LinearFitting
\ No newline at end of file
diff --git a/doc/guidelines_for_contributions.md b/doc/guidelines_for_contributions.md
index 40c93ac9..dfe51e8a 100644
--- a/doc/guidelines_for_contributions.md
+++ b/doc/guidelines_for_contributions.md
@@ -7,5 +7,5 @@ Follow the [general instructions for code constributions](create_local_copy_of_r
For each contribution make sure to list the names and affiliations of all contributors of the particular code in the message of the pull request.
## Source code format
-Contributions of must be place in a folder named "Initials_Institution" under src/original, e.g. "src/original/OGC_AmsterdamUMC"
+Contributions must be place in a folder named "Initials_Institution" under src/original, e.g. "src/original/OGC_AmsterdamUMC"
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 00000000..41c270bb
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = .
+BUILDDIR = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 00000000..e268db23
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,36 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Project information -----------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
+
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath(".."))
+
+project = 'TF2.4 IVIM MRI Code Collection'
+copyright = '2024, OSIPI'
+author = 'OSIPI team'
+
+# -- General configuration ---------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
+
+extensions = ["sphinx.ext.todo", "sphinx.ext.viewcode", "sphinx.ext.autodoc"]
+
+templates_path = ['_templates']
+exclude_patterns = ['Thumbs.db', '.DS_Store']
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+html_show_sphinx = False
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+html_show_copyright = False
+
+# -- Options for HTML output -------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+
+html_theme = 'sphinx_rtd_theme'
+html_static_path = ['_static']
diff --git a/docs/figures.rst b/docs/figures.rst
new file mode 100644
index 00000000..1291f6aa
--- /dev/null
+++ b/docs/figures.rst
@@ -0,0 +1,45 @@
+Algorithm Analysis Figures
+===========
+
+.. raw:: html
+
+
+
+
+
+
+ Algorithm Analysis Figures
+
+
+
+ Diffusion grid for limited algorithms:
+