Skip to content

Commit a6f71aa

Browse files
jgray-19CopilotJoschD
authored
Xsuite Conversion to Tbt Objects (#23)
* First attempt at xtrack implementation * Add xtrack to the toml * Toml again? * Add setuptools to toml * License update (for xtrack) * More setuptools xsuite stuff * Add xpart * Fix typo * Fix floating-point representation in example_line fixture * Add platform check for xtrack kernel compilation in test_convert_xsuite * Enhance MAD-NG and XTRACK modules with improved error handling and additional functionality - Added support for loading TBT data from various input types in the MAD-NG module. - Updated the XTRACK module to check for the presence of the xtrack package and handle errors accordingly. - Introduced a read_tbt function in the XTRACK module, currently not implemented, to match the interface. - Improved type hints and documentation across both modules for better clarity. * Refactor variable names for clarity in MAD-NG and enhance documentation in XTRACK * Improve documentation in MAD-NG module with clearer descriptions and additional details for functions * Update turn_by_turn/madng.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update pyproject.toml Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Update turn_by_turn/xtrack.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Remove unnecessary TYPE_CHECKING import and adjust type hint for convert_to_tbt function * Refactor tests and modules to improve consistency and clarity; update dependencies in pyproject.toml and enhance logging practices across multiple files. * Update documentation and improve code clarity; disable display_version in conf.py, correct module reference in index.rst, enhance example_fake_tbt fixture in conftest.py, refactor MAD-NG and xtrack_line modules for better error handling and type hints. * Enhance documentation for turn_by_turn; add usage examples for read_tbt and convert_to_tbt functions, clarify writing data process, and detail supported formats and options. * Remove load_tbt_data import from package namespace * Fix ImportError handling for tfs package in write_tbt function * Enhance docstring for example_line fixture to clarify its purpose and origin * Some ruff formatting * Refactor documentation in index.rst and io.py for clarity and structure; enhance usage examples and supported modules section. * Reorder import statements in __init__.py for consistency * minor stuff * added API header * Improve formatting in test_xtrack.py and xtrack_line.py. Add additional check for lost particles * Refactor particle ID handling in convert_to_tbt for clarity and consistency * Clarify type annotations in convert_to_tbt functions for consistency and accuracy --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: JoschD <26184899+JoschD@users.noreply.github.com>
1 parent cd27bef commit a6f71aa

File tree

20 files changed

+679
-152
lines changed

20 files changed

+679
-152
lines changed

doc/conf.py

Lines changed: 38 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
# -*- coding: utf-8 -*-
21
#
32
# TFS-Pandas documentation build configuration file, created by
43
# sphinx-quickstart on Tue Feb 6 12:10:18 2018.
@@ -90,7 +89,7 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
9089

9190
# Override link in 'Edit on Github'
9291
rst_prolog = f"""
93-
:github_url: {ABOUT_TBT['__url__']}
92+
:github_url: {ABOUT_TBT["__url__"]}
9493
"""
9594

9695
# The version info for the project you're documenting, acts as replacement for
@@ -120,6 +119,9 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
120119
# If true, `todo` and `todoList` produce output, else they produce nothing.
121120
todo_include_todos = True
122121

122+
# Activate nitpicky mode for sphinx to warn about missing references
123+
# nitpicky = True
124+
123125
# -- Options for HTML output ----------------------------------------------
124126

125127
# The theme to use for HTML and HTML Help pages. See the documentation for
@@ -130,7 +132,7 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
130132
html_logo = "_static/img/omc_logo.svg"
131133
html_static_path = ["_static"]
132134
html_context = {
133-
# "css_files": ["_static/css/custom.css"],
135+
# "css_files": ["_static/css/custom.css"],
134136
"display_github": True,
135137
# the following are only needed if :github_url: is not set
136138
"github_user": author,
@@ -141,17 +143,18 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
141143
"css/custom.css",
142144
]
143145

144-
smartquotes_action = "qe" # renders only quotes and ellipses (...) but not dashes (option: D)
146+
# renders only quotes and ellipses (...) but not dashes (option: D)
147+
smartquotes_action = "qe"
145148

146149
# Theme options are theme-specific and customize the look and feel of a theme
147150
# further. For a list of options available for each theme, see the
148151
# documentation.
149152
#
150153
html_theme_options = {
151-
'collapse_navigation': False,
152-
'display_version': True,
153-
'logo_only': True,
154-
'navigation_depth': 1,
154+
"collapse_navigation": False,
155+
"version_selector": True, # sphinx-rtd-theme>=3.0, formerly 'display_version'
156+
"logo_only": True,
157+
"navigation_depth": 2,
155158
}
156159

157160
# Add any paths that contain custom static files (such as style sheets) here,
@@ -163,11 +166,11 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
163166
# pages. Single values can also be put in this dictionary using the
164167
# -A command-line option of sphinx-build.
165168
html_context = {
166-
'display_github': True,
169+
"display_github": True,
167170
# the following are only needed if :github_url: is not set
168-
'github_user': author,
169-
'github_repo': project,
170-
'github_version': 'master/doc/',
171+
"github_user": author,
172+
"github_repo": project,
173+
"github_version": "master/doc/",
171174
}
172175
# Custom sidebar templates, must be a dictionary that maps document names
173176
# to template names.
@@ -207,7 +210,13 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
207210
# (source start file, target name, title,
208211
# author, documentclass [howto, manual, or own class]).
209212
latex_documents = [
210-
(master_doc, "turn_by_turn.tex", "turn_by_turn Documentation", "pyLHC/OMC-TEAM", "manual"),
213+
(
214+
master_doc,
215+
"turn_by_turn.tex",
216+
"turn_by_turn Documentation",
217+
"pyLHC/OMC-TEAM",
218+
"manual",
219+
),
211220
]
212221

213222
# -- Options for manual page output ---------------------------------------
@@ -232,3 +241,19 @@ def about_package(init_posixpath: pathlib.Path) -> dict:
232241
"Miscellaneous",
233242
),
234243
]
244+
245+
# -- Instersphinx Configuration ----------------------------------------------
246+
247+
# Example configuration for intersphinx: refer to the Python standard library.
248+
# use in refs e.g:
249+
# :ref:`comparison manual <python:comparisons>`
250+
intersphinx_mapping = {
251+
"python": ("https://docs.python.org/3/", None),
252+
"numpy": ("https://numpy.org/doc/stable/", None),
253+
"pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
254+
"matplotlib": ("https://matplotlib.org/stable/", None),
255+
"scipy": ("https://docs.scipy.org/doc/scipy/", None),
256+
"cpymad": ("https://hibtc.github.io/cpymad/", None),
257+
"tfs": ("https://pylhc.github.io/tfs/", None),
258+
"sdds": ("https://pylhc.github.io/sdds/", None),
259+
}

doc/index.rst

Lines changed: 35 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,41 @@ Welcome to turn_by_turn' documentation!
55

66
It provides a custom dataclass ``TbtData`` to do so, with attributes corresponding to the relevant measurements information.
77

8+
How to Use turn_by_turn
9+
-----------------------
10+
11+
There are two main ways to create a ``TbtData`` object:
12+
13+
1. **Reading from file (disk):**
14+
Use ``read_tbt`` to load measurement data from a file on disk. This is the standard entry point for working with measurement files in supported formats.
15+
16+
2. **In-memory conversion:**
17+
Use ``convert_to_tbt`` to convert data that is already loaded in memory (such as a pandas DataFrame, tfs DataFrame, or xtrack.Line) into a ``TbtData`` object. This is useful for workflows where you generate or manipulate data in Python before standardizing it.
18+
19+
Both methods produce a ``TbtData`` object, which can then be used for further analysis or written out to supported formats.
20+
21+
Supported Modules and Limitations
22+
---------------------------------
23+
24+
Different modules support different file formats and workflows (disk reading vs. in-memory conversion). For a detailed table of which modules support which features, and any important limitations, see the documentation for the :mod:`turn_by_turn.io` module.
25+
26+
- Only ``madng`` and ``xtrack`` support in-memory conversion.
27+
- Most modules are for disk reading only.
28+
- Some modules (e.g., ``esrf``) are experimental or have limited support.
29+
- For writing, see the next section.
30+
31+
Writing Data
32+
------------
33+
34+
To write a ``TbtData`` object to disk, use the ``write_tbt`` function. This function supports writing in the LHC SDDS format by default, as well as other supported formats depending on the ``datatype`` argument. The output format is determined by the ``datatype`` you specify, but for most workflows, SDDS is the standard output.
35+
36+
Example::
37+
38+
from turn_by_turn.io import write_tbt
39+
write_tbt("output.sdds", tbt_data)
40+
841
Package Reference
9-
=================
42+
-----------------
1043

1144
.. toctree::
1245
:caption: Modules
@@ -24,9 +57,8 @@ Package Reference
2457

2558

2659
Indices and tables
27-
==================
60+
------------------
2861

2962
* :ref:`genindex`
3063
* :ref:`modindex`
3164
* :ref:`search`
32-

doc/modules/index.rst

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,3 @@
1616
.. automodule:: turn_by_turn.utils
1717
:members:
1818
:noindex:
19-

doc/readers/index.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,5 +33,9 @@
3333
:noindex:
3434

3535
.. automodule:: turn_by_turn.madng
36+
:members:
37+
:noindex:
38+
39+
.. automodule:: turn_by_turn.xtrack_line
3640
:members:
3741
:noindex:

pyproject.toml

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ requires-python = ">=3.10"
2929
classifiers = [
3030
"Development Status :: 5 - Production/Stable",
3131
"Intended Audience :: Science/Research",
32-
"License :: OSI Approved :: MIT License",
3332
"Natural Language :: English",
3433
"Operating System :: OS Independent",
3534
"Programming Language :: Python :: 3 :: Only",
@@ -48,14 +47,26 @@ dependencies = [
4847
"pandas >= 2.1",
4948
"sdds >= 0.4",
5049
"h5py >= 2.9",
51-
"tfs-pandas >= 4.0.0", # for madng (could be an optional dependency)
5250
]
5351

5452
[project.optional-dependencies]
53+
madng = [
54+
"tfs-pandas >= 4.0.0", # for reading MAD-NG files (Could do everything in memory with just pandas)
55+
]
56+
57+
xtrack = [
58+
"xtrack >= 0.84.7", # for xtrack
59+
"setuptools >= 65", # for xtrack
60+
"xpart >= 0.23.0", # for xtrack
61+
]
62+
5563
test = [
5664
"pytest>=7.0",
5765
"pytest-cov>=2.9",
66+
"turn_by_turn[madng]",
67+
"turn_by_turn[xtrack]",
5868
]
69+
5970
doc = [
6071
"sphinx >= 7.0",
6172
"sphinx_rtd_theme >= 2.0",
@@ -64,6 +75,8 @@ doc = [
6475
all = [
6576
"turn_by_turn[test]",
6677
"turn_by_turn[doc]",
78+
"turn_by_turn[madng]",
79+
"turn_by_turn[xtrack]",
6780
]
6881

6982
[project.urls]

tests/conftest.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import numpy as np
2+
import pandas as pd
3+
import pytest
4+
from turn_by_turn.structures import TbtData, TransverseData
5+
6+
@pytest.fixture(scope="session")
7+
def example_fake_tbt():
8+
"""
9+
Returns a TbtData object using simulation data taken from MAD-NG.
10+
This data is also used for the tests in xtrack, so change the numbers
11+
at your own risk.
12+
13+
It is possible to run the MAD-NG in the inputs folder to regenerate the data.
14+
Also, xtrack produces the same data, so you can use the xtrack test fixture
15+
`example_line`.
16+
"""
17+
names = np.array(["BPM1", "BPM3", "BPM2"])
18+
# First BPM
19+
bpm1_p1_x = np.array([ 1e-3, 0.002414213831,-0.0009999991309])
20+
bpm1_p1_y = np.array([-1e-3, 0.0004142133507, 0.001000000149])
21+
bpm1_p2_x = np.array([-1e-3,-0.002414213831, 0.0009999991309])
22+
bpm1_p2_y = np.array([ 1e-3,-0.0004142133507,-0.001000000149])
23+
24+
# Second BPM
25+
bpm3_p1_x = np.array([ 0.002414213831,-0.0009999991309,-0.002414214191])
26+
bpm3_p1_y = np.array([ 0.0004142133507, 0.001000000149,-0.0004142129907])
27+
bpm3_p2_x = np.array([-0.002414213831, 0.0009999991309, 0.002414214191])
28+
bpm3_p2_y = np.array([-0.0004142133507,-0.001000000149, 0.0004142129907])
29+
30+
# Third BPM
31+
bpm2_p1_x = np.array([-0.0009999999503,-0.0004142138307, 0.0009999998012])
32+
bpm2_p1_y = np.array([ 0.00100000029,-0.002414213351,-0.001000001159])
33+
bpm2_p2_x = np.array([ 0.0009999999503, 0.0004142138307,-0.0009999998012])
34+
bpm2_p2_y = np.array([-0.00100000029, 0.002414213351, 0.001000001159])
35+
36+
matrix = [
37+
TransverseData( # first particle
38+
X=pd.DataFrame(index=names, data=[bpm1_p1_x, bpm2_p1_x, bpm3_p1_x]),
39+
Y=pd.DataFrame(index=names, data=[bpm1_p1_y, bpm2_p1_y, bpm3_p1_y]),
40+
),
41+
TransverseData( # second particle
42+
X=pd.DataFrame(index=names, data=[bpm1_p2_x, bpm2_p2_x, bpm3_p2_x]),
43+
Y=pd.DataFrame(index=names, data=[bpm1_p2_y, bpm2_p2_y, bpm3_p2_y]),
44+
),
45+
]
46+
return TbtData(matrices=matrix, bunch_ids=[0, 1], nturns=3)

tests/test_madng.py

Lines changed: 15 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,86 +1,46 @@
1-
2-
from datetime import datetime
3-
4-
import numpy as np
5-
import pandas as pd
61
import pytest
2+
from pathlib import Path
73

84
from tests.test_lhc_and_general import INPUTS_DIR, compare_tbt
95
from turn_by_turn import madng, read_tbt, write_tbt
10-
from turn_by_turn.structures import TbtData, TransverseData
6+
from turn_by_turn.structures import TbtData
117

128

13-
def test_read_ng(_ng_file):
14-
original = _original_simulation_data()
15-
9+
def test_read_ng(_ng_file: Path, example_fake_tbt: TbtData):
1610
# Check directly from the module
1711
new = madng.read_tbt(_ng_file)
18-
compare_tbt(original, new, no_binary=True)
12+
compare_tbt(example_fake_tbt, new, no_binary=True)
1913

2014
# Check from the main function
2115
new = read_tbt(_ng_file, datatype="madng")
22-
compare_tbt(original, new, no_binary=True)
16+
compare_tbt(example_fake_tbt, new, no_binary=True)
2317

24-
def test_write_ng(_ng_file, tmp_path):
25-
original_tbt = _original_simulation_data()
26-
18+
def test_write_ng(_ng_file: Path, tmp_path: Path, example_fake_tbt: TbtData):
2719
# Write the data
2820
from_tbt = tmp_path / "from_tbt.tfs"
29-
madng.write_tbt(from_tbt, original_tbt)
21+
madng.write_tbt(from_tbt, example_fake_tbt)
3022

3123
# Read the written data
3224
new_tbt = madng.read_tbt(from_tbt)
33-
compare_tbt(original_tbt, new_tbt, no_binary=True)
25+
compare_tbt(example_fake_tbt, new_tbt, no_binary=True)
3426

3527
# Check from the main function
36-
original_tbt = read_tbt(_ng_file, datatype="madng")
37-
write_tbt(from_tbt, original_tbt, datatype="madng")
28+
written_tbt = read_tbt(_ng_file, datatype="madng")
29+
write_tbt(from_tbt, written_tbt, datatype="madng")
3830

3931
new_tbt = read_tbt(from_tbt, datatype="madng")
40-
compare_tbt(original_tbt, new_tbt, no_binary=True)
41-
assert original_tbt.date == new_tbt.date
32+
compare_tbt(written_tbt, new_tbt, no_binary=True)
33+
assert written_tbt.date == new_tbt.date
4234

43-
def test_error_ng(_error_file):
35+
def test_error_ng(_error_file: Path):
4436
with pytest.raises(ValueError):
4537
read_tbt(_error_file, datatype="madng")
4638

47-
# ---- Helpers ---- #
48-
def _original_simulation_data() -> TbtData:
49-
# Create a TbTData object with the original data
50-
names = np.array(["BPM1", "BPM3", "BPM2"])
51-
bpm1_p1_x = np.array([ 1e-3, 0.002414213831,-0.0009999991309])
52-
bpm1_p1_y = np.array([-1e-3, 0.0004142133507, 0.001000000149])
53-
bpm1_p2_x = np.array([-1e-3,-0.002414213831, 0.0009999991309])
54-
bpm1_p2_y = np.array([ 1e-3,-0.0004142133507,-0.001000000149])
55-
56-
bpm2_p1_x = np.array([-0.0009999999503,-0.0004142138307, 0.0009999998012])
57-
bpm2_p1_y = np.array([ 0.00100000029,-0.002414213351,-0.001000001159])
58-
bpm2_p2_x = np.array([ 0.0009999999503, 0.0004142138307,-0.0009999998012])
59-
bpm2_p2_y = np.array([-0.00100000029, 0.002414213351, 0.001000001159])
60-
61-
bpm3_p1_x = np.array([ 0.002414213831,-0.0009999991309,-0.002414214191])
62-
bpm3_p1_y = np.array([ 0.0004142133507, 0.001000000149,-0.0004142129907])
63-
bpm3_p2_x = np.array([-0.002414213831, 0.0009999991309, 0.002414214191])
64-
bpm3_p2_y = np.array([-0.0004142133507,-0.001000000149, 0.0004142129907])
65-
66-
matrix = [
67-
TransverseData( # first particle
68-
X=pd.DataFrame(index=names, data=[bpm1_p1_x, bpm2_p1_x, bpm3_p1_x]),
69-
Y=pd.DataFrame(index=names, data=[bpm1_p1_y, bpm2_p1_y, bpm3_p1_y]),
70-
),
71-
TransverseData( # second particle
72-
X=pd.DataFrame(index=names, data=[bpm1_p2_x, bpm2_p2_x, bpm3_p2_x]),
73-
Y=pd.DataFrame(index=names, data=[bpm1_p2_y, bpm2_p2_y, bpm3_p2_y]),
74-
),
75-
]
76-
return TbtData(matrices=matrix, bunch_ids=[1, 2], nturns=3)
77-
78-
7939
# ---- Fixtures ---- #
8040
@pytest.fixture
81-
def _ng_file(tmp_path):
41+
def _ng_file(tmp_path: Path) -> Path:
8242
return INPUTS_DIR / "madng" / "fodo_track.tfs"
8343

8444
@pytest.fixture
85-
def _error_file(tmp_path):
45+
def _error_file(tmp_path: Path) -> Path:
8646
return INPUTS_DIR / "madng" / "fodo_track_error.tfs"

0 commit comments

Comments
 (0)