From e272b067664898c8a533a9e105c1920318f679fe Mon Sep 17 00:00:00 2001 From: John Hennig Date: Mon, 20 Oct 2025 17:52:48 +0200 Subject: [PATCH 01/14] Always use 79-character limit when breaking lines. So far, we've stuck to PEP 8, which states: > For flowing long blocks of text with fewer structural restrictions > (docstrings or comments), the line length should be limited to 72 > characters. Let's just go with 79 characters everywhere. Makes it easier to remember and also needs only one vertical line as a visual aid in the editor. --- PyPI.md | 32 ++-- ReadMe.md | 33 ++-- demos/ReadMe.md | 4 +- demos/compact_models.py | 6 +- demos/worker_pool.py | 11 +- docs/ReadMe.md | 6 +- docs/demonstrations.md | 332 +++++++++++++++++++--------------------- docs/index.md | 32 ++-- docs/installation.md | 26 ++-- docs/limitations.md | 147 +++++++++--------- docs/tutorial.md | 262 +++++++++++++++---------------- mph/client.py | 46 +++--- mph/config.py | 26 ++-- mph/discovery.py | 54 +++---- mph/model.py | 149 +++++++++--------- mph/node.py | 191 +++++++++++------------ mph/server.py | 65 ++++---- mph/session.py | 63 ++++---- tests/ReadMe.md | 19 ++- tests/test_node.py | 26 ++-- tools/ReadMe.md | 13 +- tools/codecov.py | 7 +- tools/coverage.py | 20 +-- tools/test.py | 33 ++-- 24 files changed, 772 insertions(+), 831 deletions(-) diff --git a/PyPI.md b/PyPI.md index 9ce0c0e..2e122a4 100644 --- a/PyPI.md +++ b/PyPI.md @@ -1,23 +1,23 @@ *Pythonic scripting interface for Comsol Multiphysics* -[Comsol] is a commercial software application that is widely used in -science and industry for research and development. It excels at modeling -almost any (multi-)physics problem by solving the governing set of -partial differential equations via the finite-element method. It comes -with a modern graphical user interface to set up simulation models and -can be scripted from Matlab or its native Java API. +[Comsol] is a commercial software application that is widely used in science +and industry for research and development. It excels at modeling almost any +(multi-)physics problem by solving the governing set of partial differential +equations via the finite-element method. It comes with a modern graphical user +interface to set up simulation models and can be scripted from Matlab or its +native Java API. -MPh brings the dearly missing power of Python to the world of Comsol. -It leverages the Java bridge provided by [JPype] to access the Comsol -API and wraps it in a layer of pythonic ease-of-use. The Python wrapper -covers common scripting tasks, such as loading a model from a file, -modifying parameters, importing data, to then run the simulation, -evaluate the results, and export them. +MPh brings the dearly missing power of Python to the world of Comsol. It +leverages the Java bridge provided by [JPype] to access the Comsol API and +wraps it in a layer of pythonic ease-of-use. The Python wrapper covers common +scripting tasks, such as loading a model from a file, modifying parameters, +importing data, to then run the simulation, evaluate the results, and export +them. -Comsol models are marked by their `.mph` file extension, which stands -for multi-physics. Hence the name of this library. It is open-source -and in no way affiliated with Comsol Inc., the company that develops -and sells the simulation software. +Comsol models are marked by their `.mph` file extension, which stands for +multi-physics. Hence the name of this library. It is open-source and in no way +affiliated with Comsol Inc., the company that develops and sells the simulation +software. Find the full [documentation on Read-the-Docs][docs]. diff --git a/ReadMe.md b/ReadMe.md index fe55eaa..2809abb 100644 --- a/ReadMe.md +++ b/ReadMe.md @@ -1,24 +1,25 @@ # MPh *Pythonic scripting interface for Comsol Multiphysics* -[Comsol] is a commercial software application that is widely used in -science and industry for research and development. It excels at modeling -almost any (multi-)physics problem by solving the governing set of -partial differential equations via the finite-element method. It comes -with a modern graphical user interface to set up simulation models and -can be scripted from Matlab or its native Java API. +[Comsol] is a commercial software application that is widely used in science +and industry for research and development. It excels at modeling almost any +(multi-)physics problem by solving the governing set of partial differential +equations via the finite-element method. It comes with a modern graphical user +interface to set up simulation models and can be scripted from Matlab or its +native Java API. -MPh brings the dearly missing power of Python to the world of Comsol. -It leverages the Java bridge provided by [JPype] to access the Comsol -API and wraps it in a layer of pythonic ease-of-use. The Python wrapper -covers common scripting tasks, such as loading a model from a file, -modifying parameters, importing data, to then run the simulation, -evaluate the results, and export them. +MPh brings the dearly missing power of Python to the world of Comsol. It +leverages the Java bridge provided by [JPype] to access the Comsol API and +wraps it in a layer of pythonic ease-of-use. The Python wrapper covers common +scripting tasks, such as loading a model from a file, modifying parameters, +importing data, to then run the simulation, evaluate the results, and export +them. + +Comsol models are marked by their `.mph` file extension, which stands for +multi-physics. Hence the name of this library. It is open-source and in no way +affiliated with Comsol Inc., the company that develops and sells the simulation +software. -Comsol models are marked by their `.mph` file extension, which stands -for multi-physics. Hence the name of this library. It is open-source -and in no way affiliated with Comsol Inc., the company that develops -and sells the simulation software. Find the full [documentation on Read-the-Docs][docs]. diff --git a/demos/ReadMe.md b/demos/ReadMe.md index abf40e9..2ccf6ff 100644 --- a/demos/ReadMe.md +++ b/demos/ReadMe.md @@ -1,4 +1,4 @@ ## Demonstration scripts -The scripts here demonstrate simple use cases and are referenced in -the "Demonstrations" chapter of the documentation. +The scripts here demonstrate simple use cases and are referenced in the +"Demonstrations" chapter of the documentation. diff --git a/demos/compact_models.py b/demos/compact_models.py index a15c5c9..2ad691b 100644 --- a/demos/compact_models.py +++ b/demos/compact_models.py @@ -1,9 +1,9 @@ """ Compacts Comsol models in the working directory. -Loads each Comsol model (`.mph` file) in the current folder, removes -solution and mesh data, resets the modeling history, then saves the -model file under its original name, effectively compacting its size. +Loads each Comsol model (`.mph` file) in the current folder, removes solution +and mesh data, resets the modeling history, then saves the model file under its +original name, effectively compacting its size. """ import mph diff --git a/demos/worker_pool.py b/demos/worker_pool.py index b81c7d7..02351c7 100644 --- a/demos/worker_pool.py +++ b/demos/worker_pool.py @@ -1,12 +1,11 @@ """ Distributes a parameter sweep over multiple worker processes. -This demonstration works around the limitation that only a single -Comsol client can run inside one Python process. It leverages the -`multiprocessing` module from Python's standard library to create -several independent subprocesses ("workers") that communicate with -the parent process ("boss") via inter-process queues to pass job -instructions and results back and forth. +This demonstration works around the limitation that only a single Comsol client +can run inside one Python process. It leverages the `multiprocessing` module +from Python's standard library to create several independent subprocesses +("workers") that communicate with the parent process ("boss") via inter-process +queues to pass job instructions and results back and forth. """ import mph # Comsol interface diff --git a/docs/ReadMe.md b/docs/ReadMe.md index 43c2474..b679c0a 100644 --- a/docs/ReadMe.md +++ b/docs/ReadMe.md @@ -1,5 +1,5 @@ ## Documentation source -The documentation hosted on Read-the-Docs is built from this folder. -The start page corresponds to `index.md`. The build process is -configured in `.readthedocs.yaml` in the project root folder. +The documentation hosted on Read-the-Docs is built from this folder. The start +page corresponds to `index.md`. The build process on Read-the-Docs is +configured in `.readthedocs.yaml`. diff --git a/docs/demonstrations.md b/docs/demonstrations.md index a236b1e..57080e3 100644 --- a/docs/demonstrations.md +++ b/docs/demonstrations.md @@ -2,21 +2,20 @@ ## Access the full Comsol API -The primary focus of MPh is to automate the simulation workflow, like -running parameter sweeps or optimization routines with customized, -Python-powered post-processing. Navigating and altering models via its -Python API is possible, see [next section](#navigate-and-alter-the-model), -but has some limitations. - -However, any and all functionality offered by the [Comsol API] -is accessible via the "pythonized" Java layer provided by [JPype], -which is exposed as the `.java` attribute of [`Client`](#Client) -instances, mapping to Comsol's [`ModelUtil`], as well as of -[`Model`](#Model) instances, mapping to Comsol's [`model`]. - -Let's take this Comsol blog post as an example: ["Automate your modeling -tasks with the Comsol API for use with Java"][blog]. It starts with the -following Java code: +The primary focus of MPh is to automate the simulation workflow, like running +parameter sweeps or optimization routines with customized, Python-powered +post-processing. Navigating and altering models via its Python API is possible, +see [next section](#navigate-and-alter-the-model), but has some limitations. + +However, any and all functionality offered by the [Comsol API] is accessible +via the "pythonized" Java layer provided by [JPype], which is exposed as the +`.java` attribute of [`Client`](#Client) instances, mapping to Comsol's +[`ModelUtil`], as well as of [`Model`](#Model) instances, mapping to Comsol's +[`model`]. + +Let's take this Comsol blog post as an example: ["Automate your modeling tasks +with the Comsol API for use with Java"][blog]. It starts with the following +Java code: ```java import com.comsol.model.*; import com.comsol.model.util.*; @@ -39,8 +38,8 @@ public class HelloWorld { } ``` -What it does is, it creates a model, which contains a 3d geometry -component that is just a block 0.1 by 0.2 by 0.5 meters in size. +What it does is, it creates a model, which contains a 3d geometry component +that is just a block 0.1 by 0.2 by 0.5 meters in size. In Python, we would achieve the same like so: ```python @@ -57,56 +56,54 @@ model.geom("geom1").feature("blk1").set("size", ["0.1", "0.2", "0.5"]); model.geom("geom1").run("fin"); ``` -Note how the *functional* Java code (excluding the Java boilerplate) -was essentially copied and pasted, even the semicolons, which Python -simply ignores. We named the Python wrapper `pymodel` and assigned -`model` to the underlying Java object just so we could do this. We had -to replace `new String[]{"0.1", "0.2", "0.5"}` because Python does not -know what [`new`] means. There, Java expects a list of three strings. -So we replaced the expression with `["0.1", "0.2", "0.5"]`, the Python -equivalent of just that: a list of these three strings. +Note how the *functional* Java code (excluding the Java boilerplate) was +essentially copied and pasted, even the semicolons, which Python simply +ignores. We named the Python wrapper `pymodel` and assigned `model` to the +underlying Java object just so we could do this. We had to replace +`new String[]{"0.1", "0.2", "0.5"}` because Python does not know what [`new`] +means. There, Java expects a list of three strings. So we replaced the +expression with `["0.1", "0.2", "0.5"]`, the Python equivalent of just that: a +list of these three strings. Occasionally when translating Java (or Matlab) code you find in the -documentation, or a blog post as the case was here, you will have to -amend code lines such as the one above. But they are few and far between. -The error messages you might receive should point you in the right -direction and the [JPype] documentation would offer help on issues with -type conversion. +documentation, or a blog post as the case was here, you will have to amend code +lines such as the one above. But they are few and far between. The error +messages you might receive should point you in the right direction and the +[JPype] documentation would offer help on issues with type conversion. The advantage of using Python over Java is: * You don't really need to know Java. Just a little, to understand that - occasionally we have to take care of type conversions that JPype - cannot handle all by itself. Which is rare. -* You don't need to install Java. It just ships with Comsol. You also - don't need to bother with compiling Java source code to Java classes - via `comsolcompile`. -* You can use Python introspection to understand how Comsol models - are "created in code". The Comsol documentation explains a lot of - things, but not every little detail. Either use Python's built-in - [`dir()`](#dir) or call [`mph.inspect()`](#inspect) to print a - pretty-fied representation of a Java object in the model tree. + occasionally we have to take care of type conversions that JPype cannot + handle all by itself. Which is rare. +* You don't need to install Java. It just ships with Comsol. You also don't + need to bother with compiling Java source code to Java classes via + `comsolcompile`. +* You can use Python introspection to understand how Comsol models are "created + in code". The Comsol documentation explains a lot of things, but not every + little detail. Either use Python's built-in [`dir()`](#dir) or call + [`mph.inspect()`](#inspect) to print a pretty-fied representation of a Java + object in the model tree. To save the model created in the above example, we do: ```python pymodel.save('model') ``` -This stores a file named `model.mph` in the working directory, which -may then be opened in the Comsol GUI or be used in any other Python, -Java, or Matlab project. +This stores a file named `model.mph` in the working directory, which may then +be opened in the Comsol GUI or be used in any other Python, Java, or Matlab +project. -Comsol can be very helpful in creating the Java code corresponding to -changes we make to a model. Not only does the GUI provide a function to -"copy as code to clipboard" on any node, it also lets us save the entire -model as a Java source file, in which the new code can conveniently be -found at the bottom. +Comsol can be very helpful in creating the Java code corresponding to changes +we make to a model. Not only does the GUI provide a function to "copy as code +to clipboard" on any node, it also lets us save the entire model as a Java +source file, in which the new code can conveniently be found at the bottom. ## Navigate and alter the model -The example from the previous section can be expressed in much more -idiomatic Python syntax if we ignore the Java layer and only use -methods from the [`Model`](#Model) class. +The example from the previous section can be expressed in much more idiomatic +Python syntax if we ignore the Java layer and only use methods from the +[`Model`](#Model) class. ```python import mph client = mph.start() @@ -117,10 +114,10 @@ model.property('geometries/Geometry 1/Block 1', 'size', ('0.1', '0.2', '0.5')) model.build('Geometry 1') ``` -This, again, hides all tags in application code. Instead, we refer to -nodes in the model tree by name. In the example, these names were -generated automatically, in the same way the Comsol GUI does it. We -could also supply names of our choosing. +This, again, hides all tags in application code. Instead, we refer to nodes in +the model tree by name. In the example, these names were generated +automatically, in the same way the Comsol GUI does it. We could also supply +names of our choosing. ```python import mph client = mph.start() @@ -131,17 +128,17 @@ model.property('geometries/geometry/ice block', 'size', ('0.1', '0.2', '0.5')) model.build('geometry') ``` -If [`model.create()`](#Model.create) receives a reference to a node that -does not exist yet, such as `geometries/geometry` in the example, it -creates that node in its parent group, here the built-in group -`geometries`, and gives it the name we supplied, here `geometry`. +If [`model.create()`](#Model.create) receives a reference to a node that does +not exist yet, such as `geometries/geometry` in the example, it creates that +node in its parent group, here the built-in group `geometries`, and gives it +the name we supplied, here `geometry`. So far, we have used strings to refer to nodes. We could also use the -[`Node`](#Node) class, which offers more flexibility and extra -functionality. Instances of that class are returned by `model.create()` -for convenience. But they can be generated from scratch by string -concatenation with the division operator — much like -[`pathlib.Path`](#pathlib.Path) objects from Python's standard library. +[`Node`](#Node) class, which offers more flexibility and extra functionality. +Instances of that class are returned by `model.create()` for convenience. But +they can be generated from scratch by string concatenation with the division +operator — much like [`pathlib.Path`](#pathlib.Path) objects from Python's +standard library. ```python import mph client = mph.start() @@ -153,9 +150,9 @@ block.property('size', ('0.1', '0.2', '0.5')) model.build(geometry) ``` -The division operator is the Swiss army knife for accessing nodes in -the model tree. It even works with `client` as root. Within that last -example, the following notations +The division operator is the Swiss army knife for accessing nodes in the model +tree. It even works with `client` as root. Within that last example, the +following notations ```python client/'block of ice'/'geometries'/'geometry'/'ice block' model/'geometries'/'geometry'/'ice block' @@ -164,19 +161,18 @@ geometry/'ice block' block ``` -all refer to the same geometry element in the model. We could also -include the forward slash in a string expression instead of using it as -an operator, just like we did in the first and second example. +all refer to the same geometry element in the model. We could also include the +forward slash in a string expression instead of using it as an operator, just +like we did in the first and second example. ```python model/'geometries/geometry/ice block' ``` -The model's root node can be referenced with either `model/''` or -`model/None`. If any of the node names in the hierarchy contain a -forward slash themselves, that forward slash can be escaped (i.e., -marked to be interpreted literally) by doubling it. For instance, -a geometry node displayed as "ice/frozen water" in the Comsol GUI -would be referred to as `geometry/'ice//frozen water'`. +The model's root node can be referenced with either `model/''` or `model/None`. +If any of the node names in the hierarchy contain a forward slash themselves, +that forward slash can be escaped (i.e., marked to be interpreted literally) by +doubling it. For instance, a geometry node displayed as "ice/frozen water" in +the Comsol GUI would be referred to as `geometry/'ice//frozen water'`. The example model created above ends up having the following model [`tree`](#mph.tree): @@ -213,24 +209,21 @@ block of ice └─ exports ``` -The parameter group, model component, default view and coordinate -system were created by Comsol automatically. We could rename these -nodes if we wanted to. Most built-in groups are still empty, waiting -for features to be created. +The parameter group, model component, default view and coordinate system were +created by Comsol automatically. We could rename these nodes if we wanted to. +Most built-in groups are still empty, waiting for features to be created. -The demo script [`create_capacitor.py`] shows how to create more -advanced features than in the simple example here: It generates -the demonstration model used in the [](#tutorial) entirely from -Python code. +The demo script [`create_capacitor.py`] shows how to create more advanced +features than in the simple example here: It generates the demonstration model +used in the [](#tutorial) entirely from Python code. ## Busbar example -["Electrical Heating in a Busbar"][busbar] is an example model featured -in the tutorial of ["Introduction to Comsol Multiphysics"][intro] and -explained there in great detail. The section "Getting the Maximum and -Minimum Temperature" demonstrates how to obtain the two temperature -extremes within the Comsol GUI. +["Electrical Heating in a Busbar"][busbar] is an example model featured in the +tutorial of ["Introduction to Comsol Multiphysics"][intro] and explained there +in great detail. The section "Getting the Maximum and Minimum Temperature" +demonstrates how to obtain the two temperature extremes within the Comsol GUI. The following Python code does the same thing programmatically: ```python @@ -259,15 +252,13 @@ or width `wbb` of the busbar. ## Compacting models -We usually save models to disk after we have solved them, which -includes the solution and mesh data in the file. This is convenient -so that we can come back to the model later, but don't have to run -the simulation again, which may take a long time. However, the files -then occupy a lot of disk space. After a while, we may want to archive -the models, but trim the fat before we do that. +We usually save models to disk after we have solved them, which includes the +solution and mesh data in the file. This is convenient so that we can come back +to the model later, but don't have to run the simulation again, which may take +a long time. However, the files then occupy a lot of disk space. After a while, +we may want to archive the models, but trim the fat before we do that. -To compact all model files in the current working directory, we can -do this: +To compact all model files in the current working directory, we can do this: ```python import mph from pathlib import Path @@ -280,48 +271,45 @@ for file in Path.cwd().glob('*.mph'): model.save() ``` -The script [`compact_models.py`] in the `demos` folder of the -source-code repository is a refined version of the above code. It -displays more status information and also resets the modeling history. +The script [`compact_models.py`] in the `demos` folder of the source-code +repository is a refined version of the above code. It displays more status +information and also resets the modeling history. -Note that we could easily go through all sub-directories recursively -by replacing [`glob`](#pathlib.Path.glob) with -[`rglob`](#pathlib.Path.rglob). However, this should be used -with caution so as to not accidentally modify models in folders that -were not meant to be included. +Note that we could easily go through all sub-directories recursively by +replacing [`glob`](#pathlib.Path.glob) with [`rglob`](#pathlib.Path.rglob). +However, this should be used with caution so as to not accidentally modify +models in folders that were not meant to be included. ## Multiple processes -As explained in chapter "[](#limitations)", we cannot run more than -one Comsol session inside the same Python process. But we *can* start -multiple Python processes in parallel if we leverage the -[`multiprocessing`](#multiprocessing) module from the standard library. +As explained in chapter "[](#limitations)", we cannot run more than one Comsol +session inside the same Python process. But we *can* start multiple Python +processes in parallel if we leverage the [`multiprocessing`](#multiprocessing) +module from the standard library. ```python import mph import multiprocessing import queue ``` -Additionally, we have imported the [`queue`](#queue) module, also from -the standard library, though only for the [`queue.Empty`](#queue.Empty) -exception type that it provides. +Additionally, we have imported the [`queue`](#queue) module, also from the +standard library, though only for the [`queue.Empty`](#queue.Empty) exception +type that it provides. -In this demonstration, we will solve the model [`capacitor.mph`] from -the [](#tutorial). We want to sweep the electrode distance *d* and -calculate the capacitance *C* for each value of the distance, ranging -from 0.5 to 5 mm. +In this demonstration, we will solve the model [`capacitor.mph`] from the +[](#tutorial). We want to sweep the electrode distance *d* and calculate the +capacitance *C* for each value of the distance, ranging from 0.5 to 5 mm. ```python values = [0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0] ``` -Next, we define the function that we intend to run in every process, -i.e. the "worker". The function sets up the Comsol session when the -process starts, then keeps solving the model for every distance value -that it receives via a `jobs` queue. Each time, it evaluates the -solution and returns the capacitance via a `results` queue. It does -so until the `jobs` queue is exhausted, upon which the function -terminates, and with it Comsol session and Python process. +Next, we define the function that we intend to run in every process, i.e. the +"worker". The function sets up the Comsol session when the process starts, then +keeps solving the model for every distance value that it receives via a `jobs` +queue. Each time, it evaluates the solution and returns the capacitance via a +`results` queue. It does so until the `jobs` queue is exhausted, upon which the +function terminates, and with it Comsol session and Python process. ```python def worker(jobs, results): client = mph.start(cores=1) @@ -337,41 +325,39 @@ def worker(jobs, results): results.put((d, C)) ``` -Each worker will only use one of the processor cores available on the -machine, as that's the whole point: We want to achieve maximum speed-up -of, say, a parameter sweep, by having each core work on a job -corresponding to one of the many parameter values, which it can do -independently of work being done for any other value. - -We could also solve this sequentially, one parameter value at a time. -Comsol's solver could then make use of all cores and would also employ -some parallelization strategy in its internal computation. But the -speed-up would not scale linearly with the number of cores, especially -for large numbers of them. - -We might use a "parametric sweep", a feature that Comsol does offer. -But by doing this in Python we retain full programmatic control of -which parameter is solved for and when. The parameter values don't -have to be hard-coded, they could come from user input or be generated -depending on the outcome of previous simulations. For example, this -approach lends itself to iterative optimization schemes such as the -[genetic algorithm] or [differential evolution], where a batch of -simulations would be run for each new "generation". - -Note how the returned results also contain the input parameter. As -the worker processes will run asynchronously in parallel, we cannot -take for granted that output is returned in input order. - -Before we start the computation, we add all parameter values to the -`jobs` queue: +Each worker will only use one of the processor cores available on the machine, +as that's the whole point: We want to achieve maximum speed-up of, say, a +parameter sweep, by having each core work on a job corresponding to one of the +many parameter values, which it can do independently of work being done for any +other value. + +We could also solve this sequentially, one parameter value at a time. Comsol's +solver could then make use of all cores and would also employ some +parallelization strategy in its internal computation. But the speed-up would +not scale linearly with the number of cores, especially for large numbers of +them. + +We might use a "parametric sweep", a feature that Comsol does offer. But by +doing this in Python we retain full programmatic control of which parameter is +solved for and when. The parameter values don't have to be hard-coded, they +could come from user input or be generated depending on the outcome of previous +simulations. For example, this approach lends itself to iterative optimization +schemes such as the [genetic algorithm] or [differential evolution], where a +batch of simulations would be run for each new "generation". + +Note how the returned results also contain the input parameter. As the worker +processes will run asynchronously in parallel, we cannot take for granted that +output is returned in input order. + +Before we start the computation, we add all parameter values to the `jobs` +queue: ```python jobs = multiprocessing.Queue() for d in values: jobs.put(d) ``` -We also have to provide the `results` queue, which is of course empty -at first. +We also have to provide the `results` queue, which is of course empty at first. ```python results = multiprocessing.Queue() ``` @@ -383,28 +369,27 @@ for _ in range(4): process.start() ``` -It may be a good idea to hold on to the `process` objects and add them -to a list `processes`, just so that Python's garbage collection won't -accidentally delete them while the external processes are running. +It may be a good idea to hold on to the `process` objects and add them to a +list `processes`, just so that Python's garbage collection won't accidentally +delete them while the external processes are running. -Finally, still in the main process that starts all the workers, we can -collect the results. We use a `for` loop and exploit the fact that -there will be as many results as there were jobs to begin with. +Finally, still in the main process that starts all the workers, we can collect +the results. We use a `for` loop and exploit the fact that there will be as +many results as there were jobs to begin with. ```python for _ in values: (d, C) = results.get() ``` -We would then display them, plot them, save them to a file, or whatever -it is we do with simulation results. +We would then display them, plot them, save them to a file, or whatever it is +we do with simulation results. -The complete script [`worker_pool.py`], which implements all of the -above and also irons out some wrinkles not covered here for the sake -of brevity, can be found in the `demos` folder of the source-code -repository. As it runs, it displays a live plot such as the one that -follows. It is reproduced here preserving the real time from a run with -two workers. Observe how the first two data points do in fact come in -out of order. +The complete script [`worker_pool.py`], which implements all of the above and +also irons out some wrinkles not covered here for the sake of brevity, can be +found in the `demos` folder of the source-code repository. As it runs, it +displays a live plot such as the one that follows. It is reproduced here +preserving the real time from a run with two workers. Observe how the first two +data points do in fact come in out of order. ```{image} images/worker_pool.gif :alt: Live plot of worker pool demo @@ -413,13 +398,12 @@ out of order. A more advanced implementation may use a class derived from [`multiprocessing.Process`](#multiprocessing.Process) instead of a mere -function, just to be able to save state. For long-running simulations, -it would make sense to store jobs and results on disk, rather than in -memory, so that the execution of the queue may be resumed after a -possible interruption. In that case one may, or may not, find the -[`subprocess`](#subprocess) module more convenient for starting the -external processes. The worker implementation would then be in a -separate module that is run as a script. +function, just to be able to save state. For long-running simulations, it would +make sense to store jobs and results on disk, rather than in memory, so that +the execution of the queue may be resumed after a possible interruption. In +that case one may, or may not, find the [`subprocess`](#subprocess) module more +convenient for starting the external processes. The worker implementation would +then be in a separate module that is run as a script. [`capacitor.mph`]: https://github.com/MPh-py/MPh/blob/main/demos/capacitor.mph diff --git a/docs/index.md b/docs/index.md index 1444bbb..351b733 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,24 +1,24 @@ # MPh *Pythonic scripting interface for Comsol Multiphysics* -[Comsol] is a commercial software application that is widely used in -science and industry for research and development. It excels at modeling -almost any (multi-)physics problem by solving the governing set of -partial differential equations via the finite-element method. It comes -with a modern graphical user interface to set up simulation models and -can be scripted from Matlab or its native Java API. +[Comsol] is a commercial software application that is widely used in science +and industry for research and development. It excels at modeling almost any +(multi-)physics problem by solving the governing set of partial differential +equations via the finite-element method. It comes with a modern graphical user +interface to set up simulation models and can be scripted from Matlab or its +native Java API. -MPh brings the dearly missing power of Python to the world of Comsol. -It leverages the Java bridge provided by [JPype] to access the Comsol -API and wraps it in a layer of pythonic ease-of-use. The Python wrapper -covers common scripting tasks, such as loading a model from a file, -modifying parameters, importing data, to then run the simulation, -evaluate the results, and export them. +MPh brings the dearly missing power of Python to the world of Comsol. It +leverages the Java bridge provided by [JPype] to access the Comsol API and +wraps it in a layer of pythonic ease-of-use. The Python wrapper covers common +scripting tasks, such as loading a model from a file, modifying parameters, +importing data, to then run the simulation, evaluate the results, and export +them. -Comsol models are marked by their `.mph` file extension, which stands -for multi-physics. Hence the name of this library. It is open-source -and in no way affiliated with Comsol Inc., the company that develops -and sells the simulation software. +Comsol models are marked by their `.mph` file extension, which stands for +multi-physics. Hence the name of this library. It is open-source and in no way +affiliated with Comsol Inc., the company that develops and sells the simulation +software. [Comsol]: https://www.comsol.com [JPype]: https://jpype.readthedocs.io diff --git a/docs/installation.md b/docs/installation.md index 7a693f8..258f53b 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -53,19 +53,19 @@ For most users who already have Comsol installed, MPh will work out of the box. ## Licenses Comsol offers a number of [license options] for its products. Generally -speaking, MPh wants nothing to do with that complication, but rather -assumes that, whichever license you use, things will "just work". That is, -you are able to start the Comsol GUI or invoke any of its command-line -tools without extra configuration. Because Comsol's license management -handles that in one way or another. This is true for the most common -license types: "CPU-Locked" and "Floating Network". - -For more outlandish license types, that may not be the case. For example, -the "Class Kit" license requires users to pass the command-line argument -`-ckl` when starting Comsol. In this particular case, you can tell MPh -to do the same, by setting [`mph.option('classkit', True)`](#option) -before calling [`mph.start()`](#start). In other such cases, [open an -issue] if you want to add support to the code base. +speaking, MPh wants nothing to do with that complication, but rather assumes +that, whichever license you use, things will "just work". That is, you are able +to start the Comsol GUI or invoke any of its command-line tools without extra +configuration. Because Comsol's license management handles that in one way or +another. This is true for the most common license types: "CPU-Locked" and +"Floating Network". + +For more outlandish license types, that may not be the case. For example, the +"Class Kit" license requires users to pass the command-line argument `-ckl` +when starting Comsol. In this particular case, you can tell MPh to do the same, +by setting [`mph.option('classkit', True)`](#option) before calling +[`mph.start()`](#start). In other such cases, [open an issue] if you want to +add support to the code base. [available on PyPI]: https://pypi.python.org/pypi/mph diff --git a/docs/limitations.md b/docs/limitations.md index f8392c9..b96758c 100644 --- a/docs/limitations.md +++ b/docs/limitations.md @@ -2,56 +2,54 @@ ## Java bridge -MPh is built on top of the Python-to-Java bridge [JPype]. It is -JPype that allows us to look at Comsol's [Programming Manual] or its -[API reference] and run the same commands from Python. - -Unfortunately, the Comsol API does not support running more than one -client at a time, i.e. within the same Java program. Meanwhile, JPype -cannot manage more than one Java virtual machine within the same Python -process. If it could, it would be easy to work around Comsol's limitation. -(There is an alternative Java bridge, [pyJNIus], which is not limited -to one virtual machine, but then fails in another regard: A number of -Java methods exposed by Comsol are inexplicably missing from the Python -encapsulation.) - -Therefore, if several simulations are to be run in parallel, distributed -over independent processor cores in an effort to achieve maximum speed-up -of a parameter sweep, they have to be started as separate Python -subprocesses. Refer to section ["Multiple -processes"](demonstrations.md#multiple-processes) for a demonstration. - -Additionally, there are some known, but unresolved issues with JPype's -shutdown of the Java virtual machine. Notably, pressing Ctrl+C -to interrupt an ongoing operation will usually crash the Python session. -So do not rely on catching [`KeyboardInterrupt`](#KeyboardInterrupt) -exceptions in application code. +MPh is built on top of the Python-to-Java bridge [JPype]. It is JPype that +allows us to look at Comsol's [Programming Manual] or its [API reference] and +run the same commands from Python. + +Unfortunately, the Comsol API does not support running more than one client at +a time, i.e. within the same Java program. Meanwhile, JPype cannot manage more +than one Java virtual machine within the same Python process. If it could, it +would be easy to work around Comsol's limitation. (There is an alternative Java +bridge, [pyJNIus], which is not limited to one virtual machine, but then fails +in another regard: A number of Java methods exposed by Comsol are inexplicably +missing from the Python encapsulation.) + +Therefore, if several simulations are to be run in parallel, distributed over +independent processor cores in an effort to achieve maximum speed-up of a +parameter sweep, they have to be started as separate Python subprocesses. Refer +to section ["Multiple processes"](demonstrations.md#multiple-processes) for a +demonstration. + +Additionally, there are some known, but unresolved issues with JPype's shutdown +of the Java virtual machine. Notably, pressing Ctrl+C to interrupt +an ongoing operation will usually crash the Python session. So do not rely on +catching [`KeyboardInterrupt`](#KeyboardInterrupt) exceptions in application +code. ## Platform differences -The Comsol API offers two distinct ways to run a simulation session on -the local machine. We can either start a "stand-alone" client, which -does not require a Comsol server. Or we first start a server and then -have a "thin" client connect to it via a loop-back network socket. The -first approach is more lightweight and more reliable, especially on -Windows, as it keeps everything inside the same process. The second -approach is slower to start up and relies on the inter-process -communication to be robust, but would also work across the network, -i.e., for remote sessions where the client runs locally and delegates -the heavy lifting to a server running on another machine. If we -instantiate the [`Client`](#Client) class without providing a value -for the network port, it will create a stand-alone client. Otherwise -it will run as a thin client in client–server mode. - -On Linux and macOS, however, the stand-alone mode does not work out of -the box. This is due to a limitation of Unix-like operating systems -and explained in more detail in [GitHub issue #8]. On these platforms, -if all you did was install MPh, starting the client in stand-alone mode -will raise a `java.lang.UnsatisfiedLinkError` because required external -libraries cannot be found. You would have to add the full paths of -certain shared-library folders to an environment variable named -`LD_LIBRARY_PATH` on Linux and `DYLD_LIBRARY_PATH` on macOS. +The Comsol API offers two distinct ways to run a simulation session on the +local machine. We can either start a "stand-alone" client, which does not +require a Comsol server. Or we first start a server and then have a "thin" +client connect to it via a loop-back network socket. The first approach is more +lightweight and more reliable, especially on Windows, as it keeps everything +inside the same process. The second approach is slower to start up and relies +on the inter-process communication to be robust, but would also work across the +network, i.e., for remote sessions where the client runs locally and delegates +the heavy lifting to a server running on another machine. If we instantiate the +[`Client`](#Client) class without providing a value for the network port, it +will create a stand-alone client. Otherwise it will run as a thin client in +client–server mode. + +On Linux and macOS, however, the stand-alone mode does not work out of the box. +This is due to a limitation of Unix-like operating systems and explained in +more detail in [GitHub issue #8]. On these platforms, if all you did was +install MPh, starting the client in stand-alone mode will raise a +`java.lang.UnsatisfiedLinkError` because required external libraries cannot be +found. You would have to add the full paths of certain shared-library folders +to an environment variable named `LD_LIBRARY_PATH` on Linux and +`DYLD_LIBRARY_PATH` on macOS. For example, for an installation of Comsol 6.3 on Linux, you would add the following lines at the end of the shell configuration file `.bashrc`. @@ -70,38 +68,35 @@ On macOS, `ComsolDir` would be `/Applications/COMSOL63/Multiphysics`. The above is [as documented for Comsol 6.3][comsol63_libpath] — make sure to consult the corresponding documentation for your Comsol version. -Requiring the environment variable to be set correctly limits the -possibility of selecting a specific Comsol version from within MPh, -as adding multiple installations to that search path will lead to name -collisions. One could work around the issue by wrapping a Python program -using MPh in a shell script that sets the environment variable before -starting the Python process. That's effectively what Comsol itself does -to launch its GUI on one of these platforms. Or we could have a Python -program that sets the environment variable and then runs MPh in a second -Python subprocess. Clearly, none of this is ideal. Starting the client -should work without any of these detours. +Requiring the environment variable to be set correctly limits the possibility +of selecting a specific Comsol version from within MPh, as adding multiple +installations to that search path will lead to name collisions. One could work +around the issue by wrapping a Python program using MPh in a shell script that +sets the environment variable before starting the Python process. That's +effectively what Comsol itself does to launch its GUI on one of these +platforms. Or we could have a Python program that sets the environment variable +and then runs MPh in a second Python subprocess. Clearly, none of this is +ideal. Starting the client should work without any of these detours. The function [`mph.start()`](#start) exists to mitigate these platform -differences. On Windows, it starts a stand-alone client in order to -profit from the better start-up performance. On Linux and macOS, it -creates a local session in client–server mode so that no shell -configuration is required up front. This behavior is reflected in the -configuration option `'session'`, accessible via -[`mph.option()`](#option), which is set to `'platform-dependent'` by -default. It could also be set to `'stand-alone'` or `'client-server'` -before calling `start()` in order to override the default behavior. - -Performance in client–server mode is noticeably worse in certain -scenarios, not just at start-up. If functions access the Java API -frequently, such as when navigating the model tree, perhaps even -recursively as [`mph.tree()`](#tree) does, then client–server mode can -be slower by a large factor compared to a stand-alone client. Rest -assured however that simulation run-times are not affected. - -Conversely, setting up stand-alone mode on Linux or macOS is also -not a robust solution. Image exports, for example, are known to crash -due to some conflict with external libraries. As opposed to Windows, -where this works reliably. +differences. On Windows, it starts a stand-alone client in order to profit from +the better start-up performance. On Linux and macOS, it creates a local session +in client–server mode so that no shell configuration is required up front. This +behavior is reflected in the configuration option `'session'`, accessible via +[`mph.option()`](#option), which is set to `'platform-dependent'` by default. +It could also be set to `'stand-alone'` or `'client-server'` before calling +`start()` in order to override the default behavior. + +Performance in client–server mode is noticeably worse in certain scenarios, not +just at start-up. If functions access the Java API frequently, such as when +navigating the model tree, perhaps even recursively as [`mph.tree()`](#tree) +does, then client–server mode can be slower by a large factor compared to a +stand-alone client. Rest assured however that simulation run-times are not +affected. + +Conversely, setting up stand-alone mode on Linux or macOS is also not a robust +solution. Image exports, for example, are known to crash due to some conflict +with external libraries. As opposed to Windows, where this works reliably. [JPype]: https://github.com/jpype-project/jpype diff --git a/docs/tutorial.md b/docs/tutorial.md index 5e33af5..5d95c51 100644 --- a/docs/tutorial.md +++ b/docs/tutorial.md @@ -1,16 +1,16 @@ # Tutorial -To follow along with this tutorial in an interactive Python session, -if you wish to do so, make sure you have downloaded the demonstration -model [`capacitor.mph`] from MPh's source-code repository. Save it in -the same folder from which you run Python. - -It is a model of a non-ideal, inhomogeneous, parallel-plate capacitor, -in that its electrodes are of finite extent, the edges are rounded -to avoid excessive electric-field strengths, and *two* media of -different dielectric permittivity fill the separate halves of the -electrode gap. Running the model only requires a license for the core -Comsol platform, but not for any add-on module beyond that. +To follow along with this tutorial in an interactive Python session, if you +wish to do so, make sure you have downloaded the demonstration model +[`capacitor.mph`] from MPh's source-code repository. Save it in the same folder +from which you run Python. + +It is a model of a non-ideal, inhomogeneous, parallel-plate capacitor, in that +its electrodes are of finite extent, the edges are rounded to avoid excessive +electric-field strengths, and *two* media of different dielectric permittivity +fill the separate halves of the electrode gap. Running the model only requires +a license for the core Comsol platform, but not for any add-on module beyond +that. ```{image} images/capacitor.png :alt: Screen-shot of demonstration model "capacitor" in Comsol GUI @@ -20,57 +20,54 @@ Comsol platform, but not for any add-on module beyond that. ## Starting Comsol -In the beginning was the client. And the client was with Comsol. And -the client was Comsol. So let there be a Comsol client. +In the beginning was the client. And the client was with Comsol. And the client +was Comsol. So let there be a Comsol client. ```pycon >>> import mph >>> client = mph.start() ``` -The [`start()`](#start) function returns a client object, i.e. an -instance of the [`Client`](#Client) class. It takes roughly ten -seconds for the client to spin up. +The [`start()`](#start) function returns a client object, i.e. an instance of +the [`Client`](#Client) class. It takes roughly ten seconds for the client to +spin up. -We could instantiate the client with `client = mph.start(cores=1)` -instead to have it use but one processor core. Restricting this -resource is useful when other simulations are running in parallel -on the same machine. Note, however, that within the same Java and -therefore Python session, only one Comsol client can run at a time. -Therefore, the `Client` class cannot be instantiated more than once. -If you wish to work around this limitation imposed by Comsol, and -realize the full parallelization potential of your compute hardware, -you will need to [run multiple Python -processes](demonstrations.md#multiple-processes), one for each client. +We could instantiate the client with `client = mph.start(cores=1)` instead to +have it use but one processor core. Restricting this resource is useful when +other simulations are running in parallel on the same machine. Note, however, +that within the same Java and therefore Python session, only one Comsol client +can run at a time. Therefore, the `Client` class cannot be instantiated more +than once. If you wish to work around this limitation imposed by Comsol, and +realize the full parallelization potential of your compute hardware, you will +need to [run multiple Python processes](demonstrations.md#multiple-processes), +one for each client. ## Managing models -Now that we have the client up and running, we can tell it to load a -model file. +Now that we have the client up and running, we can tell it to load a model file. ```pycon >>> model = client.load('capacitor.mph') ``` -It returns a model object, i.e. an instance of the [`Model`](#Model) -class. We will learn what to do with it further down. For now, it was -simply loaded into memory. We can list the names of all models the -client currently manages. +It returns a model object, i.e. an instance of the [`Model`](#Model) class. We +will learn what to do with it further down. For now, it was simply loaded into +memory. We can list the names of all models the client currently manages. ```pycon >>> client.names() ['capacitor'] ``` -If we were to load more models, that list would be longer. Note that -the above simply displays the names of the models. The actual model -objects can be recalled as follows: +If we were to load more models, that list would be longer. Note that the above +simply displays the names of the models. The actual model objects can be +recalled as follows: ```pycon >>> client.models() [Model('capacitor')] ``` -We will generally not need to bother with these lists, as we would -rather hold on to the `model` reference we received from the client -in the first place. But to free up memory, we could remove a specific +We will generally not need to bother with these lists, as we would rather hold +on to the `model` reference we received from the client in the first place. But +to free up memory, we could remove a specific model. ```pycon >>> client.remove(model) @@ -122,40 +119,40 @@ To solve the model, we will run these studies: ['static', 'relaxation', 'sweep'] ``` -Notice something? All features are referred to by their names, also -known as labels, such as `medium 1`. But *not* by their tags, such as -`mat1`, which litter not just the Comsol programming interface, but, -depending on display settings, its graphical user interface as well. +Notice something? All features are referred to by their names, also known as +labels, such as `medium 1`. But *not* by their tags, such as `mat1`, which +litter not just the Comsol programming interface, but, depending on display +settings, its graphical user interface as well. -Tags are an implementation detail. An unnecessary annoyance to anyone -who has ever scripted a Comsol model from either Matlab or Java. -Unnecessary because names/labels are equally enforced to be unique, -so tags are not needed for disambiguation. And annoying because we -cannot freely change a tag. Say, we remove a feature, but then realize -we need it after all, and thus recreate it. It may now have a different -tag. And any code that references it has to adapted. +Tags are an implementation detail. An unnecessary annoyance to anyone who has +ever scripted a Comsol model from either Matlab or Java. Unnecessary because +names/labels are equally enforced to be unique, so tags are not needed for +disambiguation. And annoying because we cannot freely change a tag. Say, we +remove a feature, but then realize we need it after all, and thus recreate it. +It may now have a different tag. And any code that references it has to +adapted. -This is Python though. We hide implementation details as much as we -can. Abstract them out. So refer to things in the model tree by what -you name them in the model tree. If you remove a feature and then put -it back in, just give it the same name, and nothing has changed. You -may also set up different models to be automated by the same script. -No problem, as long as your naming scheme is consistent throughout. +This is Python though. We hide implementation details as much as we can. +Abstract them out. So refer to things in the model tree by what you name them +in the model tree. If you remove a feature and then put it back in, just give +it the same name, and nothing has changed. You may also set up different models +to be automated by the same script. No problem, as long as your naming scheme +is consistent throughout. ## Modifying parameters -As we have learned from the list above, the model defines a parameter -named `d` that denotes the electrode spacing. If we know a parameter's -name, we can access its value directly. +As we have learned from the list above, the model defines a parameter named `d` +that denotes the electrode spacing. If we know a parameter's name, we can +access its value directly. ```pycon >>> model.parameter('d') '2[mm]' ``` -If we pass in not just the name, but also a value, that same method -modifies it. +If we pass in not just the name, but also a value, that same method modifies +it. ```pycon >>> model.parameter('d', '1[mm]') >>> model.parameter('d') @@ -167,9 +164,9 @@ This particular model's only geometry sequence >>> model.geometries() ['geometry'] ``` -is set up to depend on that very value. So it will effectively change -the next time it is rebuilt. This will happen automatically once we -solve the model. But we may also trigger the geometry build right away. +is set up to depend on that very value. So it will effectively change the next +time it is rebuilt. This will happen automatically once we solve the model. But +we may also trigger the geometry build right away. ```pycon >>> model.build() ``` @@ -177,29 +174,28 @@ solve the model. But we may also trigger the geometry build right away. ## Running simulations To solve the model, we need to create a mesh. This would also happen -automatically, but let's make sure this critical step passes without -a hitch. +automatically, but let's make sure this critical step passes without a hitch. ```pycon >>> model.mesh() ``` -Now run the first study, the one set up to compute the electrostatic -solution, i.e. the instantaneous and purely capacitive response to the -applied voltage, before leakage currents have any time to set in. +Now run the first study, the one set up to compute the electrostatic solution, +i.e. the instantaneous and purely capacitive response to the applied voltage, +before leakage currents have any time to set in. ```pycon >>> model.solve('static') ``` -This modest simulation should not take longer than a few seconds. -While we are at it, we may as well solve the remaining two studies, -one time-dependent, the other a parameter sweep. +This modest simulation should not take longer than a few seconds. While we are +at it, we may as well solve the remaining two studies, one time-dependent, the +other a parameter sweep. ```pycon >>> model.solve('relaxation') >>> model.solve('sweep') ``` -They take a little longer, but not much. We could have solved all three -studies at once, or rather, all of the studies defined in the model. +They take a little longer, but not much. We could have solved all three studies +at once, or rather, all of the studies defined in the model. ```pycon >>> model.solve() ``` @@ -207,16 +203,16 @@ studies at once, or rather, all of the studies defined in the model. ## Evaluating results -Let's see what we found out and evaluate the electrostatic capacitance, -i.e. at zero time or infinite frequency. +Let's see what we found out and evaluate the electrostatic capacitance, i.e. at +zero time or infinite frequency. ```pycon >>> model.evaluate('2*es.intWe/U^2', 'pF') array(1.31948342) ``` -All results are returned as [NumPy arrays](#numpy.array). Though -"global" evaluations such as this one could be readily cast to a -regular Python [`float`](#float). +All results are returned as [NumPy arrays](#numpy.array). Though "global" +evaluations such as this one could be readily cast to a regular Python +[`float`](#float). We might also ask where the electric field is strongest and have [`evaluate()`](#Model.evaluate) perform a "local" evaluation. @@ -229,28 +225,28 @@ We might also ask where the electric field is strongest and have (-0.000503768636204733, -0.004088126064370979) ``` -Note how this time we did not specify any units. When left out, values -are returned in the model's default units. Here specifically, the field -strength in V/m and its coordinates in meters. +Note how this time we did not specify any units. When left out, values are +returned in the model's default units. Here specifically, the field strength in +V/m and its coordinates in meters. -We also did not specify the dataset, even though there are three -different studies that have separate solutions and datasets associated -along with them. When not named specifically, Comsol will use what it -considers the default dataset. That generally refers to the study -defined first, here "static". The default dataset is the one resulting -from that study, here (inconsistently) named "electrostatic". +We also did not specify the dataset, even though there are three different +studies that have separate solutions and datasets associated along with them. +When not named specifically, Comsol will use what it considers the default +dataset. That generally refers to the study defined first, here "static". The +default dataset is the one resulting from that study, here (inconsistently) +named "electrostatic". ```pycon >>> model.datasets() ['electrostatic', 'time-dependent', 'parametric sweep', 'sweep//solution'] ``` -Now let's look at the time dependence. The two media in this model -have a small, but finite conductivity, leading to leakage currents in -the long run. As the two conductivities also differ in value, charges -will accumulate at the interface between the media. This interface -charge leads to a gradual relaxation of the electric field over time, -and thus to a change of the capacitance as well. We can tell that from -its value at the first and last time step. +Now let's look at the time dependence. The two media in this model have a +small, but finite conductivity, leading to leakage currents in the long run. As +the two conductivities also differ in value, charges will accumulate at the +interface between the media. This interface charge leads to a gradual +relaxation of the electric field over time, and thus to a change of the +capacitance as well. We can tell that from its value at the first and last time +step. ```pycon >>> C = '2*ec.intWe/U^2' >>> model.evaluate(C, 'pF', 'time-dependent', 'first') @@ -269,11 +265,11 @@ second, respectively. 1.0 ``` -Obviously, the capacitance also varies if we change the distance -between the electrodes. In the model, a parameter sweep was used to -study that. These "outer" solutions, just like the time-dependent -"inner" solutions, are referenced by indices, i.e. integer numbers, -each of which corresponds to a particular parameter value. +Obviously, the capacitance also varies if we change the distance between the +electrodes. In the model, a parameter sweep was used to study that. These +"outer" solutions, just like the time-dependent "inner" solutions, are +referenced by indices, i.e. integer numbers, each of which corresponds to a +particular parameter value. ```pycon >>> (indices, values) = model.outer('parametric sweep') >>> indices @@ -288,12 +284,11 @@ array(0.73678535) array(0.52865775) ``` -Then again, with a scripting interface such as this one, we may as -well run the time-dependent study a number of times and change the -parameter value from one run to the next. General parameter sweeps -can get quite complicated in terms of how they map to indices as -soon as combinations of parameters are allowed. Support for this may -therefore be limited. +Then again, with a scripting interface such as this one, we may as well run the +time-dependent study a number of times and change the parameter value from one +run to the next. General parameter sweeps can get quite complicated in terms of +how they map to indices as soon as combinations of parameters are allowed. +Support for this may therefore be limited. ## Exporting results @@ -304,25 +299,24 @@ Two exports are defined in the demonstration model: ['data', 'image'] ``` -The first exports the solution of the electrostatic field as text data. -The second renders an image of the plot featured in the screen-shot at -the top of the page. +The first exports the solution of the electrostatic field as text data. The +second renders an image of the plot featured in the screen-shot at the top of +the page. We can trigger all exports at once by calling -[`model.export()`](#Model.export). Or we can be more selective and just -export one: `model.export('image')`. The exported files will end up in -the same folder as the model file itself and have the names that were -assigned in the model's export nodes. Unless we supply custom file names -or paths by adding them as the second argument. +[`model.export()`](#Model.export). Or we can be more selective and just export +one: `model.export('image')`. The exported files will end up in the same folder +as the model file itself and have the names that were assigned in the model's +export nodes. Unless we supply custom file names or paths by adding them as the +second argument. ```pycon >>> model.export('image', 'static field.png') ``` -The idea here is to first set up sensible exports in the GUI, such as -images that illustrate the simulation results, and then trigger them -from a script for a particular simulation run. The results then may -depend on parameter values, which you could include as part of the -file name. +The idea here is to first set up sensible exports in the GUI, such as images +that illustrate the simulation results, and then trigger them from a script for +a particular simulation run. The results then may depend on parameter values, +which you could include as part of the file name. ## Saving results @@ -332,34 +326,32 @@ To save the model we just solved, along with its solution, just do: >>> model.save() ``` -This would overwrite the existing file we loaded the model from. To -avoid this, we could specify a different file name. +This would overwrite the existing file we loaded the model from. To avoid this, +we could specify a different file name. ```pycon >>> model.save('capacitor_solved') ``` -The `.mph` extension will be added automatically if it is not included -in the first place. +The `.mph` extension will be added automatically if it is not included in the +first place. -Maybe we don't actually need to keep the solution and mesh data around. -The model was quick enough to solve, and we do like free disk space. -We would just like to be able to look up modeling details somewhere -down the line. Comsol also keeps track of the modeling history: a log -of which features were created, deleted, modified, and in which order. -More often than not, such details are irrelevant. We can prune them by -resetting that record. +Maybe we don't actually need to keep the solution and mesh data around. The +model was quick enough to solve, and we do like free disk space. We would just +like to be able to look up modeling details somewhere down the line. Comsol +also keeps track of the modeling history: a log of which features were created, +deleted, modified, and in which order. More often than not, such details are +irrelevant. We can prune them by resetting that record. ```pycon >>> model.clear() >>> model.reset() >>> model.save('capacitor_compacted') ``` -Most functionality that the library offers is covered in this tutorial. -The few things that were left out can be gleaned from the [API -documentation](#api). A number of use-case examples are showcased in -chapter "[](#demonstrations)". That chapter and the API -documentation also explain how to go beyond the scope of this library -and access the full Comsol API from Python, if needed. +Most functionality that the library offers is covered in this tutorial. The few +things that were left out can be gleaned from the [API documentation](#api). A +number of use-case examples are showcased in chapter "[](#demonstrations)". +That chapter and the API documentation also explain how to go beyond the scope +of this library and access the full Comsol API from Python, if needed. [`capacitor.mph`]: https://github.com/MPh-py/MPh/blob/main/demos/capacitor.mph diff --git a/mph/client.py b/mph/client.py index 55eb6d7..50dbbd6 100755 --- a/mph/client.py +++ b/mph/client.py @@ -18,13 +18,12 @@ from jpype import JClass -# The following look-up table is used by the `modules()` method. It is -# based on the table on page 41 of Comsol 6.0's Programming Reference -# Manual, with the two columns swapped. It thus maps vendor strings to -# product names (add-on modules), except that we also shorten the names -# somewhat (drop "Module" everywhere) and leave out the pointless -# trademark symbols. The vendor strings are what we need to query the -# `ModelUtil.hasProduct()` Java method. +# The following look-up table is used by the `modules()` method. It is based on +# the table on page 41 of Comsol 6.0's Programming Reference Manual, with the +# two columns swapped. It thus maps vendor strings to product names (add-on +# modules), except that we also shorten the names somewhat (drop "Module" +# everywhere) and leave out the pointless trademark symbols. The vendor strings +# are what we need to query the `ModelUtil.hasProduct()` Java method. modules = { 'COMSOL': 'Comsol core', 'ACDC': 'AC/DC', @@ -332,14 +331,13 @@ def caching(self, state=None): """ Enables or disables caching of previously loaded models. - Caching means that the [`load`](#Client.load) method will check - if a model has been previously loaded from the same file-system - path and, if so, return the in-memory model object instead of - reloading it from disk. By default (at start-up) caching is - disabled. + Caching means that the [`load`](#Client.load) method will check if a + model has been previously loaded from the same file-system path and, + if so, return the in-memory model object instead of reloading it from + disk. By default (at start-up) caching is disabled. - Pass `True` to enable caching, `False` to disable it. If no - argument is passed, the current state is returned. + Pass `True` to enable caching, `False` to disable it. If no argument is + passed, the current state is returned. """ if state is None: return option('caching') @@ -354,8 +352,8 @@ def create(self, name: str = None) -> Model: """ Creates a new model and returns it as a [`Model`](#Model) instance. - An optional `name` can be supplied. Otherwise the model will - retain its automatically generated name, like "Model 1". + An optional `name` can be supplied. Otherwise the model will retain its + automatically generated name, like "Model 1". """ java = self.java.createUnique('model') model = Model(java) @@ -407,12 +405,12 @@ def connect(self, port: int, host: str = 'localhost'): """ Connects the client to a server. - The Comsol server must be listening at the given `port` for - client connections. The `host` address defaults to `'localhost'`, - but could be any domain name or IP address. + The Comsol server must be listening at the given `port` for client + connections. The `host` address defaults to `'localhost'`, but could be + any domain name or IP address. - This will fail for stand-alone clients or if the client is - already connected to a server. In the latter case, call + This will fail for stand-alone clients or if the client is already + connected to a server. In the latter case, call [`disconnect()`](#disconnect) first. """ if self.standalone: @@ -432,9 +430,9 @@ def disconnect(self): """ Disconnects the client from the server. - Note that the [`server`](#Server), unless started with the - option `multi` set to `'on'`, will shut down as soon as the - client disconnects. + Note that the [`server`](#Server), unless started with the option + `multi` set to `'on'`, will shut down as soon as the client + disconnects. """ if self.port: log.debug('Disconnecting from server.') diff --git a/mph/config.py b/mph/config.py index 42ab9ba..43dd817 100644 --- a/mph/config.py +++ b/mph/config.py @@ -42,9 +42,9 @@ def option(name=None, value=None): """ Sets or returns the value of a configuration option. - If called without arguments, returns all configuration options as - a dictionary. Returns an option's value if only called with the - option's `name`. Otherwise sets the option to the given `value`. + If called without arguments, returns all configuration options as a + dictionary. Returns an option's value if only called with the option's + `name`. Otherwise sets the option to the given `value`. """ if name is None: return options @@ -66,10 +66,9 @@ def location() -> Path: """ Returns the default location of the configuration file. - The folder returned by this function is platform-specific. It is - inside the user's `AppData` folder on Windows, inside `.config` - in the home directory on Linux, and in `Application Support` on - macOS. + The folder returned by this function is platform-specific. It is inside the + user's `AppData` folder on Windows, inside `.config` in the home directory + on Linux, and in `Application Support` on macOS. """ if system == 'Windows': return Path(os.environ['APPDATA'])/'MPh' @@ -85,11 +84,10 @@ def load(file: Path | str = None): """ Loads the configuration from the given `.ini` file. - If `file` is not given, looks for a configuration file named - `MPh.ini` in the current directory, or in the folder inside the - user profile as returned by [`location()`](#location), or in this - library's folder, in that order. If no such file is found, the - hard-coded default values are used. + If `file` is not given, looks for a configuration file named `MPh.ini` in + the current directory, or in the folder inside the user profile as returned + by [`location()`](#location), or in this library's folder, in that order. + If no such file is found, the hard-coded default values are used. """ if not file: folders = [Path.cwd(), location(), Path(__file__).parent] @@ -124,8 +122,8 @@ def save(file: Path | str = None): """ Saves the configuration in the given `.ini` file. - If `file` is not given, saves the configuration in `MPh.ini` - inside the default folder returned by [`location()`](#location). + If `file` is not given, saves the configuration in `MPh.ini` inside the + default folder returned by [`location()`](#location). """ if not file: file = location()/'MPh.ini' diff --git a/mph/discovery.py b/mph/discovery.py index 8469bb6..d3bf9a2 100644 --- a/mph/discovery.py +++ b/mph/discovery.py @@ -1,25 +1,25 @@ """ Discovers Comsol installations. -This is an internal helper module that is not part of the public API. -It retrieves information about installed Comsol versions, i.e. -available simulation back-ends, and locates the installation folders. +This is an internal helper module that is not part of the public API. It +retrieves information about installed Comsol versions, i.e. available +simulation back-ends, and locates the installation folders. On Windows, the discovery mechanism relies on the Registry to provide -information about install locations. On Linux and macOS, Comsol is -expected to be installed at its respective default location. Though the -folder `.local` in the user's home directory is also searched to allow -symbolic linking to a custom location. +information about install locations. On Linux and macOS, Comsol is expected to +be installed at its respective default location. Though the folder `.local` in +the user's home directory is also searched to allow symbolic linking to a +custom location. -Additionally, we also run the shell command `where comsol` (on Windows) -or `which comsol` (on Linux and macOS) to find a Comsol installation -that isn't in a default location, but for which the Comsol executable -was added to the executable search path. +Additionally, we also run the shell command `where comsol` (on Windows) or +`which comsol` (on Linux and macOS) to find a Comsol installation that isn't in +a default location, but for which the Comsol executable was added to the +executable search path. Note that duplicate installations will be ignored. That is, a Comsol -installation found in a later step that reports the same version as one -found in an earlier step will be ignored, regardless of install location. -The one found on the search path, if any, will be prioritized. +installation found in a later step that reports the same version as one found +in an earlier step will be ignored, regardless of install location. The one +found on the search path, if any, will be prioritized. """ from __future__ import annotations @@ -82,13 +82,13 @@ def parse(version: str) -> tuple[str, int, int, int, int]: """ Parses version information as returned by Comsol executable. - Returns `(name, major, minor, patch, build)` where `name` is a - string and the rest are numbers. The name is a short-hand based - on the major, minor, and patch version numbers, e.g. `'5.3a'`. + Returns `(name, major, minor, patch, build)` where `name` is a string and + the rest are numbers. The name is a short-hand based on the major, minor, + and patch version numbers, e.g. `'5.3a'`. - Raises `ValueError` if the input string deviates from the expected - format, i.e., the format in which the Comsol executable returns - version information. + Raises `ValueError` if the input string deviates from the expected format, + i.e., the format in which the Comsol executable returns version + information. """ # Separate version number from preceding program name. @@ -304,10 +304,10 @@ def find_backends() -> list[Backend]: for comsol in executables: log.debug(f'Checking executable "{comsol}".') - # The Java VM is configured in a file named "comsol.ini". - # That file is usually in the same folder as the Comsol executable. - # Though on Linux and macOS, the executable may also be a script - # that sits one folder up (for some reason). + # The Java VM is configured in a file named "comsol.ini". That file is + # usually in the same folder as the Comsol executable. Though on Linux + # and macOS, the executable may also be a script that sits one folder + # up (for some reason). ini_name = 'comsol.ini' for ini in [comsol.parent/ini_name, comsol.parent/arch/ini_name]: if ini.is_file(): @@ -430,9 +430,9 @@ def backend(version: str = None) -> Backend: """ Returns information about the Comsol back-end. - A specific Comsol `version` can be selected by name if several - are installed, for example `version='6.0'`. Otherwise the latest - version is used. + A specific Comsol `version` can be selected by name if several are + installed, for example `version='6.0'`. Otherwise the latest version is + used. """ backends = find_backends() if not backends: diff --git a/mph/model.py b/mph/model.py index 2cb2450..ef624cb 100644 --- a/mph/model.py +++ b/mph/model.py @@ -17,13 +17,13 @@ from numpy import int32, float64 -# The following look-up table is used by the `modules()` method. It maps -# the product names (add-on modules) returned by `model.getUsedProducts()` -# to the same sanitized names used in the look-up table in the `clients` -# module. So it essentially drops the Unicode trademark symbols as well -# as the redundant "Module". The strings returned by `getUsedProducts()` -# seem to correspond exactly to the product names in the left column -# of the table on page 41 of Comsol 6.0's Programming Reference Manual. +# The following look-up table is used by the `modules()` method. It maps the +# product names (add-on modules) returned by `model.getUsedProducts()` to the +# same sanitized names used in the look-up table in the `clients` module. So it +# essentially drops the Unicode trademark symbols as well as the redundant +# "Module". The strings returned by `getUsedProducts()` seem to correspond +# exactly to the product names in the left column of the table on page 41 of +# Comsol 6.0's Programming Reference Manual. modules = { 'AC/DC Module': 'AC/DC', 'Acoustics Module': 'Acoustics', @@ -83,8 +83,8 @@ class Model: """ Represents a Comsol model. - The class is not intended to be instantiated directly. Rather, the - model would be loaded from a file by the [client](#Client). + The class is not intended to be instantiated directly. Rather, the model + would be loaded from a file by the [client](#Client). Example usage: ```python @@ -98,25 +98,24 @@ class Model: print(f'capacitance C = {C:.3f} pF') ``` - The focus of the functionality exposed by this class is to - inspect an existing model, possibly change parameters, solve the - model, then evaluate the results. The intention is not *per se* - to create the model from scratch or to extensively modify its - structure, though some such functionality is offered here, and - even more of it through the [`Node`](#Node) class. - - This class is a wrapper around the [`com.comsol.model.Model`][1] - Java class, which itself is wrapped by JPype and can be accessed - directly via the `.java` attribute. The full Comsol functionality - is thus available if needed. - - The `parent` argument to the constructor is usually that internal - Java object. But in order to simplify extending the class with - custom functionality, the constructor also accepts instances of - this very class or a child class. In that case, it will preserve - the original `.java` reference throughout the class hierarchy so - that it is possible to "type-cast" an existing `Model` instance - (as loaded by the client) to a derived child class. + The focus of the functionality exposed by this class is to inspect an + existing model, possibly change parameters, solve the model, then evaluate + the results. The intention is not *per se* to create the model from scratch + or to extensively modify its structure, though some such functionality is + offered here, and even more of it through the [`Node`](#Node) class. + + This class is a wrapper around the [`com.comsol.model.Model`][1] Java + class, which itself is wrapped by JPype and can be accessed directly via + the `.java` attribute. The full Comsol functionality is thus available if + needed. + + The `parent` argument to the constructor is usually that internal Java + object. But in order to simplify extending the class with custom + functionality, the constructor also accepts instances of this very class + or a child class. In that case, it will preserve the original `.java` + reference throughout the class hierarchy so that it is possible to + "type-cast" an existing `Model` instance (as loaded by the client) to a + derived child class. [1]: https://doc.comsol.com/6.0/doc/com.comsol.help.comsol/api\ /com/comsol/model/Model.html @@ -244,11 +243,10 @@ def problems(self) -> list[dict[str, str | Node]]: """ Returns problems reported by nodes in the model. - This method lets users check if any problems are reported - throughout the model by testing `if model.problems():` in - application code, to then act accordingly. See - [`Node.problems()`](#Node.problems) on how problems - (error/warning messages and their origin) are returned. + This method lets users check if any problems are reported throughout + the model by testing `if model.problems():` in application code, to + then act accordingly. See [`Node.problems()`](#Node.problems) on how + problems (error/warning messages and their origin) are returned. """ return (self/None).problems() @@ -353,10 +351,10 @@ def inner(self, """ Returns the indices and values of inner solutions. - These are the solution indices and time values in - time-dependent studies, returned as a tuple of an integer - array and a floating-point array. A `dataset` name may be - specified. Otherwise the default dataset is used. + These are the solution indices and time values in time-dependent + studies, returned as a tuple of an integer array and a floating-point + array. A `dataset` name may be specified. Otherwise the default dataset + is used. """ # Validate dataset argument. if dataset is not None: @@ -398,9 +396,8 @@ def outer(self, Returns the indices and values of outer solutions. These are the solution indices and values in parametric sweeps, - returned as a tuple of an integer array and a floating-point - array. A `dataset` name may be specified. Otherwise the default - dataset is used. + returned as a tuple of an integer array and a floating-point array. A + `dataset` name may be specified. Otherwise the default dataset is used. """ # Validate dataset argument. if dataset is not None: @@ -654,18 +651,17 @@ def parameter( """ Returns or sets the parameter of the given name. - Returns the value of parameter `name` if no `value` is given. - Otherwise sets the value. + Returns the value of parameter `name` if no `value` is given. Otherwise + sets the value. - Values are accepted as expressions (strings, possibly including - the unit inside square brackets) or as numerical values - (referring to default units). + Values are accepted as expressions (strings, possibly including the + unit inside square brackets) or as numerical values (referring to + default units). - By default, values are returned as strings, i.e. the expression - as entered in the user interface. That expression may include - the unit, again inside brackets. If the option `evaluate` is set - to `True`, the numerical value that the expression evaluates to - is returned. + By default, values are returned as strings, i.e. the expression as + entered in the user interface. That expression may include the unit, + again inside brackets. If the option `evaluate` is set to `True`, the + numerical value that the expression evaluates to is returned. """ if value is None: if not evaluate: @@ -701,13 +697,12 @@ def parameters(self, evaluate=False): """ Returns the global model parameters. - The parameters are returned as a dictionary indexed by the - parameter names and mapping to the parameter values. + The parameters are returned as a dictionary indexed by the parameter + names and mapping to the parameter values. - Value are returned as string expressions, i.e. as entered by - the user, unless `evaluate` is set to `True`, in which case - the expressions are evaluated and the corresponding numbers - are returned. + Value are returned as string expressions, i.e. as entered by the user, + unless `evaluate` is set to `True`, in which case the expressions are + evaluated and the corresponding numbers are returned. """ if not evaluate: return {str(name): str(self.java.param().get(name)) @@ -724,8 +719,8 @@ def description(self, name, text=None): """ Returns or sets the description of the named parameter. - If no `text` is given, returns the text description of - parameter `name`. Otherwise sets it. + If no `text` is given, returns the text description of parameter + `name`. Otherwise sets it. """ if text is not None: value = self.parameter(name) @@ -753,8 +748,8 @@ def property(self, node, name, value = None): """ Returns or changes the value of the named node property. - If no `value` is given, returns the value of property `name`. - Otherwise sets the property to the given value. + If no `value` is given, returns the value of property `name`. Otherwise + sets the property to the given value. """ return (self/node).property(name, value) @@ -807,9 +802,8 @@ def import_(self, node: Node | str, file: Path | str): """ Imports external data from a file and assigns it to the node. - Note the trailing underscore in the method name. It is needed - so that the Python parser does not treat the name as an - `import` statement. + Note the trailing underscore in the method name. It is needed so that + the Python parser does not treat the name as an `import` statement. """ if isinstance(node, str): node = self/node @@ -823,10 +817,9 @@ def export(self, node: Node | str | None = None, file: Path | str = None): """ Runs the export node, either given by name or node reference. - A `file` name can be specified. Otherwise the file name defined - in the node's properties will be used. If called without any - arguments, all export nodes defined in the model are run using - the default file names. + A `file` name can be specified. Otherwise the file name defined in the + node's properties will be used. If called without any arguments, all + export nodes defined in the model are run using the default file names. """ if node is None: for node in self/'exports': @@ -905,19 +898,19 @@ def save(self, path: Path | str = None, format: str = None): """ Saves the model at the given file-system path. - If `path` is not given, the original file name is used, i.e. - the one from which the model was loaded to begin with. If - the path contains no folder information, the current folder - (working directory) is used. If the path points to a folder, - the model name is used to name the file inside that folder. + If `path` is not given, the original file name is used, i.e. the one + from which the model was loaded to begin with. If the path contains no + folder information, the current folder (working directory) is used. If + the path points to a folder, the model name is used to name the file + inside that folder. - A `format` can be specified as either "Comsol", "Java", - "Matlab", or "VBA". If no format is given, it will be deduced - from the file's ending, being either `.mph`, `.java`, `.m`, or - `.vba`, respectively. No file ending implies "Comsol" format. + A `format` can be specified as either "Comsol", "Java", "Matlab", or + "VBA". If no format is given, it will be deduced from the file's + ending, being either `.mph`, `.java`, `.m`, or `.vba`, respectively. + No file ending implies "Comsol" format. - Imposes the correct file ending for the format. Overwrites - existing files. + Imposes the correct file ending for the format. Overwrites existing + files. """ # Coerce paths given as string to Path objects. diff --git a/mph/node.py b/mph/node.py index e55d8b1..9b44718 100644 --- a/mph/node.py +++ b/mph/node.py @@ -30,14 +30,14 @@ class Node: """ Represents a model node. - This class makes it possible to navigate the model tree, inspect a - node, namely its properties, and manipulate it, like toggling it - on/off, creating child nodes, or "running" it. + This class makes it possible to navigate the model tree, inspect a node, + namely its properties, and manipulate it, like toggling it on/off, creating + child nodes, or "running" it. Instances of this class reference a node in the model tree and work similarly to [`Path`](#pathlib.Path) objects from Python's standard - library. They support string concatenation to the right with the - division operator in order to reference child nodes: + library. They support string concatenation to the right with the division + operator in order to reference child nodes: ```python >>> node = model/'functions' >>> node @@ -46,17 +46,16 @@ class Node: Node('functions/step') ``` - Note how the [`model`](#Model) object also supports the division - operator in order to generate node references. As mere references, - nodes must must not necessarily exist in the model tree: + Note how the [`model`](#Model) object also supports the division operator + in order to generate node references. As mere references, nodes must must + not necessarily exist in the model tree: ```python >>> (node/'new function').exists() False ``` - In interactive sessions, the convenience function - [`mph.tree()`](#tree) may prove useful to see the node's branch in - the model tree at a glance: + In interactive sessions, the convenience function [`mph.tree()`](#tree) may + prove useful to see the node's branch in the model tree at a glance: ```console >>> mph.tree(model/'physics') physics @@ -74,10 +73,10 @@ class Node: └─ cathode ``` - In rare cases, the node name itself might contain a forward slash, - such as the dataset `sweep/solution` that happens to exist in the - demo model from the [Tutorial](/tutorial.md). These literal forward - slashes can be escaped by doubling the character: + In rare cases, the node name itself might contain a forward slash, such as + the dataset `sweep/solution` that happens to exist in the demo model from + the [Tutorial](/tutorial.md). These literal forward slashes can be escaped + by doubling the character: ```python >>> node = model/'datasets/sweep//solution' >>> node.name() @@ -86,14 +85,13 @@ class Node: Node('datasets') ``` - If the node refers to an existing model feature, then the instance - wraps the corresponding Java object, which could belong to a variety - of classes, but would necessarily implement the - [`com.comsol.model.ModelEntity`][1] interface. That Java object - can be accessed directly via the `.java` property. The full Comsol - functionality is thus available if needed. The convenience function - [`mph.inspect()`](#inspect) is provided for introspection of the - Java object in an interactive session. + If the node refers to an existing model feature, then the instance wraps + the corresponding Java object, which could belong to a variety of classes, + but would necessarily implement the [`com.comsol.model.ModelEntity`][1] + interface. That Java object can be accessed directly via the `.java` + property. The full Comsol functionality is thus available if needed. The + convenience function [`mph.inspect()`](#inspect) is provided for + introspection of the Java object in an interactive session. [1]: https://doc.comsol.com/6.0/doc/com.comsol.help.comsol/api\ /com/comsol/model/ModelEntity.html @@ -206,10 +204,10 @@ def java(self) -> JClass | None: """ Java object this node maps to, if any. - Note that this is a property, not an attribute. Internally, - it is a function that performs a top-down search of the model - tree in order to resolve the node reference. So it introduces - a certain overhead every time it is accessed. + Note that this is a property, not an attribute. Internally, it is a + function that performs a top-down search of the model tree in order to + resolve the node reference. So it introduces a certain overhead every + time it is accessed. """ if self.is_root(): return self.model.java @@ -237,10 +235,10 @@ def java(self) -> JClass | None: def java_if_exists(self) -> JClass: # Returns `self.java` if the node exists, raises an error otherwise. # - # This helper function was introduced to reduce code repetition - # in the methods that follow. We should probably just straight up - # raise the error when `self.java` is accessed. However, that - # might break user code, so can only be done in a major release. + # This helper function was introduced to reduce code repetition in the + # methods that follow. We should probably just straight up raise the + # error when `self.java` is accessed. However, that might break user + # code, so can only be done in a major release. java = self.java if not java: error = f'Node "{self}" does not exist in model tree.' @@ -265,10 +263,9 @@ def type(self) -> str | None: """ Returns the node's feature type. - This a something like `'Block'` for "a right-angled solid or - surface block in 3D". Refer to the Comsol documentation for - details. Feature types are displayed in the Comsol GUI at the - top of the `Settings` tab. + This a something like `'Block'` for "a right-angled solid or surface + block in 3D". Refer to the Comsol documentation for details. Feature + types are displayed in the Comsol GUI at the top of the `Settings` tab. """ java = self.java return str(java.getType()) if hasattr(java, 'getType') else None @@ -324,14 +321,14 @@ def problems(self) -> list[dict[str, str | Node]]: """ Returns problems reported by the node and its descendants. - The problems are returned as a list of dictionaries, each with - an entry for `'message'` (the warning or error message), - `'category'` (either `'warning'` or `'error'`), `'node'` (either - this one or a node beneath it in the model tree), and - `'selection'` (an empty string if not applicable). + The problems are returned as a list of dictionaries, each with an entry + for `'message'` (the warning or error message), `'category'` (either + `'warning'` or `'error'`), `'node'` (either this one or a node beneath + it in the model tree), and `'selection'` (an empty string if not + applicable). - Calling this method on the root node returns all warnings and - errors in geometry, mesh, and solver sequences. + Calling this method on the root node returns all warnings and errors in + geometry, mesh, and solver sequences. """ java = self.java stack = [] @@ -411,8 +408,8 @@ def property(self, name, value=None): """ Returns or changes the value of the named property. - If no `value` is given, returns the value of property `name`. - Otherwise sets the property to the given value. + If no `value` is given, returns the value of property `name`. Otherwise + sets the property to the given value. """ java = self.java_if_exists() if value is None: @@ -429,8 +426,8 @@ def properties(self) -> dict[ """ Returns names and values of all node properties as a dictionary. - In the Comsol GUI, properties are displayed in the Settings tab - of the model node (not to be confused with the Properties tab). + In the Comsol GUI, properties are displayed in the Settings tab of the + model node (not to be confused with the Properties tab). """ java = self.java_if_exists() if not hasattr(java, 'properties'): @@ -446,18 +443,18 @@ def select(self, """ Assigns `entity` as the node's selection. - `entity` can either be another node representing a selection - feature, in which case a "named" selection is created. Or it - can be a list/array of integers denoting domain, boundary, - edge, or point numbers (depending on which of those the selection - requires), producing a "manual" selection. It may also be `'all'` - to select everything or `None` to clear the selection. - - Raises `NotImplementedError` if the node (that this method is - called on) is a geometry node. These may be supported in a - future release. Meanwhile, access their Java methods directly. - Raises `TypeError` if the node does not have a selection and - is not itself an "explicit" selection. + `entity` can either be another node representing a selection feature, + in which case a "named" selection is created. Or it can be a list/array + of integers denoting domain, boundary, edge, or point numbers + (depending on which of those the selection requires), producing a + "manual" selection. It may also be `'all'` to select everything or + `None` to clear the selection. + + Raises `NotImplementedError` if the node (that this method is called + on) is a geometry node. These may be supported in a future release. + Meanwhile, access their Java methods directly. Raises `TypeError` if + the node does not have a selection and is not itself an "explicit" + selection. """ java = self.java_if_exists() if isinstance(java, JClass('com.comsol.model.GeomFeature')): @@ -499,16 +496,15 @@ def selection(self) -> Node | NDArray[int32] | None: """ Returns the entity or entities the node has selected. - If it is a "named" selection, the corresponding selection node - is returned. If it is a "manual" selection, an array of domain, - boundary, edge, or point numbers is returned (depending on - which of those the selection holds). `None` is returned if - nothing is selected. + If it is a "named" selection, the corresponding selection node is + returned. If it is a "manual" selection, an array of domain, boundary, + edge, or point numbers is returned (depending on which of those the + selection holds). `None` is returned if nothing is selected. - Raises `NotImplementedError` if the node is a geometry node. - These may be supported in a future release. Meanwhile, access - their Java methods directly. Raises `TypeError` if the node - does not have a selection and is not itself a selection. + Raises `NotImplementedError` if the node is a geometry node. These may + be supported in a future release. Meanwhile, access their Java methods + directly. Raises `TypeError` if the node does not have a selection and + is not itself a selection. """ java = self.java_if_exists() if isinstance(java, JClass('com.comsol.model.GeomFeature')): @@ -547,11 +543,10 @@ def toggle(self, """ Enables or disables the node. - If `action` is `'flip'` (the default), it enables the feature - in the model tree if it is currently disabled or disables it - if enabled. Pass `'enable'` or `'on'` to enable the feature - regardless of its current state. Pass `'disable'` or `'off'` - to disable it. + If `action` is `'flip'` (the default), it enables the feature in the + model tree if it is currently disabled or disables it if enabled. Pass + `'enable'` or `'on'` to enable the feature regardless of its current + state. Pass `'disable'` or `'off'` to disable it. """ java = self.java_if_exists() if action == 'flip': @@ -574,9 +569,8 @@ def import_(self, file: Path | str): """ Imports external data from the given `file`. - Note the trailing underscore in the method name. It is needed - so that the Python parser does not treat the name as an - `import` statement. + Note the trailing underscore in the method name. It is needed so that + the Python parser does not treat the name as an `import` statement. """ file = Path(file) if not file.exists(): @@ -900,9 +894,9 @@ def tree(node: Node | Model, max_depth: int = None): """ Displays the model tree. - This is a convenience function to visualize, in an interactive - Python session, the branch of the model tree underneath a given - [`node`](#Node). It produces console output such as this: + This is a convenience function to visualize, in an interactive Python + session, the branch of the model tree underneath a given [`node`](#Node). + It produces console output such as this: ```console >>> mph.tree(model/'physics') physics @@ -922,16 +916,15 @@ def tree(node: Node | Model, max_depth: int = None): Specify `max_depth` to possibly limit the number of lower branches. - Often the node would refer to the model's root in order to inspect - the entire model tree. A [`Model`](#Model) object is therefore also - accepted as a value for `node`. + Often the node would refer to the model's root in order to inspect the + entire model tree. A [`Model`](#Model) object is therefore also accepted + as a value for `node`. - Note that this function performs poorly in client–server mode, the - default on Linux and macOS, especially for complex models. The - client–server communication introduces inefficiencies that do not - occur in stand-alone mode, the default on Windows, where the model - tree, i.e. the hierarchy of related Java objects, can be traversed - reasonably fast. + Note that this function performs poorly in client–server mode, the default + on Linux and macOS, especially for complex models. The client–server + communication introduces inefficiencies that do not occur in stand-alone + mode, the default on Windows, where the model tree, i.e. the hierarchy of + related Java objects, can be traversed reasonably fast. """ def traverse(node: Node, levels: list[bool], max_depth: int | None): @@ -955,16 +948,15 @@ def inspect(java: JClass | Node | Model): """ Inspects a Java node object. - This is a convenience function to facilitate exploring Comsol's - Java API in an interactive Python session. It expects a Java - node object, such as the one returned by the `.java` property - of an existing node reference, which would implement the - [`com.comsol.model.ModelEntity`][1] interface. + This is a convenience function to facilitate exploring Comsol's Java API in + an interactive Python session. It expects a Java node object, such as the + one returned by the `.java` property of an existing node reference, which + would implement the [`com.comsol.model.ModelEntity`][1] interface. - Like any object, it could also be inspected with Python's built-in - `dir` command. This function here outputs a "pretty-fied" version - of that. It displays (prints to the console) the methods implemented - by the Java node as well as its properties, if any are defined. + Like any object, it could also be inspected with Python's built-in `dir` + command. This function here outputs a "pretty-fied" version of that. It + displays (prints to the console) the methods implemented by the Java node + as well as its properties, if any are defined. ```console >>> mph.inspect((model/'studies').java) @@ -995,10 +987,9 @@ def inspect(java: JClass | Node | Model): uniquetag ``` - The node's name, tag, and documentation reference marker are - listed first. These access methods and a few others, which are - common to all objects, are suppressed in the method list further - down, for the sake of clarity. + The node's name, tag, and documentation reference marker are listed first. + These access methods and a few others, which are common to all objects, are + suppressed in the method list further down, for the sake of clarity. [1]: https://doc.comsol.com/6.0/doc/com.comsol.help.comsol/api\ /com/comsol/model/ModelEntity.html diff --git a/mph/server.py b/mph/server.py index 9625f8c..e0b2593 100644 --- a/mph/server.py +++ b/mph/server.py @@ -26,10 +26,10 @@ class Server: """ Manages a Comsol server process. - Instances of this class start and eventually stop Comsol servers - running on the local machine. Clients, either running on the same - machine or elsewhere on the network, can then connect to the - server at the network port it exposes for that purpose. + Instances of this class start and eventually stop Comsol servers running on + the local machine. Clients, either running on the same machine or elsewhere + on the network, can then connect to the server at the network port it + exposes for that purpose. Example usage: ```python @@ -39,36 +39,34 @@ class Server: server.stop() ``` - The number of processor `cores` the server makes use of may be - restricted. If no number is given, all cores are used by default. + The number of processor `cores` the server makes use of may be restricted. + If no number is given, all cores are used by default. - A specific `version` of the Comsol back-end can be specified if - several are installed on the machine, for example `version='6.0'`. - Otherwise the latest version is used. + A specific `version` of the Comsol back-end can be specified if several are + installed on the machine, for example `version='6.0'`. Otherwise the latest + version is used. The server can be instructed to use a specific network `port` for - communication with clients by passing the number of a free port - explicitly. If `port=None`, the default, the server will try to - use port 2036 or, in case it is blocked by another server already - running, will try subsequent numbers until it finds a free port. - This is also Comsol's default behavior. It is however not robust - and may lead to start-up failures if multiple servers are spinning - up at the same time. Pass `port=0` to work around this issue. The - server will then select a random free port, which will almost always - avoid collisions. - - If `multi` is `False` or `'off'` or `None` (the default), then - the server will shut down as soon as the first connected clients - disconnects itself. If it is `True` or `'on'`, the server process - will stay alive and accept multiple client connections. - - A `timeout` can be set for the server start-up. The default is 60 - seconds. `TimeoutError` is raised if the server failed to start - within that period. + communication with clients by passing the number of a free port explicitly. + If `port=None`, the default, the server will try to use port 2036 or, in + case it is blocked by another server already running, will try subsequent + numbers until it finds a free port. This is also Comsol's default behavior. + It is however not robust and may lead to start-up failures if multiple + servers are spinning up at the same time. Pass `port=0` to work around this + issue. The server will then select a random free port, which will almost + always avoid collisions. + + If `multi` is `False` or `'off'` or `None` (the default), then the server + will shut down as soon as the first connected clients disconnects itself. + If it is `True` or `'on'`, the server process will stay alive and accept + multiple client connections. + + A `timeout` can be set for the server start-up. The default is 60 seconds. + `TimeoutError` is raised if the server failed to start within that period. A list of extra command-line `arguments` can be specified. They are - appended to the arguments passed by default when starting the - server process, and would thus override them in case of duplicates. + appended to the arguments passed by default when starting the server + process, and would thus override them in case of duplicates. """ def __init__(self, @@ -128,11 +126,10 @@ def __init__(self, raise TimeoutError(error) # Bail out if server exited with an error. - # We don't use `process.returncode` here, as we would like to, - # because on Linux the server executable exits with code 0, - # indicating no error, even when an error has occurred. - # We assume that the last line in the server's output is the - # actual error message. + # We don't use `process.returncode` here, as we would like to, because + # on Linux the server executable exits with code 0, indicating no + # error, even when an error has occurred. We assume that the last line + # in the server's output is the actual error message. if port is None: error = f'Starting server failed: {lines[-1]}' log.error(error) diff --git a/mph/session.py b/mph/session.py index 7bfe0c0..bae9a8f 100755 --- a/mph/session.py +++ b/mph/session.py @@ -50,31 +50,28 @@ def start( client.remove(model) ``` - Depending on the platform, this may either be a stand-alone client - (on Windows) or a thin client connected to a server running locally - (on Linux and macOS). The reason for this disparity is that, while - stand-alone clients are more lightweight and start up much faster, - support for this mode of operation is limited on Unix-like operating - systems, and thus not the default. Find more details in section + Depending on the platform, this may either be a stand-alone client (on + Windows) or a thin client connected to a server running locally (on Linux + and macOS). The reason for this disparity is that, while stand-alone + clients are more lightweight and start up much faster, support for this + mode of operation is limited on Unix-like operating systems, and thus not + the default. Find more details in section "[](/limitations.md#platform-differences)". Returns a [`Client`](#Client) instance. Only one client can be - instantiated at a time. Subsequent calls to `start()` will return - the client instance created in the first call. In order to work - around this limitation of the Comsol API, separate Python processes - have to be started. Refer to section - "[](/demonstrations.md#multiple-processes)" for guidance. - - The number of `cores` (threads) the Comsol instance uses can be - restricted by specifying a number. Otherwise all available cores - will be used. - - A specific Comsol `version` can be selected if several are - installed, for example `version='6.0'`. Otherwise the latest - version is used. - - The server `port` can be specified if client–server mode is used. - If omitted, the server chooses a random free port. + instantiated at a time. Subsequent calls to `start()` will return the + client instance created in the first call. In order to work around this + limitation of the Comsol API, separate Python processes have to be started. + Refer to section "[](/demonstrations.md#multiple-processes)" for guidance. + + The number of `cores` (threads) the Comsol instance uses can be restricted + by specifying a number. Otherwise all available cores will be used. + + A specific Comsol `version` can be selected if several are installed, for + example `version='6.0'`. Otherwise the latest version is used. + + The server `port` can be specified if client–server mode is used. If + omitted, the server chooses a random free port. """ global client, server, thread @@ -146,12 +143,12 @@ def cleanup(): """ Cleans up resources at the end of the Python session. - This function is not part of the public API. It runs automatically - at the end of the Python session and is not intended to be called - directly from application code. + This function is not part of the public API. It runs automatically at the + end of the Python session and is not intended to be called directly from + application code. - Stops the local server instance possibly created by `start()` and - shuts down the Java Virtual Machine hosting the client instance. + Stops the local server instance possibly created by `start()` and shuts + down the Java Virtual Machine hosting the client instance. """ if client and client.port: try: @@ -165,12 +162,12 @@ def cleanup(): sys.stdout.flush() sys.stderr.flush() # Only deactivate fault handler on Windows, just like we do in - # `Client.__init__()`. pyTest seems to turn them back on right - # before entering the exit sequence. On Linux, we do get the - # occasional segmentation fault when running tests, just as - # pyTest exits. But disabling the fault handler doesn't help, - # so let's not touch it. It does seem to have some effect on - # Windows, but even there the benefit is fairly unclear. + # `Client.__init__()`. pyTest seems to turn them back on right before + # entering the exit sequence. On Linux, we do get the occasional + # segmentation fault when running tests, just as pyTest exits. But + # disabling the fault handler doesn't help, so let's not touch it. It + # does seem to have some effect on Windows, but even there the benefit + # is fairly unclear. if system == 'Windows' and faulthandler.is_enabled(): log.debug('Turning off Python fault handlers.') faulthandler.disable() diff --git a/tests/ReadMe.md b/tests/ReadMe.md index b1ac1fc..84a5459 100644 --- a/tests/ReadMe.md +++ b/tests/ReadMe.md @@ -1,13 +1,12 @@ ## Test suite -The scripts here, along with some fixtures, constitute the test suite. -They are run in the intended order by the helper scripts `test.py` and -`coverage.py` in the `tools` folder. +The scripts here, along with some fixtures, constitute the test suite. They are +run in the intended order by the helper scripts `test.py` and `coverage.py` in +the `tools` folder. -Note that when running those scripts from the project folder, i.e. the -parent folder of this one here, then they will test what's inside the -`mph` folder, i.e. the current source code. If run from anywhere else, -they would test whatever `import mph` finds, which may be an installed -version of MPh. This behavior is intentional, so that new code can be -tested without touching the installed version, even without a separate -virtual environment. +Note that when running those scripts from the project folder, i.e. the parent +folder of this one here, then they will test what's inside the `mph` folder, +i.e. the current source code. If run from anywhere else, they would test +whatever `import mph` finds, which may be an installed version of MPh. This +behavior is intentional, so that new code can be tested without touching the +installed version, even without a separate virtual environment. diff --git a/tests/test_node.py b/tests/test_node.py index 1d2452c..d063d72 100644 --- a/tests/test_node.py +++ b/tests/test_node.py @@ -275,19 +275,19 @@ def rewrite_properties(node): # side. For example, the property "solvertype" had the (string) value # value "none" before, but has "foo" now. So we skip the "rewriting" # for those few exceptions. - # Long-term, this entire recursive test may have to be removed. - # It's not overly important anyway. In case assignment does not work - # for certain node properties, and for reasons we don't control, - # we can let the user deal with the problem. - # The purpose of this test, back when it was created, was to make - # sure that reading and writing works for all involved data types, - # since we're casting those from Python to Java and vice versa. - # We need to be able to handle strings, numbers, lists, etc. Those - # are covered by `test_property()`. This test here only checks - # that the former covers every type we may encounter in the wild. - # Namely, in case Comsol adds a data type that we're not handling - # yet. If certain values don't work, then that's secondary, if not - # irrelevant, since it's out of scope. + # Long-term, this entire recursive test may have to be removed. It's + # not overly important anyway. In case assignment does not work for + # certain node properties, and for reasons we don't control, we can let + # the user deal with the problem. + # The purpose of this test, back when it was created, was to make sure + # that reading and writing works for all involved data types, since + # we're casting those from Python to Java and vice versa. We need to be + # able to handle strings, numbers, lists, etc. Those are covered by + # `test_property()`. This test here only checks that the former covers + # every type we may encounter in the wild. Namely, in case Comsol adds + # a data type that we're not handling yet. If certain values don't + # work, then that's secondary, if not irrelevant, since it's out of + # scope for us. if ( node.name() in ('stationary', 'time-dependent') and name in ( diff --git a/tools/ReadMe.md b/tools/ReadMe.md index 5aec00d..9890a81 100644 --- a/tools/ReadMe.md +++ b/tools/ReadMe.md @@ -7,19 +7,18 @@ for details. ### Running tests -MPh can be used and tested from source, provided NumPy, JPype, and pyTest -are already installed. That is, the following runs the test suite for what -is currently in the `main` branch: +MPh can be used and tested from source, provided NumPy, JPype, and pyTest are +already installed. That is, the following runs the test suite for what is +currently in the `main` branch: ``` ❯ git clone https://github.com/MPh-py/MPh.git ❯ cd MPh ❯ python tools/test.py --log ``` -This works because when you are in the project folder (named `MPh`), -then `import mph` will find the subfolder `mph` and run the code from -there, possibly ignoring a different MPh version installed in the -Python environment. +This works because when you are in the project folder (named `MPh`), then +`import mph` will find the subfolder `mph` and run the code from there, +possibly ignoring a different MPh version installed in the Python environment. Note that just calling `pytest` will fail as the test suite starts a Comsol client, and hence the Java VM, multiple times, which JPype does not support. diff --git a/tools/codecov.py b/tools/codecov.py index df0fee9..16f3dec 100644 --- a/tools/codecov.py +++ b/tools/codecov.py @@ -1,10 +1,9 @@ """ Uploads the coverage report to CodeCov. -The script expects the CodeCov uploader to be installed locally. On -Windows, for example, `codecov.exe` would have to be on the search path. -It also expects the CodeCov upload token for this project to be set as -an environment variable. +The script expects the CodeCov uploader to be installed locally. On Windows, +for example, `codecov.exe` would have to be on the search path. It also expects +the CodeCov upload token for this project to be set as an environment variable. CodeCov does not accept Coverage.py's standard report format, i.e. the `.coverage` file. It must be converted to XML format beforehand. diff --git a/tools/coverage.py b/tools/coverage.py index b08d681..9194323 100644 --- a/tools/coverage.py +++ b/tools/coverage.py @@ -1,16 +1,16 @@ """ Measures code coverage by test suite. -We cannot just run pyTest on the entire test suite (in the `tests` folder -of the repo) because the individual scripts there all start a client in -their respective setup routine. That is, they all start the Java VM, -which will fail once pyTest gets to the second script in the sequence. -Instead, we run pyTest for each test group separately, with the coverage -plug-in enabled, and thus generate the coverage report incrementally. - -We also render the coverage report (in `.coverage`) as static HTML for -easy inspection. This is helpful during development. Find it in the -`build/coverage` folder. +We cannot just run pyTest on the entire test suite (in the `tests` folder of +the repo) because the individual scripts there all start a client in their +respective setup routine. That is, they all start the Java VM, which will fail +once pyTest gets to the second script in the sequence. Instead, we run pyTest +for each test group separately, with the coverage plug-in enabled, and thus +generate the coverage report incrementally. + +We also render the coverage report (in `.coverage`) as static HTML for easy +inspection. This is helpful during development. Find it in the `build/coverage` +folder. The coverage report may be uploaded to the online service CodeCov. This is usually only done for a new release, but could also happen on each commit. diff --git a/tools/test.py b/tools/test.py index 1460b12..aa4fade 100644 --- a/tools/test.py +++ b/tools/test.py @@ -1,26 +1,25 @@ """ Runs all tests in the intended order. -Each test script (in the `tests` folder) contains a group of tests. -These scripts must be run in separate processes as most of them start -and stop the Java virtual machine, which can only be done once per -process. This is why simply calling pyTest (with `python -m pytest` -in the root folder) will not work. +Each test script (in the `tests` folder) contains a group of tests. These +scripts must be run in separate processes as most of them start and stop the +Java virtual machine, which can only be done once per process. This is why +simply calling pyTest (with `python -m pytest` in the root folder) will not +work. -This script here runs each test group in a new subprocess. It also -imposes a logical order: from the tests covering the most basic -functionality to the high-level abstractions. +This script here runs each test group in a new subprocess. It also imposes a +logical order: from the tests covering the most basic functionality to the +high-level abstractions. -Here, as opposed to the similar script `coverage.py`, we don't actually -run the tests through pyTest. Rather, we run the scripts directly so -that the output is less verbose. Note, however, that pyTest still needs -to be installed as some of the test fixtures require it. +Here, as opposed to the similar script `coverage.py`, we don't actually run the +tests through pyTest. Rather, we run the scripts directly so that the output is +less verbose. Note, however, that pyTest still needs to be installed as some of +the test fixtures require it. -The verbosity can be increased by passing `--log` as a command-line -argument. This will display the log messages produced by MPh as the -tests are running. You can also pass the name of a test group to run -only that one. For example, passing "model" will only run the tests -defined in `test_model.py`. +The verbosity can be increased by passing `--log` as a command-line argument. +This will display the log messages produced by MPh as the tests are running. +You can also pass the name of a test group to run only that one. For example, +passing "model" will only run the tests defined in `test_model.py`. """ from subprocess import run From 2a2a5ab7456cdc0d1395d75eac3b549f7a603cc9 Mon Sep 17 00:00:00 2001 From: John Hennig Date: Mon, 20 Oct 2025 18:48:47 +0200 Subject: [PATCH 02/14] Updated look-up tables of `modules()` methods for Comsol 6.3. The Comsol 6.3 documentation also lists 'CADREADER' as an alias for the look-up key `CADIMPORT` for the "CAD Import Module", but according to my tests it's not actually a valid key, i.e. `client.java.hasProduct('CADREADER')` raises "Invalid product name." --- mph/client.py | 4 +++- mph/model.py | 5 +++-- tests/test_client.py | 5 +++++ 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/mph/client.py b/mph/client.py index 50dbbd6..d582ff8 100755 --- a/mph/client.py +++ b/mph/client.py @@ -19,7 +19,7 @@ # The following look-up table is used by the `modules()` method. It is based on -# the table on page 41 of Comsol 6.0's Programming Reference Manual, with the +# the table on page 42 of Comsol 6.3's Programming Reference Manual, with the # two columns swapped. It thus maps vendor strings to product names (add-on # modules), except that we also shorten the names somewhat (drop "Module" # everywhere) and leave out the pointless trademark symbols. The vendor strings @@ -37,6 +37,7 @@ 'CORROSION': 'Corrosion', 'DESIGN': 'Design', 'ECADIMPORT': 'ECAD Import', + 'ELECTRICDISCHARGE': 'Electric Discharge', 'ELECTROCHEMISTRY': 'Electrochemistry', 'ELECTRODEPOSITION': 'Electrodeposition', 'FATIGUE': 'Fatigue', @@ -75,6 +76,7 @@ 'WAVEOPTICS': 'Wave Optics', } + log = getLogger(__package__) diff --git a/mph/model.py b/mph/model.py index ef624cb..98b29ab 100644 --- a/mph/model.py +++ b/mph/model.py @@ -22,8 +22,8 @@ # same sanitized names used in the look-up table in the `clients` module. So it # essentially drops the Unicode trademark symbols as well as the redundant # "Module". The strings returned by `getUsedProducts()` seem to correspond -# exactly to the product names in the left column of the table on page 41 of -# Comsol 6.0's Programming Reference Manual. +# exactly to the product names in the left column of the table on page 42 of +# Comsol 6.3's Programming Reference Manual. modules = { 'AC/DC Module': 'AC/DC', 'Acoustics Module': 'Acoustics', @@ -38,6 +38,7 @@ 'Corrosion Module': 'Corrosion', 'Design Module': 'Design', 'ECAD Import Module': 'ECAD Import', + 'Electric Discharge Module': 'Electric Discharge', 'Electrochemistry Module': 'Electrochemistry', 'Electrodeposition Module': 'Electrodeposition', 'Fatigue Module': 'Fatigue', diff --git a/tests/test_client.py b/tests/test_client.py index a8f064f..a83788d 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -8,6 +8,7 @@ from pytest import raises from pathlib import Path +from packaging import version client: Client @@ -88,7 +89,10 @@ def test_files(): def test_modules(): + Comsol62_or_older = (version.parse(client.version) < version.parse('6.3')) for key in mph.client.modules: + if key == 'ELECTRICDISCHARGE' and Comsol62_or_older: + continue assert client.java.hasProduct(key) in (True, False) for value in mph.client.modules.values(): assert value in mph.model.modules.values() @@ -169,6 +173,7 @@ def test_connect(): test_models() test_names() test_files() + test_modules() test_caching() test_remove() test_clear() From aa9184e3cc05f17dd49c73cd40abab160a40996a Mon Sep 17 00:00:00 2001 From: John Hennig Date: Mon, 20 Oct 2025 18:53:25 +0200 Subject: [PATCH 03/14] Removed vague promises of features "in a future release". --- mph/node.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/mph/node.py b/mph/node.py index 9b44718..b182b51 100644 --- a/mph/node.py +++ b/mph/node.py @@ -451,10 +451,9 @@ def select(self, `None` to clear the selection. Raises `NotImplementedError` if the node (that this method is called - on) is a geometry node. These may be supported in a future release. - Meanwhile, access their Java methods directly. Raises `TypeError` if - the node does not have a selection and is not itself an "explicit" - selection. + on) is a geometry node. Access their Java methods directly. Raises + `TypeError` if the node does not have a selection and is not itself an + "explicit" selection. """ java = self.java_if_exists() if isinstance(java, JClass('com.comsol.model.GeomFeature')): @@ -501,10 +500,9 @@ def selection(self) -> Node | NDArray[int32] | None: edge, or point numbers is returned (depending on which of those the selection holds). `None` is returned if nothing is selected. - Raises `NotImplementedError` if the node is a geometry node. These may - be supported in a future release. Meanwhile, access their Java methods - directly. Raises `TypeError` if the node does not have a selection and - is not itself a selection. + Raises `NotImplementedError` if the node is a geometry node. Access + their Java methods directly. Raises `TypeError` if the node does not + have a selection and is not itself a selection. """ java = self.java_if_exists() if isinstance(java, JClass('com.comsol.model.GeomFeature')): From f1f98ea1bd94590f2ff05d132f1b225d00746d70 Mon Sep 17 00:00:00 2001 From: John Hennig Date: Mon, 20 Oct 2025 22:07:40 +0200 Subject: [PATCH 04/14] Relaxed Sphinx requirement to `>8` (from `>8.2`) to make `uv sync` work. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I don't remember why I required `Sphinx>= 8.2` in the first place, but it may have had something to do with rendering the type annotations in the API documentation. Which I've given up on… for now. More importantly, though, it stops `uv sync` from resolving the dependencies, as Sphinx 8.2+ does not support Python 3.10. But Sphinx 8.0 does. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 005e177..fbea1c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ test = [ 'basedPyright >= 1.29', ] docs = [ - 'Sphinx >= 8.2', + 'Sphinx >= 8', 'MyST-parser >= 1', 'Furo >= 2024', ] From 17d4685ab87dc0d58eb1094050013e5367c5ee9d Mon Sep 17 00:00:00 2001 From: John Hennig Date: Mon, 20 Oct 2025 22:15:06 +0200 Subject: [PATCH 05/14] Use UV as the build backend (instead of Flit). --- pyproject.toml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fbea1c3..3807cba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,14 +49,15 @@ Documentation = 'https://mph.readthedocs.io' Source = 'https://github.com/MPh-py/MPh' -# Build backend: Flit +# Build backend: UV [build-system] -requires = ['flit_core >= 3.11'] -build-backend = 'flit_core.buildapi' +requires = ['uv_build >= 0.8'] +build-backend = 'uv_build' -[tool.flit.module] -name = 'mph' +[tool.uv.build-backend] +module-name = 'mph' +module-root = '.' # Code linter: Ruff From 15acad211aea3dd73887ea5d462b7ca3bddf3f39 Mon Sep 17 00:00:00 2001 From: John Hennig Date: Fri, 24 Oct 2025 20:09:18 +0200 Subject: [PATCH 06/14] Moved all coverage test artifacts to `build/coverage` folder. --- .gitignore | 2 -- pyproject.toml | 7 +++++++ tools/codecov.py | 2 +- tools/coverage.py | 2 +- 4 files changed, 9 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 178f4b3..ba85036 100644 --- a/.gitignore +++ b/.gitignore @@ -6,8 +6,6 @@ uv.lock # Test artifacts **/.pytest_cache tests/MPh.ini -.coverage -coverage.xml # OS cruft Thumbs.db diff --git a/pyproject.toml b/pyproject.toml index 3807cba..bdee752 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -161,6 +161,7 @@ testpaths = ['tests'] [tool.coverage.run] source = ['mph/'] omit = ['mph/discovery.py'] # Ignore platform-dependent discovery mechanism. +data_file = 'build/coverage/.coverage' relative_files = true [tool.coverage.report] @@ -168,3 +169,9 @@ exclude_lines = [ 'pragma: no cover', 'def location', # Ignore platform-dependent configuration search. ] + +[tool.coverage.html] +directory = "build/coverage" + +[tool.coverage.xml] +output = "build/coverage/coverage.xml" diff --git a/tools/codecov.py b/tools/codecov.py index 16f3dec..3dd9702 100644 --- a/tools/codecov.py +++ b/tools/codecov.py @@ -21,6 +21,6 @@ root = Path(__file__).parent.parent run(['coverage', 'xml'], cwd=root, check=True) run( - ['codecov', '--file', 'coverage.xml', '--token', token], + ['codecov', '--file', 'build/coverage/coverage.xml', '--token', token], cwd=root, check=True, ) diff --git a/tools/coverage.py b/tools/coverage.py index 9194323..7b8f50f 100644 --- a/tools/coverage.py +++ b/tools/coverage.py @@ -35,7 +35,7 @@ environ['PYTHONPATH'] = str(root) # Report code coverage one by one for each test group. -report = root/'.coverage' +report = root/'coverage'/'.coverage' if report.exists(): report.unlink() for group in groups: From 5128726a1d849b8c484fa8106e2f70aa10a2eb4e Mon Sep 17 00:00:00 2001 From: John Hennig Date: Fri, 24 Oct 2025 22:00:41 +0200 Subject: [PATCH 07/14] Recommend using UV for project management and local development. --- pyproject.toml | 5 ++-- tests/ReadMe.md | 11 ++----- tools/ReadMe.md | 75 +++++++++++++++++++++-------------------------- tools/codecov.py | 16 ++++++---- tools/coverage.py | 54 +++++++++++++++++++--------------- tools/docs.py | 7 +++-- tools/lint.py | 8 ++++- tools/test.py | 67 ++++++++++++++++++++++-------------------- tools/types.py | 2 +- 9 files changed, 128 insertions(+), 117 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bdee752..342b67a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,10 +29,11 @@ dependencies = [ [dependency-groups] test = [ - 'pyTest >= 8', - 'pyTest-cov >= 6', 'Ruff >= 0.11', 'basedPyright >= 1.29', + 'pyTest >= 8', + 'pyTest-cov >= 6', + 'codecov >= 2', ] docs = [ 'Sphinx >= 8', diff --git a/tests/ReadMe.md b/tests/ReadMe.md index 84a5459..1169699 100644 --- a/tests/ReadMe.md +++ b/tests/ReadMe.md @@ -1,12 +1,5 @@ ## Test suite The scripts here, along with some fixtures, constitute the test suite. They are -run in the intended order by the helper scripts `test.py` and `coverage.py` in -the `tools` folder. - -Note that when running those scripts from the project folder, i.e. the parent -folder of this one here, then they will test what's inside the `mph` folder, -i.e. the current source code. If run from anywhere else, they would test -whatever `import mph` finds, which may be an installed version of MPh. This -behavior is intentional, so that new code can be tested without touching the -installed version, even without a separate virtual environment. +run in the intended order by the helper script `test.py` in the `tools` folder, +typically with `uv run tools/test.py --log`. diff --git a/tools/ReadMe.md b/tools/ReadMe.md index 9890a81..8aed019 100644 --- a/tools/ReadMe.md +++ b/tools/ReadMe.md @@ -1,67 +1,58 @@ ## Developer tools These are simple helper scripts to run the various development tools, such -as pyTest, Flit, and Sphinx. See the doc-strings of the individual scripts -for details. +as the test suite, the type checker, code linter, and documentation builder. +See the doc-strings of the individual scripts for details. -### Running tests - -MPh can be used and tested from source, provided NumPy, JPype, and pyTest are -already installed. That is, the following runs the test suite for what is -currently in the `main` branch: -``` -❯ git clone https://github.com/MPh-py/MPh.git -❯ cd MPh -❯ python tools/test.py --log -``` - -This works because when you are in the project folder (named `MPh`), then -`import mph` will find the subfolder `mph` and run the code from there, -possibly ignoring a different MPh version installed in the Python environment. +### Local development -Note that just calling `pytest` will fail as the test suite starts a Comsol -client, and hence the Java VM, multiple times, which JPype does not support. -The `test.py` script works around that by starting a new subprocess each time. +To use any of the dev tools, you should install the project locally from +source. It is recommended to use [UV] to manage the project, though installing +it via [Pip] remains possible as well. +Install UV globally on your machine, for example with `winget install +astral-sh.uv` on Windows, `curl -LsSf https://astral.sh/uv/install.sh | sh` +on Linux, and `brew install uv` on macOS. Then `git clone` this repository +and run `uv sync` in the project root folder. It will create a virtual +environment in the `.venv` folder with all dependencies installed in it, as +defined in `pyproject.toml`. -### Local development +Run any of the dev tools via the helper scripts in the `tools` folder. For +example `uv run tools/lint.py` to lint the code (same as `uv run ruff check`), +`uv run tools/types.py` to check types, etc. -If you also want to build the documentation locally, or render the -code-coverage report, or build the wheel, it's best to create a dedicated -virtual environment: -``` -❯ python -m venv venv -❯ venv/Scripts/activate # Windows -❯ source venv/bin/activate # Linux/macOS -❯ pip install --group dev --editable . -``` +You may also install into an existing virtual environment or even the global +Python environment with `uv pip install --editable .`. The `--editable` flag +makes it so that all code changes take immediate effect without re-installing +the package. -This installs MPh and all its development dependencies inside that new -environment, in the newly created `venv` sub-folder. The `dev` dependency group -is defined in `pyproject.toml`. The `--editable` flag makes it so that all code -changes take immediate effect without re-installing the package. +When using Pip, follow the standard workflow: Create a virtual Python +environment `python -m venv .venv`, activate it, and install the project in it +with `pip install --editable .` +[UV]: https://docs.astral.sh/uv +[Pip]: https://pip.pypa.io ### Releasing a new version - Bump version number in `pyproject.toml`. - Add release notes to `docs/releases.md`. - Add dedicated commit for the version bump. -- Tag commit with version number, e.g. `git tag v1.2.0` +- Tag commit with version number, e.g. `git tag v1.3.0` - Force `stable` branch to latest commit: `git branch -f stable` -- Same for the current documentation branch: `git branch -f 1.2` -- Run code linter: `python tools/lint.py` -- Test docs build: `python tools/docs.py` -- Test wheel build: `python tools/wheel.py` -- Run code coverage: `python tools/coverage.py` +- Same for the current documentation branch: `git branch -f 1.3` +- Run code linter: `uv run tools/lint.py` +- Test docs build: `un run tools/docs.py` +- Test wheel build: `uv run tools/wheel.py` +- Run code coverage: `uv run tools/coverage.py` - Push to GitHub: ``` git push origin main git push --tags git push origin stable -git push origin 1.2 +git push origin 1.3 ``` -- Upload coverage report: `python tools/codecov.py` +- Upload coverage report: `uv run tools/codecov.py` - Create new release on GitHub and add release notes. -- Publish to PyPI: `python tools/publish.py` +- Publish to PyPI: `uv run tools/publish.py` diff --git a/tools/codecov.py b/tools/codecov.py index 3dd9702..2e4d7d2 100644 --- a/tools/codecov.py +++ b/tools/codecov.py @@ -1,12 +1,11 @@ """ Uploads the coverage report to CodeCov. -The script expects the CodeCov uploader to be installed locally. On Windows, -for example, `codecov.exe` would have to be on the search path. It also expects -the CodeCov upload token for this project to be set as an environment variable. +The script expects the CodeCov upload token for this project to be set as an +environment variable. CodeCov does not accept Coverage.py's standard report format, i.e. the -`.coverage` file. It must be converted to XML format beforehand. +`.coverage` file, which is why it must be converted to XML for the upload. """ from subprocess import run @@ -19,8 +18,13 @@ raise RuntimeError('CodeCov upload token not set in environment.') root = Path(__file__).parent.parent -run(['coverage', 'xml'], cwd=root, check=True) +run(['uv', 'run', '--no-sync', 'coverage', 'xml'], cwd=root, check=True) run( - ['codecov', '--file', 'build/coverage/coverage.xml', '--token', token], + [ + 'uv', 'run', '--no-sync', + 'codecov', + '--file', 'build/coverage/coverage.xml', + '--token', token, + ], cwd=root, check=True, ) diff --git a/tools/coverage.py b/tools/coverage.py index 7b8f50f..3d6e7e5 100644 --- a/tools/coverage.py +++ b/tools/coverage.py @@ -2,48 +2,56 @@ Measures code coverage by test suite. We cannot just run pyTest on the entire test suite (in the `tests` folder of -the repo) because the individual scripts there all start a client in their -respective setup routine. That is, they all start the Java VM, which will fail -once pyTest gets to the second script in the sequence. Instead, we run pyTest -for each test group separately, with the coverage plug-in enabled, and thus -generate the coverage report incrementally. +the repo) because many of the individual test scripts there start a Comsol +client in their respective setup routine. That is, they start the Java VM, +which will fail once pyTest gets to the second script in the sequence because +JPype doesn't allow that within the same Python process. -We also render the coverage report (in `.coverage`) as static HTML for easy -inspection. This is helpful during development. Find it in the `build/coverage` -folder. +Instead, we run pyTest for each test group separately, with the coverage +plug-in enabled, and thus generate the coverage report incrementally. + +We also render the coverage report as static HTML for easy inspection. This is +helpful during development. Find it in the `build/coverage` folder. The coverage report may be uploaded to the online service CodeCov. This is usually only done for a new release, but could also happen on each commit. -There's a separate script, `codecov.py`, to automate that. +There's a separate script, `codecov.py`, to take care of that whenever needed. """ from subprocess import run from pathlib import Path -from sys import executable as python -from os import environ, pathsep # Define order of test groups. -groups = ['config', 'discovery', 'server', 'session', 'standalone', 'client', - 'multi', 'node', 'model', 'exit'] - -# Run MPh in source tree, not a possibly different version installed elsewhere. -root = Path(__file__).parent.parent -if 'PYTHONPATH' in environ: - environ['PYTHONPATH'] = str(root) + pathsep + environ['PYTHONPATH'] -else: - environ['PYTHONPATH'] = str(root) +groups = [ + 'config', 'discovery', + 'server', 'session', 'standalone', 'client', 'multi', + 'node', 'model', 'exit', +] # Report code coverage one by one for each test group. +root = Path(__file__).parent.parent report = root/'coverage'/'.coverage' if report.exists(): report.unlink() for group in groups: - run([python, '-m', 'pytest', '--cov', '--cov-append', - f'tests/test_{group}.py'], cwd=root) + run( + [ + 'uv', 'run', '--no-sync', + 'pytest', '--cov', '--cov-append', + f'tests/test_{group}.py', + ], + cwd=root, +) # Render coverage report locally. print('Exporting coverage report as HTML.') folder = root/'build'/'coverage' folder.mkdir(exist_ok=True, parents=True) -run(['coverage', 'html', f'--directory={folder}'], cwd=root, check=True) +run( + [ + 'uv', 'run', '--no-sync', + 'coverage', 'html', f'--directory={folder}', + ], + cwd=root, check=True, +) diff --git a/tools/docs.py b/tools/docs.py index d2a7bbb..add5fab 100644 --- a/tools/docs.py +++ b/tools/docs.py @@ -9,8 +9,11 @@ target = root/'build'/'docs' process = run( - ['sphinx-build', '--fail-on-warning', 'docs', 'build/docs'], - cwd=root + [ + 'uv', 'run', '--no-sync', + 'sphinx-build', '--fail-on-warning', 'docs', 'build/docs', + ], + cwd=root, ) if process.returncode: raise RuntimeError('Error while rendering documentation.') diff --git a/tools/lint.py b/tools/lint.py index 3f877aa..1ddf57e 100644 --- a/tools/lint.py +++ b/tools/lint.py @@ -6,4 +6,10 @@ root = Path(__file__).parent.parent -run(['ruff', 'check'], cwd=root, check=True) +run( + [ + 'uv', 'run', '--no-sync', + 'ruff', 'check', + ], + cwd=root, check=True, +) diff --git a/tools/test.py b/tools/test.py index aa4fade..7b7a73d 100644 --- a/tools/test.py +++ b/tools/test.py @@ -4,8 +4,7 @@ Each test script (in the `tests` folder) contains a group of tests. These scripts must be run in separate processes as most of them start and stop the Java virtual machine, which can only be done once per process. This is why -simply calling pyTest (with `python -m pytest` in the root folder) will not -work. +simply calling pyTest (with `uv run pytest` in the root folder) will not work. This script here runs each test group in a new subprocess. It also imposes a logical order: from the tests covering the most basic functionality to the @@ -13,8 +12,8 @@ Here, as opposed to the similar script `coverage.py`, we don't actually run the tests through pyTest. Rather, we run the scripts directly so that the output is -less verbose. Note, however, that pyTest still needs to be installed as some of -the test fixtures require it. +less verbose. Note, however, that pyTest still needs to be installed as we use +some of its test fixtures. The verbosity can be increased by passing `--log` as a command-line argument. This will display the log messages produced by MPh as the tests are running. @@ -26,42 +25,45 @@ from pathlib import Path from timeit import default_timer as now from argparse import ArgumentParser -from sys import executable as python from sys import exit -from os import environ, pathsep # Define order of test groups. -groups = ['config', 'discovery', 'server', 'session', 'standalone', 'client', - 'multi', 'node', 'model', 'exit'] +groups = [ + 'config', 'discovery', + 'server', 'session', 'standalone', 'client', 'multi', + 'node', 'model', 'exit', +] # Determine path of project root folder. here = Path(__file__).parent root = here.parent -# Run MPh in project folder, not a possibly different installed version. -if 'PYTHONPATH' in environ: - environ['PYTHONPATH'] = str(root) + pathsep + environ['PYTHONPATH'] -else: - environ['PYTHONPATH'] = str(root) - # Parse command-line arguments. -parser = ArgumentParser(prog='test.py', - description='Runs the MPh test suite.', - add_help=False, - allow_abbrev=False) -parser.add_argument('--help', - help='Show this help message.', - action='help') -parser.add_argument('--log', - help='Display log output.', - action='store_true') -parser.add_argument('--groups', - help='List all test groups.', - action='store_true') -parser.add_argument('group', - help='Run only this group of tests.', - nargs='?') +parser = ArgumentParser( + prog='test.py', + description='Runs the MPh test suite.', + add_help=False, allow_abbrev=False, +) +parser.add_argument( + '--help', + help='Show this help message.', + action='help', +) +parser.add_argument( + '--log', + help='Display log output.', + action='store_true', +) +parser.add_argument( + '--groups', + help='List all test groups.', + action='store_true', +) +parser.add_argument( + 'group', + help='Run only this group of tests.', + nargs='?') arguments = parser.parse_args() if arguments.groups: for group in groups: @@ -84,7 +86,10 @@ print() print(f'Running test group "{group}".') t0 = now() - process = run([python, f'test_{group}.py'] + options, cwd=root/'tests') + process = run( + ['uv', 'run', '--no-sync', f'test_{group}.py'] + options, + cwd=root/'tests', + ) if process.returncode == 0: print(f'Passed in {now()-t0:.0f} s.') else: diff --git a/tools/types.py b/tools/types.py index 9f47c4c..c4df0a7 100644 --- a/tools/types.py +++ b/tools/types.py @@ -6,4 +6,4 @@ root = Path(__file__).parent.parent -run(['basedpyright'], cwd=root, check=True) +run(['uv', 'run', '--no-sync', 'basedpyright'], cwd=root, check=True) From 1f9331fd38e5e991faecaa707a524309e89199af Mon Sep 17 00:00:00 2001 From: John Hennig Date: Sat, 25 Oct 2025 13:03:39 +0200 Subject: [PATCH 08/14] Made output of log messages the default when running test suite. Before, we had to run `tools/test.py --log` to have it display the debug-level log messages produced by MPh, which is helpful when trouble-shooting problems reported by users. This behavior is now the default. The detailed log can be suppressed by passing `--quiet` to get the old behavior. It does make it easier to see if all tests have passed at a glance, and how long it all took. --- tests/ReadMe.md | 2 +- tests/fixtures.py | 4 ++-- tools/test.py | 28 ++++++++++++++-------------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/ReadMe.md b/tests/ReadMe.md index 1169699..8f89901 100644 --- a/tests/ReadMe.md +++ b/tests/ReadMe.md @@ -2,4 +2,4 @@ The scripts here, along with some fixtures, constitute the test suite. They are run in the intended order by the helper script `test.py` in the `tools` folder, -typically with `uv run tools/test.py --log`. +typically with `uv run tools/test.py`. diff --git a/tests/fixtures.py b/tests/fixtures.py index 93bbbd8..da2eb30 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -73,8 +73,8 @@ def timed_records(*args, **kwargs): def setup_logging(): - """Sets up logging to console if `--log` command-line argument present.""" - if '--log' not in sys.argv[1:]: + """Sets up logging to console unless `--quiet` passed on command line.""" + if '--quiet' in sys.argv[1:]: return logging.setLogRecordFactory(timed_records) logging.basicConfig( diff --git a/tools/test.py b/tools/test.py index 7b7a73d..53b53d0 100644 --- a/tools/test.py +++ b/tools/test.py @@ -10,15 +10,13 @@ logical order: from the tests covering the most basic functionality to the high-level abstractions. -Here, as opposed to the similar script `coverage.py`, we don't actually run the -tests through pyTest. Rather, we run the scripts directly so that the output is -less verbose. Note, however, that pyTest still needs to be installed as we use -some of its test fixtures. - -The verbosity can be increased by passing `--log` as a command-line argument. -This will display the log messages produced by MPh as the tests are running. -You can also pass the name of a test group to run only that one. For example, -passing "model" will only run the tests defined in `test_model.py`. +As opposed to the similar script `coverage.py`, we don't actually run the tests +through pyTest. Rather, we run the scripts directly so that the output is less +verbose. You can further reduce the verbosity by passing `--quiet` as a +command-line argument. This will suppress the log messages produced by MPh as +the tests are running. You may also pass the name of a test group to run only +that particular one. For example, passing "model" will only run the tests +defined in `test_model.py`. """ from subprocess import run @@ -51,8 +49,8 @@ action='help', ) parser.add_argument( - '--log', - help='Display log output.', + '--quiet', + help='Suppress log output.', action='store_true', ) parser.add_argument( @@ -62,7 +60,7 @@ ) parser.add_argument( 'group', - help='Run only this group of tests.', + help='Run only this group of tests. If not given, run all tests.', nargs='?') arguments = parser.parse_args() if arguments.groups: @@ -76,9 +74,11 @@ if group.endswith('.py'): group = group[:-3] groups = [group] + +# Collect optional arguments to be passed to all test scripts. options = [] -if arguments.log: - options.append('--log') +if arguments.quiet: + options.append('--quiet') # Run each test group in new process. for group in groups: From fc2f520b1ab964ad3580528aa3408e20a372cd0e Mon Sep 17 00:00:00 2001 From: John Hennig Date: Sat, 25 Oct 2025 18:41:05 +0200 Subject: [PATCH 09/14] Create test artifacts in temporary folder instead of `tests` folder. We shouldn't assume the `tests` folder is writeable. So now we use a temporary folder assigned by the operating system whenever outputting test artifacts. --- .gitignore | 7 +- tests/fixtures.py | 6 ++ tests/test_config.py | 34 +++++--- tests/test_model.py | 186 ++++++++++++++++++------------------------- 4 files changed, 110 insertions(+), 123 deletions(-) diff --git a/.gitignore b/.gitignore index ba85036..2e38c18 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,12 @@ # Build artifacts build/ -**/__pycache__ +__pycache__/ uv.lock # Test artifacts -**/.pytest_cache -tests/MPh.ini +.pytest_cache/ +.ruff_cache/ +*.mph.lock # OS cruft Thumbs.db diff --git a/tests/fixtures.py b/tests/fixtures.py index da2eb30..36c92b4 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -2,8 +2,14 @@ import logging import warnings +import tempfile import io import sys +from pathlib import Path + + +def temp_dir() -> Path: + return Path(tempfile.mkdtemp(prefix='MPh_tests_')) class logging_disabled: diff --git a/tests/test_config.py b/tests/test_config.py index 4d33d3f..782efa5 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,10 +2,23 @@ import mph +from fixtures import temp_dir from fixtures import logging_disabled from fixtures import setup_logging from pytest import raises -from pathlib import Path + +from pathlib import Path +from logging import getLogger + + +tmpdir: Path + + +def setup_module(): + global tmpdir + tmpdir = temp_dir() + log = getLogger(__name__) + log.debug(f'Temporary folder is "{tmpdir}".') def test_option(): @@ -27,33 +40,32 @@ def test_location(): def test_save(): - file = Path(__file__).parent/'MPh.ini' + file = tmpdir/'MPh.ini' mph.config.save(file) assert file.exists() def test_load(): - options = mph.option().copy() - for (key, value) in options.items(): + defaults = mph.option().copy() + file = tmpdir/'defaults.ini' + mph.config.save(file) + for (key, value) in defaults.items(): if isinstance(value, bool): mph.option(key, not value) elif isinstance(value, (int, float)): mph.option(key, value - 1) else: - mph.option(key, value + '(modified)') - for (key, value) in options.items(): + mph.option(key, value + ' (modified)') + for (key, value) in defaults.items(): assert mph.option(key) != value - file = Path(__file__).parent/'MPh.ini' - assert file.exists() mph.config.load(file) - for (key, value) in options.items(): + for (key, value) in defaults.items(): assert mph.option(key) == value - file.unlink() - assert not file.exists() if __name__ == '__main__': setup_logging() + setup_module() test_option() test_location() test_save() diff --git a/tests/test_model.py b/tests/test_model.py index c8a739c..7a5d4ee 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -4,6 +4,7 @@ from mph import Client, Model import models +from fixtures import temp_dir from fixtures import logging_disabled from fixtures import setup_logging @@ -11,15 +12,20 @@ from pytest import raises from pathlib import Path from platform import system +from logging import getLogger client: Client model: Model empty: Model +tmpdir: Path def setup_module(): - global client, model, empty + global client, model, empty, tmpdir + tmpdir = temp_dir() + log = getLogger(__name__) + log.debug(f'Temporary folder is "{tmpdir}".') client = mph.start() model = models.capacitor() empty = client.create('empty') @@ -27,30 +33,12 @@ def setup_module(): def teardown_module(): client.clear() - here = Path(__file__).resolve().parent - files = (Path('capacitor.mph'), Path('empty.java'), - here/'capacitor.mph', here/'model.mph', - here/'model.java', here/'model.m', here/'model.vba', - here/'data.txt', here/'data.vtu', - here/'image.png', - here/'mesh.mphbin', here/'mesh.mphtxt', - here/'animation.gif', here/'animation.swf', - here/'animation.avi', here/'animation.webm', - here/'frame1.png', here/'frame2.png', here/'frame3.png') - for file in files: - if file.exists(): - file.unlink() class Derived(Model): pass -######################################## -# Tests # -######################################## - - def test_init(): derived = Derived(model) assert derived.java == model.java @@ -554,64 +542,59 @@ def test_import(): def test_export(): - here = Path(__file__).resolve().parent # Test export of text data. - assert not (here/'data.txt').exists() - model.export('data', here/'data.txt') - assert (here/'data.txt').exists() - (here/'data.txt').unlink() - assert not (here/'data.txt').exists() + assert not (tmpdir/'data.txt').exists() + model.export('data', tmpdir/'data.txt') + assert (tmpdir/'data.txt').exists() + (tmpdir/'data.txt').unlink() + assert not (tmpdir/'data.txt').exists() model.export('data') - assert (here/'data.txt').exists() - (here/'data.txt').unlink() - assert not (here/'data.txt').exists() + assert (tmpdir/'data.txt').exists() + (tmpdir/'data.txt').unlink() + assert not (tmpdir/'data.txt').exists() model.export(model/'exports'/'data') - assert (here/'data.txt').exists() - (here/'data.txt').unlink() - assert not (here/'data.txt').exists() + assert (tmpdir/'data.txt').exists() + (tmpdir/'data.txt').unlink() + assert not (tmpdir/'data.txt').exists() model.property('exports/data', 'exporttype', 'text') - model.export('data', here/'data.txt') - assert (here/'data.txt').exists() - (here/'data.txt').unlink() + model.export('data', tmpdir/'data.txt') + assert (tmpdir/'data.txt').exists() # Test export of VTK data. - assert not (here/'data.vtu').exists() + assert not (tmpdir/'data.vtu').exists() model.property('exports/data', 'exporttype', 'vtu') - model.export('data', here/'data.vtu') - assert (here/'data.vtu').exists() - (here/'data.vtu').unlink() + model.export('data', tmpdir/'data.vtu') + assert (tmpdir/'data.vtu').exists() # Test export of images. - assert not (here/'image.png').exists() - model.export('image', here/'image.png') - assert (here/'image.png').exists() - (here/'image.png').unlink() - assert not (here/'image.png').exists() + assert not (tmpdir/'image.png').exists() + model.export('image', tmpdir/'image.png') + assert (tmpdir/'image.png').exists() # Test running all exports at once. + (tmpdir/'data.vtu').unlink() + (tmpdir/'image.png').unlink() + assert not (tmpdir/'data.vtu').exists() + assert not (tmpdir/'image.png').exists() model.export() - assert (here/'data.vtu').exists() - assert (here/'image.png').exists() - (here/'data.vtu').unlink() - (here/'image.png').unlink() + assert (tmpdir/'data.vtu').exists() + assert (tmpdir/'image.png').exists() # Test export of meshes. mesh = (model/'exports').create('Mesh', name='mesh') mesh.java.set('filename', 'mesh') - assert not (here/'mesh.mphbin').exists() - model.export('mesh', here/'mesh.mphbin') - assert (here/'mesh.mphbin').exists() - (here/'mesh.mphbin').unlink() - assert not (here/'mesh.mphtxt').exists() - model.export('mesh', here/'mesh.mphtxt') - assert (here/'mesh.mphtxt').exists() - (here/'mesh.mphtxt').unlink() + assert not (tmpdir/'mesh.mphbin').exists() + model.export('mesh', tmpdir/'mesh.mphbin') + assert (tmpdir/'mesh.mphbin').exists() + (tmpdir/'mesh.mphbin').unlink() + assert not (tmpdir/'mesh.mphtxt').exists() + model.export('mesh', tmpdir/'mesh.mphtxt') + assert (tmpdir/'mesh.mphtxt').exists() mesh.remove() # Test export of GIF animations. animation = (model/'exports').create('Animation', name='animation') animation.property('plotgroup', model/'plots'/'time-dependent field') animation.property('looplevelinput', 'manual') animation.property('looplevel', [1, 2, 3]) - assert not (here/'animation.gif').exists() - model.export(animation, here/'animation.gif') - assert (here/'animation.gif').exists() - (here/'animation.gif').unlink() + assert not (tmpdir/'animation.gif').exists() + model.export(animation, tmpdir/'animation.gif') + assert (tmpdir/'animation.gif').exists() animation.remove() # Test export of AVI movies (which Comsol only supports on Windows). if system() == 'Windows': @@ -619,36 +602,31 @@ def test_export(): animation.property('plotgroup', model/'plots'/'time-dependent field') animation.property('looplevelinput', 'manual') animation.property('looplevel', [1, 2, 3]) - assert not (here/'animation.avi').exists() - model.export(animation, here/'animation.avi') - assert (here/'animation.avi').exists() - (here/'animation.avi').unlink() + assert not (tmpdir/'animation.avi').exists() + model.export(animation, tmpdir/'animation.avi') + assert (tmpdir/'animation.avi').exists() animation.remove() # Test export of WebM movies. animation = (model/'exports').create('Animation', name='animation') animation.property('plotgroup', model/'plots'/'time-dependent field') animation.property('looplevelinput', 'manual') animation.property('looplevel', [1, 2, 3]) - assert not (here/'animation.webm').exists() - model.export(animation, here/'animation.webm') - assert (here/'animation.webm').exists() - (here/'animation.webm').unlink() + assert not (tmpdir/'animation.webm').exists() + model.export(animation, tmpdir/'animation.webm') + assert (tmpdir/'animation.webm').exists() animation.remove() # Test export of image sequences. animation = (model/'exports').create('Animation', name='animation') animation.property('plotgroup', model/'plots'/'time-dependent field') animation.property('looplevelinput', 'manual') animation.property('looplevel', [1, 2, 3]) - assert not (here/'frame1.png').exists() - assert not (here/'frame2.png').exists() - assert not (here/'frame3.png').exists() - model.export(animation, here/'frame.png') - assert (here/'frame1.png').exists() - assert (here/'frame2.png').exists() - assert (here/'frame3.png').exists() - (here/'frame1.png').unlink() - (here/'frame2.png').unlink() - (here/'frame3.png').unlink() + assert not (tmpdir/'frame1.png').exists() + assert not (tmpdir/'frame2.png').exists() + assert not (tmpdir/'frame3.png').exists() + model.export(animation, tmpdir/'frame.png') + assert (tmpdir/'frame1.png').exists() + assert (tmpdir/'frame2.png').exists() + assert (tmpdir/'frame3.png').exists() animation.remove() # Test error conditions. with logging_disabled(): @@ -656,7 +634,7 @@ def test_export(): model.export('non-existing') animation = (model/'exports').create('Animation', name='animation') with raises(ValueError): - model.export(animation, here/'animation.invalid') + model.export(animation, tmpdir/'animation.invalid') animation.remove() with raises(TypeError): model.export(model/'functions'/'step', file='irrelevant.txt') @@ -673,41 +651,31 @@ def test_reset(): def test_save(): - here = Path(__file__).resolve().parent - model.save() - empty.save(format='java') - assert Path(f'{model}.mph').exists() - assert Path(f'{empty}.java').exists() - Path(f'{empty}.java').unlink() - model.save(here) - model.save(here, format='java') - assert (here/f'{model}.mph').exists() - assert (here/f'{model}.java').exists() - (here/f'{model}.java').unlink() - model.save(here/'model.mph') - model.save() - assert (here/'model.mph').read_text(errors='ignore').startswith('PK') - model.save(here/'model.java') - assert (here/'model.java').exists() - assert 'public static void main' in (here/'model.java').read_text() - (here/'model.java').unlink() - assert not (here/'model.java').exists() + model.save(tmpdir) + assert (tmpdir/f'{model}.mph').exists() + model.save(tmpdir/'model.mph') + assert (tmpdir/'model.mph').exists() + assert (tmpdir/'model.mph').read_text(errors='ignore').startswith('PK') + model.save(tmpdir, format='java') + assert (tmpdir/f'{model}.java').exists() + model.save(tmpdir/'model.java') + assert (tmpdir/'model.java').exists() + assert 'public static void main' in (tmpdir/'model.java').read_text() + (tmpdir/'model.java').unlink() + assert not (tmpdir/'model.java').exists() model.save(format='java') - assert (here/'model.java').exists() - (here/'model.java').unlink() - model.save(here/'model.m') - assert (here/'model.m').exists() - assert 'function out = model' in (here/'model.m').read_text() - (here/'model.m').unlink() - model.save(here/'model.vba') - assert (here/'model.vba').exists() - assert 'Sub run()' in (here/'model.vba').read_text() - (here/'model.vba').unlink() + assert (tmpdir/'model.java').exists() + model.save(tmpdir/'model.m') + assert (tmpdir/'model.m').exists() + assert 'function out = model' in (tmpdir/'model.m').read_text() + model.save(tmpdir/'model.vba') + assert (tmpdir/'model.vba').exists() + assert 'Sub run()' in (tmpdir/'model.vba').read_text() with logging_disabled(): with raises(ValueError): - model.save('model.invalid') + model.save(tmpdir/'model.invalid') with raises(ValueError): - model.save('model.mph', format='invalid') + model.save(tmpdir/'model.mph', format='invalid') def test_problems(): From cf179d157b26e454baba2313e64e84fdda5ac288 Mon Sep 17 00:00:00 2001 From: John Hennig Date: Sat, 25 Oct 2025 19:29:39 +0200 Subject: [PATCH 10/14] Added Ruff-specific linter rules. Also reordered the rule sets in `pyproject.toml`, starting with the few single-letter rule sets, followed by the multiple-letter rule sets in alphabetical order. Only the `RUF` rule set was added, with a small subset of them set to be ignored. --- mph/discovery.py | 6 +++--- mph/node.py | 14 +++++++------- pyproject.toml | 16 ++++++++++------ tools/test.py | 2 +- 4 files changed, 21 insertions(+), 17 deletions(-) diff --git a/mph/discovery.py b/mph/discovery.py index d3bf9a2..0e475a3 100644 --- a/mph/discovery.py +++ b/mph/discovery.py @@ -202,13 +202,13 @@ def search_disk(architecture: str) -> list[Path]: Path.home() / 'Applications', ] else: - raise ValueError('Unexpected value "{system}" for "system".') + raise ValueError(f'Unexpected value "{system}" for "system".') # Look for Comsol executables at those locations. folders = [item for location in locations if location.is_dir() for item in location.iterdir() - if item.is_dir() and re.match('(?i)comsol', item.name)] + if item.is_dir() and re.match(r'(?i)comsol', item.name)] for folder in folders: log.debug(f'Checking candidate folder "{folder}".') @@ -374,7 +374,7 @@ def find_backends() -> list[Backend]: # Get version information from Comsol server. command: list[Path | str] - command = server + ['--version'] + command = [*server, '--version'] try: arguments = dict( # noqa: C408 (unnecessary `dict()` call) check=True, timeout=15, diff --git a/mph/node.py b/mph/node.py index b182b51..4b4ecfd 100644 --- a/mph/node.py +++ b/mph/node.py @@ -11,7 +11,7 @@ from functools import lru_cache from logging import getLogger -from typing import TYPE_CHECKING, overload, Literal +from typing import TYPE_CHECKING, overload, Literal, ClassVar from collections.abc import Iterator, Sequence from numpy.typing import ArrayLike, NDArray from numpy import int32 @@ -100,7 +100,7 @@ class Node: model: Model """Model object this node refers to.""" - groups: dict[str, str] = { + groups: ClassVar[dict[str, str]] = { 'parameters': 'self.model.java.param().group()', 'functions': 'self.model.java.func()', 'components': 'self.model.java.component()', @@ -125,7 +125,7 @@ class Node: } """Mapping of the built-in groups to corresponding Java objects.""" - alias: dict[str, str] = { + alias: ClassVar[dict[str, str]] = { 'parameter': 'parameters', 'function': 'functions', 'component': 'components', @@ -379,7 +379,7 @@ def rename(self, name: str): java = self.java if java: java.label(name) - self.path = self.path[:-1] + (name,) + self.path = (*self.path[:-1], name) def retag(self, tag: str): """Assigns a new tag to the node.""" @@ -627,7 +627,7 @@ def create(self, break else: type = '?' - pattern = tag_pattern(feature_path(self) + [type]) + pattern = tag_pattern([*feature_path(self), type]) if pattern.endswith('*'): tag = container.uniquetag(pattern[:-1]) elif pattern in container.tags(): @@ -734,7 +734,7 @@ def feature_path(node: Node | None) -> list[str]: type = node.type() if not type: type = '?' - return feature_path(node.parent()) + [type] + return [*feature_path(node.parent()), type] def tag_pattern(feature_path: Sequence[str]): @@ -934,7 +934,7 @@ def traverse(node: Node, levels: list[bool], max_depth: int | None): children = node.children() last = len(children) - 1 for (index, child) in enumerate(children): - traverse(child, levels + [index == last], max_depth) + traverse(child, [*levels, index == last], max_depth) if not isinstance(node, Node): # Assume node is actually a model object and traverse from root. diff --git a/pyproject.toml b/pyproject.toml index 342b67a..e1c20f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,19 +88,19 @@ include = [ 'E', # code style errors 'W', # code style warnings 'B', # possible bugs + 'Q', # quotation marks + 'ARG', # unused function arguments 'C4', # correct comprehensions + 'ERA', # commented-out code 'FA', # future annotations 'PIE', # miscellaneous lints - 'ARG', # unused function arguments - 'RSE', # raised exceptions - 'Q', # quotation marks - 'SIM', # simplifiable code 'PT', # pyTest style 'PTH', # pathlib use mandatory - 'ERA', # commented-out code + 'RSE', # raised exceptions + 'RUF', # ruff-specific rules + 'SIM', # simplifiable code 'T10', # debugger calls 'UP', # upgradable code - 'RUF100', # unnecessary `noqa` directives ] ignore = [ 'E201', # whitespace after ( @@ -121,6 +121,10 @@ include = [ 'PT012', # `pytest.raises()` with multiple statements 'PT013', # incorrect pytest import 'PTH201', # Never use `Path('.')`. + 'RUF001', # string with en-dash + 'RUF002', # docstring with en-dash + 'RUF013', # no implicit optional types by assigning `None` + 'RUF036', # `None` not at end of type annotation 'UP024', # Replace `IOError` with `OSError`. ] diff --git a/tools/test.py b/tools/test.py index 53b53d0..51e95b9 100644 --- a/tools/test.py +++ b/tools/test.py @@ -87,7 +87,7 @@ print(f'Running test group "{group}".') t0 = now() process = run( - ['uv', 'run', '--no-sync', f'test_{group}.py'] + options, + ['uv', 'run', '--no-sync', f'test_{group}.py', *options], cwd=root/'tests', ) if process.returncode == 0: From d70a3ff665fc64e678a1d9cb1ffd36896b842548 Mon Sep 17 00:00:00 2001 From: John Hennig Date: Sat, 25 Oct 2025 20:37:40 +0200 Subject: [PATCH 11/14] Set up CI to check code quality and publish new releases. --- .github/workflows/check_commit.yml | 43 +++++++++++++++++++++ .github/workflows/publish_release.yml | 54 +++++++++++++++++++++++++++ 2 files changed, 97 insertions(+) create mode 100644 .github/workflows/check_commit.yml create mode 100644 .github/workflows/publish_release.yml diff --git a/.github/workflows/check_commit.yml b/.github/workflows/check_commit.yml new file mode 100644 index 0000000..ea1433d --- /dev/null +++ b/.github/workflows/check_commit.yml @@ -0,0 +1,43 @@ +# Check code quality of latest commit. +name: Check commit + +on: [push, pull_request, workflow_dispatch] + +jobs: + + test: + runs-on: ubuntu-latest + strategy: + matrix: + python: + - "3.10" + - "3.11" + - "3.12" + - "3.13" + steps: + + - name: Check out code. + uses: actions/checkout@v5 + + - name: Install Python. + uses: actions/setup-python@v6 + with: + python-version: ${{ matrix.python }} + + - name: Install UV. + uses: astral-sh/setup-uv@v7 + + - name: Install package. + run: uv sync + + - name: Lint code. + run: uv run ruff check + + - name: Check types. + run: uv run basedpyright + + - name: Run tests. + run: uv run tools/test.py config + # We only run the very small part of the test suite that doesn't + # require Comsol to be installed. This essentially just tests that + # the library can be successfully imported, but not much more. diff --git a/.github/workflows/publish_release.yml b/.github/workflows/publish_release.yml new file mode 100644 index 0000000..5668150 --- /dev/null +++ b/.github/workflows/publish_release.yml @@ -0,0 +1,54 @@ +# Publish new GitHub release on PyPI. +name: Publish release + +on: [workflow_dispatch] + +jobs: + + build: + name: Build wheel + runs-on: ubuntu-latest + steps: + + - name: Check out code. + uses: actions/checkout@v5 + + - name: Install Python. + uses: actions/setup-python@v6 + with: + python-version: "3.13" + + - name: Install UV. + uses: astral-sh/setup-uv@v7 + + - name: Build wheel. + run: uv build --wheel --out-dir build/wheel + + - name: Store wheel. + uses: actions/upload-artifact@v4 + with: + name: python-wheel + path: build/wheel + + + publish: + name: Publish to PyPI + needs: + - build + if: startsWith(github.ref, 'refs/tags/') # Commit must be tagged. + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/MPh + permissions: + id-token: write + steps: + + - name: Download wheel. + uses: actions/download-artifact@v5 + with: + name: python-wheel + path: build/wheel + + - name: Publish to PyPI. + uses: pypa/gh-action-pypi-publish@release/v1 From 1597f48b0c06c1f4e356876c0f4b2340557e8c20 Mon Sep 17 00:00:00 2001 From: John Hennig Date: Wed, 29 Oct 2025 00:41:24 +0100 Subject: [PATCH 12/14] Renamed dev scripts in `tools` folder. Specifically, the `types.py` script would occasionally cause issues as it shadows `types` module from the standard library. --- .github/workflows/check_commit.yml | 2 +- tests/ReadMe.md | 4 ++-- tools/ReadMe.md | 27 +++++++++++----------- tools/{wheel.py => build_wheel.py} | 0 tools/{types.py => check_types.py} | 0 tools/{clean.py => clean_repo.py} | 0 tools/{lint.py => lint_code.py} | 0 tools/{coverage.py => measure_coverage.py} | 0 tools/publish.py | 11 --------- tools/{docs.py => render_docs.py} | 0 tools/{codecov.py => report_coverage.py} | 0 tools/{test.py => run_tests.py} | 2 +- 12 files changed, 18 insertions(+), 28 deletions(-) rename tools/{wheel.py => build_wheel.py} (100%) rename tools/{types.py => check_types.py} (100%) rename tools/{clean.py => clean_repo.py} (100%) rename tools/{lint.py => lint_code.py} (100%) rename tools/{coverage.py => measure_coverage.py} (100%) delete mode 100644 tools/publish.py rename tools/{docs.py => render_docs.py} (100%) rename tools/{codecov.py => report_coverage.py} (100%) rename tools/{test.py => run_tests.py} (95%) diff --git a/.github/workflows/check_commit.yml b/.github/workflows/check_commit.yml index ea1433d..a17ce67 100644 --- a/.github/workflows/check_commit.yml +++ b/.github/workflows/check_commit.yml @@ -37,7 +37,7 @@ jobs: run: uv run basedpyright - name: Run tests. - run: uv run tools/test.py config + run: uv run tools/run_tests.py config # We only run the very small part of the test suite that doesn't # require Comsol to be installed. This essentially just tests that # the library can be successfully imported, but not much more. diff --git a/tests/ReadMe.md b/tests/ReadMe.md index 8f89901..e6989e5 100644 --- a/tests/ReadMe.md +++ b/tests/ReadMe.md @@ -1,5 +1,5 @@ ## Test suite The scripts here, along with some fixtures, constitute the test suite. They are -run in the intended order by the helper script `test.py` in the `tools` folder, -typically with `uv run tools/test.py`. +run in the intended order by the helper script `run_tests.py` in the `tools` +folder, typically with `uv run tools/run_tests.py`. diff --git a/tools/ReadMe.md b/tools/ReadMe.md index 8aed019..ab283d4 100644 --- a/tools/ReadMe.md +++ b/tools/ReadMe.md @@ -19,17 +19,18 @@ environment in the `.venv` folder with all dependencies installed in it, as defined in `pyproject.toml`. Run any of the dev tools via the helper scripts in the `tools` folder. For -example `uv run tools/lint.py` to lint the code (same as `uv run ruff check`), -`uv run tools/types.py` to check types, etc. +example `uv run tools/lint_code.py` to lint the code for quality issues (same +as `uv run ruff check`), `uv run tools/check_types.py` to check type +annotations, etc. -You may also install into an existing virtual environment or even the global -Python environment with `uv pip install --editable .`. The `--editable` flag -makes it so that all code changes take immediate effect without re-installing -the package. +Alternatively, you may also install from source into an existing virtual +environment or even the global Python environment with `uv pip install --group +dev --editable .`. The `--editable` flag makes it so that all code changes take +immediate effect without re-installing the package. When using Pip, follow the standard workflow: Create a virtual Python environment `python -m venv .venv`, activate it, and install the project in it -with `pip install --editable .` +with `pip install --group dev --editable .` [UV]: https://docs.astral.sh/uv [Pip]: https://pip.pypa.io @@ -42,10 +43,10 @@ with `pip install --editable .` - Tag commit with version number, e.g. `git tag v1.3.0` - Force `stable` branch to latest commit: `git branch -f stable` - Same for the current documentation branch: `git branch -f 1.3` -- Run code linter: `uv run tools/lint.py` -- Test docs build: `un run tools/docs.py` -- Test wheel build: `uv run tools/wheel.py` -- Run code coverage: `uv run tools/coverage.py` +- Run code linter: `uv run tools/lint_code.py` +- Test docs build: `un run tools/render_docs.py` +- Test wheel build: `uv run tools/build_wheel.py` +- Run code coverage: `uv run tools/measure_coverage.py` - Push to GitHub: ``` git push origin main @@ -53,6 +54,6 @@ git push --tags git push origin stable git push origin 1.3 ``` -- Upload coverage report: `uv run tools/codecov.py` +- Upload coverage report: `uv run tools/report_codecov.py` - Create new release on GitHub and add release notes. -- Publish to PyPI: `uv run tools/publish.py` +- Publish to PyPI via GitHub Action. diff --git a/tools/wheel.py b/tools/build_wheel.py similarity index 100% rename from tools/wheel.py rename to tools/build_wheel.py diff --git a/tools/types.py b/tools/check_types.py similarity index 100% rename from tools/types.py rename to tools/check_types.py diff --git a/tools/clean.py b/tools/clean_repo.py similarity index 100% rename from tools/clean.py rename to tools/clean_repo.py diff --git a/tools/lint.py b/tools/lint_code.py similarity index 100% rename from tools/lint.py rename to tools/lint_code.py diff --git a/tools/coverage.py b/tools/measure_coverage.py similarity index 100% rename from tools/coverage.py rename to tools/measure_coverage.py diff --git a/tools/publish.py b/tools/publish.py deleted file mode 100644 index fdcbaae..0000000 --- a/tools/publish.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Publishes the package on PyPI.""" - -from subprocess import run -from pathlib import Path - - -root = Path(__file__).parent.parent - -process = run(['uv', 'publish', 'build/wheel/*.whl'], cwd=root) -if process.returncode: - raise RuntimeError('Error while publishing to PyPI.') diff --git a/tools/docs.py b/tools/render_docs.py similarity index 100% rename from tools/docs.py rename to tools/render_docs.py diff --git a/tools/codecov.py b/tools/report_coverage.py similarity index 100% rename from tools/codecov.py rename to tools/report_coverage.py diff --git a/tools/test.py b/tools/run_tests.py similarity index 95% rename from tools/test.py rename to tools/run_tests.py index 51e95b9..1f45292 100644 --- a/tools/test.py +++ b/tools/run_tests.py @@ -39,7 +39,7 @@ # Parse command-line arguments. parser = ArgumentParser( - prog='test.py', + prog=Path(__file__).name, description='Runs the MPh test suite.', add_help=False, allow_abbrev=False, ) From 262f660e87488ce34e33e123a6973a2de88aab62 Mon Sep 17 00:00:00 2001 From: John Hennig Date: Wed, 29 Oct 2025 00:55:56 +0100 Subject: [PATCH 13/14] Documented that only Comsol 6.0 and newer is "expected to work". Comsol 5.5 and 5.6 no longer work out of the box as the latest JPype version (1.6) doesn't support Java 8, which these Comsol versions ship with. --- docs/installation.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/installation.md b/docs/installation.md index 258f53b..d17d3d1 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -22,7 +22,7 @@ Python, as is the default, to match Comsol's platform architecture. ## Comsol -Comsol, obviously, you need to license and install yourself. [Versions] 5.6 and +Comsol, obviously, you need to license and install yourself. [Versions] 6.0 and newer are expected to work. Up to version 6.3, they have been successfully tested. A separate Java run-time environment is *not* required as Comsol ships with one already built in. @@ -46,7 +46,12 @@ point to the corresponding Comsol folder and give the link a name that starts with `comsol`. ```{note} -For most users who already have Comsol installed, MPh will work out of the box. +For most users who already have a recent Comsol version installed, MPh will +work out of the box. + +Comsol 5.5 and 5.6 may work if you downgrade JPype: `pip install "jpype1<1.6"`. +Newer JPype versions no longer support Java 8, which these older Comsol +versions ship with. ``` From 39f8186897c16dc94a1cb3912b68b0d2d394c0ac Mon Sep 17 00:00:00 2001 From: John Hennig Date: Wed, 29 Oct 2025 17:01:42 +0100 Subject: [PATCH 14/14] Also log Python and NumPy (not just JPype) version. This makes bug reporting clearer in the case where users paste the console output of the test suite, specifically the "discovery" part. --- mph/client.py | 2 +- mph/discovery.py | 9 +++++++++ mph/model.py | 1 + mph/node.py | 1 + mph/session.py | 1 + 5 files changed, 13 insertions(+), 1 deletion(-) diff --git a/mph/client.py b/mph/client.py index d582ff8..dcadf74 100755 --- a/mph/client.py +++ b/mph/client.py @@ -8,6 +8,7 @@ import jpype import jpype.imports # noqa: F401 (imported, but not used) + import os import faulthandler from pathlib import Path @@ -182,7 +183,6 @@ def __init__(self, standalone = host and not port # Start the Java virtual machine. - log.debug(f'JPype version is {jpype.__version__}.') log.info('Starting Java virtual machine.') root = backend['root'] args = [str(backend['jvm'])] diff --git a/mph/discovery.py b/mph/discovery.py index 0e475a3..ce0118f 100644 --- a/mph/discovery.py +++ b/mph/discovery.py @@ -24,8 +24,12 @@ from __future__ import annotations +from numpy import __version__ as numpy_version +from jpype import __version__ as jpype_version + import platform import subprocess +import sys import re from pathlib import Path from functools import lru_cache @@ -278,6 +282,11 @@ def find_backends() -> list[Backend]: # Detect platform architecture. arch = detect_architecture() + # Log relevant software versions for easier bug reporting. + log.debug(f'Python version is "{sys.version}".') + log.debug(f'NumPy version is {numpy_version}.') + log.debug(f'JPype version is {jpype_version}.') + # Search system for Comsol executables. if system == 'Windows': executables = search_registry(arch) diff --git a/mph/model.py b/mph/model.py index 98b29ab..a301184 100644 --- a/mph/model.py +++ b/mph/model.py @@ -6,6 +6,7 @@ from numpy import array, ndarray from numpy import integer + from pathlib import Path from re import match from logging import getLogger diff --git a/mph/node.py b/mph/node.py index 4b4ecfd..12e2293 100644 --- a/mph/node.py +++ b/mph/node.py @@ -4,6 +4,7 @@ from jpype import JBoolean, JInt, JDouble, JString, JArray, JClass from numpy import array, ndarray, integer + from pathlib import Path from re import split from json import load as json_load diff --git a/mph/session.py b/mph/session.py index bae9a8f..ad1381a 100755 --- a/mph/session.py +++ b/mph/session.py @@ -7,6 +7,7 @@ from .config import option import jpype + import atexit import sys import platform