Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion documentation/source/usage/examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ some types of output.

Renders of these notebooks can be found under the same sub-heading as this page. We would encourage users to run the notebooks themselves once they have read through the renders as this will help build experience with running and visualising PROCESS.

Please note that when trying to run the examples, it is important to have PROCESS installed editably to ensure the data files can be located. Without an editable install, most examples will not run.


### Notebooks in VS Code

Expand Down Expand Up @@ -39,4 +41,4 @@ A web browser will open and the notebook can be run from there. If you're using
Another way of running the PROCESS example notebooks is to use Binder. This uses a JupyterHub server to host the contents of
PROCESS, allowing the examples to be run via a web browser and without installation on your computer. You can click
[here](https://mybinder.org/v2/gh/ukaea/PROCESS/HEAD?urlpath=%2Fdoc%2Ftree%2Fexamples%2F) to try this out.
The Binder may take some time to load, but once loaded you will be in the `examples` folder and can select example notebooks to run in your web browser.
The Binder may take some time to load, but once loaded you will be in the `examples` folder and can select example notebooks to run in your web browser. Simply click the notebook you wish to run (a `.ex.py` file) and select `Open with > Notebook`.
5 changes: 5 additions & 0 deletions examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,13 @@ Please make sure the extra example dependencies are installed
pip install -e .'[examples]'
```

It is important that the install is editable, otherwise the examples will be unable to find the required data to run.

## Running notebooks

### Notebooks in Binder
Navigate to [our Binder](https://mybinder.org/v2/gh/ukaea/PROCESS/HEAD?urlpath=%2Fdoc%2Ftree%2Fexamples%2F), this should open a Binder in the `examples` directory. Right click on the `.ex.py` notebook you wish to open and select `Open with > Notebook`.

### Notebooks in VS Code

The recommended way to run notebooks is in VS Code; this has the additional advantage of being able to debug notebooks. Simply open the `.ex.py` file in VS Code and click run in interactive mode to view and run it. You may be required to select a notebook kernel on first run; be sure to select the virtual environment where PROCESS is installed (e.g. `~/PROCESS/.venv`).
Expand Down
2 changes: 1 addition & 1 deletion examples/data/run_process.conf
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
WDIR = .

* original IN.DAT name (should not be called IN.DAT!)
ORIGINAL_IN_DAT = large_tokamak_varyrun_IN.DAT
ORIGINAL_IN_DAT = large_tokamak_IN.DAT

* Max no. iterations
NITER = 30
Expand Down
73 changes: 0 additions & 73 deletions examples/functions_for_examples.py

This file was deleted.

18 changes: 10 additions & 8 deletions examples/introduction.ex.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,23 +38,27 @@
# %load_ext autoreload
# %autoreload 2

import shutil
import tempfile
from pathlib import Path

from functions_for_examples import copy_to_temp_dir

from process.main import SingleRun
from process.repository import get_process_root

# Define project root dir; this is using the current working directory
PROJ_DIR = Path.cwd().parent

# Define input file name relative to project dir, then copy to temp dir
script_dir = Path("__file__").parent.resolve()
input_rel = script_dir / "data/large_tokamak_IN.DAT"
data_dir = get_process_root() / "../examples/data/"
input_file = data_dir / "large_tokamak_IN.DAT"

temp_dir, temp_input_path, temp_dir_path = copy_to_temp_dir(input_rel, PROJ_DIR)
# Copy the file to avoid polluting the project directory with example files
temp_dir = tempfile.TemporaryDirectory()
input_path = Path(temp_dir.name) / "large_tokamak_IN.DAT"
shutil.copy(input_file, input_path)

# Run process on an input file in a temporary directory
single_run = SingleRun(temp_input_path.as_posix())
single_run = SingleRun(input_path.as_posix())
single_run.run()

# %% [markdown]
Expand Down Expand Up @@ -90,8 +94,6 @@
# %%
from process.io import mfile_to_csv

data_dir = Path("data")

# mfile_to_csv requires two inputs:
# - path to the MFILE
# - .json containing the variable names to include in the csv file
Expand Down
5 changes: 2 additions & 3 deletions examples/plot_solutions.ex.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,21 +38,20 @@
# %load_ext autoreload
# %autoreload 2

from pathlib import Path

from process.io.plot_solutions import (
RunMetadata,
plot_mfile_solutions,
plot_mfile_solutions_constraints,
)
from process.repository import get_process_root

# %% [markdown]
# ## Plot single solution
#
# Plot a single solution, showing optimisation parameters normalised to their initial values.

# %%
data_dir = Path("data")
data_dir = data_dir = get_process_root() / "../examples/data/"
runs_metadata = [
RunMetadata(data_dir / "large_tokamak_1_MFILE.DAT", "large tokamak 1"),
]
Expand Down
5 changes: 2 additions & 3 deletions examples/scan.ex.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,10 @@
# - `sweep`: array of values for the scanned variable to take; one for each run. Should be of length `isweep`

# %% slideshow={"slide_type": "subslide"}
from pathlib import Path

from process.main import SingleRun
from process.repository import get_process_root

data_dir = Path("data")
data_dir = get_process_root() / "../examples/data/"
input_name = data_dir / "scan_example_file_IN.DAT"
# Perform a SingleRun on a scan-enabled input file
single_run = SingleRun(str(input_name), solver="vmcon_bounded")
Expand Down
5 changes: 4 additions & 1 deletion examples/single_model_evaluation.ex.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,10 @@
# In order to initialise all variables in Process with their values at a given point (design parameter vector), run an evaluation input file (one with no optimisation) to initialise values in all models. The "large tokamak" regression test solution is used here.

# %%
single_run = SingleRun("data/large_tokamak_eval_IN.DAT")
from process.repository import get_process_root

data_dir = get_process_root() / "../examples/data/"
single_run = SingleRun((data_dir / "large_tokamak_eval_IN.DAT").as_posix())
single_run.run()


Expand Down
37 changes: 21 additions & 16 deletions examples/vary_run_example.ex.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,25 +75,27 @@
# %autoreload 2

import os
import shutil
import tempfile
from pathlib import Path
from shutil import copy

from functions_for_examples import copy_to_temp_dir, get_initial_values

from process.io.mfile_utils import get_mfile_initial_ixc_values
from process.main import VaryRun
from process.repository import get_process_root

# Define project root dir; when running a notebook, the cwd is the dir the notebook is in
PROJ_DIR = Path.cwd().parent

# Path to .conf file
script_dir = Path("__file__").parent.resolve()
conf_file = script_dir / "data/run_process.conf"
temp_dir, temp_input_path, temp_dir_path = copy_to_temp_dir(conf_file, PROJ_DIR)
# Path to files
data_dir = get_process_root() / "../examples/data/"
conf_file = data_dir / "run_process.conf"
input_file = data_dir / "large_tokamak_varyrun_IN.DAT"

# .conf file relies on a separate input file too; copy this as well
# TODO This double input file requirement needs to be removed
input_file = script_dir / "data/large_tokamak_varyrun_IN.DAT"
copy(PROJ_DIR / input_file, temp_dir.name)
temp_dir = tempfile.TemporaryDirectory()
input_path = Path(temp_dir.name) / "large_tokamak_IN.DAT"
conf_path = Path(temp_dir.name) / "run_process.conf"
shutil.copy(input_file, input_path)
shutil.copy(conf_file, conf_path)


# VaryRun uses process_config.py, which changes the current working directory
Expand All @@ -104,19 +106,22 @@
# TODO Remove the os.chdir() from VaryRun
cwd = Path.cwd()

vary_run = VaryRun(temp_input_path.as_posix())
vary_run = VaryRun(conf_path.as_posix())
vary_run.run()
os.chdir(cwd)


# Get the initial values from the original input file
iteration_variable_names, original_iteration_variable_values = get_initial_values(
input_file
iteration_variable_names, original_iteration_variable_values = (
get_mfile_initial_ixc_values(input_file)
)

# Get the initial values from the new input file produced by VaryRun
path_to_new_input = (temp_dir_path / "IN.DAT").as_posix()
_, updated_iteration_variable_values = get_initial_values(path_to_new_input)
# VaryRun always produces a file called IN.DAT in the same directory
# as the conf file
_, updated_iteration_variable_values = get_mfile_initial_ixc_values(
Path(temp_dir.name) / "IN.DAT"
)

# %% [markdown]
# ## Compare iteration variable values
Expand Down
4 changes: 4 additions & 0 deletions postBuild
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash

# install examples dependencies on binder
pip install -e '.[examples]'
43 changes: 43 additions & 0 deletions process/io/mfile_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import re
from pathlib import Path

import process.data_structure as data_structure
import process.iteration_variables as iteration_variables
from process.main import SingleRun


def get_mfile_initial_ixc_values(file_path: Path):
"""Initialise the input file and obtain the initial values of the iteration variables

Parameters
----------
file_path :
The path to the MFile to get the initial iteration variable values from.

Notes
-----
This method initialises a SingleRun. At present, this involves mutating the global
data structure so it is not safe to run this method during a PROCESS run.
"""
SingleRun(file_path.as_posix())
iteration_variables.load_iteration_variables()

iteration_variable_names = []
iteration_variable_values = []

for i in range(data_structure.numerics.nvar):
ivar = data_structure.numerics.ixc[i].item()

itv = iteration_variables.ITERATION_VARIABLES[ivar]

iteration_variable_names.append(itv.name)
if array := re.match(r"(\w+)\(([0-9]+)\)", itv.name):
var_name = array.group(1)
index = array.group(2)
iteration_variable_values.append(
getattr(itv.module, var_name)[int(index) - 1]
)
else:
iteration_variable_values.append(getattr(itv.module, itv.name))

return iteration_variable_names, iteration_variable_values
16 changes: 16 additions & 0 deletions process/repository.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
"""Contains method related to the PROCESS repository/installation"""

from pathlib import Path

import process

_PROCESS_ROOT = Path(process.__file__).resolve().parent.as_posix()


def get_process_root() -> Path:
"""Returns the root directory of PROCESS.

E.g. '/home/user/process'
"""

return Path(_PROCESS_ROOT)
33 changes: 27 additions & 6 deletions tests/examples/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,15 @@ def test_introductory_examples(examples_temp_data):
:type examples_temp_data: Path
"""
example_notebook_location = _get_location(examples_temp_data, "introduction")
with testbook(example_notebook_location, execute=True, timeout=600):
# Check csv file is created

with (
testbook(example_notebook_location, execute=False, timeout=600) as tb,
tb.patch(
"process.repository._PROCESS_ROOT",
new=example_notebook_location.parent.resolve().as_posix(),
),
):
tb.execute()
assert os.path.exists(examples_temp_data / "data/large_tokamak_1_MFILE.csv")

# Read in the csv file created by test and check it contains positive floats
Expand Down Expand Up @@ -84,7 +91,15 @@ def test_scan(examples_temp_data):
:type examples_temp_data: Path
"""
scan_notebook_location = _get_location(examples_temp_data, "scan")
with testbook(scan_notebook_location, execute=True, timeout=1200):

with (
testbook(scan_notebook_location, execute=False, timeout=1200) as tb,
tb.patch(
"process.repository._PROCESS_ROOT",
new=scan_notebook_location.parent.resolve().as_posix(),
),
):
tb.execute()
# Run entire scan.ex.py notebook and assert an MFILE is created
assert os.path.exists(examples_temp_data / "data/scan_example_file_MFILE.DAT")

Expand All @@ -97,6 +112,12 @@ def test_no_assertion_solutions(name, examples_temp_data):

:param examples_temp_data: temporary dir containing examples files
"""
plot_solutions_notebook_location = _get_location(examples_temp_data, name)
with testbook(plot_solutions_notebook_location, execute=True, timeout=600):
pass
notebook_location = _get_location(examples_temp_data, name)
with (
testbook(notebook_location, execute=False, timeout=600) as tb,
tb.patch(
"process.repository._PROCESS_ROOT",
new=notebook_location.parent.resolve().as_posix(),
),
):
tb.execute()
Loading