Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/src/tutorials/advancedfitting/multi_contrast.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -358,8 +358,8 @@
"d83acmw.head_layer.area_per_molecule_parameter.enabled = True\n",
"d83acmw.tail_layer.area_per_molecule_parameter.enabled = True\n",
"\n",
"d70d2o.constain_multiple_contrast(d13d2o)\n",
"d83acmw.constain_multiple_contrast(d70d2o)"
"d70d2o.constrain_multiple_contrast(d13d2o)\n",
"d83acmw.constrain_multiple_contrast(d70d2o)"
]
},
{
Expand Down
69 changes: 64 additions & 5 deletions docs/src/tutorials/simulation/resolution_functions.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
"from easyreflectometry.model import Model\n",
"from easyreflectometry.model import LinearSpline\n",
"from easyreflectometry.model import PercentageFwhm\n",
"from easyreflectometry.model import Pointwise\n",
"from easyreflectometry.sample import Layer\n",
"from easyreflectometry.sample import Material\n",
"from easyreflectometry.sample import Multilayer\n",
Expand Down Expand Up @@ -115,6 +116,16 @@
"dict_reference['10'] = load(file_path_10)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e5f65ed7",
"metadata": {},
"outputs": [],
"source": [
"dict_reference['0']"
]
},
{
"cell_type": "markdown",
"id": "1ab3a164-62c8-4bd3-b0d8-e6f22c83dc74",
Expand Down Expand Up @@ -251,9 +262,15 @@
"id": "defd6dd5-c618-4af6-a5c7-17532207f0a0",
"metadata": {},
"source": [
"## Resolution functions\n",
"\n",
"We now define the different resoultion functions. "
"## Resolution functions "
]
},
{
"cell_type": "markdown",
"id": "c9d903db",
"metadata": {},
"source": [
"We can now define the different resoultion functions. "
]
},
{
Expand Down Expand Up @@ -376,11 +393,53 @@
"plt.yscale('log')\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "43881642",
"metadata": {},
"outputs": [],
"source": [
"key = '1'\n",
"reference_coords = dict_reference[key]['coords']['Qz_0'].values\n",
"reference_variances = dict_reference[key]['coords']['Qz_0'].variances\n",
"reference_data = dict_reference[key]['data']['R_0'].values\n",
"model_coords = np.linspace(\n",
" start=min(reference_coords),\n",
" stop=max(reference_coords),\n",
" num=1000,\n",
")\n",
"\n",
"model.resolution_function = resolution_function_dict[key]\n",
"model_data = model.interface().reflectity_profile(\n",
" model_coords,\n",
" model.unique_name,\n",
")\n",
"plt.plot(model_coords, model_data, 'k-', label=f'Variable', linewidth=5)\n",
"data_points = []\n",
"data_points.append(reference_coords) # Qz\n",
"data_points.append(reference_data) # R\n",
"data_points.append(reference_variances) # sQz\n",
"model.resolution_function = Pointwise(q_data_points=data_points)\n",
"model_data = model.interface().reflectity_profile(\n",
" model_coords,\n",
" model.unique_name,\n",
")\n",
"plt.plot(model_coords, model_data, 'r-', label=f'Pointwise')\n",
"\n",
"ax = plt.gca()\n",
"ax.set_xlim([-0.01, 0.45])\n",
"ax.set_ylim([1e-10, 2.5])\n",
"plt.legend()\n",
"plt.yscale('log')\n",
"plt.show()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "easyref",
"display_name": "erl",
"language": "python",
"name": "python3"
},
Expand All @@ -394,7 +453,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.9"
"version": "3.12.10"
}
},
"nbformat": 4,
Expand Down
2 changes: 2 additions & 0 deletions src/easyreflectometry/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@
from .data_store import ProjectData
from .measurement import load
from .measurement import load_as_dataset
from .measurement import merge_datagroups

__all__ = [
"load",
"load_as_dataset",
"merge_datagroups",
"ProjectData",
"DataSet1D",
]
53 changes: 46 additions & 7 deletions src/easyreflectometry/data/measurement.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
__author__ = 'github.com/arm61'

import os
from typing import TextIO
from typing import Union

Expand All @@ -25,11 +26,16 @@ def load(fname: Union[TextIO, str]) -> sc.DataGroup:
def load_as_dataset(fname: Union[TextIO, str]) -> DataSet1D:
"""Load data from an ORSO .ort file as a DataSet1D."""
data_group = load(fname)
basename = os.path.splitext(os.path.basename(fname))[0]
data_name = 'R_' + basename
coords_name = 'Qz_' + basename
coords_name = list(data_group['coords'].keys())[0] if coords_name not in data_group['coords'] else coords_name
data_name = list(data_group['data'].keys())[0] if data_name not in data_group['data'] else data_name
return DataSet1D(
x=data_group['coords']['Qz_0'].values,
y=data_group['data']['R_0'].values,
ye=data_group['data']['R_0'].variances,
xe=data_group['coords']['Qz_0'].variances,
x=data_group['coords'][coords_name].values,
y=data_group['data'][data_name].values,
ye=data_group['data'][data_name].variances,
xe=data_group['coords'][coords_name].variances,
)


Expand Down Expand Up @@ -86,6 +92,8 @@ def _load_txt(fname: Union[TextIO, str]) -> sc.DataGroup:
if ',' in first_line:
delimiter = ','

basename = os.path.splitext(os.path.basename(fname))[0]

try:
# First load only the data to check column count
data = np.loadtxt(fname, delimiter=delimiter, comments='#')
Expand All @@ -110,13 +118,44 @@ def _load_txt(fname: Union[TextIO, str]) -> sc.DataGroup:
# Re-raise with more descriptive message
raise ValueError(f"Failed to load data from {fname}: {str(error)}") from error

data = {'R_0': sc.array(dims=['Qz_0'], values=y, variances=np.square(e))}
data_name = 'R_' + basename
coords_name = 'Qz_' + basename
data = {data_name: sc.array(dims=[coords_name], values=y, variances=np.square(e))}
coords = {
data['R_0'].dims[0]: sc.array(
dims=['Qz_0'],
data[data_name].dims[0]: sc.array(
dims=[coords_name],
values=x,
variances=np.square(xe),
unit=sc.Unit('1/angstrom'),
)
}
return sc.DataGroup(data=data, coords=coords)

def merge_datagroups(*data_groups: sc.DataGroup) -> sc.DataGroup:
"""Merge multiple DataGroups into a single DataGroup."""
merged_data = {}
merged_coords = {}
merged_attrs = {}

for group in data_groups:
for key, value in group['data'].items():
if key not in merged_data:
merged_data[key] = value
else:
merged_data[key] = sc.concatenate([merged_data[key], value])

for key, value in group['coords'].items():
if key not in merged_coords:
merged_coords[key] = value
else:
merged_coords[key] = sc.concatenate([merged_coords[key], value])

if 'attrs' not in group:
continue
for key, value in group['attrs'].items():
if key not in merged_attrs:
merged_attrs[key] = value
else:
merged_attrs[key] = {**merged_attrs[key], **value}

return sc.DataGroup(data=merged_data, coords=merged_coords, attrs=merged_attrs)
3 changes: 2 additions & 1 deletion src/easyreflectometry/fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ def fit(self, data: sc.DataGroup, id: int = 0) -> sc.DataGroup:
)
sld_profile = self.easy_science_multi_fitter._fit_objects[i].interface.sld_profile(self._models[i].unique_name)
new_data[f'SLD_{id}'] = sc.array(dims=[f'z_{id}'], values=sld_profile[1] * 1e-6, unit=sc.Unit('1/angstrom') ** 2)
new_data['attrs'][f'R_{id}_model'] = {'model': sc.scalar(self._models[i].as_dict())}
if 'attrs' in new_data:
new_data['attrs'][f'R_{id}_model'] = {'model': sc.scalar(self._models[i].as_dict())}
new_data['coords'][f'z_{id}'] = sc.array(
dims=[f'z_{id}'], values=sld_profile[0], unit=(1 / new_data['coords'][f'Qz_{id}'].unit).unit
)
Expand Down
2 changes: 2 additions & 0 deletions src/easyreflectometry/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@
from .model_collection import ModelCollection
from .resolution_functions import LinearSpline
from .resolution_functions import PercentageFwhm
from .resolution_functions import Pointwise
from .resolution_functions import ResolutionFunction

__all__ = (
"LinearSpline",
"PercentageFwhm",
"Pointwise",
"ResolutionFunction",
"Model",
"ModelCollection",
Expand Down
7 changes: 4 additions & 3 deletions src/easyreflectometry/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
},
}

COLORS =["#0173B2", "#DE8F05", "#029E73", "#D55E00", "#CC78BC", "#CA9161", "#FBAFE4", "#949494", "#ECE133", "#56B4E9"]

class Model(BaseObj):
"""Model is the class that represents the experiment.
Expand All @@ -60,8 +61,8 @@ def __init__(
scale: Union[Parameter, Number, None] = None,
background: Union[Parameter, Number, None] = None,
resolution_function: Union[ResolutionFunction, None] = None,
name: str = 'EasyModel',
color: str = 'black',
name: str = 'Model',
color: str = COLORS[0],
unique_name: Optional[str] = None,
interface=None,
):
Expand All @@ -70,7 +71,7 @@ def __init__(
:param sample: The sample being modelled.
:param scale: Scaling factor of profile.
:param background: Linear background magnitude.
:param name: Name of the model, defaults to 'EasyModel'.
:param name: Name of the model, defaults to 'Model'.
:param resolution_function: Resolution function, defaults to PercentageFwhm.
:param interface: Calculator interface, defaults to `None`.

Expand Down
6 changes: 4 additions & 2 deletions src/easyreflectometry/model/model_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import Optional
from typing import Tuple

from easyreflectometry.model.model import COLORS
from easyreflectometry.sample.collections.base_collection import BaseCollection

from .model import Model
Expand All @@ -18,7 +19,7 @@ class ModelCollection(BaseCollection):
def __init__(
self,
*models: Tuple[Model],
name: str = 'EasyModels',
name: str = 'Models',
interface=None,
unique_name: Optional[str] = None,
populate_if_none: bool = True,
Expand All @@ -41,7 +42,8 @@ def add_model(self, model: Optional[Model] = None):
:param model: Model to add.
"""
if model is None:
model = Model(name='EasyModel added', interface=self.interface)
color = COLORS[len(self) % len(COLORS)]
model = Model(name='Model', interface=self.interface, color=color)
self.append(model)

def duplicate_model(self, index: int):
Expand Down
59 changes: 59 additions & 0 deletions src/easyreflectometry/model/resolution_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ def from_dict(cls, data: dict) -> ResolutionFunction:
return PercentageFwhm(data['constant'])
if data['smearing'] == 'LinearSpline':
return LinearSpline(data['q_data_points'], data['fwhm_values'])
if data['smearing'] == 'Pointwise':
return Pointwise([data['q_data_points'], data['R_data_points'], data['sQz_data_points']])
raise ValueError('Unknown resolution function type')


Expand Down Expand Up @@ -60,3 +62,60 @@ def as_dict(
self, skip: Optional[List[str]] = None
) -> dict[str, str]: # skip is kept for consistency of the as_dict signature
return {'smearing': 'LinearSpline', 'q_data_points': list(self.q_data_points), 'fwhm_values': list(self.fwhm_values)}

# add pointwise smearing funtion
class Pointwise(ResolutionFunction):
def __init__(self, q_data_points: list[np.ndarray]):
self.q_data_points = q_data_points
self.q = None

def smearing(self, q: Union[np.ndarray, float] = None) -> np.ndarray:

Qz = self.q_data_points[0]
R = self.q_data_points[1]
sQz = self.q_data_points[2]
if q is None:
q = self.q_data_points[0]
self.q = q
sQzs = np.sqrt(sQz)
if isinstance(Qz, float):
Qz = np.array(Qz)

smeared = self.apply_smooth_smearing(Qz, R, sQzs)
return smeared

def as_dict(
self, skip: Optional[List[str]] = None
) -> dict[str, str]: # skip is kept for consistency of the as_dict signature
return {'smearing': 'Pointwise',
'q_data_points': list(self.q_data_points[0]),
'R_data_points': list(self.q_data_points[1]),
'sQz_data_points': list(self.q_data_points[2])}

def gaussian_smearing(self, qt, Qz, R, sQz):
weights = np.exp(-0.5 * ((qt - Qz) / sQz) ** 2)
if np.sum(weights) == 0 or not np.isfinite(np.sum(weights)):
return np.sum(R)
weights /= (sQz * np.sqrt(2 * np.pi))
return np.sum(R * weights) / np.sum(weights)


def apply_smooth_smearing(self, Qz, R, sQzs):
"""
Apply smooth resolution smearing using convolution with Gaussian kernel.
"""
if self.q is None:
R_smeared = np.zeros_like(Qz)
else:
R_smeared = np.zeros_like(self.q)

if not isinstance(Qz, np.ndarray):
Qz = np.array(Qz)
if not isinstance(R, np.ndarray):
R = np.array(R)
R_smeared = np.zeros_like(self.q)

for i, qt in enumerate(self.q):
R_smeared[i] = self.gaussian_smearing(qt, Qz, R, sQzs)

return R_smeared
Loading
Loading