Skip to content

Commit 3d9262e

Browse files
Merge pull request #125 from easyscience/develop
Release: merge develop into master
2 parents 9d6ca85 + a61581a commit 3d9262e

12 files changed

Lines changed: 580 additions & 82 deletions

File tree

.github/workflows/coverage.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ jobs:
8686
verbose: true
8787
token: ${{ secrets.CODECOV_TOKEN }}
8888

89-
# Job 2: Run integration tests with coverage and upload to Codecov
89+
# Job 3: Run integration tests with coverage and upload to Codecov
9090
integration-tests-coverage:
9191
runs-on: ubuntu-latest
9292

pyproject.toml

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ classifiers = [
2222
]
2323
requires-python = '>=3.11,<3.14'
2424
dependencies = [
25+
'essdiffraction', # ESS Diffraction library
2526
'numpy', # Numerical computing library
2627
'colorama', # Color terminal output
2728
'tabulate', # Pretty-print tabular data for terminal output
@@ -190,7 +191,13 @@ fail_under = 65 # Temporarily reduce to allow gradual improvement
190191

191192
[tool.ruff]
192193
# Temporarily exclude some directories until we have improved the code quality there
193-
exclude = ['tests', 'tmp']
194+
#exclude = ['tests', 'tmp']
195+
exclude = [
196+
'tmp',
197+
'tests/unit',
198+
'tests/integration/fitting',
199+
'tests/integration/scipp-analysis/tmp',
200+
]
194201
indent-width = 4
195202
line-length = 99
196203
# Enable new rules that are not yet stable, like DOC
@@ -264,7 +271,8 @@ ban-relative-imports = 'all'
264271
force-single-line = true
265272

266273
[tool.ruff.lint.per-file-ignores]
267-
'*test_*.py' = ['S101'] # allow asserts in test files
274+
'*test_*.py' = ['S101'] # allow asserts in test files
275+
'conftest.py' = ['S101'] # allow asserts in test files
268276
# Vendored jupyter_dark_detect: keep as-is from upstream for easy updates
269277
# https://github.com/OpenMined/jupyter-dark-detect/tree/main/jupyter_dark_detect
270278
'src/easydiffraction/utils/_vendored/jupyter_dark_detect/*' = [

src/easydiffraction/analysis/calculators/cryspy.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -361,8 +361,8 @@ def _convert_experiment_to_cryspy_cif(
361361
cif_lines.append(f'{engine_key_name} {attr_obj.value}')
362362

363363
x_data = experiment.data.x
364-
twotheta_min = float(x_data.min())
365-
twotheta_max = float(x_data.max())
364+
twotheta_min = f'{np.round(x_data.min(), 5):.5f}' # float(x_data.min())
365+
twotheta_max = f'{np.round(x_data.max(), 5):.5f}' # float(x_data.max())
366366
cif_lines.append('')
367367
if expt_type.beam_mode.value == BeamModeEnum.CONSTANT_WAVELENGTH:
368368
cif_lines.append(f'_range_2theta_min {twotheta_min}')

src/easydiffraction/experiments/categories/data/bragg_pd.py

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ def __init__(self, **kwargs):
7676
description='Standard uncertainty of the measured intensity at this data point.',
7777
value_spec=AttributeSpec(
7878
type_=DataTypes.NUMERIC,
79-
default=0.0,
79+
default=1.0,
8080
content_validator=RangeValidator(ge=0),
8181
),
8282
cif_handler=CifHandler(
@@ -321,7 +321,22 @@ def meas(self) -> np.ndarray:
321321

322322
@property
323323
def meas_su(self) -> np.ndarray:
324-
return np.fromiter((p.intensity_meas_su.value for p in self._calc_items), dtype=float)
324+
# TODO: The following is a temporary workaround to handle zero
325+
# or near-zero uncertainties in the data, when dats is loaded
326+
# from CIF files. This is necessary because zero uncertainties
327+
# cause fitting algorithms to fail.
328+
# The current implementation is inefficient.
329+
# In the future, we should extend the functionality of
330+
# the NumericDescriptor to automatically replace the value
331+
# outside of the valid range (`content_validator`) with a
332+
# default value (`default`), when the value is set.
333+
# BraggPdExperiment._load_ascii_data_to_experiment() handles
334+
# this for ASCII data, but we also need to handle CIF data and
335+
# come up with a consistent approach for both data sources.
336+
original = np.fromiter((p.intensity_meas_su.value for p in self._calc_items), dtype=float)
337+
# Replace values smaller than 0.0001 with 1.0
338+
modified = np.where(original < 0.0001, 1.0, original)
339+
return modified
325340

326341
@property
327342
def calc(self) -> np.ndarray:

src/easydiffraction/utils/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ def _fetch_data_index() -> dict:
7474
_validate_url(index_url)
7575

7676
# macOS: sha256sum index.json
77-
index_hash = 'sha256:e78f5dd2f229ea83bfeb606502da602fc0b07136889877d3ab601694625dd3d7'
77+
index_hash = 'sha256:9aceaf51d298992058c80903283c9a83543329a063692d49b7aaee1156e76884'
7878
destination_dirname = 'easydiffraction'
7979
destination_fname = 'data-index.json'
8080
cache_dir = pooch.os_cache(destination_dirname)
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
# SPDX-License-Identifier: BSD-3-Clause
2+
# Copyright (c) 2026 DMSC
3+
"""Shared fixtures for DREAM scipp-analysis integration tests.
4+
5+
This module provides pytest fixtures for downloading and parsing
6+
reduced diffraction data from the DREAM instrument in CIF format.
7+
"""
8+
9+
from pathlib import Path
10+
11+
import gemmi
12+
import pytest
13+
from pooch import retrieve
14+
15+
# Remote CIF file URL (regenerated nightly by scipp reduction pipeline)
16+
CIF_URL = 'https://pub-6c25ef91903d4301a3338bd53b370098.r2.dev/dream_reduced.cif'
17+
18+
# Expected datablock name in the CIF file
19+
DATABLOCK_NAME = 'reduced_tof'
20+
21+
22+
@pytest.fixture(scope='module')
23+
def cif_path(
24+
tmp_path_factory: pytest.TempPathFactory,
25+
) -> str:
26+
"""Download CIF file fresh each test session and return its path.
27+
28+
Uses tmp_path_factory to avoid pooch caching, ensuring the latest
29+
version of the nightly-regenerated CIF file is always used.
30+
"""
31+
tmp_dir = tmp_path_factory.mktemp('dream_data')
32+
return retrieve(url=CIF_URL, known_hash=None, path=tmp_dir)
33+
34+
35+
@pytest.fixture(scope='module')
36+
def cif_content(
37+
cif_path: str,
38+
) -> str:
39+
"""Read the CIF file content as text."""
40+
return Path(cif_path).read_text()
41+
42+
43+
@pytest.fixture(scope='module')
44+
def cif_document(
45+
cif_path: str,
46+
) -> gemmi.cif.Document:
47+
"""Read the CIF file with gemmi and return the document."""
48+
return gemmi.cif.read(cif_path)
49+
50+
51+
@pytest.fixture(scope='module')
52+
def cif_block(
53+
cif_document: gemmi.cif.Document,
54+
) -> gemmi.cif.Block:
55+
"""Return the 'reduced_tof' data block from the CIF document."""
56+
block = cif_document.find_block(DATABLOCK_NAME)
57+
assert block is not None, (
58+
f'Expected CIF datablock {DATABLOCK_NAME!r} was not found in the document.'
59+
)
60+
return block
Lines changed: 213 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,213 @@
1+
# SPDX-License-Identifier: BSD-3-Clause
2+
# Copyright (c) 2026 DMSC
3+
"""Tests for analyzing reduced diffraction data using easydiffraction.
4+
5+
These tests verify the complete workflow:
6+
1. Define project
7+
2. Add sample model manually defined
8+
3. Modify experiment CIF file
9+
4. Add experiment from modified CIF file
10+
5. Modify default experiment configuration
11+
6. Select parameters to be fitted
12+
7. Do fitting
13+
"""
14+
15+
from pathlib import Path
16+
17+
import pytest
18+
19+
import easydiffraction as ed
20+
21+
# CIF experiment type tags required by easydiffraction to identify
22+
# the experiment configuration (powder TOF neutron diffraction)
23+
EXPT_TYPE_TAGS = {
24+
'_expt_type.sample_form': 'powder',
25+
'_expt_type.beam_mode': 'time-of-flight',
26+
'_expt_type.radiation_probe': 'neutron',
27+
'_expt_type.scattering_type': 'bragg',
28+
}
29+
30+
31+
@pytest.fixture(scope='module')
32+
def prepared_cif_path(
33+
cif_path: str,
34+
tmp_path_factory: pytest.TempPathFactory,
35+
) -> str:
36+
"""Prepare CIF file with experiment type tags for
37+
easydiffraction.
38+
"""
39+
with Path(cif_path).open() as f:
40+
content = f.read()
41+
42+
# Add experiment type tags if missing
43+
for tag, value in EXPT_TYPE_TAGS.items():
44+
if tag not in content:
45+
content += f'\n{tag} {value}'
46+
47+
# Write to temp file
48+
tmp_dir = tmp_path_factory.mktemp('dream_data')
49+
prepared_path = tmp_dir / 'dream_reduced_prepared.cif'
50+
prepared_path.write_text(content)
51+
52+
return str(prepared_path)
53+
54+
55+
@pytest.fixture(scope='module')
56+
def project_with_data(
57+
prepared_cif_path: str,
58+
) -> ed.Project:
59+
"""Create project with sample model, experiment data, and
60+
configuration.
61+
62+
1. Define project
63+
2. Add sample model manually defined
64+
3. Modify experiment CIF file
65+
4. Add experiment from modified CIF file
66+
5. Modify default experiment configuration
67+
"""
68+
# Step 1: Define Project
69+
project = ed.Project()
70+
71+
# Step 2: Define Sample Model manually
72+
project.sample_models.add(name='si')
73+
sample_model = project.sample_models['si']
74+
75+
sample_model.space_group.name_h_m = 'F d -3 m'
76+
sample_model.space_group.it_coordinate_system_code = '1'
77+
78+
sample_model.cell.length_a = 5.43146
79+
80+
sample_model.atom_sites.add(
81+
label='Si',
82+
type_symbol='Si',
83+
fract_x=0.125,
84+
fract_y=0.125,
85+
fract_z=0.125,
86+
wyckoff_letter='c',
87+
b_iso=1.1,
88+
)
89+
90+
# Step 3: Add experiment from modified CIF file
91+
project.experiments.add(cif_path=prepared_cif_path)
92+
experiment = project.experiments['reduced_tof']
93+
94+
# Step 4: Configure experiment
95+
# Link phase
96+
experiment.linked_phases.add(id='si', scale=0.8)
97+
98+
# Instrument setup
99+
experiment.instrument.setup_twotheta_bank = 90.0
100+
experiment.instrument.calib_d_to_tof_linear = 18630.0
101+
102+
# Peak profile parameters
103+
experiment.peak.broad_gauss_sigma_0 = 48500.0
104+
experiment.peak.broad_gauss_sigma_1 = 3000.0
105+
experiment.peak.broad_gauss_sigma_2 = 0.0
106+
experiment.peak.broad_mix_beta_0 = 0.05
107+
experiment.peak.broad_mix_beta_1 = 0.0
108+
experiment.peak.asym_alpha_0 = 0.0
109+
experiment.peak.asym_alpha_1 = 0.26
110+
111+
# Excluded regions
112+
experiment.excluded_regions.add(id='1', start=0, end=10000)
113+
experiment.excluded_regions.add(id='2', start=70000, end=200000)
114+
115+
# Background points
116+
background_points = [
117+
('2', 10000, 0.01),
118+
('3', 14000, 0.2),
119+
('4', 21000, 0.7),
120+
('5', 27500, 0.6),
121+
('6', 40000, 0.3),
122+
('7', 50000, 0.6),
123+
('8', 61000, 0.7),
124+
('9', 70000, 0.6),
125+
]
126+
for id_, x, y in background_points:
127+
experiment.background.add(id=id_, x=x, y=y)
128+
129+
return project
130+
131+
132+
@pytest.fixture(scope='module')
133+
def fitted_project(
134+
project_with_data: ed.Project,
135+
) -> ed.Project:
136+
"""Perform fit and return project with results.
137+
138+
6. Select parameters to be fitted
139+
7. Do fitting
140+
"""
141+
project = project_with_data
142+
sample_model = project.sample_models['si']
143+
experiment = project.experiments['reduced_tof']
144+
145+
# Step 5: Select parameters to be fitted
146+
# Set free parameters for sample model
147+
sample_model.atom_sites['Si'].b_iso.free = True
148+
149+
# Set free parameters for experiment
150+
experiment.linked_phases['si'].scale.free = True
151+
experiment.instrument.calib_d_to_tof_linear.free = True
152+
153+
experiment.peak.broad_gauss_sigma_0.free = True
154+
experiment.peak.broad_gauss_sigma_1.free = True
155+
experiment.peak.broad_mix_beta_0.free = True
156+
157+
# Set free parameters for background
158+
for point in experiment.background:
159+
point.y.free = True
160+
161+
# Step 6: Do fitting
162+
project.analysis.fit()
163+
164+
return project
165+
166+
167+
# Test: Data Loading
168+
169+
170+
def test_analyze_reduced_data__load_cif(
171+
project_with_data: ed.Project,
172+
) -> None:
173+
"""Verify CIF data loads into project correctly."""
174+
assert 'reduced_tof' in project_with_data.experiments.names
175+
176+
177+
def test_analyze_reduced_data__data_size(
178+
project_with_data: ed.Project,
179+
) -> None:
180+
"""Verify loaded data has expected size."""
181+
experiment = project_with_data.experiments['reduced_tof']
182+
# Data should have substantial number of points
183+
assert experiment.data.x.size > 100
184+
185+
186+
# Test: Configuration
187+
188+
189+
def test_analyze_reduced_data__phase_linked(
190+
project_with_data: ed.Project,
191+
) -> None:
192+
"""Verify phase is correctly linked to experiment."""
193+
experiment = project_with_data.experiments['reduced_tof']
194+
assert 'si' in experiment.linked_phases.names
195+
196+
197+
def test_analyze_reduced_data__background_set(
198+
project_with_data: ed.Project,
199+
) -> None:
200+
"""Verify background points are configured."""
201+
experiment = project_with_data.experiments['reduced_tof']
202+
assert len(experiment.background.names) >= 5
203+
204+
205+
# Test: Fitting
206+
207+
208+
def test_analyze_reduced_data__fit_quality(
209+
fitted_project: ed.Project,
210+
) -> None:
211+
"""Verify fit quality is reasonable (chi-square value)."""
212+
chi_square = fitted_project.analysis.fit_results.reduced_chi_square
213+
assert chi_square == pytest.approx(16.0, abs=0.1)

0 commit comments

Comments
 (0)