Skip to content

experiments

collections

background

ChebyshevPolynomialBackground

Bases: BackgroundBase

Source code in src/easydiffraction/experiments/collections/background.py
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
class ChebyshevPolynomialBackground(BackgroundBase):
    _description: str = 'Chebyshev polynomial background'

    @property
    def _child_class(self) -> Type[PolynomialTerm]:
        return PolynomialTerm

    def calculate(self, x_data: np.ndarray) -> np.ndarray:
        """Evaluate polynomial background over x_data."""
        if not self._items:
            print(warning('No background points found. Setting background to zero.'))
            return np.zeros_like(x_data)

        u = (x_data - x_data.min()) / (x_data.max() - x_data.min()) * 2 - 1  # scale to [-1, 1]
        coefs = [term.coef.value for term in self._items.values()]
        y_data = chebval(u, coefs)
        return y_data

    def show(self) -> None:
        columns_headers: List[str] = ['Order', 'Coefficient']
        columns_alignment = ['left', 'left']
        columns_data: List[List[Union[int, float]]] = []
        for term in self._items.values():
            order = term.order.value
            coef = term.coef.value
            columns_data.append([order, coef])

        print(paragraph('Chebyshev polynomial background terms'))
        render_table(
            columns_headers=columns_headers,
            columns_alignment=columns_alignment,
            columns_data=columns_data,
        )
calculate(x_data)

Evaluate polynomial background over x_data.

Source code in src/easydiffraction/experiments/collections/background.py
173
174
175
176
177
178
179
180
181
182
def calculate(self, x_data: np.ndarray) -> np.ndarray:
    """Evaluate polynomial background over x_data."""
    if not self._items:
        print(warning('No background points found. Setting background to zero.'))
        return np.zeros_like(x_data)

    u = (x_data - x_data.min()) / (x_data.max() - x_data.min()) * 2 - 1  # scale to [-1, 1]
    coefs = [term.coef.value for term in self._items.values()]
    y_data = chebval(u, coefs)
    return y_data

LineSegmentBackground

Bases: BackgroundBase

Source code in src/easydiffraction/experiments/collections/background.py
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
class LineSegmentBackground(BackgroundBase):
    _description: str = 'Linear interpolation between points'

    @property
    def _child_class(self) -> Type[Point]:
        return Point

    def calculate(self, x_data: np.ndarray) -> np.ndarray:
        """Interpolate background points over x_data."""
        if not self._items:
            print(warning('No background points found. Setting background to zero.'))
            return np.zeros_like(x_data)

        background_x = np.array([point.x.value for point in self._items.values()])
        background_y = np.array([point.y.value for point in self._items.values()])
        interp_func = interp1d(
            background_x,
            background_y,
            kind='linear',
            bounds_error=False,
            fill_value=(
                background_y[0],
                background_y[-1],
            ),
        )
        y_data = interp_func(x_data)
        return y_data

    def show(self) -> None:
        columns_headers: List[str] = ['X', 'Intensity']
        columns_alignment = ['left', 'left']
        columns_data: List[List[float]] = []
        for point in self._items.values():
            x = point.x.value
            y = point.y.value
            columns_data.append([x, y])

        print(paragraph('Line-segment background points'))
        render_table(
            columns_headers=columns_headers,
            columns_alignment=columns_alignment,
            columns_data=columns_data,
        )
calculate(x_data)

Interpolate background points over x_data.

Source code in src/easydiffraction/experiments/collections/background.py
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
def calculate(self, x_data: np.ndarray) -> np.ndarray:
    """Interpolate background points over x_data."""
    if not self._items:
        print(warning('No background points found. Setting background to zero.'))
        return np.zeros_like(x_data)

    background_x = np.array([point.x.value for point in self._items.values()])
    background_y = np.array([point.y.value for point in self._items.values()])
    interp_func = interp1d(
        background_x,
        background_y,
        kind='linear',
        bounds_error=False,
        fill_value=(
            background_y[0],
            background_y[-1],
        ),
    )
    y_data = interp_func(x_data)
    return y_data

excluded_regions

ExcludedRegions

Bases: Collection

Collection of ExcludedRegion instances.

Source code in src/easydiffraction/experiments/collections/excluded_regions.py
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
class ExcludedRegions(Collection):
    """
    Collection of ExcludedRegion instances.
    """

    @property
    def _type(self) -> str:
        return 'category'  # datablock or category

    @property
    def _child_class(self) -> Type[ExcludedRegion]:
        return ExcludedRegion

    def on_item_added(self, item: ExcludedRegion) -> None:
        """
        Mark excluded points in the experiment pattern when a new region is added.
        """
        datastore = self._parent.datastore

        # Boolean mask for points within the new excluded region
        in_region = (datastore.full_x >= item.start.value) & (datastore.full_x <= item.end.value)

        # Update the exclusion mask
        datastore.excluded[in_region] = True

        # Update the excluded points in the datastore
        datastore.x = datastore.full_x[~datastore.excluded]
        datastore.meas = datastore.full_meas[~datastore.excluded]
        datastore.meas_su = datastore.full_meas_su[~datastore.excluded]

    def show(self) -> None:
        # TODO: Consider moving this to the base class
        #  to avoid code duplication with implementations in Background, etc.
        #  Consider using parameter names as column headers
        columns_headers: List[str] = ['start', 'end']
        columns_alignment = ['left', 'left']
        columns_data: List[List[float]] = []
        for region in self._items.values():
            start = region.start.value
            end = region.end.value
            columns_data.append([start, end])

        print(paragraph('Excluded regions'))
        render_table(
            columns_headers=columns_headers,
            columns_alignment=columns_alignment,
            columns_data=columns_data,
        )
on_item_added(item)

Mark excluded points in the experiment pattern when a new region is added.

Source code in src/easydiffraction/experiments/collections/excluded_regions.py
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
def on_item_added(self, item: ExcludedRegion) -> None:
    """
    Mark excluded points in the experiment pattern when a new region is added.
    """
    datastore = self._parent.datastore

    # Boolean mask for points within the new excluded region
    in_region = (datastore.full_x >= item.start.value) & (datastore.full_x <= item.end.value)

    # Update the exclusion mask
    datastore.excluded[in_region] = True

    # Update the excluded points in the datastore
    datastore.x = datastore.full_x[~datastore.excluded]
    datastore.meas = datastore.full_meas[~datastore.excluded]
    datastore.meas_su = datastore.full_meas_su[~datastore.excluded]

linked_phases

LinkedPhases

Bases: Collection

Collection of LinkedPhase instances.

Source code in src/easydiffraction/experiments/collections/linked_phases.py
50
51
52
53
54
55
56
57
58
59
60
61
class LinkedPhases(Collection):
    """
    Collection of LinkedPhase instances.
    """

    @property
    def _type(self) -> str:
        return 'category'  # datablock or category

    @property
    def _child_class(self) -> Type[LinkedPhase]:
        return LinkedPhase

datastore

BaseDatastore

Base class for all data stores.

Attributes

meas : Optional[np.ndarray] Measured intensities. meas_su : Optional[np.ndarray] Standard uncertainties of measured intensities. excluded : Optional[np.ndarray] Flags for excluded points. _calc : Optional[np.ndarray] Stored calculated intensities.

Source code in src/easydiffraction/experiments/datastore.py
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
class BaseDatastore:
    """
    Base class for all data stores.

    Attributes
    ----------
    meas : Optional[np.ndarray]
        Measured intensities.
    meas_su : Optional[np.ndarray]
        Standard uncertainties of measured intensities.
    excluded : Optional[np.ndarray]
        Flags for excluded points.
    _calc : Optional[np.ndarray]
        Stored calculated intensities.
    """

    def __init__(self) -> None:
        self.meas: Optional[np.ndarray] = None
        self.meas_su: Optional[np.ndarray] = None
        self.excluded: Optional[np.ndarray] = None
        self._calc: Optional[np.ndarray] = None

    @property
    def calc(self) -> Optional[np.ndarray]:
        """Access calculated intensities. Should be updated via external calculation.

        Returns:
            Optional[np.ndarray]: Calculated intensities array or None if not set.
        """
        return self._calc

    @calc.setter
    @enforce_type
    def calc(self, values: np.ndarray) -> None:
        """Set calculated intensities (from Analysis.calculate_pattern()).

        Args:
            values (np.ndarray): Array of calculated intensities.
        """
        self._calc = values

    @abstractmethod
    def _cif_mapping(self) -> dict[str, str]:
        """
        Must be implemented in subclasses to return a mapping from attribute
        names to CIF tags.

        Returns:
            dict[str, str]: Mapping from attribute names to CIF tags.
        """
        pass

    def as_cif(self, max_points: Optional[int] = None) -> str:
        """
        Generate a CIF-formatted string representing the datastore data.

        Args:
            max_points (Optional[int]): Maximum number of points to include
            from start and end.
            If the total points exceed twice this number, data in the middle
            is truncated with '...'.

        Returns:
            str: CIF-formatted string of the data. Empty string if no data available.
        """
        cif_lines = ['loop_']

        # Add CIF tags from mapping
        mapping = self._cif_mapping()
        for cif_key in mapping.values():
            cif_lines.append(cif_key)

        # Collect data arrays according to mapping keys
        data_arrays = []
        for attr_name in mapping.keys():
            attr_array = getattr(self, attr_name, None)
            if attr_array is None:
                data_arrays.append(np.array([]))
            else:
                data_arrays.append(attr_array)

        # Return empty string if no data
        if not data_arrays or not data_arrays[0].size:
            return ''

        # Determine number of points in the first data array
        n_points = len(data_arrays[0])

        # Function to format a single row of data
        def _format_row(i: int) -> str:
            return ' '.join(str(data_arrays[j][i]) for j in range(len(data_arrays)))

        # Add data lines, applying max_points truncation if needed
        if max_points is not None and n_points > 2 * max_points:
            for i in range(max_points):
                cif_lines.append(_format_row(i))
            cif_lines.append('...')
            for i in range(-max_points, 0):
                cif_lines.append(_format_row(i))
        else:
            for i in range(n_points):
                cif_lines.append(_format_row(i))

        cif_str = '\n'.join(cif_lines)

        return cif_str

as_cif(max_points=None)

Generate a CIF-formatted string representing the datastore data.

Parameters:

Name Type Description Default
max_points Optional[int]

Maximum number of points to include

None

Returns:

Name Type Description
str str

CIF-formatted string of the data. Empty string if no data available.

Source code in src/easydiffraction/experiments/datastore.py
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
def as_cif(self, max_points: Optional[int] = None) -> str:
    """
    Generate a CIF-formatted string representing the datastore data.

    Args:
        max_points (Optional[int]): Maximum number of points to include
        from start and end.
        If the total points exceed twice this number, data in the middle
        is truncated with '...'.

    Returns:
        str: CIF-formatted string of the data. Empty string if no data available.
    """
    cif_lines = ['loop_']

    # Add CIF tags from mapping
    mapping = self._cif_mapping()
    for cif_key in mapping.values():
        cif_lines.append(cif_key)

    # Collect data arrays according to mapping keys
    data_arrays = []
    for attr_name in mapping.keys():
        attr_array = getattr(self, attr_name, None)
        if attr_array is None:
            data_arrays.append(np.array([]))
        else:
            data_arrays.append(attr_array)

    # Return empty string if no data
    if not data_arrays or not data_arrays[0].size:
        return ''

    # Determine number of points in the first data array
    n_points = len(data_arrays[0])

    # Function to format a single row of data
    def _format_row(i: int) -> str:
        return ' '.join(str(data_arrays[j][i]) for j in range(len(data_arrays)))

    # Add data lines, applying max_points truncation if needed
    if max_points is not None and n_points > 2 * max_points:
        for i in range(max_points):
            cif_lines.append(_format_row(i))
        cif_lines.append('...')
        for i in range(-max_points, 0):
            cif_lines.append(_format_row(i))
    else:
        for i in range(n_points):
            cif_lines.append(_format_row(i))

    cif_str = '\n'.join(cif_lines)

    return cif_str

calc property writable

Access calculated intensities. Should be updated via external calculation.

Returns:

Type Description
Optional[ndarray]

Optional[np.ndarray]: Calculated intensities array or None if not set.

DatastoreFactory

Source code in src/easydiffraction/experiments/datastore.py
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
class DatastoreFactory:
    _supported = {
        'powder': PowderDatastore,
        'single crystal': SingleCrystalDatastore,
    }

    @classmethod
    def create(
        cls,
        sample_form: str = SampleFormEnum.default(),
        beam_mode: str = BeamModeEnum.default(),
    ) -> BaseDatastore:
        """
        Create and return a datastore object for the given sample form.

        Args:
            sample_form (str): Sample form type, e.g. 'powder' or 'single crystal'.
            beam_mode (str): Beam mode for powder sample form.

        Returns:
            BaseDatastore: Instance of a datastore class corresponding to sample form.
        """
        supported_sample_forms = list(cls._supported.keys())
        if sample_form not in supported_sample_forms:
            raise ValueError(f"Unsupported sample form: '{sample_form}'.\nSupported sample forms: {supported_sample_forms}")

        supported_beam_modes = ['time-of-flight', 'constant wavelength']
        if beam_mode not in supported_beam_modes:
            raise ValueError(f"Unsupported beam mode: '{beam_mode}'.\nSupported beam modes: {supported_beam_modes}")

        datastore_class = cls._supported[sample_form]
        if sample_form == 'powder':
            datastore_obj = datastore_class(beam_mode=beam_mode)
        else:
            datastore_obj = datastore_class()

        return datastore_obj

create(sample_form=SampleFormEnum.default(), beam_mode=BeamModeEnum.default()) classmethod

Create and return a datastore object for the given sample form.

Parameters:

Name Type Description Default
sample_form str

Sample form type, e.g. 'powder' or 'single crystal'.

default()
beam_mode str

Beam mode for powder sample form.

default()

Returns:

Name Type Description
BaseDatastore BaseDatastore

Instance of a datastore class corresponding to sample form.

Source code in src/easydiffraction/experiments/datastore.py
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
@classmethod
def create(
    cls,
    sample_form: str = SampleFormEnum.default(),
    beam_mode: str = BeamModeEnum.default(),
) -> BaseDatastore:
    """
    Create and return a datastore object for the given sample form.

    Args:
        sample_form (str): Sample form type, e.g. 'powder' or 'single crystal'.
        beam_mode (str): Beam mode for powder sample form.

    Returns:
        BaseDatastore: Instance of a datastore class corresponding to sample form.
    """
    supported_sample_forms = list(cls._supported.keys())
    if sample_form not in supported_sample_forms:
        raise ValueError(f"Unsupported sample form: '{sample_form}'.\nSupported sample forms: {supported_sample_forms}")

    supported_beam_modes = ['time-of-flight', 'constant wavelength']
    if beam_mode not in supported_beam_modes:
        raise ValueError(f"Unsupported beam mode: '{beam_mode}'.\nSupported beam modes: {supported_beam_modes}")

    datastore_class = cls._supported[sample_form]
    if sample_form == 'powder':
        datastore_obj = datastore_class(beam_mode=beam_mode)
    else:
        datastore_obj = datastore_class()

    return datastore_obj

PowderDatastore

Bases: BaseDatastore

Class for powder diffraction data.

Attributes

x : Optional[np.ndarray] Scan variable (e.g. 2θ or time-of-flight values). d : Optional[np.ndarray] d-spacing values. bkg : Optional[np.ndarray] Background values.

Source code in src/easydiffraction/experiments/datastore.py
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
class PowderDatastore(BaseDatastore):
    """Class for powder diffraction data.

    Attributes
    ----------
    x : Optional[np.ndarray]
        Scan variable (e.g. 2θ or time-of-flight values).
    d : Optional[np.ndarray]
        d-spacing values.
    bkg : Optional[np.ndarray]
        Background values.
    """

    def __init__(self, beam_mode: BeamModeEnum = BeamModeEnum.default()) -> None:
        """
        Initialize PowderDatastore.

        Args:
            beam_mode (str): Beam mode, e.g. 'time-of-flight' or 'constant wavelength'.
        """
        super().__init__()
        self.beam_mode = beam_mode
        self.x: Optional[np.ndarray] = None
        self.d: Optional[np.ndarray] = None
        self.bkg: Optional[np.ndarray] = None

    def _cif_mapping(self) -> dict[str, str]:
        """
        Return mapping from attribute names to CIF tags based on beam mode.

        Returns:
            dict[str, str]: Mapping dictionary.
        """
        # TODO: Decide where to have validation for beam_mode,
        #  here or in Experiment class or somewhere else.
        return {
            'time-of-flight': {
                'x': '_pd_meas.time_of_flight',
                'meas': '_pd_meas.intensity_total',
                'meas_su': '_pd_meas.intensity_total_su',
            },
            'constant wavelength': {
                'x': '_pd_meas.2theta_scan',
                'meas': '_pd_meas.intensity_total',
                'meas_su': '_pd_meas.intensity_total_su',
            },
        }[self.beam_mode]

__init__(beam_mode=BeamModeEnum.default())

Initialize PowderDatastore.

Parameters:

Name Type Description Default
beam_mode str

Beam mode, e.g. 'time-of-flight' or 'constant wavelength'.

default()
Source code in src/easydiffraction/experiments/datastore.py
137
138
139
140
141
142
143
144
145
146
147
148
def __init__(self, beam_mode: BeamModeEnum = BeamModeEnum.default()) -> None:
    """
    Initialize PowderDatastore.

    Args:
        beam_mode (str): Beam mode, e.g. 'time-of-flight' or 'constant wavelength'.
    """
    super().__init__()
    self.beam_mode = beam_mode
    self.x: Optional[np.ndarray] = None
    self.d: Optional[np.ndarray] = None
    self.bkg: Optional[np.ndarray] = None

SingleCrystalDatastore

Bases: BaseDatastore

Class for single crystal diffraction data.

Attributes

sin_theta_over_lambda : Optional[np.ndarray] sin(θ)/λ values. index_h : Optional[np.ndarray] Miller index h. index_k : Optional[np.ndarray] Miller index k. index_l : Optional[np.ndarray] Miller index l.

Source code in src/easydiffraction/experiments/datastore.py
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
class SingleCrystalDatastore(BaseDatastore):
    """Class for single crystal diffraction data.

    Attributes
    ----------
    sin_theta_over_lambda : Optional[np.ndarray]
        sin(θ)/λ values.
    index_h : Optional[np.ndarray]
        Miller index h.
    index_k : Optional[np.ndarray]
        Miller index k.
    index_l : Optional[np.ndarray]
        Miller index l.
    """

    def __init__(self) -> None:
        """
        Initialize SingleCrystalDatastore.
        """
        super().__init__()
        self.sin_theta_over_lambda: Optional[np.ndarray] = None
        self.index_h: Optional[np.ndarray] = None
        self.index_k: Optional[np.ndarray] = None
        self.index_l: Optional[np.ndarray] = None

    def _cif_mapping(self) -> dict[str, str]:
        """
        Return mapping from attribute names to CIF tags for single crystal data.

        Returns:
            dict[str, str]: Mapping dictionary.
        """
        return {
            'index_h': '_refln.index_h',
            'index_k': '_refln.index_k',
            'index_l': '_refln.index_l',
            'meas': '_refln.intensity_meas',
            'meas_su': '_refln.intensity_meas_su',
        }

__init__()

Initialize SingleCrystalDatastore.

Source code in src/easydiffraction/experiments/datastore.py
188
189
190
191
192
193
194
195
196
def __init__(self) -> None:
    """
    Initialize SingleCrystalDatastore.
    """
    super().__init__()
    self.sin_theta_over_lambda: Optional[np.ndarray] = None
    self.index_h: Optional[np.ndarray] = None
    self.index_k: Optional[np.ndarray] = None
    self.index_l: Optional[np.ndarray] = None

experiment

BaseExperiment

Bases: Datablock

Base class for all experiments with only core attributes. Wraps experiment type, instrument and datastore.

Source code in src/easydiffraction/experiments/experiment.py
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
class BaseExperiment(Datablock):
    """
    Base class for all experiments with only core attributes.
    Wraps experiment type, instrument and datastore.
    """

    # TODO: Find better name for the attribute 'type'.
    #  1. It shadows the built-in type() function.
    #  2. It is not very clear what it refers to.
    def __init__(self, name: str, type: ExperimentType):
        self.name = name
        self.type = type
        self.datastore = DatastoreFactory.create(
            sample_form=self.type.sample_form.value,
            beam_mode=self.type.beam_mode.value,
        )

    # ---------------------------
    # Name (ID) of the experiment
    # ---------------------------

    @property
    def name(self):
        return self._name

    @name.setter
    @enforce_type
    def name(self, new_name: str):
        self._name = new_name

    # ---------------
    # Experiment type
    # ---------------

    @property
    def type(self):
        return self._type

    @type.setter
    @enforce_type
    def type(self, new_experiment_type: ExperimentType):
        self._type = new_experiment_type

    # ----------------
    # Misc. Need to be sorted
    # ----------------

    def as_cif(
        self,
        max_points: Optional[int] = None,
    ) -> str:
        """
        Export the sample model to CIF format.
        Returns:
            str: CIF string representation of the experiment.
        """
        # Data block header
        cif_lines: List[str] = [f'data_{self.name}']

        # Experiment type
        cif_lines += ['', self.type.as_cif()]

        # Instrument setup and calibration
        if hasattr(self, 'instrument'):
            cif_lines += ['', self.instrument.as_cif()]

        # Peak profile, broadening and asymmetry
        if hasattr(self, 'peak'):
            cif_lines += ['', self.peak.as_cif()]

        # Phase scale factors for powder experiments
        if hasattr(self, 'linked_phases') and self.linked_phases._items:
            cif_lines += ['', self.linked_phases.as_cif()]

        # Crystal scale factor for single crystal experiments
        if hasattr(self, 'linked_crystal'):
            cif_lines += ['', self.linked_crystal.as_cif()]

        # Background points
        if hasattr(self, 'background') and self.background._items:
            cif_lines += ['', self.background.as_cif()]

        # Excluded regions
        if hasattr(self, 'excluded_regions') and self.excluded_regions._items:
            cif_lines += ['', self.excluded_regions.as_cif()]

        # Measured data
        if hasattr(self, 'datastore'):
            cif_lines += ['', self.datastore.as_cif(max_points=max_points)]

        return '\n'.join(cif_lines)

    def show_as_cif(self) -> None:
        cif_text: str = self.as_cif(max_points=5)
        paragraph_title: str = paragraph(f"Experiment 🔬 '{self.name}' as cif")
        render_cif(cif_text, paragraph_title)

    @abstractmethod
    def _load_ascii_data_to_experiment(self, data_path: str) -> None:
        pass

as_cif(max_points=None)

Export the sample model to CIF format. Returns: str: CIF string representation of the experiment.

Source code in src/easydiffraction/experiments/experiment.py
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
def as_cif(
    self,
    max_points: Optional[int] = None,
) -> str:
    """
    Export the sample model to CIF format.
    Returns:
        str: CIF string representation of the experiment.
    """
    # Data block header
    cif_lines: List[str] = [f'data_{self.name}']

    # Experiment type
    cif_lines += ['', self.type.as_cif()]

    # Instrument setup and calibration
    if hasattr(self, 'instrument'):
        cif_lines += ['', self.instrument.as_cif()]

    # Peak profile, broadening and asymmetry
    if hasattr(self, 'peak'):
        cif_lines += ['', self.peak.as_cif()]

    # Phase scale factors for powder experiments
    if hasattr(self, 'linked_phases') and self.linked_phases._items:
        cif_lines += ['', self.linked_phases.as_cif()]

    # Crystal scale factor for single crystal experiments
    if hasattr(self, 'linked_crystal'):
        cif_lines += ['', self.linked_crystal.as_cif()]

    # Background points
    if hasattr(self, 'background') and self.background._items:
        cif_lines += ['', self.background.as_cif()]

    # Excluded regions
    if hasattr(self, 'excluded_regions') and self.excluded_regions._items:
        cif_lines += ['', self.excluded_regions.as_cif()]

    # Measured data
    if hasattr(self, 'datastore'):
        cif_lines += ['', self.datastore.as_cif(max_points=max_points)]

    return '\n'.join(cif_lines)

BasePowderExperiment

Bases: BaseExperiment

Base class for all powder experiments.

Source code in src/easydiffraction/experiments/experiment.py
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
class BasePowderExperiment(BaseExperiment):
    """
    Base class for all powder experiments.
    """

    def __init__(
        self,
        name: str,
        type: ExperimentType,
    ) -> None:
        super().__init__(name=name, type=type)

        self._peak_profile_type: str = PeakProfileTypeEnum.default(
            self.type.scattering_type.value,
            self.type.beam_mode.value,
        ).value
        self.peak = PeakFactory.create(
            scattering_type=self.type.scattering_type.value,
            beam_mode=self.type.beam_mode.value,
            profile_type=self._peak_profile_type,
        )

        self.linked_phases: LinkedPhases = LinkedPhases()
        self.excluded_regions: ExcludedRegions = ExcludedRegions(parent=self)

    @abstractmethod
    def _load_ascii_data_to_experiment(self, data_path: str) -> None:
        pass

    @property
    def peak_profile_type(self):
        return self._peak_profile_type

    @peak_profile_type.setter
    def peak_profile_type(self, new_type: str):
        if new_type not in PeakFactory._supported[self.type.scattering_type.value][self.type.beam_mode.value]:
            supported_types = list(PeakFactory._supported[self.type.scattering_type.value][self.type.beam_mode.value].keys())
            print(warning(f"Unsupported peak profile '{new_type}'"))
            print(f'Supported peak profiles: {supported_types}')
            print("For more information, use 'show_supported_peak_profile_types()'")
            return
        self.peak = PeakFactory.create(
            scattering_type=self.type.scattering_type.value,
            beam_mode=self.type.beam_mode.value,
            profile_type=new_type,
        )
        self._peak_profile_type = new_type
        print(paragraph(f"Peak profile type for experiment '{self.name}' changed to"))
        print(new_type)

    def show_supported_peak_profile_types(self):
        columns_headers = ['Peak profile type', 'Description']
        columns_alignment = ['left', 'left']
        columns_data = []

        scattering_type = self.type.scattering_type.value
        beam_mode = self.type.beam_mode.value

        for profile_type in PeakFactory._supported[scattering_type][beam_mode].keys():
            columns_data.append([profile_type.value, profile_type.description()])

        print(paragraph('Supported peak profile types'))
        render_table(
            columns_headers=columns_headers,
            columns_alignment=columns_alignment,
            columns_data=columns_data,
        )

    def show_current_peak_profile_type(self):
        print(paragraph('Current peak profile type'))
        print(self.peak_profile_type)

Experiment(**kwargs)

User-facing API for creating an experiment. Accepts keyword arguments and delegates validation and creation to ExperimentFactory.

Source code in src/easydiffraction/experiments/experiment.py
565
566
567
568
569
570
def Experiment(**kwargs):
    """
    User-facing API for creating an experiment. Accepts keyword arguments and delegates
    validation and creation to ExperimentFactory.
    """
    return ExperimentFactory.create(**kwargs)

ExperimentFactory

Creates Experiment instances with only relevant attributes.

Source code in src/easydiffraction/experiments/experiment.py
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
class ExperimentFactory:
    """Creates Experiment instances with only relevant attributes."""

    _valid_arg_sets = [
        {
            'required': ['cif_path'],
            'optional': [],
        },
        {
            'required': ['cif_str'],
            'optional': [],
        },
        {
            'required': [
                'name',
                'data_path',
            ],
            'optional': [
                'sample_form',
                'beam_mode',
                'radiation_probe',
                'scattering_type',
            ],
        },
        {
            'required': ['name'],
            'optional': [
                'sample_form',
                'beam_mode',
                'radiation_probe',
                'scattering_type',
            ],
        },
    ]

    _supported = {
        ScatteringTypeEnum.BRAGG: {
            SampleFormEnum.POWDER: PowderExperiment,
            SampleFormEnum.SINGLE_CRYSTAL: SingleCrystalExperiment,
        },
        ScatteringTypeEnum.TOTAL: {
            SampleFormEnum.POWDER: PairDistributionFunctionExperiment,
        },
    }

    @classmethod
    def create(cls, **kwargs):
        """
        Main factory method for creating an experiment instance.
        Validates argument combinations and dispatches to the appropriate creation method.
        Raises ValueError if arguments are invalid or no valid dispatch is found.
        """
        # Check for valid argument combinations
        user_args = [k for k, v in kwargs.items() if v is not None]
        if not cls.is_valid_args(user_args):
            raise ValueError(f'Invalid argument combination: {user_args}')

        # Validate enum arguments if provided
        if 'sample_form' in kwargs:
            SampleFormEnum(kwargs['sample_form'])
        if 'beam_mode' in kwargs:
            BeamModeEnum(kwargs['beam_mode'])
        if 'radiation_probe' in kwargs:
            RadiationProbeEnum(kwargs['radiation_probe'])
        if 'scattering_type' in kwargs:
            ScatteringTypeEnum(kwargs['scattering_type'])

        # Dispatch to the appropriate creation method
        if 'cif_path' in kwargs:
            return cls._create_from_cif_path(kwargs)
        elif 'cif_str' in kwargs:
            return cls._create_from_cif_str(kwargs)
        elif 'data_path' in kwargs:
            return cls._create_from_data_path(kwargs)
        elif 'name' in kwargs:
            return cls._create_without_data(kwargs)

    @staticmethod
    def _create_from_cif_path(cif_path):
        """
        Create an experiment from a CIF file path.
        Not yet implemented.
        """
        # TODO: Implement CIF file loading logic
        raise NotImplementedError('CIF file loading not implemented yet.')

    @staticmethod
    def _create_from_cif_str(cif_str):
        """
        Create an experiment from a CIF string.
        Not yet implemented.
        """
        # TODO: Implement CIF string loading logic
        raise NotImplementedError('CIF string loading not implemented yet.')

    @classmethod
    def _create_from_data_path(cls, kwargs):
        """
        Create an experiment from a raw data ASCII file.
        Loads the experiment and attaches measured data from the specified file.
        """
        expt_type = cls._make_experiment_type(kwargs)
        scattering_type = expt_type.scattering_type.value
        sample_form = expt_type.sample_form.value
        expt_class = cls._supported[scattering_type][sample_form]
        expt_name = kwargs['name']
        expt_obj = expt_class(name=expt_name, type=expt_type)
        data_path = kwargs['data_path']
        expt_obj._load_ascii_data_to_experiment(data_path)
        return expt_obj

    @classmethod
    def _create_without_data(cls, kwargs):
        """
        Create an experiment without measured data.
        Returns an experiment instance with only metadata and configuration.
        """
        expt_type = cls._make_experiment_type(kwargs)
        scattering_type = expt_type.scattering_type.value
        sample_form = expt_type.sample_form.value
        expt_class = cls._supported[scattering_type][sample_form]
        expt_name = kwargs['name']
        expt_obj = expt_class(name=expt_name, type=expt_type)
        return expt_obj

    @classmethod
    def _make_experiment_type(cls, kwargs):
        """
        Helper to construct an ExperimentType from keyword arguments, using defaults as needed.
        """
        return ExperimentType(
            sample_form=kwargs.get('sample_form', SampleFormEnum.default()),
            beam_mode=kwargs.get('beam_mode', BeamModeEnum.default()),
            radiation_probe=kwargs.get('radiation_probe', RadiationProbeEnum.default()),
            scattering_type=kwargs.get('scattering_type', ScatteringTypeEnum.default()),
        )

    @staticmethod
    def is_valid_args(user_args):
        """
        Validate user argument set against allowed combinations.
        Returns True if the argument set matches any valid combination, else False.
        """
        user_arg_set = set(user_args)
        for arg_set in ExperimentFactory._valid_arg_sets:
            required = set(arg_set['required'])
            optional = set(arg_set['optional'])
            # Must have all required, and only required+optional
            if required.issubset(user_arg_set) and user_arg_set <= (required | optional):
                return True
        return False

create(**kwargs) classmethod

Main factory method for creating an experiment instance. Validates argument combinations and dispatches to the appropriate creation method. Raises ValueError if arguments are invalid or no valid dispatch is found.

Source code in src/easydiffraction/experiments/experiment.py
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
@classmethod
def create(cls, **kwargs):
    """
    Main factory method for creating an experiment instance.
    Validates argument combinations and dispatches to the appropriate creation method.
    Raises ValueError if arguments are invalid or no valid dispatch is found.
    """
    # Check for valid argument combinations
    user_args = [k for k, v in kwargs.items() if v is not None]
    if not cls.is_valid_args(user_args):
        raise ValueError(f'Invalid argument combination: {user_args}')

    # Validate enum arguments if provided
    if 'sample_form' in kwargs:
        SampleFormEnum(kwargs['sample_form'])
    if 'beam_mode' in kwargs:
        BeamModeEnum(kwargs['beam_mode'])
    if 'radiation_probe' in kwargs:
        RadiationProbeEnum(kwargs['radiation_probe'])
    if 'scattering_type' in kwargs:
        ScatteringTypeEnum(kwargs['scattering_type'])

    # Dispatch to the appropriate creation method
    if 'cif_path' in kwargs:
        return cls._create_from_cif_path(kwargs)
    elif 'cif_str' in kwargs:
        return cls._create_from_cif_str(kwargs)
    elif 'data_path' in kwargs:
        return cls._create_from_data_path(kwargs)
    elif 'name' in kwargs:
        return cls._create_without_data(kwargs)

is_valid_args(user_args) staticmethod

Validate user argument set against allowed combinations. Returns True if the argument set matches any valid combination, else False.

Source code in src/easydiffraction/experiments/experiment.py
549
550
551
552
553
554
555
556
557
558
559
560
561
562
@staticmethod
def is_valid_args(user_args):
    """
    Validate user argument set against allowed combinations.
    Returns True if the argument set matches any valid combination, else False.
    """
    user_arg_set = set(user_args)
    for arg_set in ExperimentFactory._valid_arg_sets:
        required = set(arg_set['required'])
        optional = set(arg_set['optional'])
        # Must have all required, and only required+optional
        if required.issubset(user_arg_set) and user_arg_set <= (required | optional):
            return True
    return False

PairDistributionFunctionExperiment

Bases: BasePowderExperiment

PDF experiment class with specific attributes.

Source code in src/easydiffraction/experiments/experiment.py
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
class PairDistributionFunctionExperiment(BasePowderExperiment):
    """PDF experiment class with specific attributes."""

    def __init__(
        self,
        name: str,
        type: ExperimentType,
    ):
        super().__init__(name=name, type=type)

    def _load_ascii_data_to_experiment(self, data_path):
        """
        Loads x, y, sy values from an ASCII data file into the experiment.

        The file must be structured as:
            x  y  sy
        """
        try:
            from diffpy.utils.parsers.loaddata import loadData
        except ImportError:
            raise ImportError('diffpy module not found.')
        try:
            data = loadData(data_path)
        except Exception as e:
            raise IOError(f'Failed to read data from {data_path}: {e}')

        if data.shape[1] < 2:
            raise ValueError('Data file must have at least two columns: x and y.')

        default_sy = 0.03
        if data.shape[1] < 3:
            print(f'Warning: No uncertainty (sy) column provided. Defaulting to {default_sy}.')

        # Extract x, y, and sy data
        x = data[:, 0]
        # We should also add sx = data[:, 2] to capture the e.s.d. of x. It
        # might be useful in future.
        y = data[:, 1]
        # Using sqrt isn’t appropriate here, as the y-scale isn’t raw counts
        # and includes both positive and negative values. For now, set the
        # e.s.d. to a fixed value of 0.03 if it’s not included in the measured
        # data file. We should improve this later.
        # sy = data[:, 3] if data.shape[1] > 2 else np.sqrt(y)
        sy = data[:, 2] if data.shape[1] > 2 else np.full_like(y, fill_value=default_sy)

        # Attach the data to the experiment's datastore
        self.datastore.x = x
        self.datastore.meas = y
        self.datastore.meas_su = sy

        print(paragraph('Data loaded successfully'))
        print(f"Experiment 🔬 '{self.name}'. Number of data points: {len(x)}")

PowderExperiment

Bases: InstrumentMixin, BasePowderExperiment

Powder experiment class with specific attributes. Wraps background, peak profile, and linked phases.

Source code in src/easydiffraction/experiments/experiment.py
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
class PowderExperiment(
    InstrumentMixin,
    BasePowderExperiment,
):
    """
    Powder experiment class with specific attributes.
    Wraps background, peak profile, and linked phases.
    """

    def __init__(
        self,
        name: str,
        type: ExperimentType,
    ) -> None:
        super().__init__(name=name, type=type)

        self._background_type: BackgroundTypeEnum = BackgroundTypeEnum.default()
        self.background = BackgroundFactory.create(background_type=self.background_type)

    # -------------
    # Measured data
    # -------------

    def _load_ascii_data_to_experiment(self, data_path: str) -> None:
        """
        Loads x, y, sy values from an ASCII data file into the experiment.

        The file must be structured as:
            x  y  sy
        """
        try:
            data = np.loadtxt(data_path)
        except Exception as e:
            raise IOError(f'Failed to read data from {data_path}: {e}')

        if data.shape[1] < 2:
            raise ValueError('Data file must have at least two columns: x and y.')

        if data.shape[1] < 3:
            print('Warning: No uncertainty (sy) column provided. Defaulting to sqrt(y).')

        # Extract x, y data
        x: np.ndarray = data[:, 0]
        y: np.ndarray = data[:, 1]

        # Round x to 4 decimal places
        # TODO: This is needed for CrysPy, as otherwise it fails to match
        #  the size of the data arrays.
        x = np.round(x, 4)

        # Determine sy from column 3 if available, otherwise use sqrt(y)
        sy: np.ndarray = data[:, 2] if data.shape[1] > 2 else np.sqrt(y)

        # Replace values smaller than 0.0001 with 1.0
        # TODO: This is needed for minimization algorithms that fail with
        #  very small or zero uncertainties.
        sy = np.where(sy < 0.0001, 1.0, sy)

        # Attach the data to the experiment's datastore

        # The full pattern data
        self.datastore.full_x = x
        self.datastore.full_meas = y
        self.datastore.full_meas_su = sy

        # The pattern data used for fitting (without excluded points)
        # This is the same as full_x, full_meas, full_meas_su by default
        self.datastore.x = x
        self.datastore.meas = y
        self.datastore.meas_su = sy

        # Excluded mask
        # No excluded points by default
        self.datastore.excluded = np.full(x.shape, fill_value=False, dtype=bool)

        print(paragraph('Data loaded successfully'))
        print(f"Experiment 🔬 '{self.name}'. Number of data points: {len(x)}")

    @property
    def background_type(self):
        return self._background_type

    @background_type.setter
    def background_type(self, new_type):
        if new_type not in BackgroundFactory._supported:
            supported_types = list(BackgroundFactory._supported.keys())
            print(warning(f"Unknown background type '{new_type}'"))
            print(f'Supported background types: {supported_types}')
            print("For more information, use 'show_supported_background_types()'")
            return
        self.background = BackgroundFactory.create(new_type)
        self._background_type = new_type
        print(paragraph(f"Background type for experiment '{self.name}' changed to"))
        print(new_type)

    def show_supported_background_types(self):
        columns_headers = ['Background type', 'Description']
        columns_alignment = ['left', 'left']
        columns_data = []
        for bt, cls in BackgroundFactory._supported.items():
            columns_data.append([bt.value, bt.description()])

        print(paragraph('Supported background types'))
        render_table(
            columns_headers=columns_headers,
            columns_alignment=columns_alignment,
            columns_data=columns_data,
        )

    def show_current_background_type(self):
        print(paragraph('Current background type'))
        print(self.background_type)

SingleCrystalExperiment

Bases: BaseExperiment

Single crystal experiment class with specific attributes.

Source code in src/easydiffraction/experiments/experiment.py
397
398
399
400
401
402
403
404
405
406
407
408
409
class SingleCrystalExperiment(BaseExperiment):
    """Single crystal experiment class with specific attributes."""

    def __init__(
        self,
        name: str,
        type: ExperimentType,
    ) -> None:
        super().__init__(name=name, type=type)
        self.linked_crystal = None

    def show_meas_chart(self) -> None:
        print('Showing measured data chart is not implemented yet.')

experiments

Experiments

Bases: Collection

Collection manager for multiple Experiment instances.

Source code in src/easydiffraction/experiments/experiments.py
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
class Experiments(Collection):
    """
    Collection manager for multiple Experiment instances.
    """

    @property
    def _child_class(self):
        return BaseExperiment

    def __init__(self) -> None:
        super().__init__()
        self._experiments: Dict[str, BaseExperiment] = self._items  # Alias for legacy support

    def add(self, experiment: BaseExperiment):
        """
        Add a pre-built experiment instance.
        """
        self._add_prebuilt_experiment(experiment)

    def add_from_cif_path(self, cif_path: str):
        """
        Add a new experiment from a CIF file path.
        """
        experiment = Experiment(cif_path=cif_path)
        self._add_prebuilt_experiment(experiment)

    def add_from_cif_str(self, cif_str: str):
        """
        Add a new experiment from CIF file content (string).
        """
        experiment = Experiment(cif_str=cif_str)
        self._add_prebuilt_experiment(experiment)

    def add_from_data_path(
        self,
        name: str,
        data_path: str,
        sample_form: str = SampleFormEnum.default().value,
        beam_mode: str = BeamModeEnum.default().value,
        radiation_probe: str = RadiationProbeEnum.default().value,
        scattering_type: str = ScatteringTypeEnum.default().value,
    ):
        """
        Add a new experiment from a data file path.
        """
        experiment = Experiment(
            name=name,
            data_path=data_path,
            sample_form=sample_form,
            beam_mode=beam_mode,
            radiation_probe=radiation_probe,
            scattering_type=scattering_type,
        )
        self._add_prebuilt_experiment(experiment)

    def add_without_data(
        self,
        name: str,
        sample_form: str = SampleFormEnum.default().value,
        beam_mode: str = BeamModeEnum.default().value,
        radiation_probe: str = RadiationProbeEnum.default().value,
        scattering_type: str = ScatteringTypeEnum.default().value,
    ):
        """
        Add a new experiment without any data file.
        """
        experiment = Experiment(
            name=name,
            sample_form=sample_form,
            beam_mode=beam_mode,
            radiation_probe=radiation_probe,
            scattering_type=scattering_type,
        )
        self._add_prebuilt_experiment(experiment)

    @enforce_type
    def _add_prebuilt_experiment(self, experiment: BaseExperiment):
        self._experiments[experiment.name] = experiment

    def remove(self, experiment_id: str) -> None:
        if experiment_id in self._experiments:
            del self._experiments[experiment_id]

    def show_names(self) -> None:
        print(paragraph('Defined experiments' + ' 🔬'))
        print(self.ids)

    @property
    def ids(self) -> List[str]:
        return list(self._experiments.keys())

    def show_params(self) -> None:
        for exp in self._experiments.values():
            print(exp)

    def as_cif(self) -> str:
        return '\n\n'.join([exp.as_cif() for exp in self._experiments.values()])

add(experiment)

Add a pre-built experiment instance.

Source code in src/easydiffraction/experiments/experiments.py
31
32
33
34
35
def add(self, experiment: BaseExperiment):
    """
    Add a pre-built experiment instance.
    """
    self._add_prebuilt_experiment(experiment)

add_from_cif_path(cif_path)

Add a new experiment from a CIF file path.

Source code in src/easydiffraction/experiments/experiments.py
37
38
39
40
41
42
def add_from_cif_path(self, cif_path: str):
    """
    Add a new experiment from a CIF file path.
    """
    experiment = Experiment(cif_path=cif_path)
    self._add_prebuilt_experiment(experiment)

add_from_cif_str(cif_str)

Add a new experiment from CIF file content (string).

Source code in src/easydiffraction/experiments/experiments.py
44
45
46
47
48
49
def add_from_cif_str(self, cif_str: str):
    """
    Add a new experiment from CIF file content (string).
    """
    experiment = Experiment(cif_str=cif_str)
    self._add_prebuilt_experiment(experiment)

add_from_data_path(name, data_path, sample_form=SampleFormEnum.default().value, beam_mode=BeamModeEnum.default().value, radiation_probe=RadiationProbeEnum.default().value, scattering_type=ScatteringTypeEnum.default().value)

Add a new experiment from a data file path.

Source code in src/easydiffraction/experiments/experiments.py
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
def add_from_data_path(
    self,
    name: str,
    data_path: str,
    sample_form: str = SampleFormEnum.default().value,
    beam_mode: str = BeamModeEnum.default().value,
    radiation_probe: str = RadiationProbeEnum.default().value,
    scattering_type: str = ScatteringTypeEnum.default().value,
):
    """
    Add a new experiment from a data file path.
    """
    experiment = Experiment(
        name=name,
        data_path=data_path,
        sample_form=sample_form,
        beam_mode=beam_mode,
        radiation_probe=radiation_probe,
        scattering_type=scattering_type,
    )
    self._add_prebuilt_experiment(experiment)

add_without_data(name, sample_form=SampleFormEnum.default().value, beam_mode=BeamModeEnum.default().value, radiation_probe=RadiationProbeEnum.default().value, scattering_type=ScatteringTypeEnum.default().value)

Add a new experiment without any data file.

Source code in src/easydiffraction/experiments/experiments.py
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
def add_without_data(
    self,
    name: str,
    sample_form: str = SampleFormEnum.default().value,
    beam_mode: str = BeamModeEnum.default().value,
    radiation_probe: str = RadiationProbeEnum.default().value,
    scattering_type: str = ScatteringTypeEnum.default().value,
):
    """
    Add a new experiment without any data file.
    """
    experiment = Experiment(
        name=name,
        sample_form=sample_form,
        beam_mode=beam_mode,
        radiation_probe=radiation_probe,
        scattering_type=scattering_type,
    )
    self._add_prebuilt_experiment(experiment)