Skip to content

Base

Bases: ABC

Base Interface for LUME-compatible code.

Parameters

input_file : str, optional The input file to be used, by default None initial_particles : dict, optional Initial Particle metadata to be used, by default None verbose : bool, optional Whether or not to produce verbose output, by default False timeout : float, optional The timeout in seconds to be used, by default None

Source code in lume/base.py
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
class Base(ABC):
    """
    Base Interface for LUME-compatible code.

    Parameters
    ----------
    input_file : str, optional
        The input file to be used, by default None
    initial_particles : dict, optional
        Initial Particle metadata to be used, by default None
    verbose : bool, optional
        Whether or not to produce verbose output, by default False
    timeout : float, optional
        The timeout in seconds to be used, by default None
    """

    def __init__(
        self,
        input_file=None,
        *,
        initial_particles=None,
        verbose=False,
        timeout=None,
        **kwargs,
    ):

        self._input_file = input_file
        self._initial_particles = initial_particles
        self._input = None
        self._output = None

        # Execution
        self._timeout = timeout

        # Logging
        self._verbose = verbose

        # State
        self._configured = False
        self._finished = False
        self._error = False

    @property
    def input(self):
        """
        Input data as a dictionary
        """
        return self._input

    @input.setter
    def input(self, input):
        self._input = input

    @property
    def output(self):
        """
        require openPMD standard, in the future we can add more methods
        for libs such as pandas Dataframes, xarray DataArrays and Dask Arrays.
        """
        return self._output

    @output.setter
    def output(self, output):
        self._output = output

    @property
    def initial_particles(self):
        """
        Initial Particles
        """
        return self._initial_particles

    @initial_particles.setter
    def initial_particles(self, initial_particles):
        self._initial_particles = initial_particles

    @abstractmethod
    def configure(self):
        """
        Configure and set up for run.
        """
        raise NotImplementedError

    @abstractmethod
    def run(self):
        """
        Execute the code.
        """
        raise NotImplementedError

    @property
    def verbose(self):
        """
        Read or configure the verbose flag.
        """
        return self._verbose

    @verbose.setter
    def verbose(self, verbose):
        self._verbose = verbose

    @property
    def timeout(self):
        """
        Read or configure the timeout in seconds.
        """
        return self._timeout

    @timeout.setter
    def timeout(self, timeout):
        self._timeout = timeout

    @property
    def configured(self):
        """
        Get or set the configured flag state.
        """
        return self._configured

    @configured.setter
    def configured(self, configured):
        self._configured = configured

    @property
    def finished(self):
        """
        Get or set the finished flag state.
        """
        return self._finished

    @finished.setter
    def finished(self, finished):
        self._finished = finished

    @property
    def error(self):
        """
        Get or set the error flag state.
        """
        return self._error

    @error.setter
    def error(self, error):
        self._error = error

    @property
    def input_file(self):
        """
        Get or set the input file to be processed.
        """
        return self._input_file

    @input_file.setter
    def input_file(self, input_file):
        """dictionary with parameters?"""
        self._input_file = input_file

    def fingerprint(self):
        """
        Data fingerprint (hash) using the input parameters.

        Returns
        -------
        fingerprint : str
            The hash for this object based on the input parameters.
        """
        return tools.fingerprint(self.input)

    def copy(self):
        """
        Returns a deep copy of this object.

        If a tempdir is being used, will clear this and deconfigure.
        """
        other = copy.deepcopy(self)
        other.reset()
        return other

    def reset(self):
        """
        Reset this object to its initial state.
        """
        pass

    def vprint(self, *args, **kwargs):
        # Verbose print
        if self._verbose:
            print(*args, **kwargs)

    @classmethod
    def from_yaml(cls, yaml_file, parse_input=False):
        """
        Returns an object instantiated from a YAML config file

        Will load intial_particles from an h5 file.

        """
        # Try file
        if os.path.exists(tools.full_path(yaml_file)):
            yaml_file = tools.full_path(yaml_file)
            config = yaml.safe_load(open(yaml_file))

            if "input_file" in config:
                # Check that the input file is absolute path...
                # require absolute/ relative to working dir for model input file
                f = os.path.expandvars(config["input_file"])
                if not os.path.isabs(f):
                    # Get the yaml file root
                    root, _ = os.path.split(tools.full_path(yaml_file))
                    config["input_file"] = os.path.join(root, f)

                # Here, we update the config with the input_file contents
                # provided that the input_parser method has been implemented on the subclass
                if parse_input:
                    parsed_input = cls.input_parser(config["input_file"])
                    config.update(parsed_input)

        else:
            # Try raw string
            config = yaml.safe_load(yaml_file)
            if parse_input and "input_file" in config:
                parsed_input = cls.input_parser(config["input_file"])
                config.update(parsed_input)

        # Form ParticleGroup from file
        if "initial_particles" in config:
            f = config["initial_particles"]
            if not os.path.isabs(f):
                root, _ = os.path.split(tools.full_path(yaml_file))
                f = os.path.join(root, f)
            config["initial_particles"] = ParticleGroup(f)

        return cls(**config)

    def to_hdf5(self, filename: str) -> None:
        """Serialize an object to an hdf5 file.

        Parameters
        ----------
        filename: str

        """
        serializer = HDF5Serializer()
        serializer.serialize(filename, self)

    @classmethod
    def from_hdf5(cls, filename: str) -> "Base":
        """Load an object from and hdf5.

        Parameters
        ----------
        filename: str

        """
        serializer = HDF5Serializer()
        return serializer.deserialize(filename)

    @abstractmethod
    def archive(self, h5=None):
        """
        Dump inputs and outputs into HDF5 file.

        Parameters
        ----------
        h5 : str or h5py.File
            The filename or handle to HDF5 file in which to write the information.
            If not in informed, a new file is generated.

        Returns
        -------
        h5 : h5py.File
            Handle to the HDF5 file.
        """
        raise NotImplementedError

    @abstractmethod
    def load_archive(self, h5, configure=True):
        """
        Loads input and output from archived h5 file.

        Parameters
        ----------
        h5 : str or h5py.File
            The filename or handle on h5py.File from which to load input and output data
        configure : bool, optional
            Whether or not to invoke the configure method after loading, by default True
        """
        raise NotImplementedError

configured property writable

Get or set the configured flag state.

error property writable

Get or set the error flag state.

finished property writable

Get or set the finished flag state.

initial_particles property writable

Initial Particles

input property writable

Input data as a dictionary

input_file property writable

Get or set the input file to be processed.

output property writable

require openPMD standard, in the future we can add more methods for libs such as pandas Dataframes, xarray DataArrays and Dask Arrays.

timeout property writable

Read or configure the timeout in seconds.

verbose property writable

Read or configure the verbose flag.

archive(h5=None) abstractmethod

Dump inputs and outputs into HDF5 file.

Parameters

h5 : str or h5py.File The filename or handle to HDF5 file in which to write the information. If not in informed, a new file is generated.

Returns

h5 : h5py.File Handle to the HDF5 file.

Source code in lume/base.py
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
@abstractmethod
def archive(self, h5=None):
    """
    Dump inputs and outputs into HDF5 file.

    Parameters
    ----------
    h5 : str or h5py.File
        The filename or handle to HDF5 file in which to write the information.
        If not in informed, a new file is generated.

    Returns
    -------
    h5 : h5py.File
        Handle to the HDF5 file.
    """
    raise NotImplementedError

configure() abstractmethod

Configure and set up for run.

Source code in lume/base.py
91
92
93
94
95
96
@abstractmethod
def configure(self):
    """
    Configure and set up for run.
    """
    raise NotImplementedError

copy()

Returns a deep copy of this object.

If a tempdir is being used, will clear this and deconfigure.

Source code in lume/base.py
183
184
185
186
187
188
189
190
191
def copy(self):
    """
    Returns a deep copy of this object.

    If a tempdir is being used, will clear this and deconfigure.
    """
    other = copy.deepcopy(self)
    other.reset()
    return other

fingerprint()

Data fingerprint (hash) using the input parameters.

Returns

fingerprint : str The hash for this object based on the input parameters.

Source code in lume/base.py
172
173
174
175
176
177
178
179
180
181
def fingerprint(self):
    """
    Data fingerprint (hash) using the input parameters.

    Returns
    -------
    fingerprint : str
        The hash for this object based on the input parameters.
    """
    return tools.fingerprint(self.input)

from_hdf5(filename) classmethod

Load an object from and hdf5.

Parameters

filename: str

Source code in lume/base.py
260
261
262
263
264
265
266
267
268
269
270
@classmethod
def from_hdf5(cls, filename: str) -> "Base":
    """Load an object from and hdf5.

    Parameters
    ----------
    filename: str

    """
    serializer = HDF5Serializer()
    return serializer.deserialize(filename)

from_yaml(yaml_file, parse_input=False) classmethod

Returns an object instantiated from a YAML config file

Will load intial_particles from an h5 file.

Source code in lume/base.py
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
@classmethod
def from_yaml(cls, yaml_file, parse_input=False):
    """
    Returns an object instantiated from a YAML config file

    Will load intial_particles from an h5 file.

    """
    # Try file
    if os.path.exists(tools.full_path(yaml_file)):
        yaml_file = tools.full_path(yaml_file)
        config = yaml.safe_load(open(yaml_file))

        if "input_file" in config:
            # Check that the input file is absolute path...
            # require absolute/ relative to working dir for model input file
            f = os.path.expandvars(config["input_file"])
            if not os.path.isabs(f):
                # Get the yaml file root
                root, _ = os.path.split(tools.full_path(yaml_file))
                config["input_file"] = os.path.join(root, f)

            # Here, we update the config with the input_file contents
            # provided that the input_parser method has been implemented on the subclass
            if parse_input:
                parsed_input = cls.input_parser(config["input_file"])
                config.update(parsed_input)

    else:
        # Try raw string
        config = yaml.safe_load(yaml_file)
        if parse_input and "input_file" in config:
            parsed_input = cls.input_parser(config["input_file"])
            config.update(parsed_input)

    # Form ParticleGroup from file
    if "initial_particles" in config:
        f = config["initial_particles"]
        if not os.path.isabs(f):
            root, _ = os.path.split(tools.full_path(yaml_file))
            f = os.path.join(root, f)
        config["initial_particles"] = ParticleGroup(f)

    return cls(**config)

load_archive(h5, configure=True) abstractmethod

Loads input and output from archived h5 file.

Parameters

h5 : str or h5py.File The filename or handle on h5py.File from which to load input and output data configure : bool, optional Whether or not to invoke the configure method after loading, by default True

Source code in lume/base.py
290
291
292
293
294
295
296
297
298
299
300
301
302
@abstractmethod
def load_archive(self, h5, configure=True):
    """
    Loads input and output from archived h5 file.

    Parameters
    ----------
    h5 : str or h5py.File
        The filename or handle on h5py.File from which to load input and output data
    configure : bool, optional
        Whether or not to invoke the configure method after loading, by default True
    """
    raise NotImplementedError

reset()

Reset this object to its initial state.

Source code in lume/base.py
193
194
195
196
197
def reset(self):
    """
    Reset this object to its initial state.
    """
    pass

run() abstractmethod

Execute the code.

Source code in lume/base.py
 98
 99
100
101
102
103
@abstractmethod
def run(self):
    """
    Execute the code.
    """
    raise NotImplementedError

to_hdf5(filename)

Serialize an object to an hdf5 file.

Parameters

filename: str

Source code in lume/base.py
249
250
251
252
253
254
255
256
257
258
def to_hdf5(self, filename: str) -> None:
    """Serialize an object to an hdf5 file.

    Parameters
    ----------
    filename: str

    """
    serializer = HDF5Serializer()
    serializer.serialize(filename, self)