diff --git a/pyproject.toml b/pyproject.toml index 375e859348a339af74739f24119e708aba06e268..48d6207e939bc8df5eacc012a722a9b90a2758d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ classifiers = [ dependencies = [ "packaging", "lazy-loader", - "qutech-util >= 2025.03.1", + "qutech-util >= 2025.05.1", "numpy", "scipy", "matplotlib >= 3.7", @@ -87,7 +87,7 @@ features = [ ] [tool.hatch.envs.tests.scripts] run = [ - "python -m pytest --doctest-modules --junitxml=./pytest_report.xml" + "python -m pytest --doctest-modules --junitxml=./pytest_report.xml src/ tests/" ] [tool.hatch.envs.doc] diff --git a/src/python_spectrometer/_plot_manager.py b/src/python_spectrometer/_plot_manager.py index 97865a8f284a567c557231df697b2d696ef7fe7f..2fd33ef1b763b25bc069f0b2073a41e1fa79d358 100644 --- a/src/python_spectrometer/_plot_manager.py +++ b/src/python_spectrometer/_plot_manager.py @@ -1,24 +1,44 @@ """This module defines the PlotManager helper class.""" import contextlib import os +import sys import warnings import weakref -from itertools import compress from typing import (Dict, Any, Optional, Mapping, Tuple, ContextManager, Iterable, Union, List, - Literal) + Literal, TypeVar, Callable) import matplotlib.pyplot as plt import numpy as np +from cycler import Cycler, cycler from matplotlib import gridspec, scale +from qutil.functools import wraps +from qutil.itertools import compress from qutil.misc import filter_warnings from qutil.plotting import assert_interactive_figure from scipy import integrate, signal +if sys.version_info >= (3, 10): + from typing import ParamSpec +else: + from typing_extensions import ParamSpec + +_T = TypeVar('_T') +_P = ParamSpec('_P') _keyT = Union[int, str, Tuple[int, str]] _styleT = Union[str, os.PathLike, dict] _styleT = Union[None, _styleT, List[_styleT]] +def with_plot_context(meth: Callable[_P, _T]) -> Callable[_P, _T]: + @wraps(meth) + def wrapped(*args: _P.args, **kwargs: _P.kwargs) -> _T: + self, *args = args + with self.plot_context: + return meth(self, *args, **kwargs) + + return wrapped + + class PlotManager: __instances = weakref.WeakSet() @@ -32,11 +52,11 @@ class PlotManager: plot_negative_frequencies: bool = True, plot_absolute_frequencies: bool = True, plot_amplitude: bool = True, plot_density: bool = True, plot_cumulative_normalized: bool = False, plot_style: _styleT = 'fast', - plot_dB_scale: bool = False, threaded_acquisition: bool = True, prop_cycle=None, - raw_unit: str = 'V', processed_unit: Optional[str] = None, - uses_windowed_estimator: bool = True, complex_data: Optional[bool] = False, - figure_kw: Optional[Mapping] = None, subplot_kw: Optional[Mapping] = None, - gridspec_kw: Optional[Mapping] = None, legend_kw: Optional[Mapping] = None): + plot_dB_scale: bool = False, prop_cycle=None, raw_unit: str = 'V', + processed_unit: Optional[str] = None, uses_windowed_estimator: bool = True, + complex_data: Optional[bool] = False, figure_kw: Optional[Mapping] = None, + subplot_kw: Optional[Mapping] = None, gridspec_kw: Optional[Mapping] = None, + legend_kw: Optional[Mapping] = None): """A helper class that manages plotting spectrometer data.""" self._data = data @@ -49,15 +69,15 @@ class PlotManager: self._plot_amplitude = plot_amplitude self._plot_density = plot_density self._plot_cumulative_normalized = plot_cumulative_normalized - self._plot_style = plot_style self._plot_dB_scale = plot_dB_scale - self._threaded_acquisition = threaded_acquisition self._processed_unit = processed_unit if processed_unit is not None else raw_unit + self._prop_cycle = prop_cycle + # Use the setter to validate the style + self._plot_style = None # For dB scale plots, default to the first spectrum acquired. self._reference_spectrum: Optional[_keyT] = None - self.prop_cycle = prop_cycle or plt.rcParams['axes.prop_cycle'] self.raw_unit = raw_unit self.uses_windowed_estimator = uses_windowed_estimator self._complex_data = complex_data @@ -77,6 +97,9 @@ class PlotManager: if self.subplot_kw.pop('sharex', None) is not None: warnings.warn('sharex in subplot_kw not negotiable, dropping', UserWarning) + # Set this last because it needs access to lines etc + self.plot_style = plot_style + # Keep track of instances that are alive for figure counting self.__instances.add(self) # TODO: this somehow never executes @@ -90,10 +113,13 @@ class PlotManager: return self._fig is not None and plt.fignum_exists(self.figure_kw['num']) @property + @with_plot_context def fig(self): """The figure hosting the plots.""" if self.is_fig_open(): return self._fig + elif plt.fignum_exists(num := self.figure_kw['num']): + plt.figure(num).clear() try: self._fig = plt.figure(**self.figure_kw) @@ -118,8 +144,8 @@ class PlotManager: self.destroy_axes() self.update_line_attrs(self.plots_to_draw, self.lines_to_draw, self.shown, stale=True) - # If the window is closed, remove the figure from the cache so that it can be recreated and - # stop the timer to delete any remaining callbacks + # If the window is closed, remove the figure from the cache so that it can be + # recreated and stop the timer to delete any remaining callbacks self._fig.canvas.mpl_connect('close_event', on_close) self.setup_figure() @@ -162,6 +188,23 @@ class PlotManager: def plots_to_draw(self) -> Tuple[str, ...]: return tuple(compress(self.PLOT_TYPES, [True, self.plot_cumulative, self.plot_timetrace])) + @property + @with_plot_context + def prop_cycle(self) -> Cycler: + """The matplotlib property cycle. + + Set using either a :class:`cycler.Cycler` instance or a key-val + mapping of properties. + """ + return self._prop_cycle or plt.rcParams['axes.prop_cycle'] + + @prop_cycle.setter + def prop_cycle(self, val: Union[Mapping, Cycler]): + if isinstance(val, Cycler): + self._prop_cycle = cycler(val) + else: + self._prop_cycle = cycler(**val) + @property def plot_context(self) -> ContextManager: if self.plot_style is not None: @@ -186,7 +229,20 @@ class PlotManager: @property def plot_cumulative(self) -> bool: - """If the cumulative (integrated) PSD or spectrum is plotted on a subplot.""" + r"""If the cumulative (integrated) PSD or spectrum is plotted on + a subplot. + + The cumulative PSD is given by + + .. math:: + \mathrm{RMS}_S(f)^2 = \int_{f_\mathrm{min}}^f\mathrm{d}f^\prime\,S(f^\prime) + + with :math:`\mathrm{RMS}_S(f)` the root-mean-square of + :math:`S(f^\prime)` up to frequency :math:`f^\prime`. + + If :attr:`plot_amplitude` is true, plot the square root of this, + i.e., :math:`\mathrm{RMS}_S(f)`. + """ return self._plot_cumulative @plot_cumulative.setter @@ -309,6 +365,14 @@ class PlotManager: @plot_style.setter def plot_style(self, val: _styleT): if val != self._plot_style: + try: + with plt.style.context(val): + pass + except OSError as err: + warnings.warn(f'Ignoring style {val} due to the following error:\n{err}', + UserWarning, stacklevel=2) + return + self._plot_style = val self.destroy_axes() self.update_line_attrs(self.plots_to_draw, self.lines_to_draw, stale=True) @@ -331,17 +395,6 @@ class PlotManager: if self.is_fig_open(): self.setup_figure() - @property - def threaded_acquisition(self) -> bool: - """Acquire data in a separate thread.""" - return self._threaded_acquisition - - @threaded_acquisition.setter - def threaded_acquisition(self, val: bool): - val = bool(val) - if val != self._threaded_acquisition: - self._threaded_acquisition = val - @property def reference_spectrum(self) -> Optional[Tuple[int, str]]: """Spectrum taken as a reference for the dB scale. @@ -382,71 +435,50 @@ class PlotManager: if 'timetrace_processed' in d) return complex_raw_data or complex_processed_data - def main_plot(self, key, line_type): - x, y = self.get_freq_data(key, line_type, self.plot_dB_scale) - - d = self.lines[key]['main'][line_type] + @with_plot_context + def plot_or_set_props(self, x, y, key, plot_type, line_type): + d = self.lines[key][plot_type][line_type] if line := d['line']: line.set_data(x, y) line.set_color(self.line_props(key[0], d)['color']) line.set_alpha(self.line_props(key[0], d)['alpha']) line.set_zorder(self.line_props(key[0], d)['zorder']) else: - line, = self.axes['main'][line_type].plot(x, y, **self.line_props(key[0], d)) - self.update_line_attrs(['main'], [line_type], [key], stale=False, line=line) + line, = self.axes[plot_type][line_type].plot(x, y, **self.line_props(key[0], d)) + self.update_line_attrs([plot_type], [line_type], [key], stale=False, line=line) + @with_plot_context + def main_plot(self, key, line_type): + x, y = self.get_freq_data(key, line_type, self.plot_dB_scale) + self.plot_or_set_props(x, y, key, 'main', line_type) + + @with_plot_context def cumulative_plot(self, key, line_type): # y is the power irrespective of whether self.plot_amplitude is True or not. # This means that if the latter is True, this plot shows the cumulative RMS, # and if it's False the cumulative MS (mean square, variance). - x, y = self.get_freq_data(key, line_type, dB=False, cumulative=True) - - x_min, x_max = self.axes['cumulative'][line_type].get_xlim() - mask = (x_min <= x) & (x <= x_max) - x = x[..., mask] - y = y[..., mask] - y = integrate.cumulative_trapezoid(y, x, initial=0, axis=-1) - if self.plot_amplitude: - y = np.sqrt(y) - if self.plot_cumulative_normalized: - y = (y - y.min()) / y.ptp() - - d = self.lines[key]['cumulative'][line_type] - if line := d['line']: - line.set_data(x, y) - line.set_color(self.line_props(key[0], d)['color']) - line.set_alpha(self.line_props(key[0], d)['alpha']) - line.set_zorder(self.line_props(key[0], d)['zorder']) - else: - line, = self.axes['cumulative'][line_type].plot(x, y, **self.line_props(key[0], d)) - self.update_line_attrs(['cumulative'], [line_type], [key], stale=False, line=line) + x, y = self.get_integrated_data(key, line_type, self.plot_dB_scale) + self.plot_or_set_props(x, y, key, 'cumulative', line_type) + @with_plot_context def time_plot(self, key, line_type): y = self._data[key][f'timetrace_{line_type}'][-1] if np.iscomplexobj(y): y = np.abs(y) x = np.arange(y.size) / self._data[key]['settings']['fs'] + self.plot_or_set_props(x, y, key, 'time', line_type) - d = self.lines[key]['time'][line_type] - if line := d['line']: - line.set_data(x, y) - line.set_color(self.line_props(key[0], d)['color']) - line.set_alpha(self.line_props(key[0], d)['alpha']) - line.set_zorder(self.line_props(key[0], d)['zorder']) - else: - line, = self.axes['time'][line_type].plot(x, y, **self.line_props(key[0], d)) - self.update_line_attrs(['time'], [line_type], [key], stale=False, line=line) - + @with_plot_context def setup_figure(self): gs = gridspec.GridSpec(2 + self.plot_cumulative + self.plot_timetrace, 1, figure=self.fig, **self.gridspec_kw) - with self.plot_context: - self.setup_main_axes(gs) - self.setup_cumulative_axes(gs) - self.setup_time_axes(gs) - self.destroy_unused_axes() - self.update_figure() + self.setup_main_axes(gs) + self.setup_cumulative_axes(gs) + self.setup_time_axes(gs) + self.destroy_unused_axes() + self.update_figure() + @with_plot_context def setup_main_axes(self, gs: gridspec.GridSpec): if self.axes['main']['processed'] is None: self.axes['main']['processed'] = self.fig.add_subplot(gs[:2], **self.subplot_kw) @@ -458,8 +490,7 @@ class PlotManager: self.axes['main']['processed'].set_ylabel( _ax_label(self.plot_amplitude, False, self.plot_dB_scale, self.reference_spectrum) + _ax_unit(self.plot_amplitude, self.plot_density, False, - self.plot_cumulative_normalized, self.plot_dB_scale, - 'dB' if self.plot_dB_scale else self.processed_unit) + self.plot_cumulative_normalized, self.plot_dB_scale, self.processed_unit) ) self.axes['main']['processed'].xaxis.set_tick_params(which="both", labelbottom=not self.plot_cumulative) @@ -471,11 +502,11 @@ class PlotManager: self.axes['main']['raw'].set_ylabel( _ax_label(self.plot_amplitude, False, self.plot_dB_scale, self.reference_spectrum) + _ax_unit(self.plot_amplitude, self.plot_density, False, - self.plot_cumulative_normalized, self.plot_dB_scale, - 'dB' if self.plot_dB_scale else self.raw_unit) + self.plot_cumulative_normalized, self.plot_dB_scale, self.raw_unit) ) self.set_subplotspec('main', gs[:2]) + @with_plot_context def setup_cumulative_axes(self, gs: gridspec.GridSpec): if self.plot_cumulative: if self.axes['cumulative']['processed'] is None: @@ -489,7 +520,8 @@ class PlotManager: self.axes['cumulative']['processed'].set_ylabel( _ax_label(self.plot_amplitude, True, self.plot_dB_scale, self.reference_spectrum) + _ax_unit(self.plot_amplitude, self.plot_density, True, - self.plot_cumulative_normalized, False, self.processed_unit) + self.plot_cumulative_normalized, self.plot_dB_scale, + self.processed_unit) ) if self.plot_raw: if self.axes['cumulative']['raw'] is None: @@ -499,10 +531,11 @@ class PlotManager: _ax_label(self.plot_amplitude, True, self.plot_dB_scale, self.reference_spectrum) + _ax_unit(self.plot_amplitude, self.plot_density, True, - self.plot_cumulative_normalized, False, self.raw_unit) + self.plot_cumulative_normalized, self.plot_dB_scale, self.raw_unit) ) self.set_subplotspec('cumulative', gs[2]) + @with_plot_context def setup_time_axes(self, gs: gridspec.GridSpec): if self.plot_timetrace: if self.axes['time']['processed'] is None: @@ -527,8 +560,8 @@ class PlotManager: try: self.axes[plot][line].remove() self.axes[plot][line] = None - except AttributeError: - # Ax None + except (AttributeError, NotImplementedError): + # Ax None / Artist dead continue def destroy_unused_axes(self): @@ -552,6 +585,7 @@ class PlotManager: # Line None continue + @with_plot_context def update_figure(self): # Flush out all idle events, necessary for some reason in sequential mode self.fig.canvas.flush_events() @@ -585,6 +619,7 @@ class PlotManager: self.fig.canvas.draw_idle() self.fig.canvas.flush_events() + @with_plot_context def update_lines(self): for key in self.shown: for plot in self.plots_to_draw: @@ -607,6 +642,7 @@ class PlotManager: for line in self.lines_to_draw: self.axes[plot][line].set_subplotspec(gs) + @with_plot_context def set_xlims(self): # Frequency-axis plots right = max(( @@ -644,6 +680,7 @@ class PlotManager: self.axes['time']['processed'].relim(visible_only=True) self.axes['time']['processed'].autoscale(enable=True, axis='x', tight=True) + @with_plot_context def set_ylims(self): if not self.shown: return @@ -659,8 +696,8 @@ class PlotManager: ydata = self.lines[key][plot][line]['line'].get_ydata()[ (left <= xdata) & (xdata <= right) ] - top = max(top, ydata.max()) - bottom = min(bottom, ydata.min()) + top = max(top, np.nanmax(ydata)) + bottom = min(bottom, np.nanmin(ydata)) # Transform to correct scale transform = self.axes[plot][line].transScale top, bottom = transform.transform([(1, top), @@ -675,14 +712,10 @@ class PlotManager: # If bottom = top self.axes[plot][line].set_ylim(bottom, top) + @with_plot_context def set_xscales(self): - if ( - # If daq returns complex data, the spectrum will have negative freqs - self.complex_data - and self.plot_negative_frequencies - or self.plot_raw - and self.axes['main']['processed'].get_xscale() == 'log' - ): + if self.complex_data and self.plot_negative_frequencies: + # If daq returns complex data, the spectrum will have negative freqs if self.axes['main']['processed'].get_xscale() == 'log': # matplotlib>=3.6 has asinh scale for log plots with negative values self.axes['main']['processed'].set_xscale(_asinh_scale_maybe()) @@ -718,8 +751,8 @@ class PlotManager: def drop_lines(self, key: _keyT): del self.lines[key] - def get_freq_data(self, key, line_type, dB, reference=False, - cumulative=False) -> Tuple[np.ndarray, np.ndarray]: + def get_freq_data(self, key, line_type, dB) -> Tuple[np.ndarray, np.ndarray]: + y = np.mean(np.atleast_2d(self._data[key][f'S_{line_type}']), axis=0) x = self._data[key][f'f_{line_type}'].copy() if self.plot_absolute_frequencies: x += self._data[key]['settings'].get('freq', 0) @@ -729,71 +762,107 @@ class PlotManager: ) nperseg = self._data[key]['settings']['nperseg'] fs = self._data[key]['settings']['fs'] + if isinstance(window, str) or isinstance(window, tuple): + window = signal.get_window(window, nperseg) + else: + window = np.asarray(window) - y = np.mean(np.atleast_2d(self._data[key][f'S_{line_type}']), axis=0) - if not self.plot_density or dB: - # Need to calculate dB using the spectrum, not the density - if isinstance(window, str) or isinstance(window, tuple): - window = signal.get_window(window, nperseg) - else: - window = np.asarray(window) + if not self.plot_density: y *= fs * (window ** 2).sum() / window.sum() ** 2 - if self.plot_amplitude and not cumulative: + if self.plot_amplitude: + # If db, this correctly accounts for the factor two in the definition below y **= 0.5 + if dB: + # It does not matter whether we plot the density or the spectrum because the factor + # between them cancels out + x0, y0 = self.get_freq_data(self.reference_spectrum, line_type, dB=False) + y = self.to_dB(x0, y0, x, y, key) - if dB and not reference: - _, y0 = self.get_freq_data(self.reference_spectrum, line_type, dB=True, reference=True) - with np.errstate(divide='ignore', invalid='ignore'): - try: - y = 10 * np.log10(y / y0) - except ValueError as error: - raise RuntimeError(f'dB scale requested but data for key {key} does not have ' - 'the same shape as reference data with key ' - f'{self.reference_spectrum}. Select a different reference ' - 'using Spectrometer.set_reference_spectrum() or adapt your ' - 'acquisition parameters') from error - if self.plot_density: - y /= fs * (window ** 2).sum() / window.sum() ** 2 + return x, y + + def get_integrated_data(self, key, line_type, dB=False): + # Should not integrate over dB but do the conversion afterward + x, y = self.get_freq_data(key, line_type, dB=False) + if self.plot_amplitude: + # Need to integrate over power always... + y **= 2.0 + x_min, x_max = self.axes['cumulative'][line_type].get_xlim() + mask = (x_min <= x) & (x <= x_max) + x = x[..., mask] + y = y[..., mask] + y = integrate.cumulative_trapezoid(y, x, initial=0, axis=-1) + if self.plot_amplitude: + # and then convert back if desired. + y **= 0.5 + if dB: + x0, y0 = self.get_integrated_data(self.reference_spectrum, line_type, dB=False) + y = self.to_dB(x0, y0, x, y, key) + if self.plot_cumulative_normalized: + warnings.warn('plot_cumulative_normalized does not make sense together with ' + 'plot_dB_scale. Ignoring.', UserWarning, stacklevel=3) + elif self.plot_cumulative_normalized: + y = (y - y.min()) / np.ptp(y) return x, y + def to_dB(self, x0, y0, x, y, key): + with np.errstate(divide='ignore', invalid='ignore'): + try: + y = 10 * np.log10(y / y0) + except ValueError as error: + raise RuntimeError(f'dB scale requested but data for key {key} does not have ' + 'the same shape as reference data with key ' + f'{self.reference_spectrum}. Select a different reference ' + 'using Spectrometer.set_reference_spectrum() or adapt your ' + 'acquisition parameters') from error + else: + # infs are from zero-division, nans from 0/0. The latter occurs for the first point + # in integrated spectra, so set manually to log(1)=0. Infs we mask with nans. + y[np.isnan(y)] = 0 + y[np.isinf(y)] = np.nan + + assert np.allclose(x, x0) + return y + def _ax_unit(amplitude: bool, density: bool, integrated: bool, cumulative_normalized: bool, dB: bool, unit: str) -> str: if integrated and cumulative_normalized: return ' (a.u.)' + elif dB: + return ' (dB)' + elif not amplitude: + unit = unit + '$^2$' + + labels = { + # (amplitude, density, integrated) + (False, False, False): f' ({unit})', + (False, False, True): f' ({unit}' + r'$\mathrm{Hz}$)', + (False, True, False): f' ({unit}' + r'$/\mathrm{Hz}$)', + (False, True, True): f' ({unit})', + (True, False, False): f' ({unit})', + (True, False, True): f' ({unit}' + r'$\sqrt{\mathrm{Hz}}$)', + (True, True, False): f' ({unit}' + r'$/\sqrt{\mathrm{Hz}}$)', + (True, True, True): f' ({unit})', + } + return labels[(amplitude, density, integrated)] + + +def _ax_label(amplitude: bool, integrated: bool, dB: bool, reference: Union[_keyT, None]) -> str: + labels = { + (False, False, False): '$S(f)$', + (False, True, False): r'$\sqrt{S(f)}$', + (False, False, True): r'$\mathrm{RMS}_S(f)^2$', + (False, True, True): r'$\mathrm{RMS}_S(f)$' + } if dB: - unit = 'dB' - power = '$^2$' if not amplitude and not dB else '' - hz_mul = 'Hz' if integrated and not density else '' - if density and not integrated: - return ' ({unit}{power}{hz_mul}{hz_div})'.format( - unit=unit, - power=power, - hz_mul=hz_mul, - hz_div=r'/$\sqrt{\mathrm{Hz}}$' if amplitude and density else r'/$\mathrm{Hz}$' - ) - return ' ({unit}{power}{hz_mul})'.format( - unit=unit, - power=power, - hz_mul=hz_mul, - ) - - -def _ax_label(amplitude: bool, integrated: bool, dB: bool, reference: _keyT) -> str: - if not dB: - return '{a}{b}S{c}(f{d}){e}'.format( - a=r'$\sqrt{{' if amplitude else '$', - b=r'\int_0^f\mathrm{{d}}f^\prime ' if integrated else '', - c='^2' if integrated and amplitude else '', - d=r'^\prime' if integrated else '', - e='}}$' if amplitude else '$' - ) - return '{a}{b} relative to index {c}'.format( - a='integrated ' if integrated else '', - b='amplitude' if amplitude else 'power', - c=reference[0] - ).capitalize() + labels |= { + (True, False, False): f'Pow. rel. to key {reference[0]}', + (True, True, False): f'Amp. rel. to key {reference[0]}', + (True, False, True): f'Int. pow. rel. to key {reference[0]}', + (True, True, True): f'Int. amp. rel. to key {reference[0]}' + } + return labels[(dB, amplitude, integrated)] def _asinh_scale_maybe() -> Literal['asinh', 'linear']: diff --git a/src/python_spectrometer/core.py b/src/python_spectrometer/core.py index b0d4703f6c10a23bd4cab9c660e5ecbc6296fffc..2a950b3c34480a8478abd2f4767fb815705b76b7 100644 --- a/src/python_spectrometer/core.py +++ b/src/python_spectrometer/core.py @@ -3,6 +3,7 @@ import inspect import os import platform import shelve +import sys import warnings from datetime import datetime from pathlib import Path @@ -21,6 +22,7 @@ from matplotlib.figure import Figure from matplotlib.legend import Legend from qutil import io, misc from qutil.functools import cached_property, chain, partial +from qutil.io import AsyncDatasaver from qutil.itertools import count from qutil.plotting import is_using_mpl_gui_backend from qutil.plotting import live_view @@ -49,6 +51,15 @@ def _forward_property(cls: type, member: str, attr: str): return property(getter, setter, doc=getattr(cls, attr).__doc__) +class _Unpickler(dill.Unpickler): + PATHTYPE = type(Path()) + + def find_class(self, module, name): + if module.startswith("pathlib") and name.endswith("Path"): + return self.PATHTYPE + return super().find_class(module, name) + + class FrequencyLiveView(live_view.IncrementalLiveView2D): """Spectrum live view. See :meth:`.Spectrometer.live_view`.""" @@ -124,12 +135,21 @@ class Spectrometer: Plot the cumulative data given by .. math:: - \int_{f_\mathrm{min}}^f\mathrm{d}f^\prime S(f^\prime) + \mathrm{RMS}_S(f)^2 = \int_{f_\mathrm{min}}^f\mathrm{d} + f^\prime\,S(f^\prime) + + with :math:`\mathrm{RMS}_S(f)` the root-mean-square of the PSD + :math:`S(f^\prime)` up to frequency :math:`f^\prime` on a new + subplot. If :attr:`plot_density` is False, the spectrum instead + of the PSD is used, but note that this does not make a lot of + sense. - on a new subplot. :math:`S(f)` is whatever is plotted in the - main plot and therefore depends on :attr:`plot_density` and - :attr:`plot_amplitude`. Can also be toggled dynamically by - setting :attr:`plot_cumulative`. + If :attr:`plot_dB_scale` is True, the log-ratio of + :math:`\mathrm{RMS}_S(f)` with that of the reference data is + plotted. + + Can also be toggled dynamically by setting + :attr:`plot_cumulative`. plot_negative_frequencies : bool, default True Plot negative frequencies for two-sided spectra (in case the time-series data is complex). For ``matplotlib >= 3.6`` an @@ -197,6 +217,10 @@ class Spectrometer: threaded_acquisition : bool, default True Acquire data in a separate thread. This keeps the plot window responsive while acquisition is running. + blocking_acquisition : bool, default False + Block the interpreter while acquisition is running. This might + prevent concurrency errors when running a measurement script + that performs multiple acquisitions or plot actions. prop_cycle : cycler.Cycler A property cycler for styling the plotted lines. play_sound : bool, default False @@ -374,7 +398,7 @@ class Spectrometer: _to_expose = ('fig', 'ax', 'ax_raw', 'leg', 'plot_raw', 'plot_timetrace', 'plot_cumulative', 'plot_negative_frequencies', 'plot_absolute_frequencies', 'plot_amplitude', 'plot_density', 'plot_cumulative_normalized', 'plot_style', 'plot_dB_scale', - 'threaded_acquisition', 'reference_spectrum', 'processed_unit') + 'prop_cycle', 'reference_spectrum', 'processed_unit') # type checkers fig: Figure @@ -410,7 +434,7 @@ class Spectrometer: plot_update_mode: Optional[Literal['fast', 'always', 'never']] = None, plot_dB_scale: bool = False, play_sound: bool = False, audio_amplitude_normalization: Union[Literal["single_max"], float] = "single_max", - threaded_acquisition: bool = True, + threaded_acquisition: bool = True, blocking_acquisition: bool = False, purge_raw_data: bool = False, prop_cycle=None, savepath: _pathT = None, relative_paths: bool = True, compress: bool = True, raw_unit: str = 'V', processed_unit: Optional[str] = None, figure_kw: Optional[Mapping] = None, @@ -420,6 +444,8 @@ class Spectrometer: self._data: Dict[Tuple[int, str], Dict] = {} self._savepath: Optional[Path] = None self._acquiring = False + self._stop_event = Event() + self._datasaver = AsyncDatasaver('dill', compress) self.daq = daq self.procfn = chain(*procfn) if np.iterable(procfn) else chain(procfn or Id) @@ -427,13 +453,14 @@ class Spectrometer: if savepath is None: savepath = Path.home() / 'python_spectrometer' / datetime.now().strftime('%Y-%m-%d') self.savepath = savepath - self.compress = compress if plot_update_mode is not None: warnings.warn('plot_update_mode is deprecated and has no effect', DeprecationWarning) if purge_raw_data: warnings.warn('Enabling purge raw data might break some plotting features!', UserWarning) self.purge_raw_data = purge_raw_data + self.threaded_acquisition = threaded_acquisition + self.blocking_acquisition = blocking_acquisition if psd_estimator is None: psd_estimator = {} @@ -455,10 +482,9 @@ class Spectrometer: plot_cumulative, plot_negative_frequencies, plot_absolute_frequencies, plot_amplitude, plot_density, plot_cumulative_normalized, - plot_style, plot_dB_scale, threaded_acquisition, - prop_cycle, raw_unit, processed_unit, - uses_windowed_estimator, complex_data, figure_kw, - subplot_kw, gridspec_kw, legend_kw) + plot_style, plot_dB_scale, prop_cycle, raw_unit, + processed_unit, uses_windowed_estimator, complex_data, + figure_kw, subplot_kw, gridspec_kw, legend_kw) self._audio_amplitude_normalization = audio_amplitude_normalization self._play_sound = play_sound @@ -611,13 +637,9 @@ class Spectrometer: keys = self.keys() return ' - ' + '\n - '.join((str(key) for key in sorted(self.keys()) if key in keys)) - @mock.patch.multiple('pickle', Unpickler=dill.Unpickler, Pickler=dill.Pickler) - def _savefn(self, file: _pathT, **kwargs): - file = io.check_path_length(self._resolve_path(file)) - if self.compress: - np.savez_compressed(str(file), **_to_native_types(kwargs)) - else: - np.savez(str(file), **_to_native_types(kwargs)) + def _save(self, file: _pathT, **kwargs): + self._datasaver(io.check_path_length(self._resolve_path(file)), + **_to_native_types(kwargs)) @classmethod def _make_kwargs_compatible(cls, kwargs: Dict[str, Any]) -> Dict[str, Any]: @@ -688,7 +710,7 @@ class Spectrometer: self._data[key]['S_processed'] = np.mean(self._data[key]['S_processed'], axis=0)[None] self._data[key].update(measurement_metadata=metadata) - self._savefn(self._data[key]['filepath'], **self._data[key]) + self._save(self._data[key]['filepath'], **self._data[key]) def _take_threaded(self, progress: bool, key: _keyT, n_avg: int, **settings): """Acquire data in a separate thread. @@ -730,7 +752,7 @@ class Spectrometer: iterator = self.daq.acquire(n_avg=n_avg, **settings) for i in progressbar(count(), disable=not progress, total=n_avg, desc=f'Acquiring {n_avg} spectra with key {key}'): - if stop_flag.is_set(): + if self._stop_event.is_set(): print('Acquisition interrupted.') break try: @@ -747,13 +769,13 @@ class Spectrometer: self._acquiring = False def on_close(event): - stop_flag.set() + self._stop_event.set() self._acquiring = False INTERACTIVE = is_using_mpl_gui_backend(self.fig) + self._stop_event.clear() queue = Queue() - stop_flag = Event() thread = Thread(target=acquire, daemon=True) thread.start() @@ -830,6 +852,8 @@ class Spectrometer: self._take_threaded(progress, key, **settings) else: self._take_sequential(progress, key, **settings) + if self.blocking_acquisition: + self.block_until_ready() take.__doc__ = (take.__doc__.replace(8*' ', '') + '\n\nDAQ Parameters' @@ -1171,8 +1195,8 @@ class Spectrometer: live_view_kw = {} live_view_kw.setdefault('blocking_queue', True) - live_view_kw.setdefault('autoscale', 'c') live_view_kw.setdefault('autoscale_interval_ms', None) + live_view_kw.setdefault('style', self.plot_style) live_view_kw.setdefault( 'plot_legend', 'upper right' if np.issubdtype(self.daq.DTYPE, np.complexfloating) else False @@ -1181,15 +1205,16 @@ class Spectrometer: fixed_kw = dict( plot_line=True, xlim=xlim, ylim=(0, (max_rows - 1) * T), xlabel='$f$', ylabel='$t$', clabel='$S(f)$', - units={'x': 'Hz', 'y': 's', 'c': self.processed_unit + r'$/\sqrt{{Hz}}$'}, - img_kw=dict(norm=colors.LogNorm(vmin=0.1, vmax=10), cmap='Blues') + units={'x': 'Hz', 'y': 's', 'c': self.processed_unit + r'$/\sqrt{{\mathrm{{Hz}}}}$'}, + img_kw=dict(norm=colors.LogNorm(vmin=0.1, vmax=10)) ) - freq_kw = {'xscale': freq_xscale} | live_view_kw | fixed_kw - if any(freq_kw[key] != val for key, val in live_view_kw.items()): + freq_kw = {'autoscale': 'c', 'xscale': freq_xscale, 'img_kw': {'cmap': 'Blues'}} + freq_kw = _merge_recursive(freq_kw, _merge_recursive(live_view_kw, fixed_kw)) + if not _dict_is_subset(live_view_kw, freq_kw): warnings.warn('Overrode some keyword arguments for FrequencyLiveView', UserWarning) # The view(s) get data from these queues and subsequently put them into their own - stop_event = Event() + self._stop_event.clear() get_queues = [LifoQueue(maxsize=int(live_view_kw['blocking_queue']))] views = [get_live_view(FrequencyLiveView, put_frequency_data, get_queue=get_queues[0], **freq_kw)] @@ -1198,8 +1223,9 @@ class Spectrometer: fixed_kw = dict(xlim=(0, T), xlabel='$t$', ylabel='Amplitude', n_lines=2 if np.issubdtype(self.daq.DTYPE, np.complexfloating) else 1, units={'x': 's', 'y': self.processed_unit}) - time_kw = live_view_kw | fixed_kw - if any(time_kw[key] != val for key, val in live_view_kw.items()): + time_kw = {'autoscale': 'y'} + time_kw = _merge_recursive(time_kw, _merge_recursive(live_view_kw, fixed_kw)) + if not _dict_is_subset(live_view_kw, time_kw): warnings.warn('Overrode some keyword arguments for TimeLiveView', UserWarning) get_queues.append(LifoQueue(maxsize=int(live_view_kw['blocking_queue']))) @@ -1210,23 +1236,24 @@ class Spectrometer: # thread (and stop the other view) watcher_threads = [ Thread(target=monitor_event, - args=(views[0].stop_event, stop_event, views[1].stop_event, + args=(views[0].stop_event, self._stop_event, views[1].stop_event, # Need to add the proxy's close_event for headless tests views[1].close_event if in_process else None), daemon=True), Thread(target=monitor_event, - args=(views[1].stop_event, stop_event, views[0].stop_event, + args=(views[1].stop_event, self._stop_event, views[0].stop_event, views[0].close_event if in_process else None), daemon=True), ] else: watcher_threads = [ - Thread(target=monitor_event, args=(views[0].stop_event, stop_event), daemon=True) + Thread(target=monitor_event, args=(views[0].stop_event, self._stop_event), + daemon=True) ] # The feeder thread actually performs the data acquisition and distributes it into the # get_queues until stop_event is set. - feeder_thread = Thread(target=acquire_and_feed, args=[stop_event] + get_queues, + feeder_thread = Thread(target=acquire_and_feed, args=[self._stop_event] + get_queues, daemon=True) # Finally, start all the threads @@ -1261,6 +1288,10 @@ class Spectrometer: while self.acquiring and not exceeded: self.fig.canvas.start_event_loop(self._plot_manager.TIMER_INTERVAL * 1e-3) + def abort_acquisition(self): + """Abort the current acquisition.""" + self._stop_event.set() + def reprocess_data(self, *comment_or_index: _keyT, save: Literal[False, True, 'overwrite'] = False, @@ -1286,19 +1317,27 @@ class Spectrometer: Updated keyword argument settings for data processing using :attr:`procfn` or :attr:`fourier_procfn`. Previous settings are used for those not provided here. + Note that, in offline mode (without a ``DAQ`` instance + attached), settings are only validated with the bare + :class:`daq_settings.DAQSettings` class which might not + consider all constraints. """ try: for key in self._parse_keys(*self._unravel_coi(*comment_or_index)): data = self._data[key] + if self.daq is not None: + settings = self.daq.DAQSettings(data['settings'] | new_settings) + else: + settings = daq_settings.DAQSettings(data['settings'] | new_settings) data.update(self._process_data(self._data[key]['timetrace_raw'], - **{**data['settings'], **new_settings})) + **settings.to_consistent_dict())) if save: if save == 'overwrite': data['filepath'] = io.query_overwrite(data['filepath']) else: data['filepath'] = self._get_new_file(comment=data['comment']) - self._savefn(data['filepath'], **data) + self._datasaver(data['filepath'], **data) self._data[key] = data self._plot_manager.update_line_attrs(self._plot_manager.plots_to_draw, @@ -1403,10 +1442,8 @@ class Spectrometer: data['filepath'] = newfile if relative_paths else savepath / newfile newfile = io.query_overwrite(io.check_path_length(savepath / newfile)) - if compress: - np.savez_compressed(savepath / newfile, **_to_native_types(data)) - else: - np.savez(savepath / newfile, **_to_native_types(data)) + with AsyncDatasaver('dill', compress) as datasaver: + datasaver.save_sync(savepath / newfile, **_to_native_types(data)) if newfile == oldfile: # Already 'deleted' (overwrote) the old file @@ -1429,7 +1466,7 @@ class Spectrometer: return file.relative_to(self.savepath) return file - @mock.patch.multiple('shelve', Unpickler=dill.Unpickler, Pickler=dill.Pickler) + @mock.patch.multiple('shelve', Unpickler=_Unpickler, Pickler=dill.Pickler) def serialize_to_disk(self, file: Optional[_pathT] = None, protocol: int = -1, verbose: bool = False): """Serialize the Spectrometer object to disk. @@ -1458,7 +1495,7 @@ class Spectrometer: 'plot_raw', 'plot_timetrace', 'plot_cumulative', 'plot_negative_frequencies', 'plot_absolute_frequencies', 'plot_amplitude', 'plot_density', 'plot_cumulative_normalized', - 'plot_style', 'plot_update_mode', 'plot_dB_scale', 'compress'] + 'plot_style', 'plot_update_mode', 'plot_dB_scale'] plot_manager_attrs = ['reference_spectrum', 'prop_cycle', 'raw_unit', 'processed_unit'] with shelve.open(str(file), protocol=protocol) as db: # Constructor args @@ -1478,9 +1515,10 @@ class Spectrometer: print(f'Wrote object data to {file}') @classmethod - @mock.patch.multiple('shelve', Unpickler=dill.Unpickler, Pickler=dill.Pickler) + @mock.patch.multiple('shelve', Unpickler=_Unpickler, Pickler=dill.Pickler) def recall_from_disk(cls, file: _pathT, daq: Optional[DAQ] = None, *, - reprocess_data: bool = False, **new_settings): + reprocess_data: bool = False, savepath: Optional[_pathT] = None, + **new_settings): """Restore a Spectrometer object from disk. Parameters @@ -1496,6 +1534,8 @@ class Spectrometer: reprocess_data : bool Redo the processing steps using this object's :attr:`procfn` and :attr:`psd_estimator`. Default: False. + savepath : str | Path + Overrides the savepath where data files are found. See Also -------- @@ -1518,6 +1558,8 @@ class Spectrometer: for key, val in db.items(): kwargs[key] = val + if savepath is not None: + kwargs['savepath'] = io.to_global_path(savepath) if not (runfile := kwargs.pop('runfile')).is_absolute(): runfile = kwargs['savepath'] / runfile spectrum_files = np.array(io.to_global_path(runfile).read_text().split('\n')) @@ -1540,7 +1582,12 @@ class Spectrometer: except FileNotFoundError: print(f'Could not retrieve file {file}. Skipping.') - spectrometer.set_reference_spectrum(reference_spectrum) + try: + spectrometer.set_reference_spectrum(reference_spectrum) + except KeyError: + warnings.warn('Could not set reference spectrum. Setting to key 0.', RuntimeWarning) + spectrometer.set_reference_spectrum(0) + # Show all at once to save drawing time spectrometer.show(*keys) return spectrometer @@ -1577,6 +1624,8 @@ class Spectrometer: key = (self._index, data['comment']) self._data[key] = data + # Make sure the figure is live + self._plot_manager.update_figure() self._plot_manager.add_new_line_entry(key) if show: self.show(key, color=color) @@ -1626,7 +1675,18 @@ def _load_spectrum(file: _pathT) -> Dict[str, Any]: def __exit__(self, exc_type, exc_val, exc_tb): delattr(io, 'JanewayWindowsPath') - with np.load(file, allow_pickle=True) as fp, monkey_patched_io(): + # Patch modules for data saved before move to separate package + renamed_modules = {'qutil.measurement.spectrometer.daq.settings': daq_settings} + PATHTYPE = type(Path()) + + with ( + mock.patch.dict(sys.modules, renamed_modules), + mock.patch.multiple('pathlib', WindowsPath=PATHTYPE, PosixPath=PATHTYPE), + mock.patch.multiple('pathlib._local', WindowsPath=PATHTYPE, PosixPath=PATHTYPE, + create=True), # backwards compatibility + np.load(file, allow_pickle=True) as fp, + monkey_patched_io() + ): data = {} for key, val in fp.items(): try: @@ -1698,6 +1758,78 @@ def _from_native_types(data: Dict[str, Any]) -> Dict[str, Any]: return data +def _merge_recursive(orig: dict, upd: dict, inplace: bool = False) -> dict: + """Recursively update the original dictionary 'orig' with the + values from 'upd'. + + If both dictionaries have a value for the same key and those values + are also dictionaries, the function is called recursively on those + nested dictionaries. + + Parameters + ---------- + orig : + The dictionary that is updated. + upd : + The dictionary whose values are used for updating. + inplace : + Merge or update. + + Returns + ------- + dict : + The updated original dictionary. + """ + if not inplace: + orig = copy.copy(orig) + + for key, upd_value in upd.items(): + orig_value = orig.get(key) + # If both the existing value and the update value are dictionaries, + # merge them recursively. + if isinstance(orig_value, dict) and isinstance(upd_value, dict): + # Since `orig[key]` is already part of the copied version + # if not inplace, we can update it in place. + _merge_recursive(orig_value, upd_value, inplace=True) + else: + # Otherwise, replace or add the value from `upd` into `orig`. + orig[key] = upd_value + return orig + + +def _dict_is_subset(source: dict, target: dict) -> bool: + """ + Checks recursively whether the nested dict *source* is a subset of + *target*. + + Parameters + ---------- + source : + The dictionary whose keys must be present. + target : + The dictionary to check for the required keys. + + """ + for key, source_value in source.items(): + # Check if the key exists in the target dictionary. + if key not in target: + return False + + target_value = target[key] + # If both values are dictionaries, check recursively. + if isinstance(source_value, dict): + if not isinstance(target_value, dict): + return False + if not _dict_is_subset(source_value, target_value): + return False + else: + # For non-dict values, check equality. + if source_value != target_value: + return False + + return True + + class ReadonlyError(Exception): """Indicates a :class:`Spectrometer` object is read-only.""" pass diff --git a/src/python_spectrometer/daq/settings.py b/src/python_spectrometer/daq/settings.py index d74d508ed111582231d7dea19a7f4e190665da5a..c945577052235c4d38e3d890c819b33528b41226 100644 --- a/src/python_spectrometer/daq/settings.py +++ b/src/python_spectrometer/daq/settings.py @@ -466,16 +466,26 @@ class DAQSettings(dict): fs = self._domain_df.next_smallest(fs / self['nperseg']) * self['nperseg'] if not self._isclose(fs, fs_prev): self['df'] = self._domain_df.next_closest(fs_prev / self['nperseg']) - self._make_compatible_nperseg(ceil(fs_prev / self['df'])) + self._make_compatible_nperseg(ceil(self._domain_df.round(fs_prev / self['df']))) return self._to_allowed_fs(fs_prev) # Constraints on fs itself - if not isinf(df) and not (fs / df) % 1: + if not isinf(df) and not self._domain_fs.round(fs / df) % 1: # fs might be due to ceil-ing when inferring nperseg. Use next_closest fs = self._domain_fs.next_closest(fs) else: fs = self._domain_fs.next_largest(fs) if not self._isclose(fs, fs_prev): self._make_compatible_fs(fs) + return fs + # Finally, as a last resort test if the parameters match. If not, try to adjust nperseg + if not isinf(df) and 'nperseg' in self: + fs = df / self['nperseg'] + if not self._isclose(fs, fs_prev): + self['fs'] = self._domain_fs.next_closest(fs_prev) + self._make_compatible_nperseg( + self._domain_nperseg.next_closest(self['fs'] / df) + ) + return self['fs'] return fs @@ -490,7 +500,7 @@ class DAQSettings(dict): if not self._isclose(df, df_prev): self['fs'] = self._domain_fs.next_closest(df_prev * self['nperseg']) # Use df instead of df_prev here because we preferentially adjust df over fs or nperseg - self._make_compatible_nperseg(ceil(self['fs'] / df)) + self._make_compatible_nperseg(ceil(self._domain_fs.round(self['fs'] / df))) return self._to_allowed_df(df) if not isinf(fs := self.get('fs', self.get('f_max', inf) * 2)): # Constraints on nperseg constrain df @@ -503,6 +513,16 @@ class DAQSettings(dict): df = self._domain_df.next_smallest(df) if not self._isclose(df, df_prev): self._make_compatible_df(df) + return df + # Finally, as a last resort test if the parameters match. If not, try to adjust nperseg + if not isinf(fs) and 'nperseg' in self: + df = fs / self['nperseg'] + if not self._isclose(df, df_prev): + self['df'] = self._domain_df.next_closest(df_prev) + self._make_compatible_nperseg( + self._domain_nperseg.next_closest(fs / self['df']) + ) + return self['df'] return df @@ -522,14 +542,14 @@ class DAQSettings(dict): df = self.get('df', self.get('f_min', inf)) if not isinf(df): # Constraints on fs constrain nperseg through df/f_min - nperseg = ceil(self._domain_fs.next_largest(df * nperseg) / df) + nperseg = ceil(self._domain_df.round(self._domain_fs.next_largest(df * nperseg) / df)) if nperseg != nperseg_prev: self['fs'] = self._domain_fs.next_largest(fs if not isinf(fs) else df * nperseg_prev) self._make_compatible_df(self['fs'] / nperseg_prev) return self._to_allowed_nperseg(nperseg_prev) if not isinf(fs): # Constraints on df constrain nperseg through fs/f_max - nperseg = ceil(fs / self._domain_df.next_smallest(fs / nperseg)) + nperseg = ceil(self._domain_fs.round(fs / self._domain_df.next_smallest(fs / nperseg))) if nperseg != nperseg_prev: self['df'] = self._domain_df.next_closest(df if not isinf(df) else fs * nperseg_prev) self._make_compatible_fs(self['df'] * nperseg_prev) @@ -538,6 +558,14 @@ class DAQSettings(dict): nperseg = self._domain_nperseg.next_largest(nperseg) if nperseg != nperseg_prev: self._make_compatible_nperseg(nperseg) + return nperseg + # Finally, as a last resort test if the parameters match. If not, try to adjust df + if not isinf(df) and not isinf(fs): + nperseg = ceil(self._domain_fs.round(fs / df)) + if nperseg != nperseg_prev: + self['nperseg'] = self._domain_nperseg.next_closest(nperseg_prev) + self._make_compatible_df(self._domain_df.next_closest(self['fs'] / self['nperseg'])) + return self['nperseg'] return nperseg @@ -630,7 +658,7 @@ class DAQSettings(dict): def _infer_nperseg(self, default: bool = False) -> int | None: # user-set fs or df take precedence over noverlap if 'fs' in self and 'df' in self: - return ceil(self['fs'] / self['df']) + return ceil(self._domain_fs.round(self['fs'] / self['df'])) if 'n_pts' in self: if 'noverlap' in self and 'n_seg' in self: return int((self['n_pts'] + (self['n_seg'] - 1) * self['noverlap']) @@ -648,7 +676,7 @@ class DAQSettings(dict): if not isinf(df) and not isinf(fs): # In principle this should be self._domain_nperseg.next_largest(), but that recurses # infinitely. So we do what we can. - return min(ceil(fs / df), self._upper_bound_nperseg()) + return min(ceil(self._domain_fs.round(fs / df)), self._upper_bound_nperseg()) return None def _infer_noverlap(self, default: bool = False) -> int | None: @@ -775,7 +803,7 @@ class DAQSettings(dict): return self['nperseg'] if (nperseg := self._infer_nperseg()) is not None: return nperseg - return self.setdefault('nperseg', ceil(self.fs / self.df)) + return self.setdefault('nperseg', ceil(self._domain_fs.round(self.fs / self.df))) @interdependent_daq_property def noverlap(self) -> int: diff --git a/src/python_spectrometer/daq/simulator.py b/src/python_spectrometer/daq/simulator.py index 8e7915a88d750b393a5e97ab19d423f1aa22b442..c27baa60a256885856e6246d1f01cb0315e0757b 100644 --- a/src/python_spectrometer/daq/simulator.py +++ b/src/python_spectrometer/daq/simulator.py @@ -194,14 +194,14 @@ class DemodulatorQoptColoredNoise(QoptColoredNoise): """ # Don't highpass filter - settings = copy.deepcopy(settings) + settings = copy.copy(settings) settings.pop('f_min', None) return real_space.RC_filter(signal * IQ, **settings) @with_delay - def acquire(self, *, n_avg: int, freq: float = 50, filter_order: int = 1, - modulate_signal: bool = False, - filter_method: Literal['forward', 'forward-backward'] = 'forward', + def acquire(self, *, n_avg: int, freq: float = 50, modulate_signal: bool = False, + filter_order: int = 1, + filter_method: Literal['forward', 'forward-backward'] = 'forward-backward', **settings) -> AcquisitionGenerator[DTYPE]: r"""Simulate demodulated noisy data. @@ -217,8 +217,6 @@ class DemodulatorQoptColoredNoise(QoptColoredNoise): Number of outer averages. freq : Modulation frequency. - filter_order : - RC filter order used to filter the demodulated signal. modulate_signal : Add the simulated noise to the modulation signal to mimic noise picked up by a Lock-In signal travelling through some @@ -237,6 +235,8 @@ class DemodulatorQoptColoredNoise(QoptColoredNoise): with :math:`s(t)` the output signal and :math:`\delta(t)` the noise. + filter_order : + RC filter order used to filter the demodulated signal. filter_method : See :func:`~qutil:qutil.signal_processing.real_space.RC_filter`. diff --git a/src/python_spectrometer/daq/zurich_instruments.py b/src/python_spectrometer/daq/zurich_instruments.py index 64d900573c81900095b4737d58a41cd8fdbe0b04..c7c057a23d085d3580af53cf150d521ab4ab4a08 100644 --- a/src/python_spectrometer/daq/zurich_instruments.py +++ b/src/python_spectrometer/daq/zurich_instruments.py @@ -58,7 +58,7 @@ import sys import time import warnings from abc import ABC -from typing import Any, Dict, Mapping, Optional, Type, Union +from typing import Any, Dict, Mapping, Optional, Type import numpy as np from packaging import version diff --git a/tests/test_daq_settings.py b/tests/test_daq_settings.py index 4cfcb14cd7e4e9810b21a416dd6f52b33fe3de37..8f5b0cd3b023e5c3739ebdf7a5ea6b463a7b2ea5 100644 --- a/tests/test_daq_settings.py +++ b/tests/test_daq_settings.py @@ -39,10 +39,14 @@ def test_daq_settings_warnings(): s = DAQSettings(nperseg=1000, df=0.999) s.fs = 1000 - with pytest.warns(UserWarning, match='Need to change fs from 1001 to 1000.'): + with pytest.warns(UserWarning, match='Need to change df from 1 to 1.001'): s = DAQSettings(fs=1001, df=1) s.nperseg = 1000 + with pytest.warns(UserWarning, match='Need to change nperseg from 1000 to 1001'): + s = DAQSettings(fs=1001, nperseg=1000) + s.df = 1 + def test_daq_settings_exceptions(): @@ -134,6 +138,10 @@ def test_mfli_daq_settings(mock_zi_daq): t = mock_zi_daq.DAQSettings(**s.to_consistent_dict()) t.to_consistent_dict() + s = mock_zi_daq.DAQSettings(f_min=1e1, f_max=1e5) + t = mock_zi_daq.DAQSettings(s.to_consistent_dict()) + t.to_consistent_dict() + def test_reproducibility(): """Test if there are no exceptions or infinite recursions for a diff --git a/tests/test_serialization.py b/tests/test_serialization.py index 595b9376842a1a886cc246d3cce39ee3b05952fd..ae664a3fb8878a43e844286868c0071188c6c1e1 100644 --- a/tests/test_serialization.py +++ b/tests/test_serialization.py @@ -2,6 +2,7 @@ import os import pathlib import random import string +import time from tempfile import mkdtemp import pytest @@ -79,7 +80,8 @@ def spectrometer(monkeypatch, relative_paths: bool, threaded_acquisition: bool): @pytest.fixture -def serialized(spectrometer: Spectrometer): +def serialized(done_saving: Spectrometer): + spectrometer = done_saving stem = ''.join(random.choices(string.ascii_letters, k=10)) try: @@ -96,8 +98,18 @@ def serialized(spectrometer: Spectrometer): remove_file_if_exists(spectrometer.savepath / f'{stem}{ext}') -def test_saving(spectrometer: Spectrometer, relative_paths: bool): +@pytest.fixture +def done_saving(spectrometer: Spectrometer): + while spectrometer._datasaver.jobs: + time.sleep(0.1) + return spectrometer + + +def test_saving(done_saving: Spectrometer, relative_paths: bool): + spectrometer = done_saving + assert spectrometer.savepath.exists() + for file in spectrometer.files: if relative_paths: assert os.path.exists(spectrometer.savepath / file) @@ -105,16 +117,14 @@ def test_saving(spectrometer: Spectrometer, relative_paths: bool): assert os.path.exists(file) -def test_serialization(spectrometer: Spectrometer): - spectrometer.serialize_to_disk('blub') - +def test_serialization(spectrometer: Spectrometer, serialized: pathlib.Path): exts = ['_files.txt'] - if (spectrometer.savepath / 'blub').is_file(): - assert os.path.exists(spectrometer.savepath / 'blub') + if serialized.is_file(): + assert os.path.exists(serialized) else: exts.extend(['.bak', '.dat', '.dir']) for ext in exts: - assert os.path.exists(spectrometer.savepath / f'blub{ext}') + assert os.path.exists(serialized.with_name(serialized.name + ext)) def test_deserialization(serialized: pathlib.Path):