Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ repos:
- id: check-yaml
- id: check-toml
- id: check-ast
- id: check-docstring-first
# - id: check-docstring-first # let variables have docstrings
- id: check-merge-conflict
- id: check-added-large-files
- id: mixed-line-ending
Expand Down
22 changes: 22 additions & 0 deletions src/instrument/devices/aps_source.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
"""
APS only: connect with facility information
===========================================

.. autosummary::
~aps
"""

import logging

logger = logging.getLogger(__name__)
logger.info(__file__)

import apstools.devices # noqa: E402

aps = apstools.devices.ApsMachineParametersDevice(name="aps")
"""
Information from the APS Storage Ring, including current.

.. seealso:: `apstools.devices.ApsMachineParametersDevice
<https://bcda-aps.github.io/apstools/latest/api/_devices.html#apstools.devices.aps_machine.ApsMachineParametersDevice>`_
"""
10 changes: 6 additions & 4 deletions src/instrument/devices/tetramm_picoammeter.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,11 @@ def __init__(self, *args, port_name="TetrAMM", **kwargs):
self.current4.mean_value.kind = "hinted"

def trigger(self):
'''
Tetramms are operated in continuous mode. Will just accept the current value.
'''
"""
TetrAMMs are operated in continuous mode.

The subsequent '.read()' will return the current values.
"""
if self._staged != Staged.yes:
raise RuntimeError(
"This detector is not ready to trigger."
Expand All @@ -67,7 +69,7 @@ def trigger(self):
self._status = self._status_type(self)
self._acquisition_signal.put(1, wait=False)
self.generate_datum(self._image_name, ttime.time(), {})
self._status.set_finished()
self._status.set_finished() # <-- done immediately
return self._status


Expand Down
92 changes: 92 additions & 0 deletions src/instrument/devices/xpcs_support.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
"""
XPCS plans use some ophyd Signals that are not EPICS PVs.

.. automodule::
~xpcs_dm
"""

import logging
import pathlib

from apstools.utils import dm_api_ds
from bluesky import plan_stubs as bps
from ophyd import Component
from ophyd import Device
from ophyd import Signal

logger = logging.getLogger(__name__)
logger.info(__file__)

from ..initialize_bs_tools import RE # noqa: E402


class XPCS_Plan_Signals_Device(Device):
"""
Signals and methods used in XPCS Data Acquisition Plans with DM workflows.

.. rubric:: Signals
.. automodule::
~experiment_name
~header
~index

.. rubric:: Methods
.. automodule::
~data_path
~filename_base
~increment_index
~reset_index

.. rubric:: Properties
.. automodule::
~header_index_str
"""

experiment_name = Component(Signal, value="")
"""Name of the current APS Data Management experiment."""

header = Component(Signal, value=RE.md.get("xpcs_header", "A001"))
"""Identify a set of related measurements, such as "B123"."""

index = Component(Signal, value=RE.md.get("xpcs_index", 0))
"""Increments at the start of each data acquisition plan."""

def data_path(self, title: str, nframes: int = 0):
"""Return _this_ DM experiment's data directory."""
experiment = dm_api_ds().getExperimentByName(self.experiment_name.get())
path = pathlib.Path(experiment["dataDirectory"])
filepath = path / self.filename_base(title, nframes)
# return f"{experiment['dataDirectory']}/{self.filename_base(title, nframes)}"
return filepath

def filename_base(self, title: str, nframes: int = 0):
"""Return the base part of the filename."""
return f"{self.header_index_str}_{title}-{nframes:05d}"

def full_filename(self, title: str, suffix: str = ".hdf", nframes: int = 0):
"""Return the full filename."""
base = self.filename_base(title, nframes)
path = self.data_path(title, nframes)
return path / f"{base}{suffix}"

@property
def header_index_str(self):
"""Return a formatted string with the header and index."""
return f"{self.header.get()}_{self.index.get():03d}"

def increment_index(self):
"""stub: Increment the index for the next run."""
yield from bps.mvr(self.index, 1)

def reset_index(self, index: int = 0):
"""
stub: (Re)set the 'index'. Default=0.

Data directory and file names are defined by the 'header' and
the`index'.
"""
yield from bps.mv(self.index, index)


xpcs_dm = XPCS_Plan_Signals_Device(name="xpcs_dm") # TODO: pick a better name
"""Signals used in XPCS Data Acquisition Plans with DM workflows."""
5 changes: 3 additions & 2 deletions src/instrument/plans/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
Custom Plan Definitions & Instatiations
Custom Plans
"""

# flake8: noqa
Expand All @@ -9,6 +9,7 @@
from .demo_sim_1d import demo_sim_1d

## beamline specific plans
from .bdp_demo import xpcs_bdp_demo_plan, xpcs_setup_user
# from .bdp_demo import xpcs_bdp_demo_plan, xpcs_setup_user
from .each_session import xpcs_setup_user
from .mesh_plans import xpcs_mesh
from .select_sample_env import select_sample_env
5 changes: 3 additions & 2 deletions src/instrument/plans/bdp_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,9 +303,10 @@ def xpcs_bdp_demo_plan(
# instrument metadata (expected by nxwriter)
# values from pete7.hdf
# TODO: set from actual instrument values
absolute_cross_section_scale=1,
bcx=0,
absolute_cross_section_scale=1, # ok as-is
bcx=0, # used by analysis?
bcy=0,
#! good as hard constants now, might be replaced by QMAP
ccdx=1,
ccdx0=1,
ccdy=1,
Expand Down
63 changes: 63 additions & 0 deletions src/instrument/plans/each_session.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
"""
Plans to be run for each session or experiment.

.. automodule::
~xpcs_setup_user
"""

import logging

from apstools.utils import dm_isDaqActive
from apstools.utils import dm_start_daq
from apstools.utils import validate_experiment_dataDirectory
from bluesky import plan_stubs as bps

logger = logging.getLogger(__name__)
logger.info(__file__)

from ..devices.xpcs_support import xpcs_dm # noqa: E402
from ..initialize_bs_tools import RE # noqa: E402
from .ad_setup_plans import write_if_new # noqa: E402


def xpcs_setup_user(dm_experiment_name: str, index: int = -1):
"""
Configure bluesky session for this user and DM experiment.

PARAMETERS

dm_experiment_name *str*:
Name of active APS Data Management experiment for this
data acquisition.
index *int*:
Sequence number of XPCS data acquisition.

.. hint:: Set ``index=-1`` to continue with current
'xpcs_index' value.
"""
validate_experiment_dataDirectory(dm_experiment_name)
yield from bps.mv(xpcs_dm.experiment_name, dm_experiment_name)

if index >= 0:
yield from write_if_new(xpcs_dm.index, index)
RE.md["xpcs_index"] = xpcs_dm.index.get()

# Needed when data acquisition (Bluesky, EPICS, ...) writes to Voyager.
# Full path to directory where new data will be written.
# XPCS new data is written to APS Voyager storage (path
# starting with ``/gdata/``). Use "@voyager" in this case.
# DM sees this and knows not copy from voyager to voyager.
data_directory = "@voyager"

# Check DM DAQ is running for this experiment, if not then start it.
if not dm_isDaqActive(dm_experiment_name):
# Need another DAQ if also writing to a different directory (off voyager).
# A single DAQ can be used to cover any subdirectories.
# Anything in them will be uploaded.
msg = (
f"Starting DM DAQ: experiment {dm_experiment_name!r}"
f" in data directory {data_directory!r}."
)
logger.info(msg)
print(msg) # Was not showing up in the logs.
dm_start_daq(dm_experiment_name, data_directory)
Loading