Skip to content

Commit

Permalink
Merge pull request #47 from AbbyGi/beam-stats-ophyd-objects
Browse files Browse the repository at this point in the history
BeamStatisticsReport with granular ophyd objects
  • Loading branch information
mrakitin authored Nov 8, 2021
2 parents 114384d + 9b6ed79 commit 55755da
Show file tree
Hide file tree
Showing 10 changed files with 309 additions and 88 deletions.
26 changes: 26 additions & 0 deletions .github/workflows/flake8.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: Check Code Style

on: [push, pull_request]

jobs:
lint:
runs-on: ubuntu-latest
strategy:
fail-fast: false
steps:

- uses: actions/checkout@v2

- uses: actions/setup-python@v2

- name: Install flake8
shell: bash -l {0}
run: |
set -vxeuo pipefail
python -m pip install --upgrade pip
python -m pip install flake8
python -m pip list
- name: Run flake8
shell: bash -l {0}
run: flake8
8 changes: 4 additions & 4 deletions examples/prepare_det_env.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import datetime
import json # noqa F401

import bluesky.plans as bp
import bluesky.plan_stubs as bps
import bluesky.plans as bp # noqa F401
import bluesky.plan_stubs as bps # noqa F401
import databroker
import matplotlib.pyplot as plt
import numpy as np # noqa F401
Expand All @@ -27,10 +27,10 @@

RE.subscribe(db.insert)
db.reg.register_handler('srw', SRWFileHandler, overwrite=True)
# db.reg.register_handler('shadow', ShadowFileHandler, overwrite=True)
db.reg.register_handler('shadow', ShadowFileHandler, overwrite=True)
db.reg.register_handler('SIREPO_FLYER', SRWFileHandler, overwrite=True)

plt.ion()

root_dir = '/tmp/srw_det_data'
root_dir = '/tmp/sirepo-bluesky-data'
_ = make_dir_tree(datetime.datetime.now().year, base_path=root_dir)
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# These are required for developing the package (running the tests, building
# the documentation) but not necessarily required for _using_ it.
black
dictdiffer
flake8
isort
pytest
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ numconv
numpy
ophyd
pymongo
pyqt5>=5.9,<5.12
pyqt5>=5.9
requests
scikit-image
# These requirement should be installed from the 'nsls2forge' or 'conda-forge' conda channel.
Expand Down
2 changes: 1 addition & 1 deletion sirepo_bluesky/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ def describe(self):
dtype="array",
)
)
return resource_document_data
return resource_document_data
112 changes: 90 additions & 22 deletions sirepo_bluesky/sirepo_ophyd.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import copy
import datetime
import hashlib
import json
import logging
from collections import deque
from pathlib import Path

Expand All @@ -12,6 +14,13 @@

from . import ExternalFileReference
from .srw_handler import read_srw_file
from .shadow_handler import read_shadow_file

logger = logging.getLogger("sirepo-bluesky")
# Note: the following handler could be created/added to the logger on the client side:
# import sys
# stream_handler = logging.StreamHandler(sys.stdout)
# logger.addHandler(stream_handler)

RESERVED_OPHYD_TO_SIREPO_ATTRS = {"position": "element_position"} # ophyd <-> sirepo
RESERVED_SIREPO_TO_OPHYD_ATTRS = {
Expand All @@ -28,7 +37,7 @@ def __init__(self, sirepo_dict, sirepo_param, *args, **kwargs):
self._sirepo_param = RESERVED_SIREPO_TO_OPHYD_ATTRS[sirepo_param]

def set(self, value, *, timeout=None, settle_time=None):
print(f"Setting value for {self.name} to {value}")
logger.debug(f"Setting value for {self.name} to {value}")
self._sirepo_dict[self._sirepo_param] = value
self._readback = value
return NullStatus()
Expand All @@ -37,29 +46,39 @@ def put(self, *args, **kwargs):
self.set(*args, **kwargs).wait()


class SirepoWatchpoint(Device):
class DeviceWithJSONData(Device):
sirepo_data_json = Cpt(Signal, kind="normal", value="")
sirepo_data_hash = Cpt(Signal, kind="normal", value="")

def trigger(self, *args, **kwargs):
super().trigger(*args, **kwargs)

json_str = json.dumps(self.connection.data)
json_hash = hashlib.sha256(json_str.encode()).hexdigest()
self.sirepo_data_json.put(json_str)
self.sirepo_data_hash.put(json_hash)

return NullStatus()


class SirepoWatchpoint(DeviceWithJSONData):
image = Cpt(ExternalFileReference, kind="normal")
shape = Cpt(Signal)
mean = Cpt(Signal, kind="hinted")
photon_energy = Cpt(Signal, kind="normal")
horizontal_extent = Cpt(Signal)
vertical_extent = Cpt(Signal)
sirepo_json = Cpt(Signal, kind="normal", value="")

def __init__(
self,
*args,
sirepo_server="http://localhost:8000",
root_dir="/tmp/srw_det_data",
root_dir="/tmp/sirepo-bluesky-data",
assets_dir=None,
result_file=None,
**kwargs,
):
super().__init__(*args, **kwargs)

self.sirepo_server = sirepo_server

self._root_dir = root_dir
self._assets_dir = assets_dir
self._result_file = result_file
Expand All @@ -68,22 +87,22 @@ def __init__(
self._resource_document = None
self._datum_factory = None

self._ndim = 2
sim_type = self.connection.data["simulationType"]
allowed_sim_types = ("srw", "shadow")
if sim_type not in allowed_sim_types:
raise RuntimeError(f"Unknown simulation type: {sim_type}\n"
f"Allowed simulation types: {allowed_sim_types}")

def trigger(self, *args, **kwargs):
super().trigger(*args, **kwargs)
print(f"Custom trigger for {self.name}")

if self._assets_dir is None:
date = datetime.datetime.now()
self._assets_dir = date.strftime("%Y/%m/%d")
logger.debug(f"Custom trigger for {self.name}")

if self._result_file is None:
self._result_file = f"{new_uid()}.dat"
date = datetime.datetime.now()
self._assets_dir = date.strftime("%Y/%m/%d")
self._result_file = f"{new_uid()}.dat"

self._resource_document, self._datum_factory, _ = compose_resource(
start={"uid": "needed for compose_resource() but will be discarded"},
spec="srw",
spec=self.connection.data["simulationType"],
root=self._root_dir,
resource_path=str(Path(self._assets_dir) / Path(self._result_file)),
resource_kwargs={},
Expand All @@ -105,7 +124,16 @@ def trigger(self, *args, **kwargs):
with open(sim_result_file, "wb") as f:
f.write(datafile)

self._resource_document["resource_kwargs"]["ndim"] = self._ndim
conn_data = self.connection.data
sim_type = conn_data["simulationType"]
if sim_type == "srw":
ndim = 2 # this will always be a report with 2D data.
ret = read_srw_file(sim_result_file, ndim=ndim)
self._resource_document["resource_kwargs"]["ndim"] = ndim
elif sim_type == "shadow":
nbins = conn_data['models'][conn_data['report']]['histogramBins']
ret = read_shadow_file(sim_result_file, histogram_bins=nbins)
self._resource_document["resource_kwargs"]["histogram_bins"] = nbins

def update_components(_data):
self.shape.put(_data["shape"])
Expand All @@ -114,19 +142,22 @@ def update_components(_data):
self.horizontal_extent.put(_data["horizontal_extent"])
self.vertical_extent.put(_data["vertical_extent"])

ret = read_srw_file(sim_result_file, ndim=self._ndim)
update_components(ret)

datum_document = self._datum_factory(datum_kwargs={})
self._asset_docs_cache.append(("datum", datum_document))

self.image.put(datum_document["datum_id"])

self.sirepo_json.put(json.dumps(self.connection.data))

self._resource_document = None
self._datum_factory = None

logger.debug(f"\nReport for {self.name}: "
f"{self.connection.data['report']}\n")

# We call the trigger on super at the end to update the sirepo_data_json
# and the corresponding hash after the simulation is run.
super().trigger(*args, **kwargs)
return NullStatus()

def describe(self):
Expand All @@ -145,12 +176,49 @@ def collect_asset_docs(self):
yield item


class BeamStatisticsReport(DeviceWithJSONData):
# NOTE: TES aperture changes don't seem to change the beam statistics
# report graph on the website?

report = Cpt(Signal, value={}, kind="normal")

def __init__(self, connection, *args, **kwargs):
super().__init__(*args, **kwargs)
self.connection = connection

def trigger(self, *args, **kwargs):
logger.debug(f"Custom trigger for {self.name}")

self.connection.data['report'] = 'beamStatisticsReport'

self.connection.run_simulation()

datafile = self.connection.get_datafile()
self.report.put(json.dumps(json.loads(datafile.decode())))

logger.debug(f"\nReport for {self.name}: "
f"{self.connection.data['report']}\n")

# We call the trigger on super at the end to update the sirepo_data_json
# and the corresponding hash after the simulation is run.
super().trigger(*args, **kwargs)
return NullStatus()

def stage(self):
super().stage()
self.report.put({})

def unstage(self):
super().unstage()
self.report.put({})


def create_classes(sirepo_data, connection, create_objects=True):
classes = {}
objects = {}
data = copy.deepcopy(sirepo_data)
for i, el in enumerate(data["models"]["beamline"]):
print(f"Processing {el}...")
logger.debug(f"Processing {el}...")
for ophyd_key, sirepo_key in RESERVED_OPHYD_TO_SIREPO_ATTRS.items():
# We have to rename the reserved attribute names. Example error
# from ophyd:
Expand Down
Loading

0 comments on commit 55755da

Please sign in to comment.