Exemplo n.º 1
0
def bt(home_dir):
    # start a beamtime
    pi = "Billinge "
    saf_num = 300000
    wavelength = xpd_wavelength
    experimenters = [["van der Banerjee", "S0ham", 1], ["Terban ", " Max", 2]]
    # copying example longterm config file
    os.makedirs(glbl_dict["xpdconfig"], exist_ok=True)
    pytest_dir = rs_fn("xpdacq", "tests/")
    config = "XPD_beamline_config.yml"
    configsrc = os.path.join(pytest_dir, config)
    shutil.copyfile(configsrc, glbl_dict["blconfig_path"])
    assert os.path.isfile(glbl_dict["blconfig_path"])
    bt = _start_beamtime(pi,
                         saf_num,
                         experimenters,
                         wavelength=wavelength,
                         test=True)
    # spreadsheet
    xlf = "300000_sample.xlsx"
    src = os.path.join(pytest_dir, xlf)
    shutil.copyfile(src, os.path.join(glbl_dict["import_dir"], xlf))
    import_sample_info(saf_num, bt)
    yield bt
    # when we are done with the glbl delete the folders.
    shutil.rmtree(glbl_dict["home"])
Exemplo n.º 2
0
def test_count_with_calib():
    if "shutter" not in xpd_configuration:
        xpd_configuration["shutter"] = shctl1
    poni_file = rs_fn("xpdacq", "tests/Ni_poni_file.poni")
    md = xbt.load_calibration_md(poni_file)
    plan = xbt.count_with_calib([pe1c], num=2, delay=3, calibration_md=md)
    summarize_plan(plan)
Exemplo n.º 3
0
def test_nomad_parser():
    fp = rs_fn('shed_sidewinder', 'data/nomad')
    c = 0
    for n, d in parse(fp):
        assert n in {'start', 'descriptor', 'event', 'stop'}
        assert isinstance(d, dict)
        c = 1
    assert c == 1
Exemplo n.º 4
0
def load_schemas(service_name, schema_path, schema_names, debug=False):
    schemas = {}
    fn = '{}/{{}}'.format(schema_path)

    for name, filename in schema_names.items():
        with open(rs_fn(service_name, resource_name=fn.format(filename))) as fin:
            if debug:
                print('loading schema {0} for service: {1}'.format(filename, service_name))
            schemas[name] = ujson.load(fin)

    return schemas
Exemplo n.º 5
0
    def test_import_sample_info_core_function(self):
        # no bt, default argument will fail
        self.assertRaises(TypeError, lambda: _import_sample_info(bt=None))
        # make bt but no spreadsheet
        pytest_dir = rs_fn("xpdacq", "tests/")
        config = "XPD_beamline_config.yml"
        configsrc = os.path.join(pytest_dir, config)
        shutil.copyfile(configsrc, os.path.join(self.config_dir, config))
        self.bt = _start_beamtime(
            self.PI_name,
            self.saf_num,
            self.experimenters,
            wavelength=self.wavelength,
            test=True,
        )
        # expect FileNotFoundError as no spreadsheet
        xlf = "300000_sample.xlsx"
        self.assertFalse(os.path.isfile(os.path.join(glbl["import_dir"], xlf)))
        self.assertRaises(FileNotFoundError,
                          lambda: _import_sample_info(bt=self.bt))
        # copy spreadsheet
        xlf = "300000_sample.xlsx"
        src = os.path.join(self.pkg_rs, xlf)
        shutil.copyfile(src, os.path.join(glbl["import_dir"], xlf))
        # problematic ones
        xlf2 = "999999_sample.xlsx"
        src = os.path.join(os.path.dirname(__file__), xlf2)
        shutil.copyfile(src, os.path.join(glbl["import_dir"], xlf2))
        # test with ordinary import ##
        # expect to pass with explicit argument
        _import_sample_info(300000, self.bt)
        # check imported sample metadata
        for sample in self.bt.samples.values():
            # Sample is a ChainMap with self.maps[1] == bt
            self.assertEqual(sample.maps[1], self.bt)

        # expect ValueError with inconsistent SAF_num between bt and input
        self.bt["bt_safN"] = str(300179)
        self.assertTrue(os.path.isfile(os.path.join(glbl["import_dir"], xlf)))
        self.assertRaises(ValueError,
                          lambda: _import_sample_info(300000, self.bt))

        # expct TypeError with incorrect beamtime
        self.assertRaises(TypeError, lambda: _import_sample_info(bt=set()))
        # error when validate the md
        self.bt["bt_safN"] = str(999999)
        self.assertRaises(
            RuntimeError,
            lambda: _import_sample_info(999999, self.bt, validate_only=True),
        )
        # test get_md_method
        sample_obj_list = [el for el in self.bt.samples.values()]
        for i, el in enumerate(sample_obj_list):
            self.assertEqual(dict(el), self.bt.samples.get_md(i))
Exemplo n.º 6
0
 def setUp(self):
     self.base_dir = Path(glbl["base"])
     self.home_dir = Path(glbl["home_dir"])
     self.config_dir = Path(glbl["xpdconfig"])
     self.PI_name = "Billinge "
     # must be 30000 for proper load of config yaml => don't change
     self.saf_num = 300000
     self.wavelength = 0.1812
     self.experimenters = [
         ("van der Banerjee", "S0ham", 1),
         ("Terban ", " Max", 2),
     ]
     # make xpdUser dir. That is required for simulation
     if self.home_dir.is_dir():
         shutil.rmtree(self.home_dir)
     self.home_dir.mkdir()
     # set simulation objects
     db = databroker.v1.temp()
     configure_device(
         area_det=pe1c,
         temp_controller=cs700,
         shutter=shctl1,
         db=db,
         filter_bank=fb,
     )
     if self.config_dir.is_dir():
         shutil.rmtree(self.config_dir)
     self.config_dir.mkdir()
     pytest_dir = Path(rs_fn("xpdacq", "tests/"))
     config = "XPD_beamline_config.yml"
     configsrc = pytest_dir.joinpath(config)
     shutil.copyfile(configsrc, glbl["blconfig_path"])
     self.bt = _start_beamtime(
         self.PI_name,
         self.saf_num,
         self.experimenters,
         wavelength=self.wavelength,
         test=True,
     )
     xlf = "300000_sample.xlsx"
     src = os.path.join(os.path.dirname(__file__), xlf)
     shutil.copyfile(src, os.path.join(glbl["import_dir"], xlf))
     import_sample_info(self.saf_num, self.bt)
     self.xrun = CustomizedRunEngine({})
     self.xrun.beamtime = self.bt
     # link mds
     self.xrun.subscribe(db.v1.insert, "all")
     # grad init_exp_hash_uid
     self.init_exp_hash_uid = glbl["exp_hash_uid"]
Exemplo n.º 7
0
 def setUp(self):
     self.base_dir = glbl["base"]
     self.home_dir = os.path.join(self.base_dir, "xpdUser")
     self.config_dir = os.path.join(self.base_dir, "xpdConfig")
     self.PI_name = "Billinge "
     self.saf_num = 300000  # must be 300000  => don't change
     self.wavelength = 0.1812
     self.experimenters = [
         ("van der Banerjee", "S0ham", 1),
         ("Terban ", " Max", 2),
     ]
     self.pkg_rs = rs_fn("xpdacq", "examples/")
     # make xpdUser dir. That is required for simulation
     os.makedirs(self.home_dir, exist_ok=True)
     os.makedirs(self.config_dir, exist_ok=True)
Exemplo n.º 8
0
 def test_blocking_beamtime(self):
     os.mkdir(self.home_dir)
     # copying example longterm config file
     pytest_dir = rs_fn("xpdacq", "tests/")
     config = "XPD_beamline_config.yml"
     configsrc = os.path.join(pytest_dir, config)
     shutil.copyfile(configsrc, os.path.join(self.config_dir, config))
     # test if start_beamtime properly modify the state
     _start_beamtime(self.PI_name, self.saf_num, test=True)
     assert glbl['_active_beamtime']
     # test if it blocks after beamtime
     glbl['_active_beamtime'] = False
     self.assertRaises(
         xpdAcqError,
         lambda: _start_beamtime(self.PI_name, self.saf_num, test=True))
     # restore
     glbl['_active_beamtime'] = True
Exemplo n.º 9
0
def test_load_calibrant(fresh_xrun, bt):
    xrun = fresh_xrun
    xrun.beamtime = bt
    # pyfai factory
    for k, calibrant_obj in ALL_CALIBRANTS.items():
        # light weight callback
        def check_eq(name, doc):
            assert calibrant_obj.dSpacing == doc["dSpacing"]
            assert k == doc["sample_name"]

        t = xrun.subscribe(check_eq, "start")
        # execute
        run_calibration(calibrant=k,
                        phase_info=k,
                        RE_instance=xrun,
                        wait_for_cal=False)
        # clean
        xrun.unsubscribe(t)
    # invalid calibrant
    with pytest.raises(xpdAcqException):
        run_calibration(calibrant="pyFAI",
                        phase_info="buggy",
                        RE_instance=xrun,
                        wait_for_cal=False)
    # filepath
    pytest_dir = rs_fn("xpdacq", "tests/")
    src = os.path.join(pytest_dir, "Ni24.D")
    dst_base = os.path.abspath(str(uuid.uuid4()))
    os.makedirs(dst_base)
    fn = str(uuid.uuid4())
    dst = os.path.join(dst_base, fn + ".D")
    shutil.copy(src, dst)
    c = Calibrant(filename=dst)

    def check_eq(name, doc):
        assert c.dSpacing == doc["dSpacing"]
        assert dst == doc["sample_name"]

    t = xrun.subscribe(check_eq, "start")
    # execute
    run_calibration(calibrant=dst,
                    phase_info="buggy",
                    RE_instance=xrun,
                    wait_for_cal=False)
    # clean
    xrun.unsubscribe(t)
Exemplo n.º 10
0
def load_configuration(name):
    """
    Load configuration data from a cascading series of locations.

    The precedence order is (highest priority last):

    1. The conda environment
       - CONDA_ENV/etc/{name}.yaml (if CONDA_ETC_ is defined for the env)
    2. The shipped version
    3. At the system level
       - /etc/{name}.yml
    4. In the user's home directory
       - ~/.config/{name}.yml

    where
        {name} is xpdan

    Parameters
    ----------
    name : str
        The expected base-name of the configuration files

    Returns
    ------
    conf : dict
        Dictionary keyed on ``fields`` with the values extracted
    """
    filenames = [
        os.path.join(rs_fn('xpdan', 'config/xpdan.yml')),
        os.path.join('/etc', name + '.yml'),
        os.path.join(os.path.expanduser('~'), '.config', name + '.yml'),
    ]

    if 'CONDA_ETC_' in os.environ:
        filenames.insert(0, os.path.join(
            os.environ['CONDA_ETC_'], name + '.yml'))

    config = {}
    for filename in filenames:
        if os.path.isfile(filename):
            with open(filename) as f:
                config.update(yaml.load(f))
            logger.debug("Using glbl specified in config file. \n%r",
                         config)
    return config
Exemplo n.º 11
0
def load_configuration(name):
    """
    Load configuration data from a cascading series of locations.

    The precedence order is (highest priority last):

    1. The conda environment
       - CONDA_ENV/etc/{name}.yaml (if CONDA_ETC_ is defined for the env)
    2. The shipped version
    3. At the system level
       - /etc/{name}.yml
    4. In the user's home directory
       - ~/.config/{name}.yml

    where
        {name} is xpdan

    Parameters
    ----------
    name : str
        The expected base-name of the configuration files

    Returns
    ------
    conf : dict
        Dictionary keyed on ``fields`` with the values extracted
    """
    filenames = [
        os.path.join(rs_fn('xpdan', 'config/xpdan.yml')),
        os.path.join('/etc', name + '.yml'),
        os.path.join(os.path.expanduser('~'), '.config', name + '.yml'),
    ]

    if 'CONDA_ETC_' in os.environ:
        filenames.insert(0,
                         os.path.join(os.environ['CONDA_ETC_'], name + '.yml'))

    config = {}
    for filename in filenames:
        if os.path.isfile(filename):
            with open(filename) as f:
                config.update(yaml.load(f))
            logger.debug("Using glbl specified in config file. \n%r", config)
    return config
Exemplo n.º 12
0
 def setUp(self):
     self.base_dir = glbl["base"]
     self.home_dir = os.path.join(self.base_dir, "xpdUser")
     self.config_dir = os.path.join(self.base_dir, "xpdConfig")
     self.PI_name = "Billinge "
     # must be 30079 for proper load of config yaml => don't change
     self.saf_num = 30079
     self.wavelength = 0.1812
     self.experimenters = [
         ("van der Banerjee", "S0ham", 1),
         ("Terban ", " Max", 2),
     ]
     # make xpdUser dir. That is required for simulation
     os.makedirs(self.home_dir, exist_ok=True)
     os.makedirs(self.config_dir, exist_ok=True)
     # set simulation objects
     configure_device(
         db=db,
         shutter=shctl1,
         area_det=pe1c,
         temp_controller=cs700,
         filter_bank=fb,
     )
     pytest_dir = rs_fn("xpdacq", "tests/")
     config = "XPD_beamline_config.yml"
     configsrc = os.path.join(pytest_dir, config)
     shutil.copyfile(configsrc, os.path.join(glbl["xpdconfig"], config))
     assert os.path.isfile(os.path.join(glbl["xpdconfig"], config))
     self.bt = _start_beamtime(
         self.PI_name,
         self.saf_num,
         self.experimenters,
         wavelength=self.wavelength,
         test=True,
     )
     xlf = "300000_sample.xlsx"
     src = os.path.join(pytest_dir, xlf)
     shutil.copyfile(src, os.path.join(glbl["import_dir"], xlf))
Exemplo n.º 13
0
import os
import shutil
import tempfile

import yaml
from databroker import Broker
try:
    db = Broker.named('xpd')
except NameError:
    from xpdsim import db
import logging
from pkg_resources import resource_filename as rs_fn

logger = logging.getLogger(__name__)
pytest_dir = rs_fn('xpdan', 'tests')


def load_configuration(name):
    """
    Load configuration data from a cascading series of locations.

    The precedence order is (highest priority last):

    1. The conda environment
       - CONDA_ENV/etc/{name}.yaml (if CONDA_ETC_ is defined for the env)
    2. The shipped version
    3. At the system level
       - /etc/{name}.yml
    4. In the user's home directory
       - ~/.config/{name}.yml
Exemplo n.º 14
0
import tornado.web
from pkg_resources import resource_filename as rs_fn
import ujson
import pymongo
import uuid
import time as ttime
from ..exceptions import ConfTrakException

SCHEMA_PATH = 'schemas'
SCHEMA_NAMES = {'configuration': 'configuration.json'}
fn = '{}/{{}}'.format(SCHEMA_PATH)
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    try:
        with open(rs_fn('conftrak', resource_name=fn.format(filename))) as fin:
            schemas[name] = ujson.load(fin)
    except FileNotFoundError:
        raise ConfTrakException('Schema file not found or does not exist')


def _compose_err_msg(code, status, m_str=''):
    fmsg = str(status) + str(m_str)
    return tornado.web.HTTPError(status_code=code, reason=fmsg)


def unpack_params(handler):
    """Unpacks the queries from the body of the header
    Parameters
    ----------
    handler: tornado.web.RequestHandler
        Handler for incoming request to collection
Exemplo n.º 15
0
import uuid
import time as ttime

class AmostraException(Exception):
    pass


SCHEMA_PATH = 'schemas'
SCHEMA_NAMES = {'sample': 'sample.json',
                'request': 'request.json',
                'container': 'container.json'}
fn = '{}/{{}}'.format(SCHEMA_PATH)
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    try:
        with open(rs_fn('amostra',
                        resource_name=fn.format(filename))) as fin:
            schemas[name] = ujson.load(fin)
    except FileNotFoundError:
        raise AmostraException('Schema file not found or does not exist')


def compose_err_msg(code, status, m_str=''):
    fmsg = str(status) + str(m_str)
    return tornado.web.HTTPError(status_code=code, reason=fmsg)


def unpack_params(handler):
    """Unpacks the queries from the body of the header
    Parameters
    ----------
    handler: tornado.web.RequestHandler
Exemplo n.º 16
0
from pkg_resources import resource_filename as rs_fn
import ujson
import pymongo.cursor

SCHEMA_PATH = 'schemas'
SCHEMA_NAMES = {
    'analysis_header': 'analysis_header.json',
    'analysis_tail': 'analysis_tail.json',
    'data_reference': 'data_reference.json',
    'bulk_data_reference': 'bulk_data_reference.json',
    'data_reference_header': 'data_reference_header.json'
}
fn = '{}/{{}}'.format(SCHEMA_PATH)
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    with open(rs_fn('analysisstore',
                    resource_name=fn.format(filename))) as fin:
        schemas[name] = ujson.load(fin)


def _compose_err_msg(code, status, m_str=''):
    reason = status + str(m_str)
    return tornado.web.HTTPError(code, reason=reason)


def unpack_params(handler):
    """Unpacks the queries from the body of the header
    Parameters
    ----------
    handler: tornado.web.RequestHandler
        Handler for incoming request to collection
Exemplo n.º 17
0
]

ANALYSIS_IN_SCHEMA_NAMES = {
    DocumentNames.start: 'schemas/analysis_in/run_start.json',
    DocumentNames.stop: 'schemas/analysis_in/run_stop.json',
    DocumentNames.event: 'schemas/analysis_in/event.json',
    DocumentNames.event_page: 'schemas/analysis_in/event_page.json',
    DocumentNames.descriptor: 'schemas/analysis_in/event_descriptor.json',
    DocumentNames.datum: 'schemas/analysis_in/datum.json',
    DocumentNames.datum_page: 'schemas/analysis_in/datum_page.json',
    DocumentNames.resource: 'schemas/analysis_in/resource.json'
}

analysis_in_schemas = {}
for doc_name, filename in ANALYSIS_IN_SCHEMA_NAMES.items():
    with Path(rs_fn('pdfstream', filename)).open("r") as fin:
        analysis_in_schemas[doc_name] = json.load(fin)

ANALYSIS_OUT_SCHEMA_NAMES = {
    DocumentNames.start: 'schemas/analysis_out/run_start.json',
    DocumentNames.stop: 'schemas/analysis_out/run_stop.json',
    DocumentNames.event: 'schemas/analysis_out/event.json',
    DocumentNames.event_page: 'schemas/analysis_out/event_page.json',
    DocumentNames.descriptor: 'schemas/analysis_out/event_descriptor.json',
    DocumentNames.datum: 'schemas/analysis_out/datum.json',
    DocumentNames.datum_page: 'schemas/analysis_out/datum_page.json',
    DocumentNames.resource: 'schemas/analysis_out/resource.json'
}

analysis_out_schemas = {}
for doc_name, filename in ANALYSIS_OUT_SCHEMA_NAMES.items():
Exemplo n.º 18
0
import yaml
import shutil
import subprocess
from time import strftime

from IPython import get_ipython
from pkg_resources import resource_filename as rs_fn

from .beamtime import *
from .tools import _graceful_exit
from .xpdacq_conf import glbl_dict, _load_beamline_config

# list of exposure times for pre-poluated ScanPlan inside
# _start_beamtime
EXPO_LIST = [5, 0.1, 1, 10, 30, 60]
DATA_DIR = rs_fn("xpdacq", "data/")


def _start_beamtime(PI_last,
                    saf_num,
                    experimenters=[],
                    wavelength=None,
                    test=False):
    """function for start a beamtime"""
    home_dir = glbl_dict["home"]
    if not os.path.exists(home_dir):
        raise RuntimeError(
            "WARNING: fundamental directory {} does not "
            "exist.\nPlease contact beamline staff immediately".format(
                home_dir))
Exemplo n.º 19
0
import json
from enum import Enum
from pkg_resources import resource_filename as rs_fn


__all__ = ['DocumentNames', 'schemas']


class DocumentNames(Enum):
    stop = 'stop'
    start = 'start'
    descriptor = 'descriptor'
    event = 'event'
    bulk_events = 'bulk_events'


SCHEMA_PATH = 'schemas'
SCHEMA_NAMES = {DocumentNames.start: 'schemas/run_start.json',
                DocumentNames.stop: 'schemas/run_stop.json',
                DocumentNames.event: 'schemas/event.json',
                DocumentNames.bulk_events: 'schemas/bulk_events.json',
                DocumentNames.descriptor: 'schemas/event_descriptor.json'}
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    with open(rs_fn('event_model', filename)) as fin:
        schemas[name] = json.load(fin)

from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
Exemplo n.º 20
0
from xpdacq.xpdacq import (
    _validate_dark,
    CustomizedRunEngine,
    _auto_load_calibration_file,
    set_beamdump_suspender,
)
from xpdacq.simulation import pe1c, cs700, shctl1, db, fb
import ophyd
from bluesky import Msg
import bluesky.examples as be
from bluesky.callbacks import collector

from pkg_resources import resource_filename as rs_fn
from xpdsim import dexela

pytest_dir = rs_fn("xpdacq", "tests/")


class xrunTest(unittest.TestCase):
    def setUp(self):
        self.base_dir = glbl["base"]
        self.home_dir = glbl["home_dir"]
        self.config_dir = glbl["xpdconfig"]
        self.PI_name = "Billinge "
        # must be 30000 for proper load of config yaml => don't change
        self.saf_num = 300000
        self.wavelength = 0.1812
        self.experimenters = [
            ("van der Banerjee", "S0ham", 1),
            ("Terban ", " Max", 2),
        ]
Exemplo n.º 21
0
if parse_version(yaml.__version__) > parse_version("3.13"):
    yaml_loader = partial(yaml.full_load)
else:
    yaml_loader = partial(yaml.load)

if os.name == "nt":
    _user_conf = os.path.join(os.environ["APPDATA"], "acq")
    CONFIG_SEARCH_PATH = (_user_conf, )
else:
    _user_conf = os.path.join(os.path.expanduser("~"), ".config", "acq")
    _local_etc = os.path.join(os.path.dirname(os.path.dirname(sys.executable)),
                              "etc", "acq")
    _system_etc = os.path.join("/", "etc", "acq")
    CONFIG_SEARCH_PATH = (_user_conf, _local_etc, _system_etc)

sim_config_path = rs_fn("xpdconf", "examples/sim.yaml")
sim_db_config_path = rs_fn("xpdconf", "examples/sim_db.yaml")


def lookup_config():
    """Copyright (c) 2014-2017 Brookhaven Science Associates, Brookhaven
    National Laboratory"""
    tried = []
    d = None
    for path in CONFIG_SEARCH_PATH:
        tried.append(path)
        config_path = Path(path)
        for filename in sorted(
                itertools.chain(config_path.glob("*.yaml"),
                                config_path.glob("*.yml"))):
            if (filename and os.path.isfile(os.path.join(path, filename))
Exemplo n.º 22
0
class AmostraException(Exception):
    pass


SCHEMA_PATH = 'schemas'
SCHEMA_NAMES = {
    'sample': 'sample.json',
    'request': 'request.json',
    'container': 'container.json'
}
fn = '{}/{{}}'.format(SCHEMA_PATH)
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    try:
        with open(rs_fn('amostra', resource_name=fn.format(filename))) as fin:
            schemas[name] = ujson.load(fin)
    except FileNotFoundError:
        raise AmostraException('Schema file not found or does not exist')


def _compose_err_msg(code, status, m_str=''):
    fmsg = status + str(m_str)
    return tornado.web.HTTPError(code, fmsg)


def unpack_params(handler):
    """Unpacks the queries from the body of the header
    Parameters
    ----------
    handler: tornado.web.RequestHandler
Exemplo n.º 23
0
import tornado.web
from pkg_resources import resource_filename as rs_fn
import ujson
import pymongo.cursor


SCHEMA_PATH = 'schema'
SCHEMA_NAMES = {'run_start': 'run_start.json',
                'run_stop': 'run_stop.json',
                'event': 'event.json',
                'bulk_events': 'bulk_events.json',
                'descriptor': 'event_descriptor.json'}
fn = '{}/{{}}'.format(SCHEMA_PATH)
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    with open(rs_fn('metadataservice', resource_name=fn.format(filename))) as fin:
        schemas[name] = ujson.load(fin)


def unpack_params(handler):
    """Unpacks the queries from the body of the header
    Parameters
    ----------
    handler: tornado.web.RequestHandler
        Handler for incoming request to collection

    Returns dict
    -------
        Unpacked query in dict format.
    """
    if isinstance(handler, tornado.web.RequestHandler):
Exemplo n.º 24
0
import yaml
from pkg_resources import resource_filename as rs_fn

from databroker import Broker

if os.name == 'nt':
    _user_conf = os.path.join(os.environ['APPDATA'], 'acq')
    CONFIG_SEARCH_PATH = (_user_conf, )
else:
    _user_conf = os.path.join(os.path.expanduser('~'), '.config', 'acq')
    _local_etc = os.path.join(os.path.dirname(os.path.dirname(sys.executable)),
                              'etc', 'acq')
    _system_etc = os.path.join('/', 'etc', 'acq')
    CONFIG_SEARCH_PATH = (_user_conf, _local_etc, _system_etc)

sim_config_path = rs_fn('xpdconf', 'examples/sim.yaml')


def lookup_config():
    """Copyright (c) 2014-2017 Brookhaven Science Associates, Brookhaven
    National Laboratory"""
    tried = []
    d = None
    for path in CONFIG_SEARCH_PATH:
        if os.path.exists(path):
            filenames = os.listdir(path)
        else:
            filenames = []
        filename = next(iter(filenames), None)
        tried.append(path)
        if filename and os.path.isfile(os.path.join(path, filename)):
Exemplo n.º 25
0
SCHEMA_NAMES = {
    DocumentNames.start: 'schemas/run_start.json',
    DocumentNames.stop: 'schemas/run_stop.json',
    DocumentNames.event: 'schemas/event.json',
    DocumentNames.event_page: 'schemas/event_page.json',
    DocumentNames.descriptor: 'schemas/event_descriptor.json',
    DocumentNames.datum: 'schemas/datum.json',
    DocumentNames.datum_page: 'schemas/datum_page.json',
    DocumentNames.resource: 'schemas/resource.json',
    # DEPRECATED:
    DocumentNames.bulk_events: 'schemas/bulk_events.json',
    DocumentNames.bulk_datum: 'schemas/bulk_datum.json'
}
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    with open(rs_fn('event_model', filename)) as fin:
        schemas[name] = json.load(fin)

__version__ = get_versions()['version']
del get_versions

ComposeRunBundle = namedtuple(
    'ComposeRunBundle', 'start_doc compose_descriptor compose_resource '
    'compose_stop')
ComposeDescriptorBundle = namedtuple(
    'ComposeDescriptorBundle',
    'descriptor_doc compose_event compose_event_page')
ComposeResourceBundle = namedtuple(
    'ComposeResourceBundle', 'resource_doc compose_datum compose_datum_page')

Exemplo n.º 26
0
    stop = 'stop'
    start = 'start'
    descriptor = 'descriptor'
    event = 'event'
    bulk_events = 'bulk_events'

SCHEMA_PATH = 'schema'
SCHEMA_NAMES = {DocumentNames.start: 'run_start.json',
                DocumentNames.stop: 'run_stop.json',
                DocumentNames.event: 'event.json',
                DocumentNames.bulk_events: 'bulk_events.json',
                DocumentNames.descriptor: 'event_descriptor.json'}
fn = '{}/{{}}'.format(SCHEMA_PATH)
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    with open(rs_fn('bluesky', fn.format(filename))) as fin:
        schemas[name] = json.load(fin)


loop = asyncio.get_event_loop()
loop.set_debug(True)


class Msg(namedtuple('Msg_base', ['command', 'obj', 'args', 'kwargs'])):
    __slots__ = ()

    def __new__(cls, command, obj=None, *args, **kwargs):
        return super(Msg, cls).__new__(cls, command, obj, args, kwargs)

    def __repr__(self):
        return '{}: ({}), {}, {}'.format(
Exemplo n.º 27
0
 def test_end_beamtime(self):
     _required_info = ["bt_piLast", "bt_safN", "bt_uid"]
     # end_beamtime has been run
     os.makedirs(self.home_dir, exist_ok=True)
     self.assertRaises(FileNotFoundError, lambda: _end_beamtime())
     # entire trip. _start_beamtime to _end_beamtime
     # copying example longterm config file
     pytest_dir = rs_fn("xpdacq", "tests/")
     config = "XPD_beamline_config.yml"
     configsrc = os.path.join(pytest_dir, config)
     shutil.copyfile(configsrc, os.path.join(self.config_dir, config))
     self.bt = _start_beamtime(
         self.PI_name,
         self.saf_num,
         self.experimenters,
         wavelength=self.wavelength,
         test=True,
     )
     bt_path_src = os.path.join(glbl_dict["yaml_dir"], "bt_bt.yml")
     bt_path_dst = os.path.join(glbl_dict["import_dir"], "bt_bt.yml")
     # move out for now, no bt
     shutil.move(bt_path_src, bt_path_dst)
     self.assertTrue(os.path.isfile(bt_path_dst))
     self.assertFalse(os.path.isfile(bt_path_src))
     self.assertRaises(SystemExit, lambda: _load_bt(glbl_dict["yaml_dir"]))
     # move back and test archieving funtionality
     shutil.move(bt_path_dst, bt_path_src)
     self.assertTrue(os.path.isfile(bt_path_src))
     self.assertFalse(os.path.isfile(bt_path_dst))
     pi_name = self.bt.get("bt_piLast")
     saf_num = self.bt.get("bt_safN")
     bt_uid = self.bt.get("bt_uid")
     archive_name = _load_bt_info(self.bt, _required_info)
     os.makedirs(glbl_dict['archive_dir'])
     assert os.path.isdir(glbl_dict['archive_dir'])
     archive_full_name, local_archive_name = _tar_user_data(archive_name)
     test_tar_name = '_'.join([pi_name, saf_num, bt_uid,
                               strftime('%Y-%m-%d-%H%M')])
     # is tar file name correct?
     self.assertEqual(archive_full_name,
                      os.path.join(glbl_dict['archive_dir'],
                                   test_tar_name))
     # are contents tared correctly?
     # archive_test_dir = os.path.join(glbl_dict['home'], 'tar_test')
     content_list = os.listdir(archive_full_name)
     # is remote copy the same name as local archive?
     assert os.path.basename(local_archive_name) in content_list
     assert len(content_list) == 1
     # is every directory included
     full_fp_list = list(map(os.path.basename,
                             glbl_dict['allfolders']))
     exclude_fp_list = ['xpdUser', 'xpdConfig', 'yml',
                        'samples', 'scanplans']
     bkg_fp_list = [el for el in full_fp_list if el not in
                    exclude_fp_list]  # exclude top dirs
     remote_fp_list = os.listdir(os.path.join(archive_full_name,
                                              local_archive_name))
     # difference should be empty set
     assert not set(bkg_fp_list).difference(remote_fp_list)
     # hidden files should be excluded from the archive
     assert not list(glob.glob(archive_full_name + "**/.*"))
     # now test deleting directories
     _delete_local_archive(local_archive_name)
     self.assertFalse(os.path.isdir(local_archive_name))
Exemplo n.º 28
0
# See LICENSE.txt for license information.
#
##############################################################################

import os
import time as ttime
from itertools import chain

import bluesky.examples as be
import numpy as np
from cycler import cycler
from pims import ImageSequence
from pkg_resources import resource_filename as rs_fn
from bluesky.utils import new_uid

DATA_DIR = rs_fn('xpdsim', 'data/')


class PutGet:
    """basic class to have set/put method"""
    def __init__(self, numeric_val=1):
        self._val = numeric_val

    def put(self, val):
        """set value"""
        self._val = val
        return self._val

    def get(self):
        """read current value"""
        return self._val
Exemplo n.º 29
0
import os
import shutil
import tempfile

import yaml
from databroker import Broker
try:
    db = Broker.named('xpd')
except (NameError, FileNotFoundError):
    from xpdsim import db
import logging
from pkg_resources import resource_filename as rs_fn

logger = logging.getLogger(__name__)
pytest_dir = rs_fn('xpdan', 'tests')


def load_configuration(name):
    """
    Load configuration data from a cascading series of locations.

    The precedence order is (highest priority last):

    1. The conda environment
       - CONDA_ENV/etc/{name}.yaml (if CONDA_ETC_ is defined for the env)
    2. The shipped version
    3. At the system level
       - /etc/{name}.yml
    4. In the user's home directory
       - ~/.config/{name}.yml
Exemplo n.º 30
0
import os
import yaml
import pytest
from functools import partial
from pkg_resources import resource_filename as rs_fn
from pkg_resources import parse_version

EXP_DIR = rs_fn("xpdconf", "examples/")


def test_yaml_load():
    # test version
    if parse_version(yaml.__version__) > parse_version('3.13'):
        loader = partial(yaml.full_load)
    else:
        loader = partial(yaml.load)
    # assert no warning
    with pytest.warns(None) as record:
        fn = os.path.join(EXP_DIR, 'sim.yaml')
        with open(fn) as f:
            loader(f)
    assert not record
Exemplo n.º 31
0
from functools import partial

import numpy as np
from cycler import cycler
from ophyd.sim import NumpySeqHandler, SynSignalRO
from pkg_resources import resource_filename as rs_fn

from xpdsim.area_det import det_factory, nsls_ii_path, xpd_wavelength, \
    img_gen, build_image_cycle, DEXELA_IMG_SIZE, BLACKFLY_IMG_SIZE
from xpdsim.build_sim_db import build_sim_db
from xpdsim.movers import shctl1, cs700, fb

__version__ = '0.4.2'

pyfai_path = rs_fn("xpdsim", "data/pyfai/pyFAI_calib.yml")
pyfai_poni = rs_fn("xpdsim", "data/pyfai/test.poni")

image_file = rs_fn(
    "xpdsim",
    "data/XPD/ni/sub_20170626"
    "-150911_Ni_Tim_series_tseries_1_e910af_0250.tif",
)

sim_db_dir, db = build_sim_db()  # default is sqlite
db.reg.register_handler("NPY_SEQ", NumpySeqHandler)

# simple detector that outputs 5 by 5 noisy images
simple_pe1c = det_factory()
# detector with real images
xpd_pe1c = det_factory(build_image_cycle(nsls_ii_path),
                       data_key="pe1_image",
Exemplo n.º 32
0
def beamline_config_file():
    return rs_fn("xpdacq", "tests/XPD_beamline_config.yml")
Exemplo n.º 33
0
import tornado.web
from pkg_resources import resource_filename as rs_fn
import ujson
import pymongo.cursor


SCHEMA_PATH = 'schemas'
SCHEMA_NAMES = {'analysis_header': 'analysis_header.json',
                'analysis_tail': 'analysis_tail.json',
                'data_reference': 'data_reference.json',
                'bulk_data_reference': 'bulk_data_reference.json',
                'data_reference_header': 'data_reference_header.json'}
fn = '{}/{{}}'.format(SCHEMA_PATH)
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    with open(rs_fn('analysisstore', resource_name=fn.format(filename))) as fin:
        schemas[name] = ujson.load(fin)

def _compose_err_msg(code, status, m_str=''):
    reason = status + str(m_str)
    return tornado.web.HTTPError(code, reason=reason )

def unpack_params(handler):
    """Unpacks the queries from the body of the header
    Parameters
    ----------
    handler: tornado.web.RequestHandler
        Handler for incoming request to collection

    Returns dict
    -------
Exemplo n.º 34
0
    stop = 'stop'
    start = 'start'
    descriptor = 'descriptor'
    event = 'event'
    bulk_events = 'bulk_events'

SCHEMA_PATH = 'schema'
SCHEMA_NAMES = {DocumentNames.start: 'run_start.json',
                DocumentNames.stop: 'run_stop.json',
                DocumentNames.event: 'event.json',
                DocumentNames.bulk_events: 'bulk_events.json',
                DocumentNames.descriptor: 'event_descriptor.json'}
fn = '{}/{{}}'.format(SCHEMA_PATH)
schemas = {}
for name, filename in SCHEMA_NAMES.items():
    with open(rs_fn('bluesky', fn.format(filename))) as fin:
        schemas[name] = json.load(fin)


loop = asyncio.get_event_loop()
loop.set_debug(True)


class Msg(namedtuple('Msg_base', ['command', 'obj', 'args', 'kwargs'])):
    __slots__ = ()

    def __new__(cls, command, obj=None, *args, **kwargs):
        return super(Msg, cls).__new__(cls, command, obj, args, kwargs)

    def __repr__(self):
        return '{}: ({}), {}, {}'.format(
Exemplo n.º 35
0
from pkg_resources import resource_filename as rs_fn

pyfai_path = rs_fn('xpdsim', 'data/pyfai/pyFAI_calib.yml')
Exemplo n.º 36
0
 def test_start_beamtime(self):
     # sanity check. xpdUser directory exists.
     # First make sure the code works right when it doesn't exist.
     self.assertFalse(os.path.isdir(self.home_dir))
     self.assertRaises(
         RuntimeError, lambda: _start_beamtime(self.PI_name, self.saf_num)
     )
     # now make it the proper thing...xpdUser directory
     os.mkdir(self.home_dir)
     self.assertTrue(os.path.isdir(self.home_dir))
     # but put a file in it
     self.newfile = os.path.join(self.home_dir, "touched.txt")
     open(self.newfile, "a").close()
     self.assertTrue(os.path.isfile(self.newfile))
     self.assertRaises(
         FileExistsError,
         lambda: _start_beamtime(self.PI_name, self.saf_num),
     )
     os.remove(self.newfile)
     # do the same but with directories
     self.newdir = os.path.join(self.home_dir, "userJunk")
     os.mkdir(self.newdir)
     self.assertTrue(os.path.isdir(self.newdir))
     self.assertRaises(
         FileExistsError,
         lambda: _start_beamtime(self.PI_name, self.saf_num),
     )
     os.removedirs(self.newdir)
     # real doing:
     os.mkdir(self.home_dir)
     self.assertTrue(os.path.isdir(self.home_dir))
     # copying example longterm config file
     pytest_dir = rs_fn("xpdacq", "tests/")
     config = "XPD_beamline_config.yml"
     configsrc = os.path.join(pytest_dir, config)
     shutil.copyfile(configsrc, os.path.join(self.config_dir, config))
     self.bt = _start_beamtime(
         self.PI_name,
         self.saf_num,
         self.experimenters,
         wavelength=self.wavelength,
         test=True,
     )
     self.assertIsInstance(self.bt, Beamtime)
     # test normalized md
     self.assertEqual("Billinge", self.bt.get("bt_piLast"))
     self.assertEqual("123", self.bt.get("bt_safN"))
     self.assertEqual(self.experimenters, self.bt.get("bt_experimenters"))
     self.assertEqual(self.wavelength, self.bt.get("bt_wavelength"))
     self.assertEqual(os.getcwd(), self.home_dir)
     # test prepoluate ScanPlan
     self.assertEqual(len(self.bt.scanplans), len(EXPO_LIST))
     for sp, expect_arg in zip(list(self.bt.scanplans.values()), EXPO_LIST):
         self.assertEqual(sp["sp_args"], (expect_arg,))
     # test if yml files are saved properly
     for expo in EXPO_LIST:
         f_path = os.path.join(
             glbl_dict["scanplan_dir"], "ct_{}.yml".format(expo)
         )
         self.assertTrue(os.path.isfile(f_path))
     # test if it can be reloaded
     for current_sp in self.bt.scanplans.values():
         reload_sp = ScanPlan.from_yaml(current_sp.to_yaml())
         self.assertEqual(reload_sp, current_sp)
         self.assertFalse(id(reload_sp) == id(current_sp))
Exemplo n.º 37
0
import numpy as np
from cycler import cycler
from ophyd import sim, Device
from pkg_resources import resource_filename as rs_fn
from tifffile import imread

# extra config
XPD_SHUTTER_CONF = {"open": 60, "close": 0}
DATA_DIR_STEM = "xpdsim.data"
# image size
PE_IMG_SIZE = (2048, 2048)
DEXELA_IMG_SIZE = (3072, 3088)
BLACKFLY_IMG_SIZE = (20, 24)
# package filepath
nsls_ii_path = rs_fn(DATA_DIR_STEM + ".XPD", "ni")
xpd_wavelength = 0.1823
chess_path = rs_fn(DATA_DIR_STEM, "chess")


def build_image_cycle(path, key='pe1_image'):
    """Build image cycles, essentially generators with endless images

    Parameters
    ----------
    path: str
        Path to the files to be used as the base for the cycle.
    key: str, optional
        key of the entire image sequence. Default to ``'pe1_image'``.

    Returns