コード例 #1
0
ファイル: db.py プロジェクト: stufisher/flask-dataviewer
def get_dc(id):
    conf_file = os.getenv('ISPYB_CONFIG_FILE')
    if conf_file is None:
        return

    with ispyb.open(conf_file) as i:
        mx_acquisition = i.mx_acquisition

        dc = mx_acquisition.get_data_collection(id)

        print dc.file_template_full_python
        print dc.file_directory
        return dc
コード例 #2
0
def get_dc(id):
    conf_file = os.getenv('ISPYB_CONFIG_FILE')
    if conf_file is None:
        return

    with ispyb.open(conf_file) as i:
        mx_acquisition = i.mx_acquisition
        
        dc = mx_acquisition.get_data_collection(id)

        logging.getLogger('image-service').debug('DC File Template: {}'.format(dc.file_template_full_python))
        logging.getLogger('image-service').debug('DC File Directory: {}'.format(dc.file_directory))
        # print dc.file_template_full_python
        # print dc.file_directory
        return dc
コード例 #3
0
ファイル: pipeline2ispyb.py プロジェクト: tschoonj/ispyb-api
#
# 2014-09-24
#
# Examples of how to use the modules in the ispyb package,
# demonstrating how to store data from the whole
# data acquisition and processing pipeline.
#

from __future__ import absolute_import, division, print_function

import sys
from datetime import datetime

import ispyb

with ispyb.open(sys.argv[1]) as conn:
    core = conn.core
    mxacquisition = conn.mx_acquisition
    mxprocessing = conn.mx_processing
    mxscreening = conn.mx_screening

    # Find the id for a given visit
    sessionid = core.retrieve_visit_id("cm14451-2")

    # Create a new data collection entry:
    params = mxacquisition.get_data_collection_group_params()
    params["parentid"] = sessionid
    # experimenttype must be one of the allowed values: None, 'SAD', 'SAD - Inverse Beam', 'OSC', 'Collect - Multiwedge', 'MAD', 'Helical', 'Multi-positional', 'Mesh',  'Burn', 'MAD - Inverse Beam', 'Screening'
    params["experimenttype"] = "OSC"
    params["starttime"] = datetime.strptime("2014-09-25 13:00:00",
                                            "%Y-%m-%d %H:%M:%S")
コード例 #4
0
def test_ws_connection(testconfig_ws):
    with pytest.raises(NotImplementedError):
        with ispyb.open(testconfig_ws) as conn:
            assert conn
コード例 #5
0
from __future__ import print_function
import os
import sys

import ispyb
from ispyb.xmltools import mx_data_reduction_to_ispyb, xml_file_to_dict

if len(sys.argv) not in (3, 4):
    print("Usage:")
    print("%s conf_file xml_in_file" % sys.argv[0])
    print("%s conf_file xml_in_file xml_out_file" % sys.argv[0])
    sys.exit(1)

conf_file = sys.argv[1]

with ispyb.open(conf_file) as conn:
    mxprocessing = conn.mx_processing

    xml_file = sys.argv[2]
    xml_dir = os.path.split(xml_file)[0]
    # Find the datacollection associated with this data reduction run
    try:
        dc_id = int(open(os.path.join(xml_dir, ".dc_id"), "r").read())
        print("Got DC ID %d from file system" % dc_id)
    except:
        dc_id = None

    mx_data_reduction_dict = xml_file_to_dict(xml_file)
    (app_id, ap_id, scaling_id,
     integration_id) = mx_data_reduction_to_ispyb(mx_data_reduction_dict,
                                                  dc_id, mxprocessing)
コード例 #6
0
ファイル: job.py プロジェクト: DiamondLightSource/ispyb-api
def main(cmd_args=sys.argv[1:]):
    parser = OptionParser(
        usage="ispyb.job [options] JOBID",
        description=
        "Command line tool to manipulate ISPyB processing table entries.",
    )

    recipe_dir = None
    if zocalo:
        zc = zocalo.configuration.from_file()
        zc.activate()
        if hasattr(zc, "storage"):
            recipe_dir = zc.storage.get("zocalo.recipe_directory")

    if recipe_dir and os.path.isdir(recipe_dir):
        available_recipes = sorted(
            r[6:-5] for r in os.listdir(recipe_dir)
            if r.startswith("ispyb-") and r.endswith(".json"))
    else:
        available_recipes = None

    parser.add_option("-?", action="help", help=SUPPRESS_HELP)
    parser.add_option(
        "-v",
        "--verbose",
        action="store_true",
        dest="verbose",
        default=False,
        help="show full job record",
    )

    group = OptionGroup(
        parser,
        "Processing job options",
        "These options can be used to create or modify a processing job.",
    )
    group.add_option(
        "--new",
        dest="new",
        action="store_true",
        default=False,
        help=
        "create a new processing job. If --new is specified you must not specify another JOBID",
    )
    group.add_option(
        "--dcid",
        dest="dcid",
        action="store",
        type="string",
        default=None,
        help=
        "set the primary data collection ID for the processing job (default: DCID of first sweep)",
    )
    group.add_option(
        "--display",
        dest="display",
        action="store",
        type="string",
        default=None,
        help="set the display name of the processing job",
    )
    group.add_option(
        "--comment",
        dest="comment",
        action="store",
        type="string",
        default=None,
        help="set a comment string for the processing job",
    )
    if available_recipes:
        group.add_option(
            "--recipe",
            dest="recipe",
            action="store",
            type="choice",
            default=None,
            choices=available_recipes,
            help=
            "set a recipe for the processing job. Recipe name must correspond to a filename "
            f"(plus ispyb- prefix and .json extension) in {recipe_dir}: {', '.join(available_recipes)}",
        )
    else:
        group.add_option(
            "--recipe",
            dest="recipe",
            action="store",
            type="string",
            default=None,
            help="set a recipe for the processing job",
        )
    group.add_option(
        "--source",
        dest="source",
        action="store",
        type="choice",
        default="user",
        choices=["user", "automatic"],
        help=
        "set whether the processing job was triggered by a 'user' (default) or by 'automatic' processing",
    )
    group.add_option(
        "--add-param",
        dest="parameters",
        action="append",
        type="string",
        default=[],
        metavar="KEY:VALUE",
        help="add a 'KEY:VALUE' pair string parameter to a processing job",
    )
    group.add_option(
        "--add-sweep",
        dest="sweeps",
        action="append",
        type="string",
        default=[],
        metavar="DCID:START:END",
        help=
        "add an image range from a sweep of any data collection ID to the processing job. "
        "If no sweep is defined all images from the primary data collection ID are used if the data collection ID can be inferred",
    )
    if zocalo:
        group.add_option(
            "--trigger",
            dest="trigger",
            action="store_true",
            default=False,
            help="start the processing job immediately after creation",
        )
        group.add_option(
            "--trigger-variable",
            dest="triggervariables",
            action="append",
            type="string",
            default=[],
            metavar="KEY:VALUE",
            help=
            "Set an additional variable for recipe evaluation when starting the processing job",
        )
    parser.add_option_group(group)

    group = OptionGroup(
        parser,
        "Processing program options",
        "These options can be used to create or update "
        "processing program entries belonging to a processing job.",
    )
    group.add_option(
        "-c",
        "--create",
        dest="create",
        action="store_true",
        default=False,
        help="create a new processing program entry for the JOBID",
    )
    group.add_option(
        "-u",
        "--update",
        dest="update",
        action="store",
        type="int",
        default=None,
        help="update an existing processing program entry",
    )
    parser.add_option_group(group)

    group = OptionGroup(
        parser,
        "Processing program attributes",
        "These options can be used when creating or updating "
        "processing program entries.",
    )
    group.add_option(
        "-p",
        "--program",
        dest="program",
        action="store",
        type="string",
        default=None,
        help="set a program name for processing entry",
    )
    group.add_option(
        "-l",
        "--cmdline",
        dest="cmdline",
        action="store",
        type="string",
        default=None,
        help="set full command line for processing entry",
    )
    group.add_option(
        "-e",
        "--environment",
        dest="environment",
        action="store",
        type="string",
        default=None,
        help="set an environment string for processing entry",
    )
    group.add_option(
        "-r",
        "--result",
        dest="result",
        action="store",
        type="choice",
        default=None,
        choices=["success", "failure"],
        help="set a job result: success, failure",
    )
    group.add_option(
        "-s",
        "--status",
        dest="status",
        action="store",
        type="string",
        default=None,
        help="set program status information",
    )
    group.add_option(
        "--start-time",
        dest="starttime",
        metavar="TIMESTAMP",
        action="store",
        type="string",
        default=None,
        help="set the program start time (default: now)",
    )
    group.add_option(
        "--update-time",
        dest="updatetime",
        metavar="TIMESTAMP",
        action="store",
        type="string",
        default=None,
        help="date the updated information (default: now)",
    )
    parser.add_option_group(group)
    (options, args) = parser.parse_args(cmd_args)

    if not args and not options.new:
        if cmd_args:
            print("No job ID specified\n")
        parser.print_help()
        sys.exit(0)
    if len(args) > 1:
        sys.exit("Only one job ID can be specified")
    if options.new and args:
        sys.exit("Can not create a new job ID when a job ID is specified")
    if options.new and options.update:
        sys.exit("Can not update a program when creating a new job ID")
    if zocalo and options.triggervariables and not options.trigger:
        sys.exit("--trigger-variable only makes sense with --trigger")

    i = ispyb.open()

    if options.new:
        jobid = create_processing_job(i, options)
    else:
        jobid = args[0]

    if options.create:
        i.mx_processing.upsert_program_ex(
            job_id=jobid,
            name=options.program,
            command=options.cmdline,
            environment=options.environment,
            time_start=options.starttime,
            time_update=options.updatetime,
            message=options.status,
            status={
                "success": 1,
                "failure": 0
            }.get(options.result),
        )

    elif options.update:
        i.mx_processing.upsert_program_ex(
            program_id=options.update,
            status={
                "success": 1,
                "failure": 0
            }.get(options.result),
            time_start=options.updatetime,
            time_update=options.updatetime,
            message=options.status,
        )

    rp = i.get_processing_job(jobid)
    try:
        rp.load()
    except ispyb.NoResult:
        print(f"Processing ID {jobid} not found")
        sys.exit(1)
    print(f"""Processing ID {rp.jobid}:

       Name: {rp.name}
     Recipe: {rp.recipe}
   Comments: {rp.comment}
 Primary DC: {rp.DCID}
    Defined: {rp.timestamp}""")

    if options.verbose:
        if rp.parameters:
            maxlen = max(max(map(len, dict(rp.parameters))), 11)
            print("\n Parameters:")
            print("\n".join("%%%ds: %%s" % maxlen % (p[0], p[1])
                            for p in sorted(rp.parameters)))

        if rp.sweeps:
            print("\n     Sweeps: ", end="")
            print(("\n" + " " * 13).join(
                f"DCID {sweep.DCID:7}  images{sweep.start:5} -{sweep.end:5}"
                for sweep in rp.sweeps))

    if rp.programs:
        print_format = "\nProgram #{0.app_id}: {0.name}, {0.status_text}"

        if options.verbose:
            print_format += "\n    Command: {0.command}"
            print_format += "\nEnvironment: {0.environment}"
            print_format += "\n    Defined: {0.time_defined}"
            print_format += "\n    Started: {0.time_start}"
            print_format += "\nLast Update: {0.time_update}"

        print_format += "\n  Last Info: {0.message}"

        for program in rp.programs:
            print(print_format.format(program))

            if options.verbose:
                try:
                    attachments = (i.mx_processing.
                                   retrieve_program_attachments_for_program_id(
                                       program.app_id))
                    for filetype in sorted(
                        {a["fileType"]
                         for a in attachments}):
                        for attachment in sorted(
                            (a for a in attachments
                             if a["fileType"] == filetype),
                                key=lambda a: a["fileName"],
                        ):
                            print(" {att[fileType]:>10s}: {att[fileName]}".
                                  format(att=attachment))
                except ispyb.NoResult:
                    pass
コード例 #7
0
ファイル: factory.py プロジェクト: tacaswell/ispyb-api
def create_connection(conf_file):
  import warnings
  warnings.warn("deprecated, use ispyb.open()", DeprecationWarning)
  return ispyb.open(conf_file)
コード例 #8
0
def test_session_from_envvar(testconfig, monkeypatch):
    monkeypatch.setenv("ISPYB_CREDENTIALS", testconfig)
    conn = ispyb.open()
    assert isinstance(conn, ISPyBMySQLSPConnector)
コード例 #9
0
ファイル: conftest.py プロジェクト: rjgildea/ispyb-api
def testdb(testconfig):
    """Return an ISPyB connection object for the test database configuration."""
    with ispyb.open(testconfig) as conn:
        yield conn