def _copy_collection_by_id(collection_id, staging):
    setup(blconfig.get_database(staging=staging))

    c = Collection(collection_id)
    c.reload()
    setup(blconfig.get_database(staging=not staging))
    newc = Collection()
    newc.__dict__ = c.__dict__
    newc._id = None
    newc.save()
    return newc.dbref
def copy_all_collections_from_epn_to_staging(epn):
    setup(blconfig.get_database(staging=False))
    from processingDB import VisitStats
    v = VisitStats()
    (all_coll, all_proc) = v.get_epn_info(epn)
    iddict = {}
    for coll in all_coll:
        #print "copying %s to staging..." % coll['_id']
        oldid = str(coll['_id'])
        newid = copy_to_staging_by_id(coll['_id'])
        iddict[oldid] = newid
    return iddict
Exemplo n.º 3
0
    def process(self, **kwargs):
        if kwargs['collection_id']:
            coll = Collection(kwargs['collection_id'])
        else:
            setup(blconfig.get_database())
            proc = Processing(kwargs['dataset_id'])
            coll = Collection(str(proc.collection_id.id))

        try:
            cryo_temp = coll.cryo_temperature
        except AttributeError:
            cryo_temp = None
        try:
            crystal_in_monochromator = coll.crystal_in_monochromator
        except AttributeError:
            crystal_in_monochromator = None
        try:
            sample_desc = coll.sample_desc
        except AttributeError:
            sample_desc = 'user input'
        try:
            sample_colour = coll.sample_colour
        except AttributeError:
            sample_colour = 'user input'
        try:
            sample_size_min = coll.sample_size_min
        except AttributeError:
            sample_size_min = 'user input'
        try:
            sample_size_mid = coll.sample_size_mid
        except AttributeError:
             sample_size_mid = 'user input'
        try:
            sample_size_max = coll.sample_size_max
        except AttributeError:
             sample_size_max = 'user input'

        write_template_file(self.project_dir, coll.beamline, coll.detector_type, coll.energy, cryo_temp, crystal_in_monochromator, sample_desc, sample_colour, sample_size_min, sample_size_mid, sample_size_max)
Exemplo n.º 4
0
import logbook

from itertools import chain

from modules.base import ReturnOptions

logger = logbook.Logger('MAIN')
logbook.StreamHandler(sys.stdout).push_application()
logbook.set_datetime_format("local")

# load modules
import pipelines
from beamline import variables as blconfig

from processing.models import setup, Dataset, Screening, Collection
setup(blconfig.get_database(staging=IS_STAGING))

parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--collection_id')
group.add_argument('--dataset_id')
parser.add_argument('--data_dir')
parser.add_argument('--output_dir')
parser.add_argument('--processing_dir',
                    help="Location of previously run processing directory")
parser.add_argument('--dry_run',
                    help='Show the pipeline without actually running it')

# add parser arguments from all classes inside the pipelines
for clz in set([obj.__class__
                for obj in chain(*pipelines.pipelines.values())]):
def _copy_processing_by_obj(processing, staging):
    setup(blconfig.get_database(staging=staging))
    newp = Processing()
    newp.__dict__ = processing
    newp._id = None
    newp.save()
Exemplo n.º 6
0
def get_retrigger_dir(dataset_id):
    setup(blconfig.get_database())
    dataset = Dataset(dataset_id)
    collection_id = dataset.collection_id.id
    PI = Collection(collection_id).PI
    return "/data/%(EPN)s/home/%(PI)s/auto" % {"EPN": blconfig.EPN, "PI": PI}