예제 #1
0
def main(path):
    datarecord_batch = []
    save_interval = 1000
    # read in the two columns of the meta sheet to a dict that defines a DataSet
    # TODO: Need a transaction, in case loading fails!
    logger.info('read metadata...')

#    book = xlrd.open_workbook(path) #open our xls file, there's lots of extra default options in this call, for logging etc. take a look at the docs
#    sheetname = 'Meta'
#    worksheet = book.sheet_by_name(sheetname) #we can pull by name

    metadata = read_metadata(path)
    dataset = None
    try:
        metadata = read_metadata(path)
        try:
            extant_dataset = DataSet.objects.get(facility_id=metadata['facility_id'])
            logger.info(str(('extent_dataset',extant_dataset)))
            if(extant_dataset):
                logger.warn(str(('deleting extant dataset for facility id: ', metadata['facility_id'])))
                extant_dataset.delete()
        except Exception,e:
            logger.info(str(('on trying to delete',e)))
#            raise e
        dataset = DataSet(**metadata)
        dataset.save()
        logger.info(str(('dataset created: ', dataset)))
예제 #2
0
    def build_schema(self):
        
        fields = get_detail_schema(
            DataSet(), 'dataset', lambda x: x.show_in_detail )

        fields['datapointFile'] = get_schema_fieldinformation(
            'datapoint_file','')
        fields['safVersion'] = get_schema_fieldinformation('saf_version','')
        fields['screeningFacility'] = get_schema_fieldinformation(
            'screening_facility','')

        schema['fields'] = OrderedDict(sorted(
            fields.items(), key=lambda x: x[0])) 
        return schema 
예제 #3
0
def main(path):
    datarecord_batch = []
    save_interval = 1000
    # read in the two columns of the meta sheet to a dict that defines a DataSet
    # TODO: Need a transaction, in case loading fails!
    logger.debug('read metadata...')

    metadata = read_metadata(path)
    dataset = None
    try:
        metadata = read_metadata(path)
        try:
            extant_dataset = DataSet.objects.get(
                facility_id=metadata['facility_id'])
            logger.debug(str(('extent_dataset',extant_dataset)))
            if(extant_dataset):
                logger.warn(str(('deleting extant dataset for facility id: ', 
                                 metadata['facility_id'])))
                extant_dataset.delete()
        except Exception,e:
            logger.debug(str(('on trying to delete',e)))
        dataset = DataSet(**metadata)
        dataset.save()
        logger.debug(str(('dataset created: ', dataset)))
예제 #4
0
    metadata = read_metadata(book.sheet_by_name('Meta'))
    try:
        extant_dataset = DataSet.objects.get(
            facility_id=metadata['facility_id'])
        if (extant_dataset):
            logger.warn('deleting extant dataset for facility id: %r' %
                        metadata['facility_id'])
            extant_dataset.delete()
    except ObjectDoesNotExist, e:
        pass
    except Exception, e:
        logger.exception('on delete of extant dataset: %r' %
                         metadata['facility_id'])
        raise

    dataset = DataSet(**metadata)
    logger.info('dataset to save %s' % dataset)
    dataset.save()

    logger.debug('read data columns...')
    col_to_definitions = read_datacolumns(book)

    small_molecule_col = None
    col_to_dc_map = {}
    for i, dc_definition in enumerate(col_to_definitions):
        dc_definition['dataset'] = dataset
        if (not 'display_order' in dc_definition
                or dc_definition['display_order'] == None):
            dc_definition['display_order'] = i
        datacolumn = DataColumn(**dc_definition)
        datacolumn.save()
예제 #5
0
    metadata = read_metadata(book.sheet_by_name('Meta'))
    try:
        extant_dataset = DataSet.objects.get( facility_id=metadata['facility_id'] )
        if(extant_dataset):
            logger.warn(
                'deleting extant dataset for facility id: %r' 
                    % metadata['facility_id'] )
            extant_dataset.delete()
    except ObjectDoesNotExist, e:
        pass
    except Exception,e:
        logger.exception(
            'on delete of extant dataset: %r' % metadata['facility_id'])
        raise

    dataset = DataSet(**metadata)
    logger.info('dataset to save %s' % dataset)
    dataset.save()
    
    read_datacolumns_and_data(book, dataset)
    
    read_explicit_reagents(book, dataset)

    dataset.save()

def read_metadata(meta_sheet):

    properties = ('model_field', 'required', 'default', 'converter')
    field_definitions = {
        'Lead Screener First': 'lead_screener_firstname',
        'Lead Screener Last': 'lead_screener_lastname',