def _convert_request_to_dict(self, request): ''' Utility method: transfer all values from GET, then POST to a dict. Note: uses 'getlist' to retrieve all values: - if a value is single valued, then unwrap from the list - downstream methods expecting a list value must deal with non-list single values Note: does not handle request.FILES ''' DEBUG = False or logger.isEnabledFor(logging.DEBUG) _dict = {} if request is None: return _dict for key in request.GET.keys(): val = request.GET.getlist(key) if DEBUG: logger.info('get key: %r, val: %r', key, val) # Jquery Ajax will send array list params with a "[]" suffix if '[]' in key and key[-2:] == '[]': key = key[:-2] if len(val) == 1: _dict[key] = val[0] else: _dict[key] = val for key in request.POST.keys(): val = request.POST.getlist(key) if DEBUG: logger.info('post key: %r, val: %r', key, val) # Jquery Ajax will post array list params with a "[]" suffix key = key.replace('[]','') if len(val) == 1: _dict[key] = val[0] else: _dict[key] = val # check for single-valued known list values # Note: Jquery Ajax will post array list params with a "[]" suffix known_list_values = [ 'includes','exact_fields', 'order_by', 'visibilities', 'other_screens'] # extend with alternate possible keys (depending on posted format) known_list_values.extend(['%s[]'%k for k in known_list_values]) for key in known_list_values: val = _dict.get(key,[]) if isinstance(val, basestring): _dict[key] = [val] # Parse known boolean params for convenience http_boolean_params = [ HTTP_PARAM_DATA_INTERCHANGE,HTTP_PARAM_RAW_LISTS, HTTP_PARAM_USE_VOCAB, HTTP_PARAM_USE_TITLES] for key in http_boolean_params: _dict[key] = parse_val( _dict.get(key, False),key, 'boolean') if DEBUG: logger.info('params: %r', _dict) return _dict
def _convert_request_to_dict(self, request): ''' Transfer all values from GET, then POST to a dict Note: uses 'getlist' to retrieve all values: - if a value is single valued, then unwrap from the list - downstream methods expecting a list value must deal with non-list single values ''' _dict = {} for key in request.GET.keys(): val = request.GET.getlist(key) # Jquery Ajax will post array list params with a "[]" suffix - 20151015 if '[]' in key and key[-2:] == '[]': key = key[:-2] if len(val) == 1: _dict[key] = val[0] else: _dict[key] = val for key in request.POST.keys(): val = request.POST.getlist(key) # Jquery Ajax will post array list params with a "[]" suffix - 20151015 key = key.replace('[]','') if len(val) == 1: _dict[key] = val[0] else: _dict[key] = val # check for single-valued known list values # Note: Jquery Ajax will post array list params with a "[]" suffix - 20151015 known_list_values = [ 'includes','exact_fields', 'order_by', 'visibilities','other_screens', 'includes[]', 'order_by[]','exact_fields[]', 'visibilities[]', 'other_screens[]'] for key in known_list_values: val = _dict.get(key,[]) if isinstance(val, basestring): _dict[key] = [val] # Parse known boolean params for convenience http_boolean_params = [ HTTP_PARAM_DATA_INTERCHANGE,HTTP_PARAM_RAW_LISTS, HTTP_PARAM_USE_VOCAB, HTTP_PARAM_USE_TITLES] for key in http_boolean_params: _dict[key] = parse_val( _dict.get(key, False),key, 'boolean') return _dict
def parse_columns(columns): ''' Parse the Screen Result input file Data Columns sheet into valid API Data Columns input. ''' parsed_cols = OrderedDict() errors = {} for i,column in enumerate(columns): parsed_col = { 'is_derived': False, 'is_follow_up_data': False, 'ordinal': i } logger.debug('parsing column: %r', column['data_worksheet_column']) if column['data_worksheet_column'] in parsed_cols: raise ValidationError( key='data_worksheet_column', msg='%r is listed more than once' % column['data_worksheet_column']) parsed_cols[column['data_worksheet_column']] = parsed_col for key,val in column.items(): if key == 'is_follow_up_data': parsed_col[key] = ( val and val.lower() == 'follow up') elif key == 'data_type': val = default_converter(val) # handle validation errors in the api if val not in DATA_TYPE_VALUES: key = '%s:%s' % (column['data_worksheet_column'],'data_type') errors[key] = 'val: %r must be one of %r' % (val,DATA_TYPE_VALUES) parsed_col[key] = val elif key == 'assay_readout_type': parsed_col[key] = default_converter(val) else: if key == 'how_derived': parsed_col['is_derived'] = ( val is not None and val.strip() is not '' ) parsed_col[key] = val if parsed_col.get('decimal_places') is not None: try: key = '%s:%s' % (column['data_worksheet_column'],'data_type') column['decimal_places'] = parse_val( column['decimal_places'],key,'integer') except ValidationError, e: errors.update(e.errors) logger.debug('parsed_col: %r', parsed_col)
def parse_columns(columns_sheet): logger.info('parsing columns sheet: %r', columns_sheet.name) columns = data_column_generator(sheet_cols(columns_sheet)) parsed_cols = OrderedDict() errors = {} for i,column in enumerate(columns): parsed_col = { 'is_derived': False, 'is_follow_up_data': False, 'ordinal': i } logger.debug('parsing column: %r', column['data_worksheet_column']) parsed_cols[column['data_worksheet_column']] = parsed_col for key,val in column.items(): if key == 'is_follow_up_data': parsed_col[key] = ( val and val.lower() == 'follow up') elif key == 'data_type': val = default_converter(val) # handle validation errors in the api if val not in DATA_TYPES: key = '%s:%s' % (column['data_worksheet_column'],'data_type') errors[key] = 'val: %r must be one of %r' % (val,DATA_TYPES) parsed_col[key] = val elif key == 'assay_readout_type': parsed_col[key] = default_converter(val) else: if key == 'how_derived': parsed_col['is_derived'] = ( val is not None and val.strip() is not '' ) parsed_col[key] = val if parsed_col.get('decimal_places', None): try: key = '%s:%s' % (column['data_worksheet_column'],'data_type') column['decimal_places'] = parse_val( column['decimal_places'],key,'integer') except ValidationError, e: errors.update(e.errors) logger.debug('parsed_col: %r', parsed_col)
def top_level(self, request, api_name=None): ''' A view that returns a serialized list of all resources registers to the API. ''' fullschema = parse_val( request.GET.get('fullschema', False), 'fullschema', 'boolean') available_resources = {} if api_name is None: api_name = self.api_name for name, resource in self._registry.items(): if not fullschema: schema = self._build_reverse_url('api_get_schema', kwargs={ 'api_name': api_name, 'resource_name': name, }) else: schema = resource.build_schema() available_resources[name] = { 'list_endpoint': self._build_reverse_url('api_dispatch_list', kwargs={ 'api_name': api_name, 'resource_name': name, }), 'schema': schema, } serializer = LimsSerializer() content_type = serializer.get_accept_content_type(request) serialized = serializer.serialize(available_resources, content_type) return HttpResponse( content=serialized, content_type=content_type)
def top_level(self, request, api_name=None): ''' A view that returns a serialized list of all resources registers to the API. ''' fullschema = parse_val(request.GET.get('fullschema', False), 'fullschema', 'boolean') available_resources = {} if api_name is None: api_name = self.api_name for name, resource in self._registry.items(): if not fullschema: schema = self._build_reverse_url('api_get_schema', kwargs={ 'api_name': api_name, 'resource_name': name, }) else: schema = resource.build_schema() available_resources[name] = { 'list_endpoint': self._build_reverse_url('api_dispatch_list', kwargs={ 'api_name': api_name, 'resource_name': name, }), 'schema': schema, } serializer = LimsSerializer() content_type = serializer.get_accept_content_type(request) serialized = serializer.serialize(available_resources, content_type) return HttpResponse(content=serialized, content_type=content_type)
def parse_result_row(i,parsed_columns,result_row): ''' Parse the Screen Result input file format into a valid API input format: - Convert plate_number and well_name into a well_id - Convert the assay_well_control_type input: use the ASSAY_WELL_CONTROL_TYPES to map api schema assaywell.control_type - Convert the exclude column specifiers into known column letters: "all" is converted to a list of all column letters - Parse value columns according to the data_type specified: - Create default values for positive columns - (TODO: validation rules can be moved to API) - Verify that PARTITION_POSITIVE_MAPPING values are used - Verify that CONFIRMED_POSITIVE_MAPPING values are used - Verify that integer values are integers - Verify that decimal values can be parsed as float ''' logger.debug( 'parse result row: %d, %r: %r', i, parsed_columns.keys(), result_row) meta_columns = RESULT_VALUE_FIELD_MAP.values() parsed_row = {} excluded_cols = [] well_id_errors = [] meta_key = 'plate_number' val = result_row[meta_key] logger.debug('plate value to parse: %r', val) plate_number = parse_val(val, meta_key, 'integer') if plate_number is None: well_id_errors.append('%s is required' % meta_key) meta_key = 'well_name' val = result_row[meta_key] if not val: well_id_errors.append('%s is required' % meta_key) elif WELL_NAME_PATTERN.match(val): wellname = val else: well_id_errors.append('Well_name val %r does not follow the pattern: %r' % (val, WELL_NAME_PATTERN.pattern)) if well_id_errors: raise ParseError(errors={ 'row: %d'%i: well_id_errors }) parsed_row['well_id'] = \ '%s:%s' % (str(plate_number).zfill(5), wellname) meta_key = 'assay_well_control_type' val = result_row.get(meta_key) parsed_row[meta_key] = None if val is not None: if val.lower() in ASSAY_WELL_CONTROL_TYPES: parsed_row[meta_key] = \ ASSAY_WELL_CONTROL_TYPES[val.lower()] else: msg = ('%s: val %r is not one of the choices: %r' % (meta_key, val, ASSAY_WELL_CONTROL_TYPES)) logger.error(msg) raise ValidationError(key=parsed_row['well_id'], msg=msg) meta_key = 'exclude' val = result_row.get(meta_key) if val is not None: if val.lower() == 'all': excluded_cols = parsed_columns.keys() else: excluded_cols = [x.strip().upper() for x in val.split(',')] unknown_excluded_cols = ( set(excluded_cols) - set(parsed_columns.keys())) if unknown_excluded_cols: raise ValidationError( key = parsed_row['well_id'], msg = 'unknown excluded cols: %r' % unknown_excluded_cols ) parsed_row[meta_key] = excluded_cols for colname, raw_val in result_row.items(): logger.debug('colname: %r, raw_val: %r', colname, raw_val) if colname in meta_columns: continue if colname not in parsed_columns: # NOTE: this is no longer an error, as the result value sheet may # contain extra columns (selected by user on output) logger.debug( 'result value column %r is not in recognized columns: %r', colname, parsed_columns.keys()) parsed_row[colname] = raw_val continue column = parsed_columns[colname] if raw_val is None: # 20180315 - verified with DJW, default values for # positive indicator columns if column['data_type'] == DATA_TYPE.BOOLEAN_POSITIVE: raw_val = False elif column['data_type'] == DATA_TYPE.PARTITIONED_POSITIVE: raw_val = 'NP' elif column['data_type'] == DATA_TYPE.CONFIRMED_POSITIVE: raw_val = 'NT' else: continue key = '%s-%s' % (parsed_row['well_id'],colname) parsed_row[colname] = raw_val if column['data_type'] in DATA_TYPE.numeric_types: if column['decimal_places'] > 0: # parse, to validate only; use decimal for final parsing parse_val(raw_val, key, 'float') else: parsed_row[colname] = parse_val(raw_val, key, 'integer') elif column['data_type'] == DATA_TYPE.PARTITIONED_POSITIVE: val = raw_val.upper() if val not in PARTITION_POSITIVE_MAPPING: raise ValidationError( key=key, msg='val: %r must be one of %r' % (raw_val, PARTITION_POSITIVE_MAPPING.keys())) parsed_row[colname] = val elif column['data_type'] == DATA_TYPE.CONFIRMED_POSITIVE: val = raw_val.upper() if val not in CONFIRMED_POSITIVE_MAPPING: raise ValidationError( key=key, msg='val: %r must be one of %r' % (raw_val, CONFIRMED_POSITIVE_MAPPING.keys())) parsed_row[colname] = val elif column['data_type'] == DATA_TYPE.BOOLEAN_POSITIVE: val = parse_val(raw_val,key,'boolean') parsed_row[colname] = val logger.debug('parsed_row: %r', parsed_row) return parsed_row
def parse_result_row(i,parsed_columns,result_row): logger.debug('parse result row: %r', result_row) meta_columns = RESULT_VALUE_FIELD_MAP.values() parsed_row = {} excluded_cols = [] meta_key = 'plate_number' val = result_row[meta_key] logger.debug('plate value to parse: %r', val) plate_number = parse_val(val, meta_key, 'integer') meta_key = 'well_name' val = result_row[meta_key] if WELLNAME_MATCHER.match(val): wellname = val else: raise ParseError( key=i, msg=('well_name val %r does not follow the pattern: %r' % (val, WELLNAME_MATCHER.pattern))) parsed_row['well_id'] = \ '%s:%s' % (str(plate_number).zfill(5), wellname) meta_key = 'assay_well_control_type' val = result_row.get(meta_key, None) parsed_row[meta_key] = None if val: if val.lower() in ASSAY_WELL_CONTROL_TYPES: parsed_row[meta_key] = \ ASSAY_WELL_CONTROL_TYPES[val.lower()] else: msg = ('%s: val %r is not one of the choices: %r' % (meta_key, val, ASSAY_WELL_CONTROL_TYPES)) logger.error(msg) raise ValidationError(key=parsed_row['well_id'], msg=msg) meta_key = 'exclude' val = result_row.get(meta_key, None) if val: if val.lower() == 'all': excluded_cols = parsed_columns.keys() else: excluded_cols = [x.strip().upper() for x in val.split(',')] unknown_excluded_cols = ( set(excluded_cols) - set(parsed_columns.keys())) if unknown_excluded_cols: raise ValidationError( key = parsed_row['well_id'], msg = 'unknown excluded cols: %r' % unknown_excluded_cols ) parsed_row[meta_key] = excluded_cols for colname, raw_val in result_row.items(): if colname in meta_columns: continue if colname not in parsed_columns: # NOTE: this is no longer an error, as the result value sheet may # contain extra columns (selected by user on output) logger.debug( 'result value column %r is not in recognized columns: %r', colname, parsed_columns.keys()) parsed_row[colname] = raw_val continue column = parsed_columns[colname] if ( column['data_type'] == 'partition_positive_indicator' and not raw_val): raw_val = 'NP' if ( column['data_type'] == 'confirmed_positive_indicator' and not raw_val): raw_val = 'NT' if raw_val is None: continue key = '%s-%s' % (parsed_row['well_id'],colname) parsed_row[colname] = raw_val if column['data_type'] == 'numeric': if column['decimal_places'] > 0: # parse, to validate parse_val(raw_val, key, 'float') else: parsed_row[colname] = parse_val(raw_val, key, 'integer') elif column['data_type'] == 'partition_positive_indicator': val = raw_val.upper() if val not in PARTITION_POSITIVE_MAPPING: raise ValidationError( key=key, msg='val: %r must be one of %r' % (raw_val, PARTITION_POSITIVE_MAPPING.keys())) parsed_row[colname] = val elif column['data_type'] == 'confirmed_positive_indicator': val = raw_val.upper() if val not in CONFIRMED_POSITIVE_MAPPING: raise ValidationError( key=key, msg='val: %r must be one of %r' % (raw_val, CONFIRMED_POSITIVE_MAPPING.keys())) parsed_row[colname] = val elif column['data_type'] == 'boolean_positive_indicator': val = parse_val(raw_val,key,'boolean') parsed_row[colname] = val return parsed_row
def _convert_request_to_dict(self, request): ''' Utility method: transfer all values from GET, then POST to a dict. Note: uses 'getlist' to retrieve all values: - if a value is single valued, then unwrap from the list - downstream methods expecting a list value must deal with non-list single values Note: does not handle request.FILES ''' DEBUG = False or logger.isEnabledFor(logging.DEBUG) _dict = {} if request is None: return _dict for key in request.GET.keys(): val = request.GET.getlist(key) if DEBUG: logger.info('get key: %r, val: %r', key, val) # Jquery Ajax will send array list params with a "[]" suffix if '[]' in key and key[-2:] == '[]': key = key[:-2] if len(val) == 1: _dict[key] = val[0] else: _dict[key] = val for key in request.POST.keys(): val = request.POST.getlist(key) if DEBUG: logger.info('post key: %r, val: %r', key, val) # Jquery Ajax will post array list params with a "[]" suffix key = key.replace('[]', '') if len(val) == 1: _dict[key] = val[0] else: _dict[key] = val # check for single-valued known list values # Note: Jquery Ajax will post array list params with a "[]" suffix known_list_values = [ 'includes', 'exact_fields', 'order_by', 'visibilities', 'other_screens' ] # extend with alternate possible keys (depending on posted format) known_list_values.extend(['%s[]' % k for k in known_list_values]) for key in known_list_values: val = _dict.get(key, []) if isinstance(val, basestring): _dict[key] = [val] # Parse known boolean params for convenience http_boolean_params = [ HTTP_PARAM_DATA_INTERCHANGE, HTTP_PARAM_RAW_LISTS, HTTP_PARAM_USE_VOCAB, HTTP_PARAM_USE_TITLES ] for key in http_boolean_params: _dict[key] = parse_val(_dict.get(key, False), key, 'boolean') if DEBUG: logger.info('params: %r', _dict) return _dict