async def on_c_got_step_results(self, sid, results): with db.connection_context(): # Get the datapoint associated with the step (should be generated when step is sent) datapoint = DataPoint.select().where( DataPoint.step == results['step_id']).order_by( DataPoint.created).get() # Then we save the datapoint if datapoint is not None: datapoint.save_cryo_data(results)
def get_tags( tag: str, start: str = None, end: str = None, unit: str = "day", ): if unit not in ['week', 'day', 'month', 'year']: return { 'status': 'error', 'msg': "unit should be :week, day, month, year" } if start is not None: start = dateparser.parse(str(start)) end = datetime.now() if end is not None: end = dateparser.parse(str(end)) if start is None: start = end - relativedelta(days=1000) if end is not None and start is not None: if not validate_daterange(start, end): return { 'status': 'error', 'msg': "Invalid daterange, start date must be earlier than end date" } daily_metrics = [] datapoints = DataPoint.select().where((DataPoint.key == 'tag') & (DataPoint.value == tag)) if datapoints.exists(): for datapoint in datapoints: for point in datapoint.metrics: m = point m.pop('tag') m['region'] = datapoint.region.region_id time = datetime.strptime(m['time'].split(' ')[0], "%Y-%m-%d") if time >= start and time <= end: daily_metrics.append(m) return { 'status': 'ok', 'date': { 'start': start.strftime('%Y-%m-%d'), 'end': end.strftime('%Y-%m-%d') }, 'results': daily_metrics }
def suggestion(search: str, ratio=0.5, top=20): edit = int(len(search) * ratio) exp = NodeList([ SQL("levenshtein("), DataPoint.value, SQL(", '{}') <= {}".format(search, edit)), SQL(" order by levenshtein("), DataPoint.value, SQL(", '{}')".format(search)) ], glue='') datapoints = DataPoint.select().where(exp) tags = [] if datapoints.exists(): for datapoint in datapoints[:top]: tags.append(datapoint.value) return {'tags': tags}
async def get_next_step(self): with db.connection_context(): try: # Get the next step step = ExperimentStep.select().where( ExperimentStep.step_done == False).order_by( ExperimentStep.id).first() # Check if the step is none, and skip to the catch clause if it is if step is None: raise DoesNotExist('Step does not exist') # Check if the step has an associated datapoint if DataPoint.select().where( ExperimentStep == step).count() < 1: step.generate_datapoint() # Convert step to dict step_d = model_to_dict(step) # Set the experiment id (different from the step id) step_d['experiment_configuration_id'] = step_d[ 'experiment_configuration']['id'] # Remove datetime and experiment configuration from the dict # They are not needed in the client, and they are not directly serializable to json (due to missing datetime format) del (step_d['created']) del (step_d['experiment_configuration']) # Return the step if it exists return step_d # Check if the step even exists except DoesNotExist: # It is OK if it does not exist, we should just stop measuring print('No more steps ready') # Return None if no step exists return None
def get_tags(tag: str, start: str = None, end: str = None, unit: str = "day", ratio: float = 1, top: int = 5): if unit not in ['week', 'day', 'month', 'year']: return { 'status': 'error', 'msg': "unit should be :week, day, month, year" } if start is not None: start = dateparser.parse(str(start)) end = datetime.now() if end is not None: end = dateparser.parse(str(end)) if start is None: start = end - relativedelta(days=1000) if end is not None and start is not None: if not validate_daterange(start, end): return { 'status': 'error', 'msg': "Invalid daterange, start date must be earlier than end date" } daily_metrics = [] edit = int(len(tag) * ratio) exp = NodeList([ SQL("levenshtein("), DataPoint.value, SQL(", '{}') <= {}".format(tag, edit)), SQL(" order by levenshtein("), DataPoint.value, SQL(", '{}')".format(tag)) ], glue='') datapoints = DataPoint.select().where(exp) if datapoints.exists(): for datapoint in datapoints[:top]: datapoint_metrics = [] for point in datapoint.metrics: m = point m.pop('tag') m['region'] = datapoint.region.region_id time = datetime.strptime(m['time'].split(' ')[0], "%Y-%m-%d") if time >= start and time <= end: datapoint_metrics.append(m) daily_metrics.append({ 'tag': datapoint.value, 'data': datapoint_metrics }) return { 'status': 'ok', 'date': { 'start': start.strftime('%Y-%m-%d'), 'end': end.strftime('%Y-%m-%d') }, 'results': daily_metrics }
async def export_data(request): # Check id exists if 'id' not in request.query: return web.Response(text='Could not find the requested id', content_type='text/html') # Grab the id config_id = request.query['id'] # Now we want to start the export # Open connection to database with db.connection_context(): # Grab the configuration first ecl = ExperimentConfiguration.select().where( ExperimentConfiguration.id == config_id).dicts() # Check if we have a result if len(ecl) > 0: # Grab the first result (There should only be one when we query by id) ec = ecl[0] # Convert date format ec['created'] = ec['created'].isoformat() # Compute the number of points taken ec['n_points_taken'] = ExperimentStep.select() \ .where(ExperimentStep.experiment_configuration == ec['id']) \ .where(ExperimentStep.step_done == True) \ .count() # Compute the number of points taken ec['n_points_total'] = ExperimentStep.select() \ .where(ExperimentStep.experiment_configuration == ec['id']) \ .count() # Add an empty array to contain steps ec['steps'] = [] # Now we're done processing the configuration # Next we get all the datapoints that were saved # We start by iterating over all the steps in the experiment for step in ExperimentStep.select().where( ExperimentStep.experiment_configuration == ec['id']).dicts(): # Convert date format step['created'] = step['created'].isoformat() # Add an empty array to contain datapoints step['datapoints'] = [] # And we iterate through all the datapoints for the step for dp in DataPoint.select().where( DataPoint.step == step['id']): # Create a dict to contain the collected information datapoint_dict = { 'id': dp.id, 'created': dp.created.isoformat(), 'magnetism_datapoints': [], 'temperature_datapoints': [], 'pressure_datapoints': [] } # Next we find the magnetism datapoint for mdp in MagnetismDataPoint.select().where( MagnetismDataPoint.datapoint == dp): # For this we find the magnetism measurements (where we actually store the data) mdps = MagnetismMeasurement.select().where( MagnetismMeasurement.magnetism_data_point == mdp) # Save it to the datapoint dict for magnetism_datapoint in list(mdps.dicts()): datapoint_dict['magnetism_datapoints'].append( magnetism_datapoint) # And we find the cryodatapoint for cdp in CryogenicsDataPoint.select().where( CryogenicsDataPoint.datapoint == dp): # Similarly we find pressure and temperature datapoints pdps = PressureDataPoint.select().where( PressureDataPoint.cryo_data_point == cdp) tdps = TemperatureDataPoint.select().where( TemperatureDataPoint.cryo_data_point == cdp) # Save them to the datapoint dict for pressure_datapoint in list(pdps.dicts()): datapoint_dict['pressure_datapoints'].append( pressure_datapoint) for temperature_datapoint in list(tdps.dicts()): # Convert date format temperature_datapoint[ 'created'] = temperature_datapoint[ 'created'].isoformat() # Append the temperature datapoint_dict['temperature_datapoints'].append( temperature_datapoint) # Save the datapoint to the step step['datapoints'].append(datapoint_dict) # Save the step to the configuration ec['steps'].append(step) # And finally we send the response data return web.json_response( headers={'Content-Disposition': f'Attachment'}, body=json.dumps(ec)) else: return web.Response(text='Attempted to export ' + str(config_id) + ' but no such config found', content_type='text/html')