def get_ttl(self, key): with self.database.cursor(self.Cursor) as cursor: cursor.execute( self._escape_sql_args_formatter(""" SELECT `expired` FROM `storage` WHERE `key`=? AND `expired`>? LIMIT 1;"""), (key, get_timestamp())) result = cursor.fetchone() if not result: return -2 ttl = int(result[0]) - get_timestamp() return int(ttl)
def purge_expired(self): with self.database.cursor(self.Cursor) as cursor: cursor.execute( self._escape_sql_args_formatter(""" DELETE FROM `storage` WHERE `expired`<=?;"""), (get_timestamp(), ))
def publish(self, body, serializer=None, exchange=None, routing_key=None, mandatory=False, immediate=False, **properties): """Publish a Task formatted message :param body: The body of the task message :param exchange: Exchange to use, None if default :param routing_key: Routing key for the message :param mandatory: <tosee> :param immediate: <tosee> :param properties: Headers of the message """ # Add the current time. body['time'] = get_timestamp() # Encode the message. content_type, content_encoding, content = serialize.encode(body) properties['content_type'] = content_type properties['content_encoding'] = content_encoding # Send the message Task. return self.basic_publish(body=content, exchange=exchange or '', routing_key=routing_key or '', mandatory=mandatory, immediate=immediate, **properties)
def set(self, key, pyobj, expires=86400, encoding="utf-8"): data = self.serialize(pyobj, encoding=encoding) with self.database.cursor(self.Cursor) as cursor: cursor.execute( self._escape_sql_args_formatter(""" REPLACE INTO `storage` (`key`, `value`, `expired`) VALUES (?, ?, ?);"""), (key, self._translate_blob(data), get_timestamp() + expires))
def get_all_keys_by_wildcard(self, wildcard="*"): wc = wildcard.replace("*", "%").replace("?", "_") with self.database.cursor(self.Cursor) as cursor: cursor.execute( self._escape_sql_args_formatter(""" SELECT `key` FROM `storage` WHERE `expired`>? AND `key` LIKE ?;"""), (get_timestamp(), wc)) result = [k for k, *_ in cursor.fetchall()] return result
def is_expired(self, key): with self.database.cursor(self.Cursor) as cursor: cursor.execute( self._escape_sql_args_formatter(""" SELECT 1 FROM `storage` WHERE `key`=? AND `expired`>? LIMIT 1;"""), (key, get_timestamp())) result = not bool(cursor.fetchone()) return result
def get(self, key, encoding=None): with self.database.cursor(self.Cursor) as cursor: cursor.execute( self._escape_sql_args_formatter(""" SELECT `value` FROM `storage` WHERE `key`=? AND `expired`>? LIMIT 1;"""), (key, get_timestamp())) result = cursor.fetchone() if not result: return t = result[0] t = self.unserialize(t, encoding=encoding) return t
def get_top_song_metadata(): """ Fetch the JSON metadata about the latest top song from the fileserver. """ cache_buster = '?v=%s' % get_timestamp() response = requests.get(settings.MEDIA_URL + 'musikk/top_meta.json' + cache_buster) return response.json()
def getAlgorithm(request, id_record): # ADD THE TYPE ODF THE SIGNAL ALSO IN URLS!!! # read parameters from url # get data type list allow_ml = False id_file = -1 if request.method == 'POST': mydict = dict(request.POST.iterlists()) # if 'choose_signal' in mydict: # print request.POST print id_record type_sig = '' id_num = id_record try: print "RUNNING FOR ", id_num data, cols = QueryDb(id_num) # print cols time = selcol(data, cols, "TIME") labs = selcol(data, cols, "LAB") type_sig = get_signal_type(cols) # print type_sig params = dict() if (mydict['type'][0] == 'contigous'): windows, winlab = wd.get_windows_contiguos( time, labs, float(mydict['length'][0]), float(mydict['step'][0])) if ( mydict['type'][0] == 'no_mix' ): # for the values, make reference to .forms --> windowing.!!!! windows, winlab = wd.get_windows_no_mix( time, labs, float(mydict['length'][0]), float(mydict['step'][0])) if (mydict['type'][0] == 'full_label'): windows, winlab = wd.get_windows_full_label(time, labs) params.update({ "windowing.type": str(mydict["type"][0]), "windowing.length": str(mydict["length"][0]), "windowing.step": str(mydict["step"][0]) }) # extract features from result # store feats. in the db params.update({"signal_type": type_sig}) if type_sig == "GSR": # GSR data_in = selcol(data, cols, "PHA") funcs, pars = list( Preprocessed_Recording.objects.filter( pk=id_num).values_list('applied_preproc_funcs_names', 'preproc_funcs_parameters'))[0] DELTA = float(pars[funcs.index(u"GSR.preproc")][u"DELTA_PEAK"]) feat_dict = extfeat_GSR(data_in, time, DELTA, windows) data_out, cols_out = dict_to_arrays(feat_dict) data_out = np.column_stack((data_out, winlab)) columns_out = np.r_[cols_out, ["LAB"]] elif type_sig == "inertial": col_acc = ["ACCX", "ACCY", "ACCZ"] col_gyr = ["GYRX", "GYRY", "GYRZ"] col_mag = ["MAGX", "MAGY", "MAGZ"] try: acc = selcol(data, cols, col_acc) thereIsAcc = True except IndexError as e: print e thereIsAcc = False try: gyr = selcol(data, cols, col_gyr) thereIsGyr = True except IndexError as e: print e thereIsGyr = False try: mag = selcol(data, cols, col_mag) thereIsMag = True except IndexError as e: print e thereIsMag = False columns_out = np.array(["LAB"]) data_out = winlab[:] if thereIsAcc: feats_acc, fcol_acc = extfeat_ACC(acc, time, col_acc, windows) data_out = np.column_stack([feats_acc, data_out]) columns_out = np.r_[fcol_acc, columns_out] if thereIsGyr: feats_gyr, fcol_gyr = extfeat_GYR(gyr, time, col_gyr, windows) data_out = np.column_stack([feats_gyr, data_out]) columns_out = np.r_[fcol_gyr, columns_out] if thereIsMag: feats_mag, fcol_mag = extfeat_MAG(mag, time, col_mag, windows) data_out = np.column_stack([feats_mag, data_out]) columns_out = np.r_[fcol_mag, columns_out] elif type_sig == "IBI": data_in = selcol(data, cols, ["TIME", "IBI"]) cols_in = ["TIME", "IBI"] data_out, winlab = extfeat_IBI(data_in, cols_in, windows, winlab) columns_out = np.array([ 'RRmean', 'RRSTD', 'pNN50', 'pNN25', 'pNN10', 'RMSSD', 'SDSD' ]) # print data_out.shape, winlab.shape data_out = np.column_stack((data_out, winlab)) columns_out = np.r_[columns_out, ["LAB"]] st = datetime.datetime.fromtimestamp( get_timestamp()).strftime('%Y%m%d_%H%M%S') fname = MEDIA_ROOT + type_sig + "_" + id_num + "_" + st + ".csv" print fname # print(fname) toCsv(data_out, columns_out, fname) id_file = WritePathtoDB(fname, id_num, params) success = True # check distinct label print "COLUMNS", columns_out, type(columns_out), np.where( columns_out == "LAB")[0] allow_ml = False if len( np.unique(data_out[:, np.where( columns_out == "LAB")])) == 1 else True except Exception as e: print "COULD NOT PROCESS " + id_num + ": " + e.message if type_sig is not None: messages.error( request, "Error processing " + id_num + " (" + type_sig + "). Review your parameters! It will not be saved.") else: messages.error( request, "Error processing. Review your parameters! It will not be saved." ) success = False # else: # success=False # messages.error(request, "Choose at least one preprocessed signal") else: success = False form = windowing() # form_signal = form_select_signal(id_record) template = "extfeat/choose_alg.html" # print urlTmp['id_num'] context = { 'form': form, 'id_record': id_record, 'success': success, 'allow_ml': allow_ml, 'id_file': id_file } return render(request, template, context)
def getAlgorithm(request, id_record): # ADD THE TYPE ODF THE SIGNAL ALSO IN URLS!!! # read parameters from url # get data type list if (request.method == 'POST'): mydict = dict(request.POST.iterlists()) if 'choose_signal' in mydict: for id_num in mydict['choose_signal']: try: print "RUNNING FOR ", id_num data, cols = QueryDb(id_num) time = selcol(data, cols, "TIME") labs = selcol(data, cols, "LAB") type_sig=get_signal_type(cols) params=dict() if (mydict['type'][0] == 'contigous'): windows, winlab = wd.get_windows_contiguos(time, labs, float(mydict['length'][0]), float(mydict['step'][0])) if (mydict['type'][0] == 'no_mix'): # for the values, make reference to .forms --> windowing.!!!! windows, winlab = wd.get_windows_no_mix(time, labs, float(mydict['length'][0]), float(mydict['step'][0])) if (mydict['type'][0] == 'full_label'): windows, winlab = wd.get_windows_full_label(time, labs) params.update({"windowing.type":str(mydict["type"][0]), "windowing.length":str(mydict["length"][0]), "windowing.step":str(mydict["step"][0])}) # extract features from result # store feats. in the db params.update({"signal_type":type_sig}) if type_sig=="GSR": #GSR data_in=selcol(data, cols, "PHA") funcs, pars=list(Preprocessed_Recording.objects.filter(pk = id_num).values_list('applied_preproc_funcs_names', 'preproc_funcs_parameters'))[0] DELTA=float(pars[funcs.index(u"GSR.preproc")][u"DELTA_PEAK"]) feat_dict = extfeat_GSR(data_in, time, DELTA, windows) data_out, cols_out=dict_to_arrays(feat_dict) data_out=np.column_stack((data_out, winlab)) columns_out=np.r_[cols_out, ["LAB"]] elif type_sig=="inertial": col_acc=["ACCX", "ACCY", "ACCZ"] col_gyr=["GYRX", "GYRY", "GYRZ"] col_mag=["MAGX", "MAGY", "MAGZ"] try: acc=selcol(data, cols, col_acc) thereIsAcc=True except IndexError as e: print e thereIsAcc=False try: gyr=selcol(data, cols, col_gyr) thereIsGyr=True except IndexError as e: print e thereIsGyr=False try: mag=selcol(data, cols, col_mag) thereIsMag=True except IndexError as e: print e thereIsMag=False columns_out=np.array(["LAB"]) data_out=winlab[:] if thereIsAcc: feats_acc, fcol_acc= extfeat_ACC(acc, time, col_acc, windows) data_out=np.column_stack([feats_acc, data_out]) columns_out=np.r_[fcol_acc, columns_out] if thereIsGyr: feats_gyr, fcol_gyr= extfeat_GYR(gyr, time, col_gyr, windows) data_out=np.column_stack([feats_gyr, data_out]) columns_out=np.r_[fcol_gyr, columns_out] if thereIsMag: feats_mag, fcol_mag= extfeat_MAG(mag, time, col_mag, windows) data_out=np.column_stack([feats_mag, data_out]) columns_out=np.r_[fcol_mag, columns_out] elif type_sig=="IBI": data_in=selcol(data, cols, ["TIME","IBI"]) cols_in=["TIME", "IBI"] data_out, winlab = extfeat_IBI(data_in, cols_in, windows, winlab) columns_out=np.array(['RRmean', 'RRSTD', 'pNN50', 'pNN25', 'pNN10', 'RMSSD', 'SDSD']) print data_out.shape, winlab.shape data_out=np.column_stack((data_out, winlab)) columns_out=np.r_[columns_out, ["LAB"]] st = datetime.datetime.fromtimestamp(get_timestamp()).strftime('%Y%m%d_%H%M%S') fname=MEDIA_ROOT+type_sig+"_"+id_num+"_"+st+".csv" toCsv(data_out, columns_out, fname) WritePathtoDB(fname, id_num, params) except Exception as e: print "COULD NOT PROCESS "+id_num+": "+e.message if type_sig!=None: messages.error(request, "Error processing "+id_num+" ("+type_sig+"). Review your parameters! It will not be saved.") else: messages.error(request, "Error processing. Review your parameters! It will not be saved.") success=False else: success=True else: success=False messages.error(request, "Choose at least one preprocessed signal") else: success=False form = windowing() form_signal = form_select_signal(id_record) template = "extfeat/choose_alg.html" # print urlTmp['id_num'] context = {'form': form,'form_signal':form_signal, 'id_record': id_record, 'success':success} return render(request, template, context)
def get_top_song_metadata(): """ Fetch the JSON metadata about the latest top song from the fileserver. """ cache_buster = '?v=%s' % get_timestamp() response = requests.get(MUSIC_DIR + 'top_meta.json' + cache_buster) return response.json()