def insert_cog_prot_if_not_exist(self, FK_id_COG, FK_id_prot): """ Verify if the a object with the same Fks don't exists and insert it in the database :param FK_id_COG: id of a cog :param FK_id_prot: id of a protein :type FK_id_COG: int :type FK_id_prot: int :return: id of the ProteinDom object inserted :rtype int """ id_prot_dom = self.get_id_cog_prot_by_id_cog_id_prot( FK_id_COG, FK_id_prot) if id_prot_dom == -1: sql_string = "INSERT INTO COGS_PROT (FK_id_cog_CO_CP, FK_id_protein_PT_CP) VALUES (%s, %s)" params = [FK_id_COG, FK_id_prot] dalObj = DAL(self.db_name, sql_string) dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid else: print("This COGprotein : {0} - {1} pair already exists".format( FK_id_COG, FK_id_prot)) return id_prot_dom
def insert_contig(self, id_contig_db_outside, head, sequence, fk_id_whole_genome): """ Insert a Contig WITHOUT ANY VERIFICATION :param id_contig_db_outside: Header of the CONTIG (ACC number -1 if unknown) :param head: First line of the fasta file "" if unknown :param sequence: Sequence in nucleotide of the CONTIG - "" if unknown :param fk_id_whole_genome: id of the whole DNA that contain this contig :type id_contig_db_outside: text - not required :type head: text - not required :type sequence: text - required :type fk_id_whole_genome: text - required :return: id of the CONTIG object inserted :rtype int """ sql_string = "INSERT INTO CONTIGS (id_contig_db_outside_CT, head_CT, sequence_CT, FK_id_whole_genome_WD_CT) VALUES (%s, %s, %s, %s)" dalObj = DAL(self.db_name, sql_string) params = [ id_contig_db_outside, head, str(sequence), fk_id_whole_genome ] dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid
def create_cog_score_verification(self, grp_a, grp_b, score_cog): """ Insert a Cog score and return its id if it isn't exist another cog score with the two groups :param grp_a: value of the interaction (1 - positive; 0 - negative) :param grp_b: id of the bacterium :param score_cog: id of the phage :type grp_a: string - required :type grp_b: string - required :type score_cog: int - required :return: id of the COG score inserted :rtype int """ id_value_COG_score = self.get_id_cog_score_by_grpa_grpb(grp_a, grp_b) if id_value_COG_score == -1: sql_string = "INSERT INTO COG_INTERACTION (Group1_CI, Group2_CI, Score_CI) VALUES (%s, %s, %s)" dalObj = DAL(self.db_name, sql_string) params = [grp_a, grp_b, score_cog] dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid else: print("Its already exists a Cog Scor with these groups") return id_value_COG_score
def insert_domains_ds_config(self, range_DD, auto_split_DD, nomralization_data_DD, id_user, id_configuration): """ Insert a dataset :param range_DD: size of the range :param auto_split_DD: SK-leanr automatic split :param nomralization_data_DD: normalize the data :param id_user: FK of the user :param id_configuration: FK of the configuration :type range_DD: int - required :type auto_split_DD: int - required :type nomralization_data_DD: int - required :type id_user: int - required :type id_configuration: int - required :return: id of the dataset_ds inserted :rtype int """ sql_string = "INSERT INTO DOMAINS_DS (range_DD, auto_split_DD, nomralization_data_DD, FK_id_user_US_DD, FK_id_configuration_CF_DD) VALUES (%s, %s, %s, %s, %s)" dalObj = DAL(self.db_name, sql_string) params = [range_DD, auto_split_DD, nomralization_data_DD, id_user, id_configuration] dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid
def insert_configuration(self, date_time_creation, designation, id_ds, id_user): """ Insert a dataset :param date_time_creation: date and time of the creation (actual) :param designation: configuration name :param id_ds: FK of the DS :param id_user: FK of the user :type date_time_creation: string (datetime sql format) - required :type name: string - required :type id_ds: int - required :type id_user: int - required :return: id of the dataset inserted :rtype int """ sql_string = "INSERT INTO CONFIGURATIONS (date_time_creation_CF, designation_CF, FK_id_DS_CF, FK_id_user_US_DD) VALUES (%s, %s, %s, %s)" dalObj = DAL(self.db_name, sql_string) params = [date_time_creation, designation, id_ds, id_user] dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid
def markSiteAsBuilt(site, mayVal): if mayVal[1] == 0: DAL.insert_newsSite_lastBuilt(client, site) print(str(dtNow()) + " " + " db:insert") else: DAL.update_newsSite_lastBuilt(client, mayVal[1]) print(str(dtNow()) + " " + " db:update")
def insert_DDI_source_return_id_if_not_exists(self, date_creation, FK_DDI_interaction, FK_Source_DDI_DB): """ Insert a ddi_source WITHOUT ANY VERIFICATION. :param date_creation: creation date of the register :param FK_DDI_interaction: FK of the DDI interaction :param FK_Source_DDI_DB: FK of the database tha give the information :type date_creation: int - required :type FK_DDI_interaction: int - required :type FK_Source_DDI_DB: string (datetime sql format) - required :return: id of the domain inserted or ID of the existant :rtype int """ id_DDI_DB_source = self.get_id_DDI_interaction_DB_by_FK_keys( FK_DDI_interaction, FK_Source_DDI_DB) if id_DDI_DB_source == -1: sqlObj = "INSERT INTO DDI_INTERACTIONS_DB (date_insertion_DDB, FK_id_interaction_DD_DDB, FK_id_db_interaction_DDB) VALUES (%s, %s, %s)" params = [date_creation, FK_DDI_interaction, FK_Source_DDI_DB] dalObj = DAL(self.db_name, sqlObj, params) results = dalObj.executeInsert() return results.lastrowid else: return id_DDI_DB_source
def insert_gene_return_id(self, gene_number, dna_head, dna_seq, start_position, end_position, fk_organism, fk_protein): """ :param gene_number: number of the gene correspond to the order of the gene in the whole sequence - -1 if unknown :param dna_head: fasta head first line of the gene - "No head" if unknown :param dna_seq: NUCLEOTIDE sequence of the Gene - "" if unknown :param start_position: start position of the gene in the whole genome :param end_position: end position of the gene in the whole genome :param FK_id_organism: id of the organims which the gene belongs :param FK_id_protein: id of the protein which the gene belongs :type gene_number: int - not required :type dna_head: text - required :type dna_seq: text - required :type start_position: int - not required :type end_position: int - not required :type FK_id_organism: int - required :type FK_id_protein: int - required :return: id of the inserted gene :rtype int """ sqlObj = "INSERT INTO GENES (gene_number_GE, dna_head_GE, dna_sequence_GE, start_position_GE, end_position_GE, FK_id_organism_OR_GE, FK_id_protein_PT_GE) VALUES (%s, %s, %s, %s, %s, %s, %s)" params = [ gene_number, dna_head, dna_seq, start_position, end_position, fk_organism, fk_protein ] dalObj = DAL(self.db_name, sqlObj, params) results = dalObj.executeInsert() return results.lastrowid
def create_cog_interact_source_verification(self, score_interaction_COG, Fk_source, Fk_interaction): """ Insert a Cog groupe based on the FK ids of both. :param score_interaction_COG: score of the cog interaction :param Fk_source: FK of the source :param FK_interaction: FK of the interaction :type score_interaction_COG: int - required :type Fk_source: string - required :type FK_interaction: string - required :return: id of the COG interaction :rtype int """ id_source_itneract_cog = self.get_id_cog_interact_by_source_interaction(Fk_source, Fk_interaction) if id_source_itneract_cog == -1: sql_string = "INSERT INTO COG_SOURCES_INTERACT (score_CSI, FK_id_cog_source_CS_CSI, FK_id_cog_interaction_CI_CSI) VALUES (%s, %s, %s)" dalObj = DAL(self.db_name, sql_string) params = [score_interaction_COG, Fk_source, Fk_interaction] dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid else: print("Its already exists a Cog Score with these groups") return id_source_itneract_cog
def insert_protein_all_info_return_id_procedure(self, id_accession, designation, sequence_prot, sequence_dna, start_point, end_point, start_point_cnt, end_point_cnt, fk_id_contig, organism_id): """ Insert a PROTEIN and return its id, insert only with the parameters cited below. ANY VERIFICATION ARE DONE :param id_accession: accession number - -1 if unknown :param designation: the text following ">" in the fasta file (first line) - if unknown empty string :param sequence_prot: proteic sequence - if unknown empty string :param sequence_dna: nucleic sequence - if unknown empty string :param start_point: start position in the gene - -1 if unknown :param end_point: end position in the gene - -1 if unknown :param start_point_cnt: start position in the contig - -1 if unknown :param end_point_cnt: end position in the contig - -1 if unknown :param fk_id_contig: fk_contig -1 if unknown :param id_organism: if of the organism of the protein -1 if unknown :return: id of the protein :rtype int """ sqlObj = "insert_protein_secure" params = [ id_accession, designation, sequence_prot, sequence_dna, start_point, end_point, start_point_cnt, end_point_cnt, fk_id_contig, organism_id ] print(params) dalObj = DAL(self.db_name, sqlObj, params) results = dalObj.call_procedure() return results.lastrowid
def verify_COG_preview_exits(self, FK_id_prot_back, FK_id_prot_phage, FK_id_interact_cog, fk_id_couple): """ verify if a give COG_preview exist :param FK_id_interaction_cog_source: id of the cog interaction :type FK_id_interaction_cog_source: int - required :return: quantity of row deleted row :rtype int """ sql_string = "SELECT count(*) FROM COGS_preview WHERE FK_id_prot_cog_bact_CP_CPR = " + str( FK_id_prot_back) + " and FK_id_prot_cog_phage_CP_CPR = " + str( FK_id_prot_phage ) + " and FK_id_interaction_cog_source_CSI_CPR = " + str( FK_id_interact_cog) + " and FK_id_couple_CP_CPR = " + str( fk_id_couple) dalObj = DAL(self.db_name, sql_string) results = dalObj.executeSelect() if results[0] == 0: return 0 else: return results[0]
def create_cog_interact_verification(self, FK_group_cog_a, FK_group_cog_b): """ Insert a Cog groupe based on the FK ids of both. :param FK_group_cog_a: fk id of the group cog A :param FK_group_cog_b: fk id of the group cog B :type FK_group_cog_a: int - required :type FK_group_cog_b: int - required :return: id of the COG interaction :rtype int """ id_value_COG_interact = self.get_id_cog_interact_by_grpa_grpb( FK_group_cog_a, FK_group_cog_b) if id_value_COG_interact == -1: sql_string = "INSERT INTO COGS_INTERACTIONS (FK_cog_a_CI, FK_cog_b_CI) VALUES (%s, %s)" dalObj = DAL(self.db_name, sql_string) params = [FK_group_cog_a, FK_group_cog_b] dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid else: print("Its already exists a Cog Scor with these groups") return id_value_COG_interact
def test_add_existing_language_get_false(self): dal = DAL() for l in languages: print "dal add language" + str(dal.add_language(l)) print "dal get all languages" + str(dal.get_all_languages()) res = dal.add_language(languages[0]) self.assertFalse(res)
def insert_specie_if_not_exist_in_Genus(self, specieName, genus_id): """ Insert a Specie if it not yet exist (based on the designation) :param specieName: name of the specie :param genus_id: FK of the specie's genus - -1 if unknown :type genusName: string - required :type genus_id: int - required :return: id of the specie inserted :rtype int :note:: it not verify the complete taxonomy but just only if the specie already exists in a give genus. """ id_specie = self.get_specie_id_by_designation_and_genus_id( specieName, genus_id) if id_specie == -1: sql_string = "INSERT INTO SPECIES (designation_SP, FK_id_genus_GE_SP) VALUES (%s, %s)" params = [specieName, genus_id] dalObj = DAL(self.db_name, sql_string) dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid else: print("The specie: %s already exists in the genus id: %d" % (str(specieName), genus_id)) return id_specie
def whoIsAlive(): #print(dtNow()) client = DAL.openConnection() cursor = DAL.liveness_getAll(client) results = list(cursor) for d in results: d['last_notif_min'] = floor((dtNow() - d["timestamp"]).total_seconds()/60) # how many seconds ago last received d['last_notif_sec'] = floor((dtNow() - d["timestamp"]).total_seconds()) # how many seconds ago last received if d['name'] == "producer: news": d['status'] = True if d['last_notif_min'] < 25 else False elif d['name'] == "producer: predictions": d['status'] = True if d['last_notif_min'] < 61 else False elif d['name'] == "producer: telegram": d['status'] = True if d['last_notif_min'] < 60 else False elif d['name'] == "worker: news": d['status'] = True if d['last_notif_min'] < 120 else False elif d['name'] == "worker: sentiments news": d['status'] = True if d['last_notif_min'] < 120 else False else: d['status'] = True if d['last_notif_min'] < 3 else False del d['timestamp'] del d['_id'] if mailer and not d['status']: send_email_server("Module ("+d['name']+") is offline", "last notif minutes: " + str(d['last_notif_min'])) results = sorted(results, key=lambda x: x['name']) return results
def get_id_couple_by_phage_bact_type_interaction(self, fk_bact, fk_phage, type_inter): """ verify if a couple exist given a phage, bacterium and type interaction. If exists return the id, if not return -1 :param fk_bact: id of the bacterium :param fk_phage: id of the phage :param fk_type_inter: id of the phage :type fk_bact: int - required :type fk_phage: int - required :type fk_type_inter: int - required :return: id of the couple or -1 if inexistant :rtype int """ sql_string = "SELECT id_couple_CP FROM COUPLES WHERE FK_id_organism_bact_OR_CP = '" + str( fk_bact) + "' and FK_id_organism_phage_OR_CP = " + str( fk_phage) + " AND interaction_CP = " + str(type_inter) dalObj = DAL(self.db_name, sql_string) results = dalObj.executeSelect() if len(results) is 0: return -1 else: return results[0][0]
class UserTreatment(): def __init__(self): self.dal = DAL() def login(self, data): logging.info("Got login request") email = data["email"] password = data["password"] try: res = self.dal.check_user_passwd(email, password) logging.info("res " + str(res)) if res: return 0, res else: return 3, "Wrong password" except UserNotExistException: return 2, "Error, user with this email does not exist" def register(self, data): logging.info("Got register request") logging.info(data) firstname = data["firstname"] lastname = data["lastname"] nickname = data["nickname"] password = data["password"] email = data["email"] try: self.dal.add_new_user(firstname, lastname, nickname, password, email, user_role="contributor") return 0, "User registered successfully" except UserExistException: return 1, "Error, user with this email is already exist"
def insert_protdom_if_not_exist(self, fk_protein, fk_domain): """ Verify if the a object with the same Fks don't exists and insert it in the database :param fk_protein: id of a protein :param fk_domain: id of a domain :type fk_protein: int :type fk_domain: int :return: id of the ProteinDom object inserted :rtype int """ id_prot_dom = self.get_id_prot_dom_by_id_prot_id_domain( fk_protein, fk_domain) if id_prot_dom == -1: sql_string = "INSERT INTO PROT_DOM (FK_id_protein_PT_DP, FK_id_domain_DO_DP) VALUES (%s, %s)" params = [fk_protein, fk_domain] dalObj = DAL(self.db_name, sql_string) dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid else: print("This protein-domain: {0} - {1} pair already exists".format( fk_protein, fk_domain)) return id_prot_dom
def get_id_ds_conf_ds_by_value_and_fks(self, value_configuration, FK_id_configuration_DCT_DCD, FK_id_dataset_DS_DCD): """ Return the id o a ds_conf_Ds :param value_configuration: value of the configurations :param FK_id_configuration_DCT_DCD: dFk of the configuration (DATASET_CONFIGURATIONS_TYPES) :param FK_id_dataset_DS_DCD: fk of the dataset (DATASETS) :type value_configuration: int - required :type FK_id_configuration_DCT_DCD: int - required :type FK_id_dataset_DS_DCD: int - required :return: id of the DATASET_CONF_DS or -1 i don't exists :rtype int """ sql_string = "SELECT id_ds_conf_ds_DCD FROM DATASET_CONF_DS WHERE value_config_DCD = '" + str( value_configuration ) + "' AND FK_id_configuration_DCT_DCD = '" + str( FK_id_configuration_DCT_DCD ) + "' AND FK_id_dataset_DS_DCD = '" + str(FK_id_dataset_DS_DCD) + "'" dalObj = DAL(self.db_name, sql_string) results = dalObj.executeSelect() if len(results) is 0: return -1 else: return results[0][0]
def insert_DS_conf_DS_return_id_if_not_exists(self, value_configuration, FK_id_configuration_DCT_DCD, FK_id_dataset_DS_DCD): """ Insert a ddi_source WITHOUT ANY VERIFICATION. :param value_bins: value of the configuration :param FK_id_configuration_DCT_DCD: FK of the configuration :param FK_id_dataset_DS_DCD: FK of the dataset :type value_bins: int - required :type FK_id_configuration_DCT_DCD: int - required :type FK_id_dataset_DS_DCD: int - required :return: id of the dataset configuration inserted or ID of the existant :rtype int """ id_conf_ds = self.get_id_ds_conf_ds_by_value_and_fks( value_configuration, FK_id_configuration_DCT_DCD, FK_id_dataset_DS_DCD) if id_conf_ds == -1: sqlObj = "INSERT INTO DATASET_CONF_DS (value_config_DCD, FK_id_configuration_DCT_DCD, FK_id_dataset_DS_DCD) VALUES (%s, %s, %s)" params = [ value_configuration, FK_id_configuration_DCT_DCD, FK_id_dataset_DS_DCD ] dalObj = DAL(self.db_name, sqlObj, params) results = dalObj.executeInsert() return results.lastrowid else: return id_conf_ds
def get_contig_by_organism_id(self, id_organism): sql_string = "SELECT id_contig_CT, id_contig_db_outside_CT, head_CT, sequence_CT, FK_id_whole_genome_WD_CT FROM CONTIGS, ORGANISMS, WHOLE_DNA WHERE id_organism_OR = " + str( id_organism ) + " and fk_id_whole_dna_dna_or = id_dna_wd and fk_id_whole_genome_wd_ct = id_dna_wd" dalObj = DAL(self.db_name, sql_string) results = dalObj.executeSelect() return results
def send_pulses(index): """ Sends a "pulse" (number) through a socket until server closes. :param index: The index of the information to send. """ path = str(DAL.__file__[:-7]) conn = DAL.connect(path + r'\DBProject.db') patients = [item[0] for item in DAL.get_patient(conn)] while get_running(): time.sleep(1) thread_input = get_thread_input(index) with PRINT_LOCK: print("Thread num:", index, ", input: ", thread_input) data = {} #TO DO-complete thread_input =1,2,3 data["input"] = thread_input data["client_num"] = patients[index] data["position"] = "123" data["event_time"] = datetime.datetime.now() data["value"] = "123" #{"input": thread_input, "client num": index, "start_time": "123"} requests.post(f"http://{SERVER_IP}:{SERVER_PORT}/add_data", data) if (thread_input == b"2"): change_thread_input(index)
def insert_PPI_couple(self, FK_id_prot_bact, FK_id_prot_phage, Fk_couple): """ Insert a ppi_couple in the database. The id of the PPI_couple is updated :Note A verification id done in the database which allow to insert only a unic pair of id_prot_bact and id_prot_phage. :param FK_id_prot_bact: id of the couple - -1 if unknown :param FK_id_prot_phage: id of the couple - -1 if unknown :param Fk_couple: id of the couple - -1 if unknown :type FK_id_prot_bact: text - required :type FK_id_prot_phage: text - required :type Fk_couple: text - required :return: id of the PPI_couple inserted :rtype: int """ sql_string = "INSERT INTO PPI_couple (FK_prot_bact_PT_PCP, FK_prot_phage_PT_PCP, FK_couple_CP_PCP) VALUES (%s, %s, %s)" params = [FK_id_prot_bact, FK_id_prot_phage, Fk_couple] dalObj = DAL(self.db_name, sql_string) dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid
def get_id_DDI_interaction_DB_by_FK_keys(self, FK_DDI_pair, FK_DDI_source): """ Return the id o a DDI source :param FK_DDI_pair: designation of DDI source :param FK_DDI_source: designation of DDI source :type FK_DDI_pair: string - required :type FK_DDI_source: string - required :return: id of the DDI_pair_source or -1 i don't exists :rtype int """ sql_string = "SELECT id_DDI_interaction_DDB FROM DDI_INTERACTIONS_DB WHERE FK_id_interaction_DD_DDB = '" + str( FK_DDI_pair) + "' AND FK_id_db_interaction_DDB = '" + str( FK_DDI_source) + "'" dalObj = DAL(self.db_name, sql_string) results = dalObj.executeSelect() if len(results) is 0: return -1 else: return results[0][0]
def insert_whole_genome_if_not_exist(self, head, head_id, sequence): """ Insert a Whole DNA only but verify if doesn't exist another one with the same head and head_id :param head: Header of the whole genome (first line in fasta file) - "" if unknown :param head_id: accession number of the Whole_DNA - "" if unknown :param sequence: Sequence in nucleotide of the WholeDNA - "" if unknown :type head: text - required :type head_id: text - required :type sequence: text - required :return: id of the WholeDNA object inserted or ID of the WHoleDNA existing :rtype int """ id_wholeDNA = self.get_id_whole_genome_by_head_and_head_id(head, head_id) if id_wholeDNA == -1: sql_string = "INSERT INTO WHOLE_DNA (head_WD, head_id_WD, sequence_WD) VALUES (%s, %s, %s)" dalObj = DAL(self.db_name, sql_string) params = [head, head_id, sequence] dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid else: print("It already exists an Whole DNA with these heads data") return id_wholeDNA
def insert_contig_if_not_exist(self, id_contig_db_outside, head, sequence, fk_id_whole_genome): """ Insert a Contig if exists any other contig with the same head :param id_contig_db_outside: Header of the CONTIG (ACC number -1 if unknown) :param head: First line of the fasta file "" if unknown :param sequence: Sequence in nucleotide of the CONTIG - "" if unknown :param fk_id_whole_genome: id of the whole DNA that contain this contig :type id_contig_db_outside: text - not required :type head: text - not required :type sequence: text - required :type fk_id_whole_genome: text - required :return: id of the CONTIG object inserted :rtype int """ id_contig = self.get_id_contig_by_head(head) if id_contig == -1: sql_string = "INSERT INTO CONTIGS (id_contig_db_outside_CT, head_CT, sequence_CT, FK_id_whole_genome_WD_CT) VALUES (%s, %s, %s, %s)" dalObj = DAL(self.db_name, sql_string) params = [ id_contig_db_outside, head, str(sequence), fk_id_whole_genome ] dalObj.sqlcommand = sql_string dalObj.parameters = params results = dalObj.executeInsert() return results.lastrowid else: print("Its already exists a Contig with this head") return id_contig
def process_rdd_social(time, part_iterator): try: log("----------- %s : SOCIAL --" % str(datetime.fromtimestamp(time))) buckets = {} for part in part_iterator: crypto = part['crypto'] source = part['source'] key = crypto + "|" + source if not key in buckets: buckets[key] = {'crypto': crypto, 'source': source, 'nums': 0} buckets[key]['nums'] += 1 for key, b in buckets.items(): crypto = b['crypto'] source = b['source'] nums = b['nums'] log(crypto + " (" + source + "): " + str(nums)) DAL.store_mentions_social(client, nums, datetime.fromtimestamp(time), crypto, source) DAL.liveness_IAmAlive(client, "worker: social") except Exception as ex: log("exception") logErr(str(ex), traceback.format_exc())
def login_root(): rows = [] if request.method == 'POST': conn = DAL.connect('DBProject.db') rows = DAL.login(conn, request.form['username']) if (len(rows) == 0): return render_template('login.html', error="username is invalid") patientID, chipID, firstname, lastname, medical_state, location, contactID, username, password, salt = \ rows[0] if_true = hash_code.verify_password(password, salt, request.form['password']) DAL.close(conn) if (not if_true): return render_template('login.html', error="password is invalid") session["patientID"] = patientID session["chipID"] = chipID session["firstname"] = firstname session["lastname"] = lastname session["contactID"] = contactID session["username"] = username session["password"] = str(hash) session["salt"] = str(salt) return get_main_page() elif request.method == 'GET': return render_template('login.html', error="")
def insert_protein_all_info_return_id(self, id_accession, designation, sequence_prot, sequence_dna, start_point, end_point, start_point_cnt, end_point_cnt, fk_id_contig): """ Insert a PROTEIN and return its id, insert only with the parameters cited below. ANY VERIFICATION ARE DONE :param id_accession: accession number - -1 if unknown :param designation: the text following ">" in the fasta file (first line) - if unknown empty string :param sequence_prot: proteic sequence - if unknown empty string :param sequence_dna: nucleic sequence - if unknown empty string :param start_point: start position in the gene - -1 if unknown :param end_point: end position in the gene - -1 if unknown :param start_point_cnt: start position in the contig - -1 if unknown :param end_point_cnt: end position in the contig - -1 if unknown :param fk_id_contig: fk_contig -1 if unknown :return: id of the protein :rtype int """ sqlObj = "INSERT INTO PROTEINS (id_protein_BD_online_PT, designation_PT, sequence_PT, DNA_sequence_PT, start_point_PT, end_point_PT, start_point_cnt_PT, end_point_cnt_PT, FK_id_contig_CT_PT) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)" params = [ id_accession, designation, sequence_prot, sequence_dna, start_point, end_point, start_point_cnt, end_point_cnt, fk_id_contig ] print(params) dalObj = DAL(self.db_name, sqlObj, params) results = dalObj.executeInsert() return results.lastrowid
def update_handler(client, message): print(message) print(type(message)) # pyrogram message type/class log.info(message) try: DAL.liveness_IAmAlive(dalclient, "producer: telegram") if message['text'] is not None: msg = (message['text'].encode('utf-8')).decode('utf-8') channelName = message['chat']['title'] print() # channel's name print(msg) # message print(" ") # print(update.message.date) # UTC timestamp of message sbody = nltk.wordpunct_tokenize(msg.lower()) for crypto, kws in CryptoMapping: for kw in kws: if kw in sbody: print("sending to kafka <" + crypto + ">: " + msg) log.info("sending to kafka <" + crypto + ">: " + msg) producerMgr.producer_send_mentionsSocial( msg, 'telegram', channelName, crypto, producer) else: print("non-text message (sticker or image)") log.info("non-text message (sticker or image)") except Exception as ex: print(ex) pass # not every update is a message, some are misc. notifications logErr.critical(str(ex), exc_info=True)
def main(): # Configure a Raspberry I/O SPL.configurePi() # Create a GUI info system root = Tk() root.title("Running monitoring system...") message = StringVar() message.set("Initializing...") Label(root, textvariable=message, width=50, height=10).pack() root.update() while (1): try: config = json.load(open("config.json")) fs = config["sensor"]["fs"] except: print("Error reading configurations") sleep(fs) DAL.printLog( "\n\n\n---------------------------\nInitializing a sending routine...\n" ) print( "\n\n\n---------------------------\nInitializing a sending routine...\n" ) # Get the last measure wh = DAL.getLastMeasure() # Get the now time timestamp = datetime.now().strftime(DAL.getDateFormat()) # Send data to cloud server DAL.sendData(timestamp, wh) # Try send unsent data from the queue DAL.resendQueue() DAL.printLog( "Finished a sending routine.\n\n\nWaiting for the next time...") print("Finished a sending routine.\n\n\nWaiting for the next time...") # Restart a window message display try: root.destroy() except: pass root = Tk() root.title("Running monitoring system...") message = StringVar() message.set("The system is running...\n Last update: " + timestamp + "\n Everything working fine!\n\n" + str(float(wh) / 1000) + " kWh\n is the acumulated energy generated") Label(root, textvariable=message, width=50, height=10).pack() root.update()
def test_save_new_language_get_all_languages(self): """ Test save in DB new language and retrieve ist of languages from DB """ dal = DAL() ndb.get_context().clear_cache() res = dal.add_language("C") get = dal.get_all_languages() self.assertEqual(True, res) self.assertEqual(["C"], get)
def add_new_language_json(self, data): """ This function get all parsing data of a new language in dictionary and saves it in DB :param data: dictionary :return: True if all saved, False if there were errors in saving """ dal = DAL() try: dal.save_language_data(self.language, data) return True except Exception as e: logging.error(e.message) return False
def proc_light_curve_pl(subset_index): import DAL #from DAL.datasets.checkpoint import Checkpoint lightcurves = DAL.create('lightcurves') #checkpoint = Checkpoint() if(subset_index >= len(s)): return [] subset_filename = s[subset_index] subset_number = subset_filename[:-9] print subset_filename , subset_number ret = [] ccc = 0 for i in lightcurves.iter(subset_filename): ccc += 1 #if(ccc >= 5): # break #print "--"*10 #t1 = time.clock() name = i['id'] lc = i['data'] time_x = lc[:int(len(lc)/2)] #modified - first half time flux_y = lc[int(len(lc)/2):] #modified - first half flux X = np.array(time_x, dtype = 'float32') Y = np.array(flux_y, dtype = 'float32') X = X[np.logical_not(np.isnan(Y))] Y = Y[np.logical_not(np.isnan(Y))] res = kernel_regress_cross_validation(X, Y) Y_hat = res[0] Y_band = res[2] resids = Y- Y_hat sigma = 1.4826* np.median(abs(resids- np.median(resids))) # 1.4826 * MAD resids_standard = (resids - resids.mean() ) / sigma beta = math.sqrt(2*math.log(len(X))) resids_truc = resids_standard[np.logical_not(resids_standard >= -1*beta)] norm_one_sum = np.linalg.norm(resids_truc, ord=1) ret.append((name, norm_one_sum)) ''' print subset_index, name, Y_band, norm_one_sum print "temp_spent ",time.clock() - t1 fig = plt.figure(figsize=(10,6)) plt.plot(X, Y, '.k') plt.plot(X, Y_hat, '-b') title_string = "id:" + str(sample[0]) +", kernel regression, boxcar , bandwidth="+str(h) plt.title(title_string) plt.show() ''' #checkpoint.store("proj4_1_"+str(subset_index), obj=ret) #also supports fp=<file pointer> and s=<string> return ret
def setUp(self): # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_datastore_v3_stub(Users) self.testbed.init_memcache_stub() # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache() self.mydb = DAL()
def __init__(self, language, keyword=None): self.THANKS_BUT_NO_THANKS = "Thank you, but the keyword already translated" self.THANKS_FOR_APPROVE = "Thank you for the keyword approve!" self.THANKS_FOR_CONTRIBUTION = "Thank you for your contribution!" self.GET_CONTRIBUTION = "Please insert contribution details: " self.WRONG_URL = "We could not use this URL. Please recheck spelling" self.WRONG_TRANSLATION = "Are you sure this link describing the keyword?" self.WRONG_WORD_TYPE = "Please recheck keyword type, it could not be keyword" self.ERROR_CODE = -1 self.OK_CODE = 0 self.language = language self.keyword = keyword self.dal = DAL() self.res_parser = ResultParser(self.language) self.function_mapper = {"id": self.res_parser.find_by_id, "class": self.res_parser.find_by_class, "p": self.res_parser.find_by_p, "clear": self.res_parser.strip_text_from_html, "nothing": lambda html: html}
def gist(index): import DAL import scipy import leargist import math import numpy as np tinyimages=DAL.create('tinyimages') img=scipy.misc.toimage( \ tinyimages.byid(index).reshape(32,32,3, order="F").copy()) #return leargist.color_gist(img) a = leargist.color_gist(img) vec = np.array(a) sd = math.sqrt(np.var(vec)) mu = np.mean(vec) #normalization standarized_gist = (vec- mu) / sd #In this case the gist of 960 #ret = [[index]] return list(standarized_gist)
class ContributionEngine(): def __init__(self, language, keyword=None): self.THANKS_BUT_NO_THANKS = "Thank you, but the keyword already translated" self.THANKS_FOR_APPROVE = "Thank you for the keyword approve!" self.THANKS_FOR_CONTRIBUTION = "Thank you for your contribution!" self.GET_CONTRIBUTION = "Please insert contribution details: " self.WRONG_URL = "We could not use this URL. Please recheck spelling" self.WRONG_TRANSLATION = "Are you sure this link describing the keyword?" self.WRONG_WORD_TYPE = "Please recheck keyword type, it could not be keyword" self.ERROR_CODE = -1 self.OK_CODE = 0 self.language = language self.keyword = keyword self.dal = DAL() self.res_parser = ResultParser(self.language) self.function_mapper = {"id": self.res_parser.find_by_id, "class": self.res_parser.find_by_class, "p": self.res_parser.find_by_p, "clear": self.res_parser.strip_text_from_html, "nothing": lambda html: html} def contribute(self): """ Contribution act on keyword: if exist in DB and approved - say thank you if exist in DB and not approved - ask for approval if do not exist in DB - ask for contribution :return: String describing the next step """ logging.info("Got keyword: {}, language: {}".format(self.keyword, self.language)) res = self.check_keyword_exist() if not res: return self.get_contribution() else: if self.check_keyword_approved(res): return self.THANKS_BUT_NO_THANKS else: return self.show_existing_translation(res) def user_approve(self): """ If contributor approved keyword, send thank you, else ask for new contribution """ self.set_approved(True) return self.THANKS_FOR_APPROVE def check_keyword_exist(self): """ Check if keyword exist in DB :return: dictionary with keyword details, or empty dictionary if not exist """ try: return self.dal.get_data_from_db(self.keyword, self.language) except DataNotExistException: return [] def check_keyword_approved(self, res): """ Get dictionary describing the keyword and return if keyword approved :param res: dictionary :rtype boolean :return: """ return eval(res['approved']) if res else False def get_contribution(self): """ Return final String asking to contribute :return: """ return self.GET_CONTRIBUTION def show_existing_translation(self, res): """ Return existing translation of saved keyword in DB :param res: Dictionary of keyword saved in DB :return: String - the transaltion """ return res['translation'] def set_approved(self, approved): """ Change entry in DB of approved :param approved: new value to set to """ self.dal.set_approved(self.keyword, self.language, approved) def run_check_on_contribution(self, translation): """ Check if the text describing the keyword :param translation: String that supposed to describe the keyword :rtype Boolean :return: Is the text describing the keyword """ res = self.res_parser.strip_text_from_html(translation) return self.res_parser.find_needed_info(res, self.keyword) def check_is_keyword(self): """ This function check if it is really defined keyword in language :return: """ lsf = LanguagesSpecificFeatures(self.language) list_of_keywords = lsf.find_all_keywords() if not list_of_keywords: return False return self.keyword in list_of_keywords def get_translation(self, link, translation_type, name=None, word_type=None): """ This function get information from contributor: which url to access which element to look for with the given name Using this rules executing the relevant function on html to get the needed translation. If translation pass the check, add it to DB :param word_type: keyword type (keyword, function .... ) :param link: url to try to get info from :param translation_type: which function to call :param name: the name of element e.g id=name :return: return thankful message with new translation or error """ if word_type == "keyword": logging.info("marked as keyword") res = self.check_is_keyword() if not res: return self.WRONG_WORD_TYPE, self.ERROR_CODE la = LanguagesAPI() try: result, code = la.http_request_using_urlfetch(http_url=link) except WrongURL: return self.WRONG_URL, self.ERROR_CODE func = self.function_mapper.get(translation_type) if name: translation = func(result, name) else: translation = func(result) logging.info("Translation is {}".format(translation)) if not self.run_check_on_contribution(translation): return self.WRONG_TRANSLATION, self.ERROR_CODE return translation, self.OK_CODE def save_in_db(self, word_type, link, translation): try: DAL.save_data_in_db(self.language, self.keyword, word_type, link, translation, approved=True) logging.info("Saving in DB new contributed translation") except DataExistException: logging.info("Updating data in DB") DAL.update_data_in_db(self.language, self.keyword, word_type, link, translation) return self.THANKS_FOR_CONTRIBUTION def add_new_language_json(self, data): """ This function get all parsing data of a new language in dictionary and saves it in DB :param data: dictionary :return: True if all saved, False if there were errors in saving """ dal = DAL() try: dal.save_language_data(self.language, data) return True except Exception as e: logging.error(e.message) return False def add_urls_for_language(self, data): """ Save in DB all data about languages urls :param data: dictionary :return: True if all saved, False if there were errors in saving """ try: self.dal.save_language_details(self.language, "urls", data["urls"]) for url in data["urls"]: self.dal.set_url_details(url, data[url]['type'], data[url]['name']) self.dal.add_language(self.language) return True except Exception as e: logging.error(e) return False def add_classification_for_language(self, data): """ Save in DB classification data for new language :param data: dictionary :return: True if all saved, False if there were errors in saving """ try: self.dal.save_classification(self.language, "statement", data["statements"]) self.dal.save_classification(self.language, "data type", data["data_types"]) self.dal.save_classification(self.language, "expression", data["expressions"]) self.dal.save_classification(self.language, "operator", data["operators"]) other_list = data['other'] if other_list: for key, value in other_list.iteritems(): self.dal.save_classification(self.language, key, value) return True except Exception as e: logging.info(20*"*") logging.info(e.message) logging.info(e) return False
def __init__(self): self.dal = DAL()
import time import string import re import fractions import numpy import scipy import time from numpy import arange,array,ones,linalg from pylab import plot,show from __future__ import division from IPython.parallel import Client import matplotlib.pyplot as plt rc = Client() dview = rc[:] wishes=DAL.create('wishes') data = wishes.subsets()[13:18] #use only one recent week of twitter data to get the volcabulary, # Clean the raw text data with filters # Create a very long string comprising of the first 5 days of twitter data dictionary= {} for i in range(len(data)): print 'day', i text = "" for tweet in wishes.iter(data[i]): if tweet.has_key('text'): lower = tweet['text'].lower() text += lower else: #print i break
import re import fractions import math import scipy from numpy import arange,array,ones,linalg from pylab import plot,show from __future__ import division from IPython.parallel import Client import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter import matplotlib.pyplot as plt lightcurves = DAL.create('lightcurves') s = lightcurves.subsets() ''' dat = [] for i in lightcurves.iter(s[101]): name = i['id'] #print type(name),name == 3096237 if name == 3096237: break lc = i['data'] time = lc[:int(len(lc)/2)] #modified - first half time flux = lc[int(len(lc)/2):] #modified - first half flux dat.append( (name, time, flux) ) #modified ''' print len(s) print "files:"
def wave_filter(t_id): import DAL from DAL.datasets.checkpoint import Checkpoint #from DAL.datasets.checkpoint import Checkpoint lightcurves = DAL.create('lightcurves') try: reversed_dict except: checkpoint = Checkpoint() reversed_dict = checkpoint.load("reversed_dict", t = "obj") #get the date by id sample_data = [] subset_index = reversed_dict.get(t_id) if(subset_index < 0 or subset_index>= len(s)): return subset_index for i in lightcurves.iter(s[subset_index]): name = i['id'] if name == t_id: print name lc = i['data'] time = lc[:int(len(lc)/2)] #modified - first half time flux = lc[int(len(lc)/2):] #modified - first half flux sample_data.append((name, time, flux)) break for sample in [sample_data[0]]: print "-"*10 print "id:", sample[0] X = np.array(sample[1], dtype = 'float32') Y = np.array(sample[2], dtype = 'float32') X = X[np.logical_not(np.isnan(Y))] Y = Y[np.logical_not(np.isnan(Y))] h = 0.8 # be carefully chosen #res = nad_wat(X, Y, 0.1) #fit the curve res = nad_wat_robust(X,Y, h) Y_hat = res[0] ################################### fig = plt.figure(figsize=(10,6)) plt.plot(X, Y, '.k') plt.plot(X, Y_hat, '-b') title_string = "id:" + str(sample[0]) +", kernel regression, boxcar , bandwidth="+str(h) plt.title(title_string) plt.show() ################################### resids = Y- Y_hat sigma = 1.4826* np.median(abs(resids- np.median(resids))) # 1.4826 * MAD resids_standard = (resids - resids.mean() ) / sigma beta = math.sqrt(2*math.log(len(X))) ################################################# resids_truc = resids_standard[np.logical_not(resids_standard >= -1*beta)] norm_one_sum = np.linalg.norm(resids_truc, ord =2 ) # norm 2 print "norm1 of residuals ", norm_one_sum #norm_ones.append(norm_one_sum) fig = plt.figure(figsize=(10,6)) plt.plot(X, resids_standard, '.k') plt.plot(X, np.zeros(len(X)) - beta, '-r' ) plt.show() ############################### ## detect the different levels, if there are more levels, just think they are binary stars. wave_level = 0 outliers = np.logical_not(resids_standard >= -1*0.9*beta) # let more potential signals in #scale the outlier residuals to range(0,1) mask = (np.zeros(len(X))+1)* outliers # len (mask) == len(X) , if signal, mask[i] == 1,, not signal, mask[i] == 0 if(mask.sum() <= 3): # no signal, it is very possible to be outliers from white noise. # mark this to be white noise print "LEVEL: ", wave_level print "==="*10 return wave_level # get the signal out signal = resids_standard * mask # rescale the signal to process signal = -1 * signal # get them reflected above the x-axis s_min = signal[ signal > 0].min() # the min non-zero s_max = signal.max() # the max if(abs(s_min - s_max) <= 0.0001): #it is almost impossible to happen, just in case print "LEVEL: ", wave_level print "==="*10 return wave_level #scaled signal signal = signal - s_min signal = signal / abs(s_max - s_min) #stat the area in each cell. #resharp the signals into rect total_num_signal = np.sum(mask) re_signal = np.zeros(len(signal)) is_open = False signal_strength = 0 counts_open = 0 for i in range(4, len(X)-4): # start to receive if( mask[i-1] == 0 and mask[i] == 1 and mask[i+1] == 1 and mask[i+2] == 1): is_open = True counts_open += 1 if(is_open == True): if(signal[i] > signal_strength): signal_strength = signal[i] # start to refuse if( mask[i-2] == 1 and mask[i-1] == 1 and mask[i] == 1 and mask[i+1] == 0): j = i while(j >= 0 and mask[j] > 0): re_signal[j] = signal_strength j = j -1 signal_strength = 0 is_open = False if(re_signal.max() <= 0.0001): # no signal, it is very possible to be outliers from white noise. # mark this to be white noise print "no signal after filtering" print "LEVEL: ", wave_level print "==="*10 return wave_level #re-scale after filter re_signal = re_signal / abs(re_signal.max() - re_signal.min()) left_sum = 0 right_sum = 0 left_sum = np.sum(re_signal[int(len(X)/4): int(len(X)*3/8)+int(len(X)/4)])+1 right_sum = np.sum(re_signal[int(len(X)*3/8)+int(len(X)/4)+1:])+1 if(left_sum > 2* right_sum or right_sum > 2* left_sum ): print "unbalanced signal ---" print "LEVEL: ", wave_level print "==="*10 if np.sum(counts_open > 5): return wave_level # confirm that there is at least one level of signals wave_level = 1 ######################################################## #get the upper level of signal #find the safe zone between waves zone_width = 0.1 up_bound = np.arange(1.0, 0.12, -0.025) down_bound = up_bound - zone_width up_zone = 1 down_zone = 1 for i in range(len(up_bound)): ones = np.zeros(len(X))+1 is_in_zone = np.logical_and(re_signal <= up_bound[i], re_signal >= down_bound[i]) num_in_zone = np.sum(is_in_zone * 1) if(num_in_zone == 0): up_zone = up_bound[i]-0.01 down_zone = down_bound[i]+0.01 break is_upper = np.logical_not(re_signal <= up_zone) # let more potential signals in #scale the outlier residuals to range(0,1) mask = (np.zeros(len(X))+1)* is_upper # len (mask) == len(X) , if signal, mask[i] == 1,, not signal, mask[i] == 0 up_signal = re_signal * mask down_signal = re_signal - up_signal up_is_true_signal = True down_is_true_signal = True # testing the upside is singel left_sum = 0 right_sum = 0 left_sum = np.sum(up_signal[int(len(X)/4): int(len(X)*3/8)+int(len(X)/4)])+1 right_sum = np.sum(up_signal[int(len(X)*3/8)+int(len(X)/4)+1:])+1 if(left_sum > 2* right_sum or right_sum > 2* left_sum ): print "unbalanced up signal" up_is_true_signal = False return wave_level # testing the downside is singel if(down_signal.max() <= 0.0001): # no signal, it is very possible to be outliers from white noise. # mark this to be white noise print "no signal down_side " down_is_true_signal = False return wave_level down_signal = down_signal / abs(down_signal.max()) left_sum = 0 right_sum = 0 left_sum = np.sum(down_signal[int(len(X)/4): int(len(X)*3/8)+int(len(X)/4)])+1 right_sum = np.sum(down_signal[int(len(X)*3/8)+int(len(X)/4)+1:])+1 if(left_sum > 2* right_sum or right_sum > 2* left_sum ): print "unbalanced up signal" down_is_true_signal = False return wave_level if(up_is_true_signal and down_is_true_signal): wave_level += 1 ############################################# #continue if you want to get more levels ############################################# re_signal = down_signal #re-scale after filter re_signal = re_signal / abs(re_signal.max() - re_signal.min()) ######################################################## #get the upper level of signal #find the safe zone between waves zone_width = 0.1 up_bound = np.arange(1.0, 0.12, -0.025) down_bound = up_bound - zone_width up_zone = 1 down_zone = 1 for i in range(len(up_bound)): ones = np.zeros(len(X))+1 is_in_zone = np.logical_and(re_signal <= up_bound[i], re_signal >= down_bound[i]) num_in_zone = np.sum(is_in_zone * 1) if(num_in_zone == 0): up_zone = up_bound[i]-0.01 down_zone = down_bound[i]+0.01 break is_upper = np.logical_not(re_signal <= up_zone) # let more potential signals in #scale the outlier residuals to range(0,1) mask = (np.zeros(len(X))+1)* is_upper # len (mask) == len(X) , if signal, mask[i] == 1,, not signal, mask[i] == 0 up_signal = re_signal * mask down_signal = re_signal - up_signal up_is_true_signal = True down_is_true_signal = True # testing the upside is singel left_sum = 0 right_sum = 0 left_sum = np.sum(up_signal[int(len(X)/4): int(len(X)*3/8)+int(len(X)/4)])+1 right_sum = np.sum(up_signal[int(len(X)*3/8)+int(len(X)/4)+1:])+1 if(left_sum > 2* right_sum or right_sum > 2* left_sum ): print "unbalanced up signal" up_is_true_signal = False return wave_level # testing the downside is singel if(down_signal.max() <= 0.0001): # no signal, it is very possible to be outliers from white noise. # mark this to be white noise print "no signal down_side " down_is_true_signal = False return wave_level down_signal = down_signal / abs(down_signal.max()) left_sum = 0 right_sum = 0 left_sum = np.sum(down_signal[int(len(X)/4): int(len(X)*3/8)+int(len(X)/4)])+1 right_sum = np.sum(down_signal[int(len(X)*3/8)+int(len(X)/4)+1:])+1 if(left_sum > 2* right_sum or right_sum > 2* left_sum ): print "unbalanced up signal" down_is_true_signal = False return wave_level if(up_is_true_signal and down_is_true_signal): wave_level += 1 return wave_level
import numpy import scipy from numpy import arange,array,ones,linalg from pylab import plot,show from __future__ import division from IPython.parallel import Client import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter import matplotlib.pyplot as plt import numpy as np crime = DAL.create('crime') crime_list = crime.get_crime_list() # the following can be slow; do this once at the beginning # of the program and use this data structure throughout crime_counts = crime.get_crime_counts() region_list = crime.get_region_list() K = 10 #number of crime types N = int(len(crime_counts)/K) #number of regions T = len(crime_counts.get((100,0))) print K,N,T print crime_list proj3_1_filename = [] proj3_2_filename = []
class DALTest(unittest.TestCase): def setUp(self): # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Next, declare which service stubs you want to use. self.testbed.init_datastore_v3_stub(Users) self.testbed.init_memcache_stub() # Clear ndb's in-context cache between tests. # This prevents data from leaking between tests. # Alternatively, you could disable caching by # using ndb.get_context().set_cache_policy(False) ndb.get_context().clear_cache() self.mydb = DAL() def test_add_user_check_user_added_by_getting_role(self): """ Test saving user in DB and getting his level after """ DAL.add_new_user("Olesya", "Shapira", "Oles_ka", "123", "*****@*****.**", "admin") role = self.mydb.get_user_level("*****@*****.**") print role self.assertEqual(role, 'admin') def test_check_unique_user_exception_thrown_if_user_already_exist(self): """ Test to check unique user saved in DB by email """ DAL.add_new_user("Olesya", "Shapira", "Oles_ka", "123", "*****@*****.**", "admin") self.assertRaises(UserExistException, DAL.add_new_user, "Olesya", "Shapira", "Oles_ka", "123", "*****@*****.**", "admin") def test_add_new_data(self): """ Test saving data in DB and getting data from DB """ DAL.save_data_in_db("java", "for", "statement", "https://docs....", "the for statement is ...", approved=True) data = DAL.get_data_from_db("for", "java") print data self.assertIsNotNone(data) def test_check_unique_keyword_exception_thrown_if_keyword_already_exist(self): """ Test to check unique data saved in DB for each keyword """ DAL.save_data_in_db("java", "for", "statement", "https://docs....", "the for statement is ...", approved=True) self.assertRaises(DataExistException, DAL.save_data_in_db, "java", "for", "statement", "https://docs....", "the for statement is ...", True) def test_save_and_read_new_language_c_statements(self): """ Test save in DB details for new language c """ ndb.get_context().clear_cache() stmtns = ["for", "if", "else"] DAL.save_language_details("C", "statements", stmtns) res = DAL.get_language_details("C", "statements") self.assertEqual(stmtns, res) def test_save_new_language_get_all_languages(self): """ Test save in DB new language and retrieve ist of languages from DB """ dal = DAL() ndb.get_context().clear_cache() res = dal.add_language("C") get = dal.get_all_languages() self.assertEqual(True, res) self.assertEqual(["C"], get) def test_add_few_languages(self): dal = DAL() dal.add_language(languages) res = dal.get_all_languages() self.assertEqual(languages, res) def test_add_existing_language_get_false(self): dal = DAL() for l in languages: print "dal add language" + str(dal.add_language(l)) print "dal get all languages" + str(dal.get_all_languages()) res = dal.add_language(languages[0]) self.assertFalse(res) def tearDown(self): self.testbed.deactivate()
def test_add_few_languages(self): dal = DAL() dal.add_language(languages) res = dal.get_all_languages() self.assertEqual(languages, res)
############################### ## Project 1 problem 1 ## Stat 376 ## Zhengjian Song ############################### # Verify cluster connection and connect to the data set import DAL from IPython.parallel import Client rc = Client() #print len(rc) dview = rc[:] tinyimages = DAL.create('tinyimages') # Search the image set by keywords: car and bicycle car_ids = tinyimages.search('car', 2000) bicycle_ids = tinyimages.search('bicycle', 2000) # Ground truth, constructed manually # 100 in each category car_true = [12025562,12025563,12025564,12025565,12025567,12025568,12025569,12025571,12025572,12025573,12025574,12025576,12025580,12025583,12025584,12025585,12025586,12025587,12025588,12025589,12025590,12025591,12025592,12025593,12025594,12025597,12025599,12025601,12025602,12025603,12025604,12025605,12025606,12025611,12025615,12025618,12025620,12025624,12025627,12025628,12025630,12025631,12025632,12025633,12025634,12025635,12025636,12025661,12025663,12025664,12025668,12025670,12025671,12025674,12025675,12025676,12025677,12025678,12025679,12025682,12025683,12025684,12025686,12025687,12025688,12025689,12025690,12025691,12025692,12025693,12025701,12025702,12025706,12025707,12025709,12025711,12025712,12025713,12025717,12025718,12025725,12025726,12025727,12025747,12025749,12025752,12025753,12025757,12025768,12025769,12025771,12025772,12025773,12025774,12025776,12025778,12025779,12025780,12025781,12025782] bicycle_true = [7112211,7112212,7112213,7112214,7112215,7112216,7112218,7112219,7112220,7112223,7112224,7112225,7112226,7112227,7112228,7112229,7112231,7112232,7112234,7112235,7112237,7112239,7112240,7112241,7112243,7112244,7112245,7112246,7112247,7112248,7112249,7112250,7112251,7112253,7112259,7112260,7112261,7112262,7112263,7112265,7112266,7112268,7112270,7112272,7112273,7112275,7112276,7112281,7112285,7112287,7112297,7112300,7112301,7112302,7112303,7112304,7112309,7112310,7112312,7112320,7112325,7112328,7112329,7112330,7112335,7112339,7112341,7112342,7112343,7112344,7112347,7112348,7112349,7112350,7112351,7112352,7112355,7112358,7112359,7112360,7112361,7112362,7112365,7112366,7112371,7112374,7112380,7112381,7112386,7112394,7112403,7112409,7112413,7112414,7112416,7112421,7112424,7112426,7112427,7112431] # Print the results images = tinyimages.byid(car_ids[0:300]) print "\"cars\"" tinyimages.display(images) images = tinyimages.byid(car_true) print "verified cars" tinyimages.display(images) print "\n"