def main(reset_process, initialize_db, experiment_name, remove=None): """Populate db with experiments to run.""" main_config = config.Config() log = logger.get(os.path.join(main_config.log_dir, 'prepare_experiments')) if reset_process: db.reset_in_process() log.info('Reset experiment progress counter in DB.') if initialize_db: db.initialize_database() log.info('Initialized DB.') if experiment_name is not None: # TODO: add capability for bayesian opt. db_config = credentials.postgresql_connection() experiment_dict = experiments()[experiment_name]() if 'hp_optim' in experiment_dict.keys( ) and experiment_dict['hp_optim'] is not None: exp_combos = hp_optim_parameters(experiment_dict, log) log.info('Preparing an hp-optimization experiment.') else: exp_combos = package_parameters(experiment_dict, log) log.info('Preparing a grid-search experiment.') with db.db(db_config) as db_conn: db_conn.populate_db(exp_combos) db_conn.return_status('CREATE') log.info('Added new experiments.') if remove is not None: db_config = credentials.postgresql_connection() with db.db(db_config) as db_conn: db_conn.remove_experiment(remove) log.info('Removed %s.' % remove)
def GET(self): input_data = web.input() channel = input_data.get('channel') date = input_data.get('date') model = db() if (not channel) or (not date): return 'error' else: url = url_builder(channel).set_data_by_str(date).build() if (not model.select(url)): epg.get(date) plist = model.select(url) if (plist): mdict = {} mdict['date'] = date mdict['channel'] = channel mdict['list'] = [] mdict['total_size'] = len(plist) for prog in plist: tmp = {} prog = eval(prog) tmp['time'] = prog[0] tmp['name'] = prog[1] tmp['cover_url'] = "www.qq.com" mdict['list'].append(tmp) return json.dumps(mdict) else: return 'no keys'
def GET(self): mdict = {} program_list,now = db().get_showing_list() for elem in program_list: elem['cover_url'] = "www.qq.com" mdict['total_size'] = len(program_list) mdict['list'] = program_list mdict['current_time'] = now return json.dumps(mdict)
def GET(self): mdict = {} input_data = web.input() day_time = input_data.get('daytime') clock_time = input_data.get('clocktime') url = url_builder('cctv1').set_data_by_str(day_time).build() model = db() if (not model.select(url)): epg.get(day_time) program_list,query_time = db().get_certaintime_list(clock_time, day_time) for elem in program_list: elem['cover_url'] = "www.qq.com" mdict['total_size'] = len(program_list) mdict['list'] = program_list mdict['query_time'] = query_time return json.dumps(mdict)
def __init__(self, Full_name, email, salary, is_manager, health_rate): self.full_name = Full_name regex = '^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}$' if re.search(regex, email): self.Email = email else: raise Exception("Sorry, email isn't valid") if salary >= 1000: self.Salary = salary else: raise Exception("Sorry, salary must be 1000 or more") if 0 < health_rate < 100: self.Health_rate = health_rate else: self.healthRate = random.randrange(1, 100, 1) if is_manager == 0 or is_manager == 1: self.Is_manager = is_manager else: raise Exception("Sorry, is_manager must be 0 or 1") db().insert_emp((Full_name, email, salary, is_manager))
def __init__(self, filehandles, process_num): threading.Thread.__init__(self) self.handles = filehandles self.__entries_processed = 0 self.__successful_entries = 0 self.__exception_count = 0 self.__retry_time = 5 self.__active = True self.__process_num = process_num self.db = db() self.processor = Processor() self.spamhaus = Spamhaus() self.phishtank = Phishtank() self.user_agent = ("Mozilla/5.0 (X11; Linux x86_64)" " AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/47.0.2526.106 Safari/537.36")
class CrudDoctor(Crud): connection = db() cursor = connection.cursor() def crear(self, **kwargs): super().crear(**kwargs) doctor_name = kwargs.get("doctor_name") e_doctor = kwargs.get("speciality") hospital_id = kwargs.get("hospital_id") self.cursor.execute( "INSERT INTO doctor (doctor_name , speciality , hospital_id) values(%s,%s,%s)", (doctor_name, e_doctor, hospital_id), ) self.connection.commit() def mostrar(self, **kwargs): super().mostrar(**kwargs) self.cursor.execute( "SELECT doctor.doctor_name, doctor.speciality, hospital.hospital_name FROM hospital INNER JOIN doctor ON hospital.hospital_id = doctor.hospital_id" ) rows = self.cursor.fetchall() return rows def eliminar(self, **kwargs): pass def mostrarId(self, **kwargs): pass def editar(self, **kwargs): super().editar(**kwargs) doctor_name = kwargs.get("doctor_name") e_doctor = kwargs.get("speciality") hospital_id = kwargs.get("hospital_id") id_doctor = kwargs.get("id_doctor") self.cursor.execute( "UPDATE doctor SET doctor_name = %s, speciality = %s , hospital_id = %s WHERE doctor_id = %s", (doctor_name, e_doctor, hospital_id, id_doctor), ) self.connection.commit() def eliminar(self, **kwargs): super().eliminar(**kwargs) id_doctor = kwargs.get("id_doctor") self.cursor.execute("DELETE FROM doctor WHERE doctor_id = %s", (id_doctor)) self.connection.commit()
class CrudHospital(Crud): connection = db() cursor = connection.cursor() def crear(self, **kwargs): super().crear(**kwargs) hospital_name = kwargs.get("hospital_name") self.cursor.execute("INSERT INTO hospital (hospital_name) values(%s)", (hospital_name, )) self.connection.commit() def mostrar(self, **kwargs): super().mostrar(**kwargs) self.cursor.execute("SELECT * FROM hospital") rows = self.cursor.fetchall() return rows def eliminar(self, **kwargs): pass def mostrarId(self, **kwargs): super().mostrar(**kwargs) hospital_id = kwargs.get("hospital_id") self.cursor.execute( "SELECT hospital_name FROM hospital where hospital_id = %s ", (hospital_id)) rows = self.cursor.fetchone() return rows def editar(self, **kwargs): super().editar(**kwargs) hospital_id = kwargs.get("hospital_id") hospital_name = kwargs.get("hospital_name") self.cursor.execute( "UPDATE hospital SET hospital_name = %s WHERE hospital_id = %s", (hospital_name, hospital_id), ) self.connection.commit() def eliminar(self, **kwargs): super().eliminar(**kwargs) hospital_id = kwargs.get("hospital_id") self.cursor.execute("DELETE FROM hospital WHERE hospital_id = %s", (hospital_id)) self.connection.commit()
def get_employee(self, id): employee = db().get_emp_by_id(id) print(employee)
def get_all_employees(self): employees = db().get_all_emp() print(employees)
def exercises(data): selector = db() return selector.retrieveState( """SELECT o.NAME, e.day, e.dID, e.eID FROM exercises o, groups e where o.exID = e.eID""" )
def setExercise(data): inserter = db() return inserter.updateState(data["values"])
def check(self, vid, *args): model = db() if (model.check_video_exist_by_id(vid)): return func(self, vid, *args) else: return errorno.server_error(errorno.VIDEO_NOT_EXISTED[0], errorno.VIDEO_NOT_EXISTED[1]).dumps()
def check(self, username, *args): model = db() if (model.check_user_exist_by_name(username)): return func(self, username, *args) else: return errorno.server_error(errorno.USER_NOT_EXISTED[0], errorno.USER_NOT_EXISTED[1]).dumps()
def __init__(self): self.model = db() self.epg_parser = MyParser()
# under the License. import os.path import tornado.auth import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web import sys sys.path.append('..') from db import db import json from tornado.options import define, options DB=db.db() class Application(tornado.web.Application): def __init__(self): handlers = [ (r"/",get_imagelist), (r"/Get/image", get_image), (r"/Get/imagelist", get_imagelist), (r"/Post/image", change_image), (r"/Put/image", create_iamge), (r"/Delete/image",delete_iamge ), ] tornado.web.Application.__init__(self,handlers) class get_imagelist(tornado.web.RequestHandler): def get(self): entries = DB.listAllImage()
""" process to go through csv files downloaded from bts airtravel web """ from scrape.BTS import walk_extractedFiles from scrape.scrape_config import extract_to from db.db import sqlite3 as db import os dbf = os.path.join(extract_to, 'load.' + db.myext) db.create(dbf) ldb = db(dbf) import csv import pandas as pd #from loaddata.types import make_typesDict as mtd #td=mtd('numpy') useless here i think import numpy as np def loadCSVs(**kwargs): walker = walk_extractedFiles() kwargs.setdefault('if_exists', 'append') import db if type(ldb) is db.db.sqlite3: kwargs['flavor'] = 'sqlite' else: raise NotImplementedError
def main(reset_process, initialize_db, experiment_name, remove=None, force_repeat=None): """Populate db with experiments to run.""" main_config = config.Config() log = logger.get(os.path.join(main_config.log_dir, 'prepare_experiments')) if reset_process: db.reset_in_process() log.info('Reset experiment progress counter in DB.') if initialize_db: db.initialize_database() log.info('Initialized DB.') if remove is not None: db_config = credentials.postgresql_connection() with db.db(db_config) as db_conn: db_conn.remove_experiment(remove) log.info('Removed %s.' % remove) if experiment_name is not None: # TODO: add capability for bayesian opt. if ',' in experiment_name: # Parse a comma-delimeted string of experiments experiment_name = experiment_name.split(',') else: experiment_name = [experiment_name] db_config = credentials.postgresql_connection() for exp in experiment_name: experiment_dict = py_utils.import_module( module=exp, pre_path=main_config.experiment_classes) experiment_dict = experiment_dict.experiment_params() if not hasattr(experiment_dict, 'get_map'): experiment_dict['get_map'] = [False] train_loss_function = experiment_dict.get('train_loss_function', None) if train_loss_function is None: experiment_dict['train_loss_function'] = experiment_dict[ 'loss_function'] experiment_dict['val_loss_function'] = experiment_dict[ 'loss_function'] experiment_dict.pop('loss_function', None) exp_combos = package_parameters(experiment_dict, log) log.info('Preparing experiment.') assert exp_combos is not None, 'Experiment is empty.' # Repeat if requested repeats = experiment_dict.get('repeat', 0) if force_repeat is not None: repeats = force_repeat if repeats: dcs = [] for copy in range(repeats): # Need to make deep copies dcs += deepcopy(exp_combos) exp_combos = dcs log.info('Expanded %sx to %s combinations.' % (experiment_dict['repeat'], len(exp_combos))) # Convert augmentations to json json_combos = [] for combo in exp_combos: combo['train_augmentations'] = json.dumps( deepcopy(combo['train_augmentations'])) combo['val_augmentations'] = json.dumps( deepcopy(combo['val_augmentations'])) json_combos += [combo] # Add data to the DB with db.db(db_config) as db_conn: db_conn.populate_db(json_combos) db_conn.return_status('CREATE') log.info('Added new experiments.')
""" process to go through csv files downloaded from bts airtravel web """ from scrape.BTS import walk_extractedFiles from scrape.scrape_config import extract_to from db.db import sqlite3 as db import os dbf=os.path.join(extract_to,'load.'+db.myext) db.create(dbf) ldb=db(dbf) import csv import pandas as pd #from loaddata.types import make_typesDict as mtd #td=mtd('numpy') useless here i think import numpy as np def loadCSVs(**kwargs): walker=walk_extractedFiles() kwargs.setdefault('if_exists','append') import db if type(ldb) is db.db.sqlite3: kwargs['flavor']='sqlite' else: raise NotImplementedError for atbl,csvfiles in walker:
# License for the specific language governing permissions and limitations # under the License. import os.path import tornado.auth import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web import sys sys.path.append('..') from db import db import json from tornado.options import define, options DB = db.db() class Application(tornado.web.Application): def __init__(self): handlers = [ (r"/", get_imagelist), (r"/Get/image", get_image), (r"/Get/imagelist", get_imagelist), (r"/Post/image", change_image), (r"/Put/image", create_iamge), (r"/Delete/image", delete_iamge), ] tornado.web.Application.__init__(self, handlers)