def watch(): ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.setLevel(logging.DEBUG) logger.addHandler(ch) chimptools.logger.setLevel(logging.DEBUG) chimptools.logger.addHandler(ch) db.connect() while True: tasks = Task.select().where(Task.state == 'todo').limit(1) if len(tasks) == 0: logger.info("nothing to do") time.sleep(1) else: Tasker(tasks[0]).process() db.close()
def process_data(f, i, file_num): print(f, "{}/{}".format(i, file_num)) try: data = get_course_data(DATA_DIR + f) except IndexError as e: print("IndexError", e) return db.connect(True) for d in data: d['total'] = sum(d['grades'].values()) d['GPA'] = sum( map(lambda x: gp_mapping[x[0]] * x[1], d['grades'].items())) / d['total'] try: GradeData.create(semester=DATA_DIR[-5:-1], course_no=d['course_no'], course_name=d['course_name'], grades=json.dumps(d['grades']), total=d['total'], GPA=d['GPA'], type=d['type']) except peewee.IntegrityError as e: pass # print(e) db.close()
def extract_nn_stats(results, duo_results, nns, frac, submit_to_nndb=False): db.connect() for network_name, res in results.unstack().iterrows(): network_class, network_number = network_name.split('_') nn = nns[network_name] if network_class == 'Network': res_dict = {'network': network_number} elif network_class == 'ComboNetwork': res_dict = {'combo_network': network_number} elif network_class == 'MultiNetwork': res_dict = {'multi_network': network_number} if all([name not in res_dict for name in ['network', 'combo_network', 'multi_network']]): raise Exception(''.join('Error! No network found for ', network_name)) res_dict['frac'] = frac for stat, val in res.unstack(level=0).iteritems(): res_dict[stat] = val.loc[nn._target_names].values try : duo_res = duo_results.loc[network_name] res_dict.update(duo_res) except KeyError: pass postprocess_slice = PostprocessSlice(**res_dict) if submit_to_nndb is True: postprocess_slice.save() db.close()
def listing_by_categories(): cat_id = request.args.get('category_id') cat_list = [] for l in Listing.select().where(Listing.categoryId == cat_id): li = model_to_dict(l, exclude=[l.cid]) cat_list.append(li) db.close() return jsonify(cat_list)
def getcategories(): print("GET CATEGORIES") cat_list = [] for cat in Categories.select(): c = model_to_dict(cat) cat_list.append(c) db.close() return jsonify(cat_list)
def create_category(db,catname,meta): try: db.close() db.connect() cat = Categories.create(name=json.dumps(catname), metadata=meta) cat.save() db.close() except: db.rollback() raise
def db_close(response): ''' Close the database connection after each request. ''' logging.debug("Closing database connection.") if not db.is_closed(): db.close() else: logging.debug("Database connection already closed.") logging.debug("Closed database connection.") return response
def get_order(): if db.is_closed(): db.connect() order = Order.get_or_none(Order.is_processing == False) if not order: return order.is_processing = True order.save() if not db.is_closed(): db.close() return order
def nns_from_manual(): nns = OrderedDict() #div_nn = load_nn(405) #sum_nn = load_nn(406) #nn = QuaLiKizDuoNN(['efiITG_GB', 'efeITG_GB'], div_nn, sum_nn, [lambda x, y: x * y/(x + 1), lambda x, y: y/(x + 1)]) #nn.label = 'div_style' #nns[nn.label] = nn #nn_efi = load_nn(88) #nn_efe = load_nn(89) #nn = QuaLiKizDuoNN(['efiITG_GB', 'efeITG_GB'], nn_efi, nn_efe, [lambda x, y: x, lambda x, y: y]) #nn.label = 'sep_style' #nns[nn.label] = nn #nn = load_nn(205) #nn.label = 'combo_style' #nns[nn.label] = nn #subnn = (ComboNetwork.select() # .where(ComboNetwork.id == 78) # ).get() #nn = subnn.to_QuaLiKizComboNN() #nn.label = 'bla' #nns[nn.label] = nn #dbnn = Network.by_id(135).get() dbnns = [] #dbnns.append(MultiNetwork.by_id(119).get()) dbnns.append(ComboNetwork.by_id(3333).get()) #dbnns.append(ComboNetwork.by_id(1050).get()) #dbnns.append(MultiNetwork.by_id(102).get()) for dbnn in dbnns: nn = dbnn.to_QuaLiKizNN() nn.label = '_'.join([str(el) for el in [dbnn.__class__.__name__ , dbnn.id]]) nns[nn.label] = nn #nns[nn.label] = QuaLiKizNDNN.from_json('nn.json') slicedim = 'Ati' style='duo' style='mono' #from qlkANNk import QuaLiKiz4DNN #nns['4D'] = QuaLiKiz4DNN() #nns['4D'].label = '4D' #nns['4D']._target_names = ['efeITG_GB', 'efiITG_GB'] db.close() return slicedim, style, nns
def initdb(): """Initialize the database.""" click.echo('Init the db') print("remove database") if os.path.exists(DATABASE): os.remove(DATABASE) print("connect database") db.connect() print("create tables") db.create_tables([User, Task, Record]) print("close database") db.close()
def nns_from_NNDB(max=20, only_dim=None): db.connect() non_sliced = get_similar_not_in_table(PostprocessSlice, max=max, only_sep=True, no_particle=False, no_divsum=True, only_dim=only_dim) network = non_sliced.get() style = 'mono' if len(network.target_names) == 2: match_0 = re.compile('^(.f)(.)(ITG|ETG|TEM)_GB').findall(network.target_names[0]) match_1 = re.compile('^(.f)(.)(ITG|ETG|TEM)_GB').findall(network.target_names[1]) if len(match_0) == 1 and len(match_1) == 1: group_0 = match_0[0] group_1 = match_1[0] if ((group_0[1] == 'e' and group_1[1] == 'i') or (group_0[1] == 'i' and group_1[1] == 'e')): style='duo' else: raise Exception('non-matching target_names. Not sure what to do.. {s}' .format(network.target_names)) matches = [] for target_name in network.target_names: matches.extend(re.compile('^.f.(ITG|ETG|TEM)_GB').findall(target_name)) if matches[1:] == matches[:-1]: if matches[0] == 'ITG': slicedim = 'Ati' elif matches[0] == 'TEM' or matches[0] == 'ETG': slicedim = 'Ate' else: raise Exception('Unequal stability regime. Cannot determine slicedim') nn_list = {network.id: str(network.id) for network in non_sliced} print('Found {:d} {!s} with target {!s}'.format(non_sliced.count(), network.__class__, network.target_names)) nns = OrderedDict() for dbnn in non_sliced: nn = dbnn.to_QuaLiKizNN() nn.label = '_'.join([str(el) for el in [dbnn.__class__.__name__ , dbnn.id]]) nns[nn.label] = nn db.close() return slicedim, style, nns
def close_db(error): """Closes the database again at the end of the request.""" if hasattr(g, 'sqlite_db'): db.close() g.sqlite_db = None
def after_request(response): db.close() return response
def disconnect_db(): if not db.is_closed(): db.close() print('Processing finished') sys.stdout.flush()
def disconnect_db(response): '''Disconnect from db after handling request''' if not db.is_closed(): db.close() return response
import random from model import db, Donor, Donation db.connect() # This line will allow you "upgrade" an existing database by # dropping all existing tables from it. db.drop_tables([Donor, Donation]) db.create_tables([Donor, Donation]) alice = Donor(name="Alice") alice.save() bob = Donor(name="Bob") bob.save() charlie = Donor(name="Charlie") charlie.save() donors = [alice, bob, charlie] for x in range(30): Donation(donor=random.choice(donors), value=random.randint(100, 10000)).save() db.close()
def _db_close(exc): if not db.is_closed(): db.close()
def after(req): if not db.is_closed(): db.close() return req
def close_db(resp): if not db.is_closed(): db.close() return resp
def do_after_request(exception=None): db.close()