def insert_outing_pic(apply_no): table_id_list = [] for i in range(3): table_id = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(32)) table_id_list.append(table_id) table_key = DB().search_outing_pic_key(apply_no) DB().insert_outing_pic(table_id_list, table_key)
def __init__( self, table: NamedTuple, brand_name: str, main_url: str, base_page_url: Optional[str] = "", model_date_format: str = "%Y-%m-%d", encoding: Optional[str] = None, custom_config: Config = None, ): self.brand_name = brand_name if custom_config is None: self.config = Config() else: self.config = custom_config self.init_logger() self.base_page_url = base_page_url self.curr_ctgr_url = None self.curr_post_url = None self.table = table if self.config.is_test is True: self.table_name = 'test.' + self.table.__name__ else: self.table_name = 'public.' + self.table.__name__ self.conn = DB(table=table, table_name=self.table_name) self.encoding = encoding self.session = RetrySession(encoding=encoding) self.main_url = main_url self.model_date_format = model_date_format self.soup = None self.posts_soup = None self.metas = {} if not self.config.is_valid(): raise ValueError(f"Invalid Config: \n {self.config.__repr__()}")
def test_ctx_manager_exit_no_obj(mock_connect): db = None with DB(DSN) as db: db.conn = None db.cur = None assert db
def main(deck_paths, blacklist, columns, sort_by): # build card "database" db = DB(blacklist) # turn all the deck files into decks decks = list(map(Deck.from_txt_deck, deck_paths)) # map cards in deck to cards in db cards = Deck( None, {card: count for deck in decks for card, count in deck.cards.items()}) # lookup for IDs of decks id_lookup = {card: deck.identity for deck in decks for card in deck.cards} # lookup identity if needed def get_identity(deck, db, card): identity = id_lookup[card] return {'display': identity, 'sort': (identity, )} all_columns = { 'cycle': ('Cycle', Deck.card_cycle), 'type': ('Type', Deck.card_type), 'identity': ('Identity', get_identity), 'faction': ('Faction', Deck.card_faction), } print( cards.to_markdown(db, title=None, columns=[(key, all_columns[key][0], all_columns[key][1]) for key in columns], sort_by=sort_by))
def onCommand(self): self.dialog.msg(text=getInfraction("userKick").format( self.sender.getRank().getName(), self.sender.ping(), self.aimed.getRank().getName(), self.aimed.ping(), getReason(self.args))) DB().createLogs(self.aimed, self.sender, "kick", 0, getReason(self.args)) self.dialog.kickUser(self.aimed)
def test_ctx_manager(mock_connect): db = None with DB(DSN) as db: mock_connect.assert_called_once_with(DSN) assert db.conn.autocommit assert db db.cur.close.assert_called_once() db.conn.close.assert_called_once()
def getAllDialogs(): firstDialogsList = DB().getAllGroup() secondDialogList = [] for i in firstDialogsList: obj = {"message": { "peer_id": i['peer'] }} secondDialogList.append(Dialog(obj)) return secondDialogList
def onCommand(self): dialogsList = getAllDialogs() self.aimed.setBalckList() self.dialog.msg( text=getInfraction("userAddBlackList").format(self.sender.getRank().getName(), self.sender.ping(), self.aimed.getRank().getName(), self.aimed.ping())) self.dialog.kickUser(self.aimed) for i in dialogsList: i.kickUser(self.aimed) DB().createLogs(self.aimed, self.sender, "back_list", 0, "NONE")
def __init__(self, config): self.config = config self.loop = asyncio.get_event_loop() self.ircHandle = IRCHandle(app=self) self.rconHandle = RconHandle(app=self) self.auth = Modlist(self) self.db = DB(app=self) self.iopool = ThreadPoolExecutor() self.dnsresolver = aiodns.DNSResolver(loop=self.loop) self.renx = Renx(self)
def _run_db_cmd_on_table(self, cmd: str, table_name: str) -> None: with DB(self.args.dsn) as db: if cmd == 'truncate': db.truncate_table(table_name) elif cmd == 'vacuum-analyze': db.vacuum_analyze_table(table_name) else: raise ValueError(f'Unknown DB command: { cmd }')
def __init__(self): # open syslog and notice startup syslog.openlog('captiveportal', logoption=syslog.LOG_DAEMON, facility=syslog.LOG_LOCAL4) syslog.syslog(syslog.LOG_NOTICE, 'starting captiveportal background process') # handles to ipfw, arp the config and the internal administration self.ipfw = IPFW() self.arp = ARP() self.cnf = Config() self.db = DB() self._conf_zone_info = self.cnf.get_zones()
def __init__(self): # open syslog and notice startup syslog.openlog('captiveportal', logoption=syslog.LOG_DAEMON, facility=syslog.LOG_LOCAL4) syslog.syslog(syslog.LOG_NOTICE, 'starting captiveportal background job') # handles to ipfw, arp the config and the internal administration self.ipfw = IPFW() self.db = DB()
def onCommand(self): secondDialogList = getAllDialogs() self.aimed.setRank(0) for i in secondDialogList: if i.getType() == 'staff': i.msg(text=getInfraction("staffKick").format( self.sender.getRank().getName(), self.sender.ping(), self.aimed.ping())) i.kickUser(self.aimed) DB().createLogs(self.aimed, self.sender, "staff_kick", 0, "Более не является персоналом проекта")
def __init__(self, metadata): self.db = DB() self.metadata = metadata #exchanges dict has {exchange_name: exchange_object} self.exchanges_dict = {} #init all exchanges we need for exchange_name, markets in metadata.items(): #init exchange & record print("Initializing exchange: " + str(exchange_name)) exchange = init_exchange(exchange_name) self.exchanges_dict[exchange_name] = exchange #create tables for exchange self.db.execute("CREATE TABLE IF NOT EXISTS " + exchange_name + "(datestamp TIMESTAMP, ask REAL, bid REAL, market VARCHAR(14), market_sym VARCHAR(14))") #hard code what columns we want to record
def main(deck_paths): db = DB() decks = list(map(Deck.from_txt_deck, deck_paths)) # diff each pair print(decks[0].to_markdown(db)) for i in range(1, len(decks)): diff = decks[i].diff(decks[i - 1]) print( diff.to_markdown(db, title='### Transition', total=False, diff=True, sort_by=['count'])) print(decks[i].to_markdown(db))
class TestUserReg(BaseCase): #url = 'http://115.28.108.130:5000/api/user/reg/' db = DB() data_file = os.path.join(data_path, 'test_case.xlsx') def test_user_reg_normal(self): case_data = self.get_case_data('test_user_reg_normal') user_name = json.loads(case_data.get('args')).get('name') #环境检查 if self.db.check_user(user_name): self.db.del_user(user_name) #发送数据 self.send_request(case_data) # data = {'name':NOT_EXIST_USER,'password':'******'} # res = requests.post(url=self.url,json=data) # #print(res.json()) # except_res = { # "code": "100000", # "msg": "成功", # "data": { # "name": NOT_EXIST_USER, # "password": "******" # } # } # # #响应断言 # self.assertDictEqual(res.json(),except_res) #数据库断言 self.assertTrue(db.check_user(user_name)) # 环境清理(由于注册接口向数据库写入了用户信息) db.del_user(user_name) def test_user_reg_exist(self): case_data = self.get_case_data('test_user_reg_exist') user_name = json.loads(case_data.get('args')).get('name') #环境检查 if not self.db.check_user(user_name): self.db.add_user(user_name) # 发送数据 self.send_request(case_data)
def _run_helper(self, sequence: Sequence[str], deps: AbstractSet[Tuple[str, str]], seed: int, num_rows: int) -> None: cache = Cache(deps) with DB(self.args.dsn) as dbconn: rand_gen = Random(seed=seed) for table_name in sequence: table = self.tables[table_name] rows_to_gen = Executor._get_num_rows_to_gen( rand_gen, num_rows, table.scaler) logger.info( f'Generating {rows_to_gen} rows (seed {seed}) for table { table_name }' ) data = BaseObject.sample_from_source(rand_gen, rows_to_gen, table.schema, cache) dbconn.ingest_table(table_name, table.schema, data) cache.add(table_name, data)
def configure_pipeline(conffile): from lib import inputs from lib import outputs LOG.info("Creating the pipeline") with open(conffile) as f_in: conf = yaml.load(f_in) # Parse inputs LOG.debug("Inputs:") ins = [] outs = [] for i in conf["inputs"]: LOG.debug("- %s (%s)", i["class"], i["name"]) new_in = inputs.Input.select(i["class"], i["name"], i.get("options", {}), conf["core"]["inbound"]) ins.append(new_in) LOG.debug("Outputs:") for o in conf.get("outputs", []): LOG.debug("- %s (%s)", o["class"], o["name"]) new_out = outputs.Output.select(o["class"], o["name"], o.get("options", {}), conf["core"]["outbound"]) outs.append(new_out) core = [Core(conf["core"]["inbound"], conf["core"]["outbound"])] if conf.get("reactor", None) is not None: # Import the Reactor only when used from lib.reactor import Reactor core.append(Reactor(conf["reactor"], conf["core"]["outbound"])) if conf.get("db", None) is not None: # Import DB here (hence also SQLAlchemy) only when needed from lib.db import DB core.append(DB(conf["db"], conf["core"]["outbound"])) return (core, ins, outs)
def onCommand(self): time = getInfractionTime(self.args[0]) if not time: self.dialog.msg( text=getError("timeError").format(self.sender.ping())) return if self.dialog.inBanUser(self.aimed): self.dialog.msg( text=getError("userAlreadyBan").format(self.aimed.ping())) return if violationLimits(self.sender, time): self.dialog.msg(text=getError("rankLimit").format( self.sender.getRank().getName(), self.sender.ping(), self.sender.getRank().getInfractionLimit())) return self.dialog.banUser(self.aimed, time) DB().createLogs(self.aimed, self.sender, "ban", time, getReason(self.args[1:])) self.dialog.msg(text=getInfraction("userBan").format( self.sender.getRank().getName(), self.sender.ping(), self.aimed.getRank().getName(), self.aimed.ping(), self.args[0], getReason(self.args[1:]))) self.dialog.kickUser(self.aimed)
def get_apply_info(apply_no): apply_info_key = ['sub_type', 'loan_type', 'status'] apply_info_list = [] apply_info = DB().check_apply_status(apply_no) if apply_info[0] is not None: st = sub_type[apply_info[0]] apply_info_list.append(st) lt = loan_type[apply_info[1]] apply_info_list.append(lt) status = apply_info[2] apply_info_list.append(status) else: if apply_info[1] == 'DY': apply_info_list.append('') else: st = sub_type[apply_info[1]] apply_info_list.append(st) lt = loan_type[apply_info[1]] apply_info_list.append(lt) status = apply_info[2] apply_info_list.append(status) apply_info_dic = dict(zip(apply_info_key, apply_info_list)) return apply_info_dic
def main(deck_paths, shorten): # build card db db = DB() # parse all decks decks = list(map(Deck.from_txt_deck, deck_paths)) # keep track of where every card is used owners = {} for deck in decks: for card, count in deck.cards.items(): owners.setdefault(card, {})[deck.identity] = count # create the 'seen elsewhere' column def other_owners(deck, db, card): others = { owner: alt_count for owner, alt_count in owners[card].items() if owner != deck.identity if sum(owners[card].values()) > 3 # HACK } text = ', '.join([ '{}x {}'.format(count, shorten_name(identity) if shorten else identity) for identity, count in others.items() ]) return {'display': text, 'sort': (text, )} for deck in decks: print( deck.to_markdown( db, columns=[ ('owners', 'Used Elsewhere', other_owners), ('type', 'Type', Deck.card_type), ], sort_by=['type'], ))
def basic_page(self, loan_type, sub_type, amount, apply_no): workspace = '//span[contains(text(),"工作台")]' credit = '//span[text()="征信情况"]/following::input[1]' loans = '//span[contains(text(),"负债情况")]/following::input[1]' other_loans = '//span[contains(text(),"其他负债")]/following::input[1]' options = '//span[contains(text(),"审批意见")]/following::textarea[1]' options1 = '//span[contains(text(),"审批意见")]/following::textarea[2]' loan_amount = '//span[contains(text(),"授信金额")]/following::input[1]' percentage = '//span[contains(text(),"授信成数")]/following::input[1]' rent_amount = '//span[contains(text(),"租金")]/following::input[1]' source = '//span[contains(text(),"还款来源")]/following::input[1]' approval_amount = '//span[contains(text(),"审批金额")]/following::input[1]' options2 = '//span[text()="处理意见"]/following::input[1]' self.wait_until_visible(workspace) DB().update_basic_credit(apply_no) if loan_type == '房抵信审' or loan_type == '过桥信审': self.find_element(loan_amount).send_keys(amount) if sub_type == '信用' or loan_type == '房抵信审': self.find_element(percentage).send_keys('1') if sub_type == '开心租': self.find_element(rent_amount).send_keys('1000') elif sub_type == '过桥': self.find_element(percentage).send_keys('0.6') if sub_type == '信用' or sub_type == '过桥': self.find_element(source).send_keys('1111111') else: self.find_element(approval_amount).send_keys(amount) self.driver.execute_script('arguments[0].removeAttribute("readonly")', self.find_element(options2)) self.find_element(options2).send_keys('通过') self.driver.execute_script('arguments[0].removeAttribute("readonly")', self.find_element(credit)) self.find_element(credit).send_keys('优') self.find_element(loans).click() self.find_element(other_loans).send_keys('1') self.find_element(options).send_keys('1') self.find_element(options1).send_keys('1')
def test_ingest_table(mock_data_obj, mocker): column = mocker.MagicMock() skip_column = mocker.MagicMock() skip_column.gen = 'skip' table = 'bla' schema = { 'a': column, 'b': column, 'xx': skip_column, 'c': column } with DB(DSN) as db: db.ingest_table(table, schema, [mock_data_obj, mock_data_obj]) db.cur.copy_expert.assert_called_once() first_call = db.cur.copy_expert.mock_calls[0] def cleanup(string): return string.replace('\n', '').replace(' ', '') ref = "COPY bla(\"a\",\"b\",\"c\") FROM STDIN WITH(FORMAT CSV, DELIMITER '|')" assert cleanup(first_call.args[0]) == cleanup(ref)
def __init__(self, dsn: str): self.database = DB(dsn) self.session_id = None
s = smtp_connect() smtp_login(s) s.close() smtp_connected = True except smtplib.SMTPServerDisconnected: logging.info('...timed out. Please check your SMTP settings in .env') except Exception as e: logging.info(str(e)) # open database user_home_dir = os.path.expanduser("~") user_config_dir = os.path.expanduser("~") + "/.config/nginx-odoo" Path(user_config_dir).mkdir(parents=True, exist_ok=True) db_path = user_config_dir + "/database.db" db = DB(db_path) os.chmod(db_path, 0o600) db_perm = os.stat(db_path).st_mode & 0o777 if db_perm != 0o600: sys.exit('File permissions of {} must be 600 but are: {:o}'.format( db_path, db_perm)) db.cleanup() # check Odoo settings ODOO_PORT = os.environ.get('NGINX_ODOO_ODOO_PORT', '8069') ODOO_HOST = os.environ.get('NGINX_ODOO_ODOO_HOST', 'localhost') ODOO_DATABASE = os.environ.get('NGINX_ODOO_ODOO_DATABASE') if not ODOO_DATABASE: sys.exit('Odoo settings not set in .env') # try to connect to odoo
mix = row[1] t = mix.partition('(') if t[1] == '': type = mix length = '' else: type = t[0] length = t[2][:len(t[2]) - 1] row = (row[0], type, '', length, row[3]) data.append(row) # print row # print data writeExcel('f:/' + tablename + '.xlsx', data) def excel2db(path): list = readExcel(path) # 第一列为字段名 sqlExecute = DB('192.168.1.15', 'ymt', 'yimiaotong2015', 'dlb') db2excel(sqlExecute, 'users') sqlExecute.close() # print 'int(11)'.partition('(') # print 'int11)'.partition('(') # print len('12312')
def test_postgre(self): self.queries( DB('postgre', 'framework', 'localhost', 5432, 'myname', '123456'))
def test_mysql(self): self.queries( DB('mysql', 'framework', 'localhost', 3306, 'root', '123456'))
# parse input parameters parameters = {'sessionid': None, 'zoneid': None, 'output_type': 'plain'} current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == '/': current_param = param[1:].lower() elif current_param is not None: if current_param in parameters: parameters[current_param] = param.strip() current_param = None # disconnect client response = {'terminateCause': 'UNKNOWN'} if parameters['sessionid'] is not None and parameters['zoneid'] is not None: cp_db = DB() # remove client client_session_info = cp_db.del_client(parameters['zoneid'], parameters['sessionid']) if client_session_info is not None: cpIPFW = IPFW() cpIPFW.delete_from_table(parameters['zoneid'], client_session_info['ip_address']) client_session_info['terminateCause'] = 'User-Request' response = client_session_info # output result as plain text or json if parameters['output_type'] != 'json': for item in response: print '%20s %s' % (item, response[item]) else:
'zoneid': None, 'authenticated_via': None, 'output_type': 'plain' } current_param = None for param in sys.argv[1:]: if len(param) > 1 and param[0] == '/': current_param = param[1:].lower() elif current_param is not None: if current_param in parameters: parameters[current_param] = param.strip() current_param = None # create new session if parameters['ip_address'] is not None and parameters['zoneid'] is not None: cpDB = DB() cpIPFW = IPFW() arp_entry = ARP().get_by_ipaddress(parameters['ip_address']) if arp_entry is not None: mac_address = arp_entry['mac'] else: mac_address = None response = cpDB.add_client( zoneid=parameters['zoneid'], authenticated_via=parameters['authenticated_via'], username=parameters['username'], ip_address=parameters['ip_address'], mac_address=mac_address) # check if address is not already registered before adding it to the ipfw table if not cpIPFW.ip_or_net_in_table(table_number=parameters['zoneid'],