def __init__(self, addr, port): RPCServer.__init__(self, addr, port) len_up = len(UPLOAD_SERVERS) len_repo = len(REPOSITORY_SERVERS) if len_up < len_repo or len_up % len_repo != 0: show_error(self, 'failed to initialize') raise Exception('failed to initialize') addr = localhost() if addr not in REPOSITORY_SERVERS: show_error(self, 'failed to initialize') raise Exception('failed to initialize REPOSITORY_SERVERS') for i in range(len(REPOSITORY_SERVERS)): if addr == REPOSITORY_SERVERS[i]: break total = len_up / len_repo self._upload_servers = UPLOAD_SERVERS[i * total:(i + 1) * total] self._print('upload_servers=%s' % str(self._upload_servers)) if HDFS: self._port = HDFS_PORT self._client = HDFSClient() else: self._port = FTP_PORT self._client = FTPClient() self._server = FTPServer() if REPO_DB: self._db = Database(addr=REPO_DB) else: self._db = Database(addr=addr) locks = [] for _ in range(LOCK_MAX): locks.append(Lock()) self._locks = HashRing(locks) if DEBUG: self._upload_cnt = 0 self._download_cnt = 0
def run_parsing(config): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() # Search parser group search_parser = subparsers.add_parser('search') search_parser.add_argument('QUERY', action='store', type=str, default=False) search_parser.add_argument('--output', '-o', action='store', required=False, default='json', choices=['tsv', 'json', 'binary']) search_parser.add_argument('--columns', '-c', action='store', required=False, help='Coma separated names of columns') # Prune parser group prune_parser = subparsers.add_parser('prune') prune_parser.add_argument('PRUNE', action='store', type=int, default=False) args = parser.parse_args() try: db_path = os.environ['DB_PATH'] except KeyError: print("Env variable DB_PATH is missing!", file=sys.stderr) exit(1) db = Database(db_path) if 'QUERY' in args: try: # Scrape and download down = Downloader() parser = Parser(config) d = Driver(config, args.QUERY, down, parser, db) torrents, headers = d.run() # Format and print fm = lib.utils.Formatter(headers, torrents) if args.columns: fm.filter_data(args.columns.split(',')) fm.format_data(args.output) fm.print_data() except: traceback.print_exc() exit(1) elif 'PRUNE' in args: try: # Prune DB db.connect() db.prune(args.PRUNE, time.time()) except: traceback.print_exc() exit(1)
def test_init(self, db): assert db is not None with pytest.raises(Exception) as exec_info: db = Database("/usr/bin/db.json") assert "not permitted" in str( exec_info.value) or "Permission denied" in str(exec_info.value)
def post(self): parser = reqparse.RequestParser() parser.add_argument('code', required=True, help='code can not be blank!') args = parser.parse_args() code = args['code'] # LINE notify json data client = { 'grant_type': 'authorization_code', 'code': code, 'redirect_uri': os.getenv("NOTIFY_REDIRECT_URI"), 'client_id': os.getenv("NOTIFY_CLIENT_ID"), 'client_secret': os.getenv("NOTIFY_CLIENT_SECRET") } # send request to auth r = requests.post('https://notify-bot.line.me/oauth/token', data=client) req = json.loads(r.text) if req['status'] == 200: token = req['access_token'] # Here is use PostgreSQL, you can change your love db with Database() as db, db.connect() as conn: with conn.cursor( cursor_factory=psycopg2.extras.RealDictCursor) as cur: cur.execute( f"INSERT INTO notify(token) VALUES ('{token}')") return {'access_token': req['access_token']}, 200 else: return {'message': r.text}, 200
def prepare(self, IP): self.db = Database(DBPATH) self.ip = IP self.zombie = sina_zombie(IP) zombie_info = self.db.get_zombie(0, 1, now())[0] self.upass = zombie_info[2].strip('\r').strip('\n') self.uname = zombie_info[1] self.uid = zombie_info[0]
def get(self): msg = request.args.get('msg') with Database() as db, db.connect() as conn, conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur: cur.execute("SELECT * FROM users") fetch = cur.fetchall() for user in fetch: # LINE push message self.line_bot_api.push_message( user['id'], TextSendMessage(text=msg)) return {'message': msg}, 200
def __init__(self, **kwargs): self.act = {} self.db = kwargs.get('db', Database()) self.dir = kwargs.get('dir', None) self.files = self.dir + '/files' self.config = kwargs.get('config', None) self.collection = '' self.match = None self.execution_order = 1 self.logging = kwargs.get('logging', None) self.history = {} self.idle_methods = [] self.define_idle(self.export_db, 24, lib.dt.datetime_from_time(0, 0)) # export db nightly
def get(self): msg = request.args.get('msg') with Database() as db, db.connect() as conn: with conn.cursor( cursor_factory=psycopg2.extras.RealDictCursor) as cur: cur.execute(f"SELECT token FROM notify") fetch = cur.fetchall() for f in fetch: headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Authorization': f"Bearer {f['token']}" } payload = {'message': msg} r = requests.post('https://notify-api.line.me/api/notify', data=payload, headers=headers) return {'result': 'ok'}, 200
def post(self): parser = reqparse.RequestParser() parser.add_argument('message', required=True, help='message can not be blank!') args = parser.parse_args() msg = args['message'] with Database() as db, db.connect() as conn: with conn.cursor( cursor_factory=psycopg2.extras.RealDictCursor) as cur: cur.execute(f"SELECT token FROM notify") fetchs = cur.fetchall() for fetch in fetchs: body = { 'token': f"{fetch['token']}", 'message': f"Hello everyone, {msg}" } cli.send_message(QueueUrl=os.getenv("SQS_URL"), DelaySeconds=0, MessageAttributes={}, MessageBody=json.dumps(body)) return {'result': 'ok'}, 200
def __init__(self, *args, **kwargs): """ Initialise attributes and register all behaviours """ self.logging = logging self.__log('Starting Assistant') self.db = Database() self.mode = kwargs.get('mode', 'console') self.behaviours = {} self.dir = os.path.dirname(os.path.realpath(__file__)) self.files = self.dir + '/files' self.config = config.Config() self.responder = None self.admin = self.config.get_or_request('Admin') self.register_behaviours() self.register_responders() schedule.clear() schedule.every(5).minutes.do(self.idle)
def getEvolutionData(players): #print (players[0]) retVal = dict() sortedByRating = sorted(players, key = lambda x : x[1],reverse=True)[:10] for player in sortedByRating: playerMap = dict() maptoAttach = dict() playerName = player[-2] x = player[0] temp = [] for row in Database().execute('select date,overall_rating from Player_Attributes where player_api_id is '+str(x)): temp.append(row) for row in temp: if playerMap.get(row[0].split('-')[0]) is None: playerMap[row[0].split('-')[0]] = [] playerMap[row[0].split('-')[0]].append(row[1]) #print(playerMap) retVal[playerName] = dict() for k,v in playerMap.items(): retVal[playerName][k]=np.mean(v) #print(retVal) data = [] featureVector = [] for player in retVal.keys(): row = [] featureVector.append(player) row.append(retVal.get(player).get('2007')) row.append(retVal.get(player).get('2008')) row.append(retVal.get(player).get('2009')) row.append(retVal.get(player).get('2010')) row.append(retVal.get(player).get('2011')) row.append(retVal.get(player).get('2012')) row.append(retVal.get(player).get('2013')) row.append(retVal.get(player).get('2014')) row.append(retVal.get(player).get('2015')) data.append(row) data = np.asarray(data) utils.createFile(data.T,"topPlayerRating.csv",featureVector)
from lib.db import Database if __name__ == "__main__": db = Database() users = {'micha': None, 'luke': None, 'steven': None} for user in users: print("Inserting user: "******"@example.com") users[user] = uid db.new_conversation(users['micha'], users['luke']) db.new_message(users['luke'], users['micha'], "Hi micha... this is luke") db.deanonymize_user(users['luke']) db.new_conversation(users['micha'], users['steven']) db.new_message(users['micha'], users['steven'], "hey steven... how are you?") db.new_message(users['steven'], users['micha'], "good! what about you?")
def crawl(urloc:str) -> (str,list): db = Database(PATH) parser = Parser() session = connect_to_tor() # select here to find if in db try: urlindb = db.isCrawled(urloc) if len(urlindb) > 0: # url already crawled del urlindb return urloc,[] except Exception as e: print(e) try: try: r = session.get(urloc,headers=TORBUNDLEHEADER,timeout=20) r.raise_for_status() except Exception as err: insert_data = { "protocol" : "Error", "url" : urloc, "data" : base64.b64encode(str(err).encode()), "lastvisit": int(time.time()), } try: db.insert(insert_data) except Exception as e: # if urloc in db dont crawl it again and return if "UNIQUE constraint failed" in str(e): # update val try: db.update(insert_data) except Exception as e: pass else: urls = parser.urlExtractor(urloc,r.text) protocol = urloc.split("://")[0] insert_data = { "protocol" : protocol, "url" : urloc, "data" : base64.b64encode(r.content), "lastvisit": int(time.time()), } try: db.insert(insert_data) except Exception as e: # if urloc in db dont crawl it again and return if "UNIQUE constraint failed" in str(e): # update val try: db.update(insert_data) except Exception as e: pass retUrls = [] for key, value in urls.items(): if key == "http" or key == "https" : # crawl only http protocol for url in urls[key]: tld = parser.tldExtractor(url) # crawl only onion sites if tld == "onion": retUrls.append(url) return urloc, retUrls except Exception as e: return urloc,[]
def __init__(self, **kwargs): super(self.__class__, self).__init__(**kwargs) self.db = Database()
from lib.db import Database from lib import utils import random import json if __name__ == '__main__': obj = Database() obj.cursor.execute('select * from player_attributes') print("Total Teams= " + str(len(utils.getAllDatafromTable('team')))) print("Total Players= " + str(len(utils.getAllDatafromTable('player')))) #utils.prediction1()
def setUp(self) -> None: self.db = Database(':memory:') self.db.full_db_path = ':memory:' self.db.connect() populate_db(self.db.cur)
def __init__(self) -> None: """Initialize the controller""" self.db = Database()
#!usr/bin/python2.6 # encoding: utf-8 from lib.db import Database db = Database('db/zombie.sqlite') # db.ImportSpeech('data/new/') # db.ImportAccount('data/account/sohu.csv')
def post(self): # get X-Line-Signature header value signature = request.headers['X-Line-Signature'] body = request.get_data(as_text=True) event = json.loads(body) print(event) try: self.handler.handle(body, signature) except InvalidSignatureError: print( "Invalid signature. Please check your channel access token/channel secret.") abort(400) token = event['events'][0]['replyToken'] if token == "00000000000000000000000000000000": pass else: profile = self.line_bot_api.get_profile( event['events'][0]['source']['userId']) print(profile) state = f"Hello 👉 `{profile.display_name}` 👈" id = profile.user_id picture = profile.picture_url name = profile.display_name try: with Database() as db, db.connect() as conn, conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur: cur.execute( f"INSERT INTO users(id, name, picture) VALUES ('{id}', '{name}', '{picture}')") except Exception as e: print(e) pass message = event['events'][0]['message']['text'] if message == "上一頁" or message == "下一頁": try: rich_menu_id = self.line_bot_api.get_rich_menu_id_of_user( id) except: # link default rich menu self.line_bot_api.link_rich_menu_to_user( id, "richmenu-269cc28b8e8497d76c2df062b274a2ce") if rich_menu_id == "richmenu-269cc28b8e8497d76c2df062b274a2ce": self.line_bot_api.link_rich_menu_to_user( id, "richmenu-e31be74ad7e577b4752ab70c9c2a3fba") else: self.line_bot_api.link_rich_menu_to_user( id, "richmenu-269cc28b8e8497d76c2df062b274a2ce") else: self.line_bot_api.reply_message(token, TextSendMessage( text=message)) # buttons_template_message = TemplateSendMessage( # alt_text='Buttons template', # template=ButtonsTemplate( # thumbnail_image_url=f'{picture}.jpg', # title='Menu', # text='Please select', # actions=[ # PostbackAction( # label='postback', # display_text='postback text', # data='action=buy&itemid=1' # ), # MessageAction( # label='message', # text='message text' # ), # URIAction( # label='uri', # uri='http://example.com/' # ) # ] # ) # ) # self.line_bot_api.reply_message(token, buttons_template_message) # LINE reply Location message # self.line_bot_api.reply_message(token, LocationSendMessage( # title='my location', # address='Tokyo', # latitude=35.65910807942215, # longitude=139.70372892916203 # )) # LINE reply Image and Text message # self.line_bot_api.reply_message(token, [TextSendMessage( # text=state), ImageSendMessage( # original_content_url=profile.picture_url, preview_image_url=profile.picture_url)]) return 'OK'
from lib import utils from lib.db import Database from datetime import datetime from sklearn.decomposition.pca import PCA from sklearn.preprocessing import StandardScaler from sklearn.manifold import TSNE import numpy as np def generatePlayerStats(): query = 'SELECT * FROM Player_Attributes a INNER JOIN (SELECT player_name, player_api_id AS id FROM Player) b ON a.player_api_id = b.id;'' data = [] for row in Database().execute(query): data.append(row) data = [player for player in data if player[4] is not None] cutoff = datetime(2015,1,1) temp =[] for player in data: string = list(map(int,player[3].split()[0].split('-'))) currentDate = datetime(string[0],string[1],string[2]) if currentDate > cutoff: temp.append(player) visitedId = set() filtered = [] for t in temp: if t[2] not in visitedId: filtered.append(t) visitedId.add(t[2]) else: continue
def __init__(self): self._lock = Lock() if APP_DB: self._db = Database(addr=APP_DB, domain=APP) else: self._db = Database(addr=localhost(), domain=APP)
def validate_config(self): """Validate config""" dbase = Database() dbase.validate_config()
def db() -> Database: database = Database("/tmp/db.json") database.purge() return database
if key == "http" or key == "https" : # crawl only http protocol for url in urls[key]: tld = parser.tldExtractor(url) # crawl only onion sites if tld == "onion": retUrls.append(url) return urloc, retUrls except Exception as e: return urloc,[] if __name__ == "__main__": db = Database(PATH) urls = [WIKI,URLLISTING,OLDWIKI] while len(urls)>0: # multi threading function TPE default max workers == cpu count * 5 with concurrent.futures.ThreadPoolExecutor() as executor: # optimally defined number of threads urls = [executor.submit(crawl, url) for url in urls] concurrent.futures.wait(urls) newUrls = [] for result in urls: try: data = result.result()
def __init__(self): self._lock = Lock() if SERVER_APPDB: self._db = Database(addr=SERVER_APPDB[0], domain=APP) else: self._db = Database(addr=localhost(), domain=APP)
except NameError: return val = {} for name in self.db.get_column_names(): val[name] = self.ui.ent[name].get() if not val[name]: return self.db.update_record(id=id, **val) self.view_collection() def delete_item(self): """Delete a record from the database table.""" try: id = self.ui.selected[0] except NameError: return self.db.delete_record(id) self.view_collection() ################################################################################ app = AppInterface() app.parse_args(args=sys.argv[1:]) if app.seed: seed_database(path=app.db_path, name=app.db_name) app.window = tk.Tk() Window(window=app.window, title=app.title, db=Database(path=app.db_path, name=app.db_name)) app.window.mainloop()