def __init__(self, **params): DBWorker.__init__(self, **params) try: metadata = MetaData(self.engine) self.nodes = Table('nodes', metadata, autoload=True) self.policy = Table('policy', metadata, autoload=True) self.statistics = Table('statistics', metadata, autoload=True) self.versions = Table('versions', metadata, autoload=True) self.attributes = Table('attributes', metadata, autoload=True) except NoSuchTableError: tables = create_tables(self.engine) map(lambda t: self.__setattr__(t.name, t), tables) s = self.nodes.select().where(and_(self.nodes.c.node == ROOTNODE, self.nodes.c.parent == ROOTNODE)) wrapper = self.wrapper wrapper.execute() try: rp = self.conn.execute(s) r = rp.fetchone() rp.close() if not r: s = self.nodes.insert( ).values(node=ROOTNODE, parent=ROOTNODE, path='') self.conn.execute(s) finally: wrapper.commit()
def __init__(self, **params): DBWorker.__init__(self, **params) try: metadata = MetaData(self.engine) self.nodes = Table('nodes', metadata, autoload=True) self.policy = Table('policy', metadata, autoload=True) self.statistics = Table('statistics', metadata, autoload=True) self.versions = Table('versions', metadata, autoload=True) self.attributes = Table('attributes', metadata, autoload=True) except NoSuchTableError: tables = create_tables(self.engine) map(lambda t: self.__setattr__(t.name, t), tables) s = self.nodes.select().where( and_(self.nodes.c.node == ROOTNODE, self.nodes.c.parent == ROOTNODE)) wrapper = self.wrapper wrapper.execute() try: rp = self.conn.execute(s) r = rp.fetchone() rp.close() if not r: s = self.nodes.insert().values(node=ROOTNODE, parent=ROOTNODE, path='') self.conn.execute(s) finally: wrapper.commit()
def __init__(self, **params): DBWorker.__init__(self, **params) execute = self.execute execute(""" create table if not exists config ( key text primary key, value text ) """)
def __init__(self, **params): DBWorker.__init__(self, **params) try: metadata = MetaData(self.engine) self.config = Table('config', metadata, autoload=True) except NoSuchTableError: tables = create_tables(self.engine) map(lambda t: self.__setattr__(t.name, t), tables)
def __init__(self, **params): DBWorker.__init__(self, **params) try: metadata = MetaData(self.engine) self.groups = Table('groups', metadata, autoload=True) except NoSuchTableError: tables = create_tables(self.engine) map(lambda t: self.__setattr__(t.name, t), tables)
def __init__(self, **params): DBWorker.__init__(self, **params) execute = self.execute execute(""" create table if not exists groups ( owner text, name text, member text, primary key (owner, name, member) ) """) execute(""" create index if not exists idx_groups_member on groups(member) """)
def __init__(self, **params): DBWorker.__init__(self, **params) execute = self.execute execute(""" create table if not exists public ( public_id integer primary key autoincrement, path text not null, active boolean not null default 1, url text) """) execute(""" create unique index if not exists idx_public_path on public(path) """) execute(""" create unique index if not exists idx_public_url on public(url) """)
def __init__(self, **params): DBWorker.__init__(self, **params) execute = self.execute execute(""" pragma foreign_keys = on """) execute(""" create table if not exists xfeatures ( feature_id integer primary key, path text ) """) execute(""" create unique index if not exists idx_features_path on xfeatures(path) """) execute(""" create table if not exists xfeaturevals ( feature_id integer, key integer, value text, primary key (feature_id, key, value) foreign key (feature_id) references xfeatures(feature_id) on delete cascade ) """)
def main() -> None: config = config_parser.parse('config.json') db_worker = DBWorker(config) redis_client = redis.Redis(password=config['redis_pass']) try: redis_client.get('1') except redis.exceptions.ConnectionError: print('Ошибка подключения к Redis.') exit() args = parse_args() if args.s: # запуск start(config, db_worker, redis_client)
def main(self, conf): # create synchronized printer self.printer = Printer('Collector') # create queues self.queue = gevent.queue.Queue() self.db_fail_queue = gevent.queue.Queue() self.db_recovery_queue = gevent.queue.Queue() # create workers self.db_recovery_worker = DBRecoveryWorker(self.db_recovery_queue, self.db_fail_queue, self.queue, conf) self.db_fail_worker = DBFailWorker(self.db_fail_queue, self.db_recovery_queue, conf) self.db_workers = [ DBWorker(i, self.queue, self.db_fail_queue, conf) \ for i in range(conf.num_workers) ] # register atexit handler atexit.register(self.atexit) try: # call additional prepare() functions self.db_fail_worker.prepare() # start workers self.db_fail_worker.start() self.db_recovery_worker.start() [w.start() for w in self.db_workers] # connect to twitter API auth = tweepy.auth.OAuthHandler(conf.API_key, conf.API_secret) auth.set_access_token(conf.Access_token, conf.Access_token_secret) stream = tweepy.Stream(auth, self, timeout=conf.max_silence) self.printer.log("Connected to twitter API.") # start collection self.last_time = datetime.now() stream.filter(**conf.api_filter) except KeyboardInterrupt: pass except Exception, e: traceback.print_exc()
def __init__(self, **params): DBWorker.__init__(self, **params) execute = self.execute execute(""" pragma foreign_keys = on """) execute(""" create table if not exists nodes ( node integer primary key, parent integer default 0, path text not null default '', latest_version integer, foreign key (parent) references nodes(node) on update cascade on delete cascade ) """) execute(""" create unique index if not exists idx_nodes_path on nodes(path) """) execute(""" create index if not exists idx_nodes_parent on nodes(parent) """) execute(""" create table if not exists policy ( node integer, key text, value text, primary key (node, key) foreign key (node) references nodes(node) on update cascade on delete cascade ) """) execute(""" create table if not exists statistics ( node integer, population integer not null default 0, size integer not null default 0, mtime integer, cluster integer not null default 0, primary key (node, cluster) foreign key (node) references nodes(node) on update cascade on delete cascade ) """) execute(""" create table if not exists versions ( serial integer primary key, node integer, hash text, size integer not null default 0, type text not null default '', source integer, mtime integer, muser text not null default '', uuid text not null default '', checksum text not null default '', cluster integer not null default 0, foreign key (node) references nodes(node) on update cascade on delete cascade ) """) execute(""" create index if not exists idx_versions_node_mtime on versions(node, mtime) """) execute(""" create index if not exists idx_versions_node_uuid on versions(uuid) """) execute(""" create table if not exists attributes ( serial integer, domain text, key text, value text, primary key (serial, domain, key) foreign key (serial) references versions(serial) on update cascade on delete cascade ) """) execute(""" create index if not exists idx_attributes_domain on attributes(domain) """) wrapper = self.wrapper wrapper.execute() try: q = "insert or ignore into nodes(node, parent) values (?, ?)" execute(q, (ROOTNODE, ROOTNODE)) finally: wrapper.commit()
from dbworker import DBWorker from art import * import art if __name__ == '__main__': worker = DBWorker() print(worker.get_changings('product_id=1'))
def __init__(self, **params): DBWorker.__init__(self, **params) execute = self.execute execute(""" create table if not exists qh_serials ( serial bigint primary key) """)
#!/usr/bin/env python # -*- coding: utf-8 -*- from aiogram import Dispatcher, types from aiogram.dispatcher import FSMContext from aiogram.dispatcher.filters import Text from aiogram.dispatcher.filters.state import State, StatesGroup from dbworker import DBWorker import random db = DBWorker("db_file.db") class States(StatesGroup): waiting_for_players_list = State() waiting_for_teams_number = State() waiting_for_user_confirm = State() async def cmd_start(message: types.Message, state: FSMContext): await state.finish() if not db.user_exist(message.from_user.id): db.add_user(message.from_user.id) await message.answer(f'Привет, {message.from_user.first_name}! \ \nЯ смотрю ты тут впервые, добро пожаловать! \ \nДавай разделим твою компанию на команды. \ \nЧтобы начать используй команду /begin') else: await message.answer( f'Привет, {message.from_user.first_name}, с возвращением! \ \nДавай разделим твою компанию на команды. \
} # config = {"database": {"host": "localhost", # "port": 5432, # "dbname": "postgres", # "user": "******", # "password": "******" # }} # # config = {"database": {"host": "localhost", # "port": 5432, # "dbname": "postgres", # "user": "******", # "password": "******" # }} db_worker = DBWorker(config) root = Tk() root.title('DB') root.geometry('800x600') class Table(tk.Frame): def __init__(self, parent=None, headings=tuple(), rows=tuple()): super().__init__(parent) table = ttk.Treeview(self, show="headings", selectmode="browse") table["columns"] = headings table["displaycolumns"] = headings for head in headings:
class PluginParser: movies = [] tvshows = [] plugin_name = 'plugin.video.fs.ua' dbworker = DBWorker() # def __init__(self): # def __del__(self): def get_original_title_and_year(self, root): pattern = ".+video%2F.+%2F([^-]+).+" id = re.match(pattern, root).groups()[0] xbmc.log(id) url = "http://fs.to/video/films/iframeplayer/{0}" headers = {'X-Requested-With': 'XMLHttpRequest'} req = urllib2.Request(url.format(id), None, headers) response = urllib2.urlopen(req) try: data = json.loads(response.read()) original_title = data['coverData']['title_origin'] year = data['coverData']['year'][0]['title'] except ValueError: return (None, None) return (original_title, year) def get_movies_from_source(self): request = '{ "jsonrpc": "2.0", "method": "Files.GetDirectory", "params": {"directory": "plugin://plugin.video.fs.ua/?section=video&page=%d&type=favorites&mode=readfavorites&subsection=film"},"id": 1 }' page = 0 while True: response = json.loads(xbmc.executeJSONRPC(request % page)) if 'error' in response.keys(): break for file in response['result']['files']: #xbmc.log(str(file)) if not file['label'].startswith('['): root = file['file'] title_year = self.dbworker.get_movie_title_by_root(root) if title_year is None: title, year = self.get_original_title_and_year(root) label = file['label'].split(' / ')[-1] if title is None: title = label.split('(')[0].replace(')', '') if year is None: year = label.split('(')[1] if len( label.split('(')) > 1 else "" else: title, year = title_year self.movies.append({ 'title': title, 'year': year, 'root': root }) page += 1 def get_movie_links_from_root(self, root): request = '{ "jsonrpc": "2.0", "method": "Files.GetDirectory", "params": {"directory": "%s"},"id": 1 }' links = {} response = json.loads(xbmc.executeJSONRPC(request % root)) xbmc.log(str(response)) xbmc.log(request % root) lang_dirs = response['result']['files'] for lang_dir in lang_dirs: if lang_dir['file'].endswith('quality=None'): lang = lang_dir['label'] quality_dirs = json.loads( xbmc.executeJSONRPC(request % lang_dir['file']))['result']['files'] qualities = {} for quality_dir in quality_dirs: if quality_dir['filetype'] == 'directory': qualities[quality_dir['label']] = quality_dir['file'] links[lang] = qualities return links def get_direct_link_from_source(self, title, year, source): request = '{ "jsonrpc": "2.0", "method": "Files.GetDirectory", "params": {"directory": "%s"},"id": 1 }' response = json.loads(xbmc.executeJSONRPC(request % source)) xbmc.log(str(response)) xbmc.log(request % source) direct_link = response['result']['files'][0]['file'] if urllib.urlopen(direct_link).getcode() == 200: self.dbworker.update_link(title, year, self.plugin_name, direct_link, source) return direct_link def get_movies(self): self.get_movies_from_source() xbmc.log(str(self.movies)) for movie in self.movies: self.dbworker.add_movie(movie['title'], movie['year'], self.plugin_name, movie['root']) return self.dbworker.read_movies(self.plugin_name) def get_movie(self, title, year): movie = self.dbworker.get_movie_link(title, year, self.plugin_name) if movie is None: return None if movie['link'] != '': if urllib.urlopen(movie['link']).getcode() == 200: return movie['link'] else: return self.get_direct_link_from_source( title, year, movie['source']) else: links = self.get_movie_links_from_root(movie['root']) lang = links.keys()[xbmcgui.Dialog().select(title, links.keys())] source = links[lang].values()[xbmcgui.Dialog().select( lang, links[lang].keys())] return self.get_direct_link_from_source(title, year, source)
def __init__(self, **params): self._props = params.pop('props') for p in self._props: setattr(self, p.upper(), self._props[p]) self.mapfile_prefix = params.pop('mapfile_prefix', 'snf_file_') DBWorker.__init__(self, **params) execute = self.execute execute(""" pragma foreign_keys = on """) execute(""" create table if not exists nodes ( node integer primary key, parent integer default 0, path text not null default '', latest_version integer, foreign key (parent) references nodes(node) on update cascade on delete cascade ) """) execute(""" create unique index if not exists idx_nodes_path on nodes(path) """) execute(""" create index if not exists idx_nodes_parent on nodes(parent) """) execute(""" create index if not exists idx_latest_version on nodes(latest_version) """) execute(""" create table if not exists policy ( node integer, key text, value text, primary key (node, key) foreign key (node) references nodes(node) on update cascade on delete cascade ) """) execute(""" create table if not exists statistics ( node integer, population integer not null default 0, size integer not null default 0, mtime integer, cluster integer not null default 0, primary key (node, cluster) foreign key (node) references nodes(node) on update cascade on delete cascade ) """) execute(""" create table if not exists versions ( serial integer primary key, node integer, hash text, size integer not null default 0, type text not null default '', source integer, mtime integer, muser text not null default '', uuid text not null default '', checksum text not null default '', cluster integer not null default 0, available integer not null default 1, map_check_timestamp integer, mapfile text, is_snapshot boolean not null default false, foreign key (node) references nodes(node) on update cascade on delete cascade ) """) execute(""" create index if not exists idx_versions_node_mtime on versions(node, mtime) """) execute(""" create index if not exists idx_versions_node on versions(node) """) execute(""" create index if not exists idx_versions_node_uuid on versions(uuid) """) execute(""" create table if not exists attributes ( serial integer, domain text, key text, value text, node integer not null default 0, is_latest boolean not null default 1, primary key (serial, domain, key) foreign key (serial) references versions(serial) on update cascade on delete cascade ) """) execute(""" create index if not exists idx_attributes_domain on attributes(domain) """) execute(""" create index if not exists idx_attributes_serial_node on attributes(serial, node) """) execute(""" create table if not exists mapfile_seq ( serial integer primary key, dummy boolean default -1) """) wrapper = self.wrapper wrapper.execute() try: q = "insert or ignore into nodes(node, parent) values (?, ?)" execute(q, (ROOTNODE, ROOTNODE)) finally: wrapper.commit()