def __initiate_db(self): """Create a DB connection""" # If the database doesn't exist, create and prepopulate it with feeds.sql if not os.path.exists(self.__db_path): self.__db_worker = Sqlite3Worker(self.__db_path) self.__db_worker.execute('CREATE TABLE feeds (id INTEGER PRIMARY KEY AUTOINCREMENT, name CHAR(200) UNIQUE, url CHAR(200) UNIQUE, frequency INTEGER(3))') self.__db_worker.execute('CREATE TABLE news (id INTEGER PRIMARY KEY AUTOINCREMENT, title CHAR(255), url CHAR(255), feedid INTEGER, published TEXT, FOREIGN KEY(feedid) REFERENCES feeds(id))') if os.path.exists("./feeds.sql"): f = open("./feeds.sql", "r") for insert in f.readlines(): self.__db_worker.execute(insert.strip()) f.close() else: self.__db_worker = Sqlite3Worker(self.__db_path)
def __initiate_db(self): """ Create a DB connection and build tables if the DB file doesn't exist. Attempts to create tables every time. """ # If the database doesn't exist, create and prepopulate it with feeds.sql self.__db_worker = Sqlite3Worker(self.__db_path) self.__db_worker.execute( 'CREATE TABLE feeds (id INTEGER PRIMARY KEY AUTOINCREMENT, ' \ 'name CHAR(200) UNIQUE, url CHAR(200) UNIQUE, ' \ 'frequency INTEGER(3))' ) self.__db_worker.execute( 'CREATE TABLE news (id INTEGER PRIMARY KEY AUTOINCREMENT, ' \ 'title CHAR(255), url CHAR(255), feedid INTEGER, ' \ 'published TEXT, version INTEGER, ' \ 'FOREIGN KEY(feedid) REFERENCES feeds(id))' ) self.__db_worker.execute( 'CREATE TABLE chat (id INTEGER PRIMARY KEY AUTOINCREMENT, ' \ 'chan CHAR(255), time REAL)' ) if os.path.exists("./feeds.sql"): f = open("./feeds.sql", "r") for insert in f.readlines(): self.__db_worker.execute(insert.strip()) f.close()
def __init__(self, recreate=False): database = r"%s/sqlite.db" % CUR_DIR # this will create separate DB for each platform used log.info("[DB] Start the module") self.conn = Sqlite3Worker(database) self.conn.execute(self.create_table) if recreate: self.conn.execute(self.drop_index) self.conn.execute(self.create_index)
def insertimgurl(uuid, imgfile): sql_worker = Sqlite3Worker("mydatabase.sqlite3") exsql = ''' INSERT INTO "main"."imgStore"("id", "uuid", "frameUrl", "maskUrl", "isComplete", "createTime") VALUES (NULL, '%s', '%s', '%s', 0, datetime('now','localtime')) ''' % (uuid, imgfile[0], imgfile[1]) sql_worker.execute(exsql)
def getimages(): sql_worker = Sqlite3Worker("mydatabase.sqlite3") results = sql_worker.execute('SELECT * from imgStore where isComplete =0') for m in results: sql = "update imgStore set handlTime=datetime('now','localtime') where id='%s' " % ( m[0]) sql_worker.execute(sql) return jsonify(results)
def request(self, vacancy_name, area): self.vacancy_name = vacancy_name self.area = area self.connect_ = Sqlite3Worker(r'Result.db') with concurrent.futures.ThreadPoolExecutor(max_workers=6) as pool: pool.map(self.request_items, range(20)) self.connect_.close()
def __init__(self, db_path): """ Constructor for SQLiteHelper :param db_path: Path to the database file :type db_path: str """ self._db_path = db_path self.sql_worker = Sqlite3Worker(db_path)
def __init__(self): self.sql_worker = Sqlite3Worker("../config/spiderurls.db") self.sql_worker.execute("DROP TABLE IF EXISTS spiderurls") self.sql_worker.execute("CREATE TABLE IF NOT EXISTS spiderurls( \ id INTEGER PRIMARY KEY, \ url TEXT, \ depth INTEGER,\ method TEXT,\ param TEXT\ )")
def delete_images(): sql_worker = Sqlite3Worker("mydatabase.sqlite3") data = json.loads(request.get_data()) successful = {"list": []} for id in data: sql = "update imgStore set handlTime=datetime('now','localtime'),isComplete=1 where id='%s' " % ( id) sql_worker.execute(sql) successful["list"].append(id) return jsonify(successful)
def Conn(database): if database: print("[+] Inserting into Database: " + str(database)) conn = Sqlite3Worker(database) if isinstance(conn, str): print(str) sys.exit(1) else: conn = "" return conn
def __init__(self, frame, thread_num, login_session): self.frame = frame self.thread_num = thread_num CheckMain.stoped = False self.threads = [] self.lock = threading.Lock() self.login_session = login_session # 报告输出 CheckMain.reporter = Reporter() # 从数据库中查数据 self.sql_worker = Sqlite3Worker("../config/spiderurls.db") CheckMain.spiderurls = self.sql_worker.execute( "SELECT method,url,param from spiderurls")
def __init__(self, topics_and_parsers, **kwargs): from sqlite3worker import Sqlite3Worker super(MqttToDBStreamer, self).__init__(job_name=JOB_NAME, **kwargs) self.sqliteworker = Sqlite3Worker(config["storage"]["database"], max_queue_size=250, raise_on_error=False) self.topics_and_callbacks = [{ "topic": topic_and_parser.topic, "callback": self.create_on_message(topic_and_parser), } for topic_and_parser in topics_and_parsers] self.start_passive_listeners()
def xss_check_main(self): if os.path.exists('../config/spiderurls.db'): sql_worker = Sqlite3Worker("../config/spiderurls.db") else: print "db is not exit" return results = sql_worker.execute("SELECT method,url,param from spiderurls") for method, url, param in results: # if method == "GET" or method == "get": # yield self.do_xss_check(url) if method.lower() == "post": if param is None: print "post parm is none" continue data = param print method, url, data yield self.do_xss_check(url, data) sql_worker.close()
def __init__(self, config, botName: str): self.config = config self.worker = Sqlite3Worker(getDbFullPath(self.config, botName)) self.dispatchListFields = list(DispatchListItem.__annotations__.keys()) self.userFields = list(User.__annotations__.keys())
from Levenshtein import distance as levenshtein_distance except ImportError: from distance import levenshtein as levenshtein_distance import common import config try: __import__('pysqlite3') import sys sys.modules['sqlite3'] = sys.modules.pop('pysqlite3') except ModuleNotFoundError: pass from sqlite3worker import Sqlite3Worker common.load_tabs(config.all_tabs) db = Sqlite3Worker("bot.db") class ChatConfig: def __init__(self, guild_id=None, channel_id=None): self.guild_id = guild_id self.channel_id = channel_id self.tabs = config.default_tabs[:] self.lock = False def __str__(self): return f"<ChatConfig(guild_id={self.guild_id}, channel_id={self.channel_id}, tabs={self.tabs}, lock={self.lock})>" chat_configs = {}
def initdb(self): self._db = Sqlite3Worker(self._conn_string)
#PRESS F TO PAY RESPECT, tempo medio senza thread: 50 secondi con il link 6jb6yp # tempo medio Con thread: 20-25 secondi con il link 6jb6yp # nohup python3 /var/www/redditBot.py >& /var/www/tmp & import sys import logging import time import _thread import threading import praw from praw.models import MoreComments import sqlite3 from sqlite3 import IntegrityError from sqlite3worker import Sqlite3Worker sql_worker = Sqlite3Worker("respect.db") logging.getLogger("sqlite3worker").setLevel(logging.CRITICAL) commentiBigArray = [] lucchetto = False Master = False #select count(*) as RespectAmount from rispetto; def check_updates( ): #Metodo che guarda i post e guarda se ci sono almeno 10 commenti nuovi, if so aggiorna query = "SELECT link,commenti,data FROM post" reddit = getCredential() while (True): f = 0 for post in sql_worker.execute(query): #Apro tutti i link nel db
script = fd.read() # Close SQL script file fd.close() # Open database connection db = sqlite3.connect(os.path.join(args.config_dir, 'db', 'bazarr.db'), timeout=30) c = db.cursor() # Execute script and commit change to database c.executescript(script) # Close database connection db.close() logging.info('BAZARR Database created successfully') database = Sqlite3Worker(os.path.join(args.config_dir, 'db', 'bazarr.db'), max_queue_size=256, as_dict=True) class SqliteDictConverter: def __init__(self): self.keys_insert = tuple() self.keys_update = tuple() self.values = tuple() self.question_marks = tuple() def convert(self, values_dict): if type(values_dict) is dict: self.keys_insert = tuple() self.keys_update = tuple() self.values = tuple()
from sqlite3worker import Sqlite3Worker from multiprocessing.dummy import Pool as ThreadPool from datetime import datetime import json import requests import logging import argparse sql_worker = Sqlite3Worker("habr.db") sql_worker.execute("CREATE TABLE IF NOT EXISTS comments(" "id INTEGER," "parent_id INTEGER," "article INTEGER," "level INTEGER," "timePublished TEXT," "score INTEGER," "message TEXT," "children TEXT," "author TEXT)" ) def worker(i): url = "https://m.habr.com/kek/v2/articles/{}/comments/?fl=ru%2Cen&hl=ru".format(i) try: r = requests.get(url) if r.status_code == 503: logging.critical("503 Error") raise SystemExit if r.status_code != 200:
from sqlite3worker import Sqlite3Worker from .db import init_followers_db, init_campaign_db, init_state_db from .utils import load_state, store_state, load_state_worker from tweepy.error import TweepError MODE = os.environ.get('MODE','ALL') #ALL, SERVER, INDEX FOLLOWERS_DB = 'followers.db' CAMPAIGN_DB = 'campaign.db' STATE_DB = 'state.db' USERNAME = '' if not os.path.isfile(STATE_DB): state_db = sqlite3.connect(STATE_DB) init_state_db(state_db) state_db_worker = Sqlite3Worker(STATE_DB) else: state_db = sqlite3.connect(STATE_DB) state_db_worker = Sqlite3Worker(STATE_DB) curr_state = load_state(state_db) _auth = None tweepyapi = None IS_AUTH = False def reset_tweepyapi(): global IS_AUTH, tweepyapi, _auth, state_db_worker curr_state = load_state_worker(state_db_worker) if 'CONSUMER_KEY' in curr_state and 'CONSUMER_SECRET_KEY' in curr_state: _auth = tweepy.OAuthHandler(curr_state['CONSUMER_KEY'], curr_state['CONSUMER_SECRET_KEY'])
"209.127.127.58:7156" ] TICKERS = [ "CLOV", "COIN", "TSLA", "AAPL", "IZEA", "HCMC", "AMD", "BABA", "PTON", "SWRM", "NIO", "RECAF", "AMZN", "AMC", "PLTR", "DIS", "SPY", "NFLX", "MARA", "BFT", "FB", "GME", "MSFT", "RIOT", "CCIV", "EEENF", "EBON", "NVDA", "BBKCF", "RIDE", "SEGI", "ADHC", "PLUG", "OPEN", "DSCR", "TWTR", "DKNG", "SNAP", "WTII", "GOOG", "SQ", "BNGO", "QQQ", "SNDL", "PLX", "SOS", "ARDX", "NNDM", "AABB" ] proxy_username = "******" proxy_password = "******" sql_worker = Sqlite3Worker('test_db.db') def make_request(ticker): host_port = random.choice(PROXY_LIST) proxies = { "http": f"http://{proxy_username}:{proxy_password}@{host_port}", "https": f"http://{proxy_username}:{proxy_password}@{host_port}" } url = f'https://api.stocktwits.com/api/2/streams/symbol/{ticker}.json?since=9999999999' r = requests.get(str(url), proxies=proxies) res = r.json() watchlist_count = res['symbol']['watchlist_count'] print(f"{ticker}:{watchlist_count} -- Time: {time.time()}") query = f"INSERT INTO tickers VALUES('{ticker}', '{watchlist_count}', '{time.time()}')"