def __init__(self, ref: str, creation_strategy: str, save_dir="", file_limit=1000000, table_name="TEMP", write_ahead_mode=True): if len(save_dir) == 0: default_dir = get_db_buffer_default_dir() else: default_dir = save_dir self._table_name = table_name self._write_ahead_mode = write_ahead_mode FileHandler.create_file_if_not_exist(default_dir) self.filename = default_dir + ref # file_exist = os.path.exists(self.filename) self.db = sqlite3.connect(self.filename, timeout=10) self.cur = self.db.cursor() #self.cur.execute("PRAGMA journal_mode = MEMORY") #if not file_exist: if self._write_ahead_mode: self.cur.execute("PRAGMA journal_mode = WAL;") self.cur.execute("PRAGMA synchronous = OFF;") self.exclusive_access_file_limit = file_limit # cannot ensure uniqueness of data in multithread access #self.cur.execute("CREATE TABLE IF NOT EXISTS TEMP (LINK TEXT, RS_CODE INTEGER, LEV INTEGER, L_TYPE INTEGER, PRIMARY KEY(LINK));") self.cur.execute(creation_strategy) self.db.commit()
def __init__(self, interval: int, ref: ProgressLogInterface, stop_event: Event): """ logging prograss for long running method :param interval: period of logging in second :param ref: the reference object invoked logging :param stop_event: event to stop logging :return: """ threading.Thread.__init__(self) self._interval = interval self._ref = ref self._stop_event = stop_event self.begin_time = int(time.time()) self._ref_time = self.begin_time self._path = get_log_dir() + "Progress/" temp = ref.get_file_name() if len(temp) > 200: filename = temp[0:199] else: filename = temp if not filename.endswith(".csv"): filename += ".csv" self._file_path = self._path + filename FileHandler.create_file_if_not_exist(self._file_path) self._limit = ref.get_limit() self.limit_counter = 0
def __init__(self, interval: int, ref: ProgressLogInterface, stop_event: Event): """ logging prograss for long running method :param interval: period of logging in second :param ref: the reference object invoked logging :param stop_event: event to stop logging :return: """ threading.Thread.__init__(self) self._interval = interval self._ref = ref self._stop_event = stop_event self.begin_time = int(time.time()) self._ref_time = self.begin_time self._path = get_log_dir() + "Progress/" temp = ref.get_file_name() if len(temp) > 200: filename = temp[0:199] else: filename = temp if not filename.endswith(".csv"): filename += ".csv" self._file_path = self._path + filename FileHandler.create_file_if_not_exist(self._file_path) self._limit = ref.get_limit() self.limit_counter = 0
def add_proxies(self, proxies: []): if proxies is not None: convtered = [] for proxy in proxies: if isinstance(proxy, ProxyStruct): convtered.append((proxy.addr, proxy.port, proxy.alt_port, proxy.user_name, proxy.psd)) FileHandler.create_file_if_not_exist(self._file_path) CsvLogger.log_to_file_path(self._file_path, convtered)
def run(self): FileHandler.create_file_if_not_exist(self._file_path) cols = ["Index", "Time/Min"] + self._ref.get_column_names() self._append(cols) while not self._stop_event.is_set() and self.limit_counter < self._limit: current_time = int(time.time()) gap = current_time - self._ref_time if gap >= self._interval: self._ref_time = current_time self.report_progress() time.sleep(1)
def run(self): FileHandler.create_file_if_not_exist(self._file_path) cols = ["Index", "Time/Min"] + self._ref.get_column_names() self._append(cols) while not self._stop_event.is_set( ) and self.limit_counter < self._limit: current_time = int(time.time()) gap = current_time - self._ref_time if gap >= self._interval: self._ref_time = current_time self.report_progress() time.sleep(1)
def __init__(self, file_dir: str = "", file_name="UserAccounts.db"): if len(file_dir) == 0: file_dir = get_temp_db_dir() FileHandler.create_file_if_not_exist(file_dir) self._file_name = file_name file_path = file_dir + self._file_name self.db = sqlite3.connect(file_path) self.cur = self.db.cursor() self.cur.execute( "CREATE TABLE IF NOT EXISTS ACCOUNTS(TYPE INTEGER, USER_ID TEXT, PSD TEXT," " LINK TEXT,ACCESS_ID TEXT, API_KEY TEXT, PROXY TEXT);") self.db.commit()
def __init__(self, file_name, worker: ExternalTempInterface, stop_event: Event, buf_size=200, output_f=1000, dir_path="", table_name="temp", convert_input=True, convert_output=True, terminate_callback=None): """ :param file_name: :param worker: :param stop_event: :param buf_size: :param dir_path: :param table_name: :param convert_input: :param convert_output: convert output to OnSiteLink by default, else return raw tuple data. :return: """ self._file_name = file_name if len(dir_path) > 0: self._file_dir = dir_path else: self._file_dir = get_temp_db_dir() self._file_path = self._file_dir + self._file_name PrintLogger.print("ExternalTempDataDiskBuffer create path in init: " + self._file_path) FileHandler.create_file_if_not_exist(self._file_path) self.stop_event = stop_event self._tab = table_name self._worker = worker self._get_lock = threading.RLock() self._put_lock = threading.RLock() self._convert_input = convert_input self._convert_output = convert_output FileBuffInterface.__init__(self, self._file_name, buf_size, output_f=output_f, power_save_mode=True, terminate_callback=terminate_callback) self.set_db_update_interval(10) self._is_reading = Event() self._need_to_vaccum = Event() self._total_record = self.count_all()
def log_to_file_path(file_path: str, rows: [()]): if len(rows) > 0: try: path = file_path if not path.endswith(".csv"): path += ".csv" FileHandler.create_file_if_not_exist(path) with open(path, mode="a", newline="") as csv_file: wr = csv.writer(csv_file, delimiter=",") for row in rows: wr.writerow(row) csv_file.close() except Exception as ex: ErrorLogger.log_error("CsvLogger", ex, "log_to_file_path()")
def log_to_file_path(file_path: str, rows: [()]): if len(rows) > 0: try: path = file_path if not path.endswith(".csv"): path += ".csv" FileHandler.create_file_if_not_exist(path) with open(path, mode='a', newline='') as csv_file: wr = csv.writer(csv_file, delimiter=',') for row in rows: wr.writerow(row) csv_file.close() except Exception as ex: ErrorLogger.log_error("CsvLogger", ex, "log_to_file_path()")
def log_error(ref: str, error: Exception, addtional: str = ""): path = get_log_dir() + ErrorLogger.FILE_NAME try: FileHandler.create_file_if_not_exist(path) lines = [] lines.append(ref) lines.append("{0:d} {1:s}".format(ErrorLogger.Counter, str(datetime.datetime.now(tz=pytz.utc)))) lines.append(str(error)) if len(addtional) > 0: lines.append(addtional) with open(path, mode="a", newline="") as csv_file: wr = csv.writer(csv_file, delimiter=",") wr.writerow(lines) csv_file.close() # lines.append("") # FileHandler.append_lines_to_file(path, lines) ErrorLogger.Counter += 1 except: pass
def log_error(ref: str, error: Exception, addtional: str = ""): path = get_log_dir() + ErrorLogger.FILE_NAME try: FileHandler.create_file_if_not_exist(path) lines = [] lines.append(ref) lines.append("{0:d} {1:s}".format( ErrorLogger.Counter, str(datetime.datetime.now(tz=pytz.utc)))) lines.append(str(error)) if len(addtional) > 0: lines.append(addtional) with open(path, mode='a', newline='') as csv_file: wr = csv.writer(csv_file, delimiter=',') wr.writerow(lines) csv_file.close() # lines.append("") # FileHandler.append_lines_to_file(path, lines) ErrorLogger.Counter += 1 except: pass
def __init__(self, ref: str, creation_strategy: str, save_dir="", file_limit=1000000, table_name="TEMP", write_ahead_mode=True): if len(save_dir) == 0: default_dir = get_db_buffer_default_dir() else: default_dir = save_dir self._table_name = table_name self._write_ahead_mode = write_ahead_mode FileHandler.create_file_if_not_exist(default_dir) self.filename = default_dir + ref # file_exist = os.path.exists(self.filename) self.db = sqlite3.connect(self.filename, timeout=10) self.cur = self.db.cursor() #self.cur.execute("PRAGMA journal_mode = MEMORY") #if not file_exist: if self._write_ahead_mode: self.cur.execute("PRAGMA journal_mode = WAL;") self.cur.execute("PRAGMA synchronous = OFF;") self.exclusive_access_file_limit = file_limit # cannot ensure uniqueness of data in multithread access #self.cur.execute("CREATE TABLE IF NOT EXISTS TEMP (LINK TEXT, RS_CODE INTEGER, LEV INTEGER, L_TYPE INTEGER, PRIMARY KEY(LINK));") self.cur.execute(creation_strategy) self.db.commit()
def _write_to_power_save_db(self) -> bool: data = self.get_state_for_power_save_mode() if isinstance(data, Serializable): FileHandler.create_file_if_not_exist(self._recovery_file_path) try: db = sqlite3.connect(self._recovery_file_path) cur = db.cursor() cur.execute( "CREATE TABLE IF NOT EXISTS STATE_TAB(STATE TEXT UNIQUE, STATE_V TEXT);" ) data_converted = data.get_serializable_json() cur.execute( "INSERT OR REPLACE INTO STATE_TAB (STATE, STATE_V) VALUES ( ?, ?);", ("state", data_converted)) db.commit() db.close() return True except Exception as ex: ErrorLogger.log_error( "FileBuffInterface", ex, "_write_to_power_save_db() " + self._recovery_file_path) return False else: return False
def __init__(self, file_name, worker: ExternalTempInterface, stop_event: Event, buf_size=200, output_f=1000, dir_path="", table_name="temp", convert_input=True, convert_output=True, terminate_callback=None): """ :param file_name: :param worker: :param stop_event: :param buf_size: :param dir_path: :param table_name: :param convert_input: :param convert_output: convert output to OnSiteLink by default, else return raw tuple data. :return: """ self._file_name = file_name if len(dir_path) > 0: self._file_dir = dir_path else: self._file_dir = get_temp_db_dir() self._file_path = self._file_dir + self._file_name PrintLogger.print("ExternalTempDataDiskBuffer create path in init: " + self._file_path) FileHandler.create_file_if_not_exist(self._file_path) self.stop_event = stop_event self._tab = table_name self._worker = worker self._get_lock = threading.RLock() self._put_lock = threading.RLock() self._convert_input = convert_input self._convert_output = convert_output FileBuffInterface.__init__(self, self._file_name, buf_size, output_f=output_f, power_save_mode=True, terminate_callback=terminate_callback) self.set_db_update_interval(10) self._is_reading = Event() self._need_to_vaccum = Event() self._total_record = self.count_all()