def remove(self, configuration): """Remove an Updater matching the given Configuration.""" UPDATER_REMOVED_MESSAGE = "Updater removed for {configuration}." assert isinstance(configuration, Configuration) repository_mirror_hostnames = configuration.get_repository_mirror_hostnames( ) assert configuration.hostname in self.__updaters assert repository_mirror_hostnames.issubset( self.__repository_mirror_hostnames) # Get the updater. updater = self.__updaters.get(configuration.hostname) # If all is well, remove the stored Updater as well as its associated # repository mirror hostnames. updater.cleanup() del self.__updaters[configuration.hostname] self.__repository_mirror_hostnames.difference_update( repository_mirror_hostnames) Logger.info( UPDATER_REMOVED_MESSAGE.format(configuration=configuration))
def retrieve(self, url, filename=None, reporthook=None, data=None): INTERPOSITION_MESSAGE = "Interposing for {url}" Logger.info(INTERPOSITION_MESSAGE.format(url=url)) # What is the actual target to download given the URL? Sometimes we would # like to transform the given URL to the intended target; e.g. "/simple/" # => "/simple/index.html". target_filepath = self.get_target_filepath(url) # TODO: Set valid headers fetched from the actual download. # NOTE: Important to guess the mime type from the target_filepath, not the # unmodified URL. content_type, content_encoding = mimetypes.guess_type(target_filepath) headers = { # NOTE: pip refers to this same header in at least these two duplicate # ways. "content-type": content_type, "Content-Type": content_type, } # Download the target filepath determined by the original URL. temporary_directory, temporary_filename = self.download_target( target_filepath) if filename is None: # If no filename is given, use the temporary file. filename = temporary_filename else: # Otherwise, copy TUF-downloaded file in its own directory # to the location user specified. shutil.copy2(temporary_filename, filename) return filename, headers
def retrieve(self, url, filename=None, reporthook=None, data=None): INTERPOSITION_MESSAGE = "Interposing for {url}" Logger.info(INTERPOSITION_MESSAGE.format(url=url)) # What is the actual target to download given the URL? Sometimes we would # like to transform the given URL to the intended target; e.g. "/simple/" # => "/simple/index.html". target_filepath = self.get_target_filepath(url) # TODO: Set valid headers fetched from the actual download. # NOTE: Important to guess the mime type from the target_filepath, not the # unmodified URL. content_type, content_encoding = mimetypes.guess_type(target_filepath) headers = { # NOTE: pip refers to this same header in at least these two duplicate # ways. "content-type": content_type, "Content-Type": content_type, } # Download the target filepath determined by the original URL. temporary_directory, temporary_filename = self.download_target(target_filepath) if filename is None: # If no filename is given, use the temporary file. filename = temporary_filename else: # Otherwise, copy TUF-downloaded file in its own directory # to the location user specified. shutil.copy2(temporary_filename, filename) return filename, headers
def get(self, url): """Get an Updater, if any, for this URL. Assumptions: - @url is a string.""" GENERIC_WARNING_MESSAGE = "No updater or interposition for url={url}" DIFFERENT_NETLOC_MESSAGE = "We have an updater for netloc={netloc1} but not for netlocs={netloc2}" HOSTNAME_FOUND_MESSAGE = "Found updater for hostname={hostname}" HOSTNAME_NOT_FOUND_MESSAGE = "No updater for hostname={hostname}" updater = None try: parsed_url = urlparse.urlparse(url) hostname = parsed_url.hostname port = parsed_url.port or 80 netloc = parsed_url.netloc network_location = "{hostname}:{port}".format(hostname=hostname, port=port) # Sometimes parsed_url.netloc does not have a port (e.g. 80), # so we do a double check. network_locations = set((netloc, network_location)) updater = self.__updaters.get(hostname) if updater is None: Logger.warn( HOSTNAME_NOT_FOUND_MESSAGE.format(hostname=hostname)) else: # Ensure that the updater is meant for this (hostname, port). if updater.configuration.network_location in network_locations: Logger.info( HOSTNAME_FOUND_MESSAGE.format(hostname=hostname)) # Raises an exception in case we do not recognize how to # transform this URL for TUF. In that case, there will be no # updater for this URL. target_filepath = updater.get_target_filepath(url) else: # Same hostname, but different (not user-specified) port. Logger.warn( DIFFERENT_NETLOC_MESSAGE.format( netloc1=updater.configuration.network_location, netloc2=network_locations)) updater = None except: Logger.exception(GENERIC_WARNING_MESSAGE.format(url=url)) updater = None finally: if updater is None: Logger.warn(GENERIC_WARNING_MESSAGE.format(url=url)) return updater
def get(self, url): """Get an Updater, if any, for this URL. Assumptions: - @url is a string.""" GENERIC_WARNING_MESSAGE = "No updater or interposition for url={url}" DIFFERENT_NETLOC_MESSAGE = "We have an updater for netloc={netloc1} but not for netlocs={netloc2}" HOSTNAME_FOUND_MESSAGE = "Found updater for hostname={hostname}" HOSTNAME_NOT_FOUND_MESSAGE = "No updater for hostname={hostname}" updater = None try: parsed_url = urlparse.urlparse(url) hostname = parsed_url.hostname port = parsed_url.port or 80 netloc = parsed_url.netloc network_location = "{hostname}:{port}".format(hostname=hostname, port=port) # Sometimes parsed_url.netloc does not have a port (e.g. 80), # so we do a double check. network_locations = set((netloc, network_location)) updater = self.__updaters.get(hostname) if updater is None: Logger.warn(HOSTNAME_NOT_FOUND_MESSAGE.format(hostname=hostname)) else: # Ensure that the updater is meant for this (hostname, port). if updater.configuration.network_location in network_locations: Logger.info(HOSTNAME_FOUND_MESSAGE.format(hostname=hostname)) # Raises an exception in case we do not recognize how to # transform this URL for TUF. In that case, there will be no # updater for this URL. target_filepath = updater.get_target_filepath(url) else: # Same hostname, but different (not user-specified) port. Logger.warn(DIFFERENT_NETLOC_MESSAGE.format( netloc1=updater.configuration.network_location, netloc2=network_locations)) updater = None except: Logger.exception(GENERIC_WARNING_MESSAGE.format(url=url)) updater = None finally: if updater is None: Logger.warn(GENERIC_WARNING_MESSAGE.format(url=url)) return updater
def add(self, configuration): """Add an Updater based on the given Configuration.""" UPDATER_ADDED_MESSAGE = "Updater added for {configuration}." repository_mirror_hostnames = self.__check_configuration(configuration) # If all is well, build and store an Updater, and remember hostnames. self.__updaters[configuration.hostname] = Updater(configuration) self.__repository_mirror_hostnames.update(repository_mirror_hostnames) Logger.info(UPDATER_ADDED_MESSAGE.format(configuration=configuration))
def add(self, configuration): """Add an Updater based on the given Configuration.""" UPDATER_ADDED_MESSAGE = "Updater added for {configuration}." repository_mirror_hostnames = self.__check_configuration_on_add(configuration) # If all is well, build and store an Updater, and remember hostnames. self.__updaters[configuration.hostname] = Updater(configuration) self.__repository_mirror_hostnames.update(repository_mirror_hostnames) Logger.info(UPDATER_ADDED_MESSAGE.format(configuration=configuration))
def remove(self, configuration): """Remove an Updater matching the given Configuration.""" UPDATER_REMOVED_MESSAGE = "Updater removed for {configuration}." assert isinstance(configuration, Configuration) repository_mirror_hostnames = configuration.get_repository_mirror_hostnames() assert configuration.hostname in self.__updaters assert repository_mirror_hostnames.issubset(self.__repository_mirror_hostnames) # If all is well, remove the stored Updater as well as its associated # repository mirror hostnames. del self.__updaters[configuration.hostname] self.__repository_mirror_hostnames.difference_update(repository_mirror_hostnames) Logger.info(UPDATER_REMOVED_MESSAGE.format(configuration=configuration))
def retrieve(self, url, filename=None, reporthook=None, data=None): INTERPOSITION_MESSAGE = "Interposing for {url}" # TODO: set valid headers content_type, content_encoding = mimetypes.guess_type(url) headers = {"content-type": content_type} Logger.info(INTERPOSITION_MESSAGE.format(url=url)) target_filepath = self.get_target_filepath(url) temporary_directory, temporary_filename = self.download_target(target_filepath) if filename is None: # If no filename is given, use the temporary file. filename = temporary_filename else: # Otherwise, copy TUF-downloaded file in its own directory # to the location user specified. shutil.copy2(temporary_filename, filename) return filename, headers
class Database(object): ''' The goal of this class is to move all SQL composition out of the program logic and place it here. ''' __instance = None @staticmethod def get_instance(): ''' This static method is used to get the singleton object for this class. ''' if Database.__instance == None: Database() return Database.__instance def __init__(self): # gate the access to __init__() if Database.__instance != None: raise Exception( "Database class is a singleton. Use get_instance() instead.") else: Database.__instance = self # Continue with init exactly once. self.logger = Logger(self, Logger.DEBUG) self.logger.debug("enter constructor") self.data_version = '1.0' self.database_name = 'accounting.db' self.db_create_file = 'database.sql' self.db_pop_file = 'populate.sql' self.open() locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') self.logger.debug("leave constructor") @debugger def open(self): if not os.path.isfile(self.database_name): self.create_database() self.db = sql.connect(self.database_name) self.db.row_factory = sql.Row @debugger def close(self): self.db.commit() self.db.close() @debugger def read_statement(self, fh): ''' Read a statement from the *.sql file and return it. This skips comments and concatinates lines until a ';' is read. A comment is text that starts with a '#' and continues to the end of the line. ''' retv = '' for line in fh: # strip comments from the line idx = line.find('#') line = line[0:idx].strip() # If there is anything left, append it to the return value. if len(line) > 0: retv += " %s" % (line) if line[-1] == ';': break return retv @debugger def run_file(self, db, name): ''' Execute all of the statements in a *.sql file. ''' with open(name) as fh: while True: line = self.read_statement(fh) if len(line) > 0: db.execute(line) else: break @debugger def create_database(self): ''' Create the database if it does not exist already. ''' # Load the DB creation file and create the database from that. self.logger.info("creating database") c = sql.connect(self.database_name) db = c.cursor() self.run_file(db, self.db_create_file) self.run_file(db, self.db_pop_file) c.commit() c.close() @debugger def get_columns(self, table): ''' Return a dict where all column names are keys with blank data. ''' # TODO: make the data the type of element that the column uses. retv = {} cols = self.execute('PRAGMA table_info(%s);' % (table)) for item in cols: retv[item[1]] = '' return cols @debugger def get_column_list(self, table): ''' Return a list with all of the column names. ''' retv = [] cols = self.execute('PRAGMA table_info(%s);' % (table)) for item in cols: retv.append(item[1]) return retv @debugger def execute(self, sql): ''' Execute an arbitrary SQL statement. ''' self.logger.debug("SQL=%s" % (sql)) return self.db.execute(sql) @debugger def commit(self): ''' Commit the database to disk. ''' self.db.commit() @debugger def populate_list(self, table, column): ''' Return a list with all of the items then the column of the table. ''' curs = self.execute('select %s from %s;' % (column, table)) retv = [] for item in curs: retv.append(' '.join(item)) #retv.append(item) return retv @debugger def get_row_by_id(self, table, ID): ''' Return a dict of all of the columns in the row that has the specified ID. ''' curs = self.execute('select * from %s where ID = %d;' % (table, ID)).fetchall() try: retv = dict(curs[0]) return retv except IndexError: return None @debugger def get_id_by_row(self, table, col, val): ''' Return a dictionary of the columns in the row where a data element matches the value given. ''' if type(val) is str: sql = 'SELECT ID FROM %s WHERE %s = \"%s\";' % (table, col, val) else: sql = 'SELECT ID FROM %s WHERE %s = %s;' % (table, col, val) row = self.execute(sql).fetchall() if len(row) == 0: return None else: return dict(row[0])['ID'] @debugger def get_cursor(self): ''' Return the current database cursor. ''' return self.db.cursor() @debugger def get_id_list(self, table, where=None): ''' Get a list of all of the IDs in the table ''' retv = [] if where is None: sql = 'SELECT ID FROM %s;' % (table) else: sql = 'SELECT ID FROM %s WHERE %s;' % (table, where) cur = self.execute(sql) for item in cur: retv.append(item[0]) return retv @debugger def get_row_list(self, table, where): ''' Get a generic list of rows based on more than one criteria ''' retv = [] sql = 'SELECT * FROM %s WHERE %s' % (table, where) cur = self.execute(sql) for item in cur: retv.append(dict(item)) if len(retv) == 0: return None else: return retv @debugger def get_row_list_by_col(self, table, col, val): ''' Get the list of all rows where the column has a certain value ''' retv = [] if type(val) is str: sql = 'SELECT * FROM %s WHERE %s = \"%s\";' % (table, col, val) else: sql = 'SELECT * FROM %s WHERE %s = %s;' % (table, col, val) self.logger.debug("SQL=%s" % (sql)) cur = self.execute(sql) for item in cur: retv.append(dict(item)) if len(retv) == 0: return None else: return retv @debugger def get_id_by_name(self, table, col, val): ''' Return the ID where the data in the column matches the value. Only returns the first match. ''' if type(val) is str: sql = 'select ID from %s where %s = \"%s\";' % (table, col, val) else: sql = 'select ID from %s where %s = %s;' % (table, col, str(val)) curs = self.execute(sql) recs = curs.fetchall() retv = None for row in recs: retv = row[0] break return retv @debugger def get_single_value(self, table, col, row_id): ''' Retrieve a single value where the table, column and row ID are known. ''' sql = 'SELECT %s FROM %s WHERE ID=%d;' % (col, table, row_id) self.logger.debug("SQL=%s" % (sql)) curs = self.execute(sql) recs = curs.fetchall() retv = None for row in recs: retv = row[0] break return retv @debugger def set_single_value(self, table, col, row_id, val): ''' Retrieve a single value where the table, column and row ID are known. ''' vals = tuple([val]) sql = 'UPDATE %s SET %s=? WHERE ID=%d;' % (table, col, row_id) self.logger.debug("SQL=%s (%s)" % (sql, vals)) return self.db.execute(sql, vals) @debugger def insert_row(self, table, rec): ''' Insert a row from a dictionary. This expects a dictionary where the keys are the column names and the values are to be inserted in to the columns. ''' keys = ','.join(rec.keys()) qmks = ','.join(list('?' * len(rec))) vals = tuple(rec.values()) sql = 'INSERT INTO %s (%s) VALUES (%s);' % (table, keys, qmks) self.logger.debug("SQL=%s (%s)" % (sql, vals)) return self.db.execute(sql, vals).lastrowid @debugger def update_row(self, table, rec, where): ''' Update a row from a dictionary. This expects a dictionary where the keys are the column names and the data is the value to place in those columns. A condition must be specified, such as ID=123. Otherwise the database will have incorrect data placed in it. ''' keys = '=?,'.join(rec.keys()) keys += '=?' vals = tuple(rec.values()) sql = 'UPDATE %s SET %s WHERE %s;' % (table, keys, where) self.logger.debug("SQL=%s (%s)" % (sql, vals)) return self.db.execute(sql, vals) @debugger def update_row_by_id(self, table, rec, id): ''' Update a row using a dictionary and the id of the row. This expects a dictionary where the keys are the column names and the data is the value to be placed in the columns. ''' keys = '=?,'.join(rec.keys()) keys += '=?' vals = tuple(rec.values()) sql = 'UPDATE %s SET %s WHERE ID = %d;' % (table, keys, id) self.logger.debug("SQL=%s (%s)" % (sql, vals)) return self.db.execute(sql, vals) @debugger def delete_row(self, table, id): ''' Delete the row given by the ID ''' sql = 'DELETE FROM %s WHERE ID = %d;' % (table, id) self.logger.debug("SQL=%s" % (sql)) return self.db.execute(sql) @debugger def delete_where(self, table, where): ''' Delete rows that conform to the "where" clause. ''' sql = 'DELETE FROM %s WHERE %s;' % (table, where) self.logger.debug("SQL=%s" % (sql)) return self.db.execute(sql) @debugger def if_rec_exists(self, table, column, value): ''' Return True if there is a row that has the column with the value ''' if type(value) is int or type(value) is float: sql = 'SELECT %s FROM %s WHERE %s = %s;' % (column, table, column, str(value)) else: sql = 'SELECT %s FROM %s WHERE %s = \"%s\";' % (column, table, column, value) cursor = self.db.execute(sql) if cursor.fetchone() is None: return False return True @debugger def convert_value(self, val, value_type, abs_val=True): ''' Convert the value to the specified type. The value_type is an actual python type name. ''' retv = None self.logger.debug('val type: %s, value: %s, target type: %s' % (type(val), val, value_type)) #try: if type(val) is value_type: retv = val elif value_type is str: retv = str(val) else: if value_type is float: if type(val) is str: if val == '': retv = 0.0 else: if abs_val: retv = abs(locale.atof(val)) else: retv = locale.atof(val) else: if abs_val: retv = abs(locale.atof(val)) else: retv = locale.atof(val) elif value_type is int: if abs_val: retv = int(abs(locale.atof(val))) else: retv = int(locale.atof(val)) # except: # self.logger.error('Cannot convert value') # exit(1) self.logger.debug('made it here: %s' % (str(retv))) return retv
class SetupFormBase(object): ''' This class provides common services for forms in the setup notebook. ''' def __init__(self, master, table, empty_ok=False): self.logger = Logger(self, level=Logger.DEBUG) self.logger.debug("Setup Dialog start constructor") self.master = master self.table = table self.empty_ok = empty_ok self.data = Database.get_instance() self.events = EventHandler.get_instance() self.id_list = self.get_id_list() self.crnt_index = 0 @debugger def get_id_list(self): ''' This method exists so that a form can manage a smaller set of records than every single one. To do that, override this default method in the form class. ''' return self.data.get_id_list(self.table) @debugger def get_id(self): ''' Returns the id of the record in the current form. ''' return self.id_list[self.crnt_index] @debugger def select_button_command(self): self.id_list = self.get_id_list() sel = SelectItem(self.master, self.table) #if not hasattr(sel, 'item_id'): if sel.item_id == -1: self.logger.debug('Select dialog was canceled') elif sel.item_id == 0: self.logger.debug('Select dialog item was not found') else: try: self.logger.debug('Select dialog item selected = %d' % (sel.item_id)) self.crnt_index = self.id_list.index(sel.item_id) self.set_form() except TypeError: mb.showerror( 'ERROR', 'No record was selected. (no records are available?)') self.events.raise_event('select_button') @debugger def new_button_command(self): ''' Clear the form ''' self.clear_form() self.events.raise_event('new_button') @debugger def save_button_command(self): ''' Save the form to the database ''' if not self.id_list is None: self.get_form() self.events.raise_event('save_button') @debugger def del_button_command(self): ''' Delete the item given in the form from the database ''' if not self.id_list is None: val = mb.askokcancel( "Sure?", "Are you sure you want to delete item from %s?" % (self.table)) if val: self.logger.info("Deleting item %d from %s" % (self.id_list[self.crnt_index], self.table)) self.data.delete_row(self.table, self.id_list[self.crnt_index]) self.data.commit() self.id_list = self.get_id_list() if self.crnt_index >= len(self.id_list): self.crnt_index -= 1 self.set_form() self.events.raise_event('del_button') @debugger def next_btn_command(self): ''' Go to the next item in the form table ''' if not self.id_list is None: self.crnt_index += 1 if self.crnt_index >= len(self.id_list): self.crnt_index = len(self.id_list) - 1 self.set_form() self.events.raise_event('next_button') @debugger def prev_btn_command(self): ''' Go to the previous item in the table ''' if not self.id_list is None: self.crnt_index -= 1 if self.crnt_index < 0: self.crnt_index = 0 self.set_form() self.events.raise_event('prev_button') @debugger def clear_form(self): ''' Clear the form. ''' for item in self.form_contents: print(item) item['self'].clear() self.id_list = None self.events.raise_event('clear_form') @debugger def set_form(self): #, row_id): ''' Read the database and place the data in the form. ''' if self.id_list is None: return try: self.id_list = self.get_id_list() row_id = self.id_list[self.crnt_index] except IndexError: if not self.empty_ok: self.logger.info('No records defined for table \'%s\'' % (self.table)) mb.showinfo( 'Records', 'There are no records available for this form: \'%s\".' % (self.table)) self.clear_form() return row = self.data.get_row_by_id(self.table, row_id) if row is None: if not self.empty_ok: self.logger.info('No records defined for table \'%s\'' % (self.table)) mb.showinfo( 'Records', 'There are no records available for this table: \'%s\'.' % (self.table)) self.clear_form() return print(self.form_contents) for item in self.form_contents: if not item['hasid'] is None: # swap in the value that the ID points to rather than the actual ID item['hasid']['id'] = int(row[item['column']]) tmp_row = self.data.get_row_by_id(item['hasid']['table'], item['hasid']['id']) item['self'].write(tmp_row[item['hasid']['column']]) else: item['self'].write(row[item['column']]) self.events.raise_event('set_form') @debugger def get_form(self): ''' Read the form and place the data in the database. ''' if self.id_list is None: return row = {} for item in self.form_contents: if not item['hasid'] is None: # If in the future, forms that require a writable ID in the # form is implemented, then this line will have to change. row[item['column']] = item['hasid']['id'] else: row[item['column']] = item['self'].read() if self.id_list is None: self.data.insert_row(self.table, row) else: row_id = self.id_list[self.crnt_index] self.data.update_row_by_id(self.table, row, row_id) self.id_list = self.get_id_list() self.events.raise_event('get_form')
def main(svc_input, configs): logger = Logger("查询日志", verbose=True) log_file_name = "log%s_%s.txt" % (svc_input.replace( "?", "#"), DateTimeUtil.get_current_datetime(is_date=True)) log_file_path = WindowsUtil.convert_win_path( os.path.join(temp_dir, log_file_name)) logger.info("[开始查询] %s" % svc_input) try: # 找到本地匹配的保修历史记录 history_zip = ZipFileSVC(zip_file_path=history_zipfile, mode='a') start_time = DateTimeUtil.get_current_datetime() # 创建出所有可能查询码 svc_generator = SVCGenerator(svc_input, logger) logger.info("创建出所有可能查询码:%s" % len(svc_generator.target_svc_set)) # 根据本地匹配的非法查询码历史,筛选出目标查询码,以及非法查询码 existed_svc = history_zip.find_file_regex(svc_generator.regex) svc_generator.generate_target_svc_batch(existed_svc, invalid_history_file_path) # 调用戴尔查询API,并将API数据转化为实体类数据 output_dell_asset_list = list([]) if svc_generator.target_svc_set: batch = Batch(logger, configs) api_dell_asset_list = batch.begin(svc_generator.target_svc_set) output_dell_asset_list = api_dell_asset_list logger.info("从API中总共得到%s个结果" % (len(api_dell_asset_list))) logger.info("将实体类序列化到本地临时TXT文件") temp_text_files_path = DellAsset.serialize_txt_batch( api_dell_asset_list, temp_dir) logger.info("将序列化临时文件存到本地zip历史记录,总数:%s" % len(temp_text_files_path)) history_zip.add_new_file_batch(temp_text_files_path) logger.info("删除临时 %s 个TXT文件" % len(temp_text_files_path)) for file_path in temp_text_files_path: FileUtil.delete_file(file_path) logger.info("将API得到的实体类和历史记录实体类合并") else: logger.warn("目标查询码为空,仅从从历史记录中导出结果") for svc in svc_generator.existed_svc_set: dell_asset_content = history_zip.get_member_content( file_name="%s.txt" % svc) output_dell_asset_list.append( DellAsset.deserialize_txt(dell_asset_content)) logger.info("添加历史记录,总共得到%s个结果" % (len(output_dell_asset_list))) excel_output_path = WindowsUtil.convert_win_path( os.path.join(excel_dir, "%s.xlsx" % svc_generator.get_file_name())) DellAsset.save_as_excel_batch(output_dell_asset_list, excel_output_path) if FileUtil.is_path_existed(excel_output_path): logger.info("存为Excel文档成功") end_time = DateTimeUtil.get_current_datetime() logger.info("总用时 %s " % DateTimeUtil.datetime_diff(start_time, end_time)) logger.info("[查询结束] 总共%s个结果 保存在:%s" % (len(output_dell_asset_list), excel_output_path)) else: logger.error("[保存结果失败] %s" % excel_output_path) except Exception as e: # 若程序出现错误失败,发送邮件 logger.error("[查询失败] 已发送报告 请等待解决") logger.error("%s\n%s" % (e, traceback.format_exc())) logger.save(log_file_path) email_api_key = configs["email_api_key"] email = Email( email_api_key, subject="[查询失败] %s %s" % (DateTimeUtil.get_current_datetime(is_date=True), svc_input)) email.add_attachment(log_file_path) email.send(cc_mode=logger.has_error)
def main(svc_input, configs): logger = Logger("查询日志", verbose=True) log_file_name = "log%s_%s.txt" % (svc_input.replace("?", "#"), DateTimeUtil.get_current_datetime(is_date=True)) log_file_path = WindowsUtil.convert_win_path(os.path.join(temp_dir, log_file_name)) logger.info("[开始查询] %s" % svc_input) try: # 找到本地匹配的保修历史记录 history_zip = ZipFileSVC(zip_file_path=history_zipfile, mode='a') start_time = DateTimeUtil.get_current_datetime() # 创建出所有可能查询码 svc_generator = SVCGenerator(svc_input, logger) logger.info("创建出所有可能查询码:%s" % len(svc_generator.target_svc_set)) # 根据本地匹配的非法查询码历史,筛选出目标查询码,以及非法查询码 existed_svc = history_zip.find_file_regex(svc_generator.regex) svc_generator.generate_target_svc_batch(existed_svc, invalid_history_file_path) # 调用戴尔查询API,并将API数据转化为实体类数据 output_dell_asset_list = list([]) if svc_generator.target_svc_set: batch = Batch(logger, configs) api_dell_asset_list = batch.begin(svc_generator.target_svc_set) output_dell_asset_list = api_dell_asset_list logger.info("从API中总共得到%s个结果" % (len(api_dell_asset_list))) logger.info("将实体类序列化到本地临时TXT文件") temp_text_files_path = DellAsset.serialize_txt_batch(api_dell_asset_list, temp_dir) logger.info("将序列化临时文件存到本地zip历史记录,总数:%s" % len(temp_text_files_path)) history_zip.add_new_file_batch(temp_text_files_path) logger.info("删除临时 %s 个TXT文件" % len(temp_text_files_path)) for file_path in temp_text_files_path: FileUtil.delete_file(file_path) logger.info("将API得到的实体类和历史记录实体类合并") else: logger.warn("目标查询码为空,仅从从历史记录中导出结果") for svc in svc_generator.existed_svc_set: dell_asset_content = history_zip.get_member_content(file_name="%s.txt" % svc) output_dell_asset_list.append(DellAsset.deserialize_txt(dell_asset_content)) logger.info("添加历史记录,总共得到%s个结果" % (len(output_dell_asset_list))) excel_output_path = WindowsUtil.convert_win_path(os.path.join(excel_dir, "%s.xlsx" % svc_generator.get_file_name())) DellAsset.save_as_excel_batch(output_dell_asset_list, excel_output_path) if FileUtil.is_path_existed(excel_output_path): logger.info("存为Excel文档成功") end_time = DateTimeUtil.get_current_datetime() logger.info("总用时 %s " % DateTimeUtil.datetime_diff(start_time, end_time)) logger.info("[查询结束] 总共%s个结果 保存在:%s" % (len(output_dell_asset_list), excel_output_path)) else: logger.error("[保存结果失败] %s" % excel_output_path) except Exception as e: # 若程序出现错误失败,发送邮件 logger.error("[查询失败] 已发送报告 请等待解决") logger.error("%s\n%s" % (e, traceback.format_exc())) logger.save(log_file_path) email_api_key = configs["email_api_key"] email = Email(email_api_key, subject="[查询失败] %s %s" % (DateTimeUtil.get_current_datetime(is_date=True), svc_input)) email.add_attachment(log_file_path) email.send(cc_mode=logger.has_error)