def download_system_logs(): db_session = DBSession() logs = db_session.query(Log) \ .order_by(Log.created_time.desc()) contents = '' for log in logs: contents += get_datetime_string(log.created_time) + ' UTC\n' contents += log.level + ':' + log.msg + '\n' if log.trace is not None: contents += log.trace + '\n' contents += '-' * 70 + '\n' # Create a file which contains the size of the image file. temp_user_dir = create_temp_user_directory(current_user.username) log_file_path = os.path.normpath(os.path.join(temp_user_dir, "system_logs")) create_directory(log_file_path) make_file_writable(log_file_path) log_file = open(os.path.join(log_file_path, 'system_logs'), 'w') log_file.write(contents) log_file.close() return send_file(os.path.join(log_file_path, 'system_logs'), as_attachment=True)
def api_create_tar_job(): db_session = DBSession() form = CreateTarForm(request.form) server_id = request.args.get('server') server_directory = request.args.get('server_directory') source_tars = request.args.getlist('source_tars[]') contents = request.args.getlist('tar_contents[]') additional_packages = request.args.getlist('additional_packages[]') new_tar_name = request.args.get('new_tar_name').strip('.tar') create_tar_job = CreateTarJob( server_id = server_id, server_directory = server_directory, source_tars = (',').join(source_tars), contents = (',').join(contents), additional_packages = (',').join(additional_packages), new_tar_name = new_tar_name, created_by = current_user.username, status = 'Job Submitted.') db_session.add(create_tar_job) db_session.commit() job_id = create_tar_job.id return jsonify({'status': 'OK', 'job_id': job_id})
def api_get_scheduled_download_jobs(): dt_params = DataTableParams(request) db_session = DBSession() clauses = [] if len(dt_params.search_value): criteria = '%' + dt_params.search_value + '%' clauses.append(DownloadJob.cco_filename.like(criteria)) clauses.append(DownloadJob.scheduled_time.like(criteria)) clauses.append(DownloadJob.created_by.like(criteria)) query = db_session.query(DownloadJob) total_count = query.filter(DownloadJob.status == None).count() filtered_count = query.filter(and_(DownloadJob.status == None), or_(*clauses)).count() columns = [getattr(DownloadJob.cco_filename, dt_params.sort_order)(), getattr(DownloadJob.scheduled_time, dt_params.sort_order)(), '', getattr(DownloadJob.created_by, dt_params.sort_order)()] download_jobs = query.order_by(columns[dt_params.column_order])\ .filter(and_(DownloadJob.status == None), or_(*clauses))\ .slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all() response = dict() response['draw'] = dt_params.draw response['recordsTotal'] = total_count response['recordsFiltered'] = filtered_count response.update(get_download_job_json_dict(db_session, download_jobs)) return jsonify(**response)
def api_get_session_logs(table): id = request.args.get("record_id") db_session = DBSession() if table == 'install_job': install_job = db_session.query(InstallJob).filter(InstallJob.id == id).first() elif table == 'install_job_history': install_job = db_session.query(InstallJobHistory).filter(InstallJobHistory.id == id).first() elif table == 'inventory_job_history': install_job = db_session.query(InventoryJobHistory).filter(InventoryJobHistory.id == id).first() if install_job is None: abort(404) log_folder = install_job.session_log file_path = os.path.join(get_log_directory(), log_folder) if not os.path.isdir(file_path): abort(404) rows = [] log_file_list = get_file_list(file_path) for file in log_file_list: row = dict() row['filepath'] = os.path.join(file_path, file) row['filename'] = file rows.append(row) return jsonify(**{'data': rows})
def user_create(): if not can_create_user(current_user): abort(401) form = UserForm(request.form) # Need to add the Required flag back as it is globally removed during user_edit() add_validator(form.password, Required) fill_user_privileges(form.privilege.choices) if request.method == 'POST' and form.validate(): db_session = DBSession() user = get_user(db_session, form.username.data) if user is not None: return render_template('user/edit.html', form=form, duplicate_error=True) user = User( username=form.username.data, password=form.password.data, privilege=form.privilege.data, fullname=form.fullname.data, email=form.email.data) user.preferences.append(Preferences()) db_session.add(user) db_session.commit() return redirect(url_for('home')) else: # Default to Active form.active.data = True return render_template('user/edit.html', form=form)
def api_get_hosts_by_region(region_id, role, software): selected_roles = [] selected_software = [] if 'ALL' not in role: selected_roles = role.split(',') if 'ALL' not in software: selected_software = software.split(',') rows = [] db_session = DBSession() hosts = db_session.query(Host).filter(Host.region_id == region_id). \ order_by(Host.hostname.asc()) for host in hosts: host_roles = [] if host.roles is None else host.roles.split(',') if not selected_roles or any(role in host_roles for role in selected_roles): if host.software_platform is not None and host.software_version is not None: host_platform_software = host.software_platform + ' (' + host.software_version + ')' else: host_platform_software = UNKNOWN if not selected_software or host_platform_software in selected_software: row = {'hostname': host.hostname, 'roles': host.roles, 'platform_software': host_platform_software} rows.append(row) return jsonify(**{'data': rows})
def api_get_smu_details(smu_id): rows = [] db_session = DBSession() smu_info = db_session.query(SMUInfo).filter(SMUInfo.id == smu_id).first() if smu_info is not None: row = dict() row['id'] = smu_info.id row['name'] = smu_info.name row['status'] = smu_info.status row['type'] = smu_info.type row['posted_date'] = smu_info.posted_date row['ddts'] = smu_info.ddts row['description'] = smu_info.description row['functional_areas'] = smu_info.functional_areas row['impact'] = smu_info.impact row['package_bundles'] = smu_info.package_bundles row['compressed_image_size'] = str(smu_info.compressed_image_size) row['uncompressed_image_size'] = str(smu_info.uncompressed_image_size) row['prerequisites'] = smu_info.prerequisites row['supersedes'] = smu_info.supersedes row['superseded_by'] = smu_info.superseded_by row['composite_DDTS'] = smu_info.composite_DDTS row['prerequisites_smu_ids'] = get_smu_ids(db_session, smu_info.prerequisites) row['supersedes_smu_ids'] = get_smu_ids(db_session, smu_info.supersedes) row['superseded_by_smu_ids'] = get_smu_ids(db_session, smu_info.superseded_by) rows.append(row) return jsonify(**{'data': rows})
def software_profile_edit(profile_name): db_session = DBSession() software_profile = get_software_profile(db_session, profile_name) if software_profile is None: abort(404) form = SoftwareProfileForm(request.form) server_dialog_form = ServerDialogForm(request.form) fill_servers(server_dialog_form.server_dialog_server.choices, get_server_list(db_session), False) if request.method == 'POST' and form.validate(): if profile_name != form.profile_name.data and \ get_software_profile(db_session, form.profile_name.data) is not None: return render_template('conformance/profile_edit.html', form=form, server_dialog_form=server_dialog_form, system_option=SystemOption.get(db_session), duplicate_error=True) software_profile.name = form.profile_name.data software_profile.description = form.description.data software_profile.packages = ','.join([l for l in form.software_packages.data.splitlines() if l]), db_session.commit() return redirect(url_for('conformance.home')) else: form.profile_name.data = software_profile.name form.description.data = software_profile.description if software_profile.packages is not None: form.software_packages.data = '\n'.join(software_profile.packages.split(',')) return render_template('conformance/profile_edit.html', form=form, server_dialog_form=server_dialog_form, system_option=SystemOption.get(db_session))
def software_profile_create(): # if not can_create_user(current_user): # abort(401) db_session = DBSession() form = SoftwareProfileForm(request.form) server_dialog_form = ServerDialogForm(request.form) fill_servers(server_dialog_form.server_dialog_server.choices, get_server_list(db_session), False) if request.method == 'POST' and form.validate(): software_profile = get_software_profile(db_session, form.profile_name.data) if software_profile is not None: return render_template('conformance/profile_edit.html', form=form, system_option=SystemOption.get(db_session), duplicate_error=True) software_profile = SoftwareProfile( name=form.profile_name.data, description=form.description.data, packages=','.join([l for l in form.software_packages.data.splitlines() if l]), created_by=current_user.username) db_session.add(software_profile) db_session.commit() return redirect(url_for('conformance.home')) else: return render_template('conformance/profile_edit.html', form=form, server_dialog_form=server_dialog_form, system_option=SystemOption.get(db_session))
def dispatch(self): db_session = DBSession() self.handle_email_jobs(db_session) self.handle_create_tar_jobs(db_session) db_session.close()
def command_profile_edit(profile_name): db_session = DBSession() command_profile = get_command_profile(db_session, profile_name) if command_profile is None: abort(404) form = CustomCommandProfileForm(request.form) if request.method == 'POST' and form.validate(): if profile_name != form.profile_name.data and \ get_command_profile(db_session, form.profile_name.data) is not None: return render_template('custom_commad/command_profile_edit.html', form=form, duplicate_error=True) command_profile.profile_name = form.profile_name.data command_profile.command_list = ','.join([l for l in form.command_list.data.splitlines() if l]), db_session.commit() return redirect(url_for('custom_command.home')) else: form.profile_name.data = command_profile.profile_name if command_profile.command_list is not None: form.command_list.data = '\n'.join(command_profile.command_list.split(',')) return render_template('custom_command/command_profile_edit.html', form=form)
def command_profile_create(): db_session = DBSession() form = CustomCommandProfileForm(request.form) if request.method == 'POST' and form.validate(): command_profile = get_command_profile(db_session, form.profile_name.data) if command_profile is not None: return render_template('custom_command/command_profile_edit.html', form=form, duplicate_error=True) command_profile = CustomCommandProfile( profile_name=form.profile_name.data, command_list=','.join([l for l in form.command_list.data.splitlines() if l]), created_by=current_user.username ) db_session.add(command_profile) db_session.commit() return redirect(url_for('custom_command.home')) else: return render_template('custom_command/command_profile_edit.html', form=form)
def start(self): db_session = DBSession() for sql in sql_statements: try: db_session.execute(sql) except: pass
def get_smu_info_from_cco(self, platform, release): save_to_db = True db_session = DBSession() platform_release = platform + '_' + release try: self.smu_meta = SMUMeta(platform_release=platform_release) # Load data from the SMU XML file self.load() # This can happen if the given platform and release is not valid. # The load() method calls get_smu_info_from_db and failed. if not self.is_valid: logger.error('get_smu_info_from_cco() hit exception, platform_release=' + platform_release) return db_smu_meta = db_session.query(SMUMeta).filter(SMUMeta.platform_release == platform_release).first() if db_smu_meta: if db_smu_meta.created_time == self.smu_meta.created_time: save_to_db = False else: # Delete the existing smu_meta and smu_info for this platform and release db_session.delete(db_smu_meta) db_session.commit() if save_to_db: db_session.add(self.smu_meta) else: db_smu_meta.retrieval_time = datetime.datetime.utcnow() db_session.commit() except Exception: logger.exception('get_smu_info_from_cco() hit exception, platform_release=' + platform_release)
def api_get_distinct_host_regions(platform, software_versions): """ software_versions may equal to 'ALL' or multiple software versions """ clauses = [] db_session = DBSession() clauses.append(Host.software_platform == platform) if 'ALL' not in software_versions: clauses.append(Host.software_version.in_(software_versions.split(','))) region_ids = db_session.query(Host.region_id).filter(and_(*clauses)).distinct() # Change a list of tuples to a list region_ids_list = [region_id[0] for region_id in region_ids] rows = [] if not is_empty(region_ids): regions = db_session.query(Region).filter(Region.id.in_(region_ids_list)). \ order_by(Region.name.asc()).all() for region in regions: rows.append({'region_id': region.id, 'region_name': region.name}) return jsonify(**{'data': rows})
def run(self): db_session = DBSession() try: system_option = SystemOption.get(db_session) inventory_hour = system_option.inventory_hour db_session.close() # Build a scheduler object that will look at absolute times scheduler = sched.scheduler(time.time, time.sleep) current_hour = datetime.datetime.now().hour # Put task for today at the designated hour. daily_time = datetime.time(inventory_hour) # If the scheduled time already passed, schedule it for tomorrow if current_hour > inventory_hour: first_time = datetime.datetime.combine(datetime.datetime.now() + datetime.timedelta(days=1), daily_time) else: first_time = datetime.datetime.combine(datetime.datetime.now(), daily_time) scheduler.enterabs(time.mktime(first_time.timetuple()), 1, self.scheduling, (scheduler, daily_time,)) scheduler.run() except: logger.exception('InventoryManagerScheduler hit exception') db_session.close()
def api_get_distinct_host_roles(platform, software_versions, region_ids): """ software_versions may equal to 'ALL' or multiple software versions region_ids may equal to 'ALL' or multiple region ids """ clauses = [] db_session = DBSession() clauses.append(Host.software_platform == platform) if 'ALL' not in software_versions: clauses.append(Host.software_version.in_(software_versions.split(','))) if 'ALL' not in region_ids: clauses.append(Host.region_id.in_(region_ids.split(','))) host_roles = db_session.query(Host.roles).filter(and_(*clauses)).distinct() # Change a list of tuples to a list # Example of roles_list = [u'PE Router', u'PE1,R0', u'PE1,PE4', u'PE2,R1', u'Core'] roles_list = [roles[0] for roles in host_roles if not is_empty(roles[0])] # Collapses the comma delimited strings to list roles_list = [] if is_empty(roles_list) else ",".join(roles_list).split(',') # Make the list unique, then sort it roles_list = sorted(list(set(roles_list))) rows = [] for role in roles_list: rows.append({'role': role}) return jsonify(**{'data': rows})
def user_preferences(): db_session = DBSession() form = PreferencesForm(request.form) user = get_user_by_id(db_session, current_user.id) if request.method == 'POST' and form.validate(): user.preferences[0].cco_username = form.cco_username.data if len(form.cco_password.data) > 0: user.preferences[0].cco_password = form.cco_password.data # All the checked checkboxes (i.e. platforms and releases to exclude). values = request.form.getlist('check') excluded_platform_list = ','.join(values) preferences = Preferences.get(db_session, current_user.id) preferences.excluded_platforms_and_releases = excluded_platform_list db_session.commit() return redirect(url_for('home')) else: preferences = user.preferences[0] form.cco_username.data = preferences.cco_username if not is_empty(user.preferences[0].cco_password): form.password_placeholder = 'Use Password on File' else: form.password_placeholder = 'No Password Specified' return render_template('cco/preferences.html', form=form, platforms_and_releases=get_platforms_and_releases_dict(db_session))
def dispatch(self): db_session = DBSession() try: # Check if Scheduled Installs are allowed to run. if not db_session.query(SystemOption).first().can_install: return install_jobs = db_session.query(InstallJob).filter( InstallJob.scheduled_time <= datetime.datetime.utcnow()).order_by(InstallJob.scheduled_time.asc()).all() download_job_key_dict = get_download_job_key_dict() if len(install_jobs) > 0: for install_job in install_jobs: if install_job.status != JobStatus.FAILED: # If there is pending download, don't submit the install job if self.is_pending_on_download(download_job_key_dict, install_job): continue # This install job has a dependency, check if the expected criteria is met if install_job.dependency is not None: dependency_completed = self.get_install_job_dependency_completed(db_session, install_job) # If the dependency has not been completed, don't proceed if len(dependency_completed) == 0: continue self.submit_job(InstallWorkUnit(install_job.host_id, install_job.id)) except Exception: # print(traceback.format_exc()) # Purpose ignore. Otherwise, it may generate continue exception pass finally: db_session.close()
def get_download_job_key_dict(): result = {} db_session = DBSession() download_jobs = db_session.query(DownloadJob).all() for download_job in download_jobs: download_job_key = "{}{}{}{}".format(download_job.user_id,download_job.cco_filename, download_job.server_id, download_job.server_directory) result[download_job_key] = download_job return result
def api_get_conformance_report(id): rows = [] dt_params = DataTableParams(request) db_session = DBSession() conformance_report = get_conformance_report_by_id(db_session, id) if not conformance_report: response = dict() response['draw'] = dt_params.draw response['recordsTotal'] = 0 response['recordsFiltered'] = 0 response['data'] = rows return jsonify(**response) clauses = [] if len(dt_params.search_value): criteria = '%' + dt_params.search_value + '%' clauses.append(ConformanceReportEntry.hostname.like(criteria)) clauses.append(ConformanceReportEntry.software_platform.like(criteria)) clauses.append(ConformanceReportEntry.software_version.like(criteria)) clauses.append(ConformanceReportEntry.conformed.like(criteria)) clauses.append(ConformanceReportEntry.host_packages.like(criteria)) clauses.append(ConformanceReportEntry.missing_packages.like(criteria)) query = db_session.query(ConformanceReportEntry) total_count = query.filter(ConformanceReportEntry.conformance_report_id == id).count() filtered_count = query.filter(and_(ConformanceReportEntry.conformance_report_id == id), or_(*clauses)).count() columns = [getattr(ConformanceReportEntry.hostname, dt_params.sort_order)(), getattr(ConformanceReportEntry.software_platform, dt_params.sort_order)(), getattr(ConformanceReportEntry.software_version, dt_params.sort_order)(), '', '', getattr(ConformanceReportEntry.conformed, dt_params.sort_order)()] entries = query.order_by(columns[dt_params.column_order])\ .filter(and_(ConformanceReportEntry.conformance_report_id == id), or_(*clauses))\ .slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all() for entry in entries: row = dict() row['hostname'] = entry.hostname row['software_platform'] = entry.software_platform row['software_version'] = entry.software_version row['missing_packages'] = entry.missing_packages row['host_packages'] = entry.host_packages row['conformed'] = entry.conformed row['comments'] = entry.comments rows.append(row) response = dict() response['draw'] = dt_params.draw response['recordsTotal'] = total_count response['recordsFiltered'] = filtered_count response['data'] = rows return jsonify(**response)
def start(self): db_session = DBSession() for sql in sql_statements: try: db_session.execute(sql) except: # import traceback # print(traceback.format_exc()) pass
def api_get_host_software_profile_list(id): rows = [] dt_params = DataTableParams(request) db_session = DBSession() software_profile = get_software_profile_by_id(db_session, id) if not software_profile: response = dict() response['draw'] = dt_params.draw response['recordsTotal'] = 0 response['recordsFiltered'] = 0 response['data'] = rows return jsonify(**response) clauses = [] if len(dt_params.search_value): criteria = '%' + dt_params.search_value + '%' clauses.append(Host.hostname.like(criteria)) clauses.append(Region.name.like(criteria)) clauses.append(Host.roles.like(criteria)) clauses.append(Host.platform.like(criteria)) clauses.append(Host.software_platform.like(criteria)) clauses.append(Host.software_version.like(criteria)) query = db_session.query(Host).join(Region, Host.region_id == Region.id) total_count = query.filter(Host.software_profile_id == id).count() filtered_count = query.filter(and_(Host.software_profile_id == id), or_(*clauses)).count() columns = [getattr(Host.hostname, dt_params.sort_order)(), getattr(Region.name, dt_params.sort_order)(), getattr(Host.roles, dt_params.sort_order)(), getattr(Host.platform, dt_params.sort_order)(), getattr(Host.software_platform, dt_params.sort_order)(), getattr(Host.software_version, dt_params.sort_order)()] hosts = query.order_by(columns[dt_params.column_order])\ .filter(and_(Host.software_profile_id == id), or_(*clauses))\ .slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all() for host in hosts: row = dict() row['hostname'] = host.hostname row['region'] = '' if host.region is None else host.region.name row['roles'] = host.roles row['platform'] = host.platform row['software_platform'] = host.software_platform row['software_version'] = host.software_version rows.append(row) response = dict() response['draw'] = dt_params.draw response['recordsTotal'] = total_count response['recordsFiltered'] = filtered_count response['data'] = rows return jsonify(**response)
def get_progress(): db_session = DBSession() job_id = request.args.get('job_id') tar_job = db_session.query(CreateTarJob).filter(CreateTarJob.id == job_id).first() if tar_job is None: logger.error('Unable to retrieve Create Tar Job: %s' % job_id) return jsonify(status='Unable to retrieve job') return jsonify(status='OK',progress= tar_job.status)
def api_get_distinct_host_platforms(): rows = [] db_session = DBSession() platforms = db_session.query(Host.software_platform).order_by(Host.software_platform.asc()).distinct() for platform in platforms: if platform[0] is not None: rows.append({'platform': platform[0]}) return jsonify(**{'data': rows})
def api_get_model_name_summary(region_id): """ Return the model name, in use (count), available (count) summary datatable json data """ db_session = DBSession() rows = get_model_name_summary_query_results(db_session, region_id) db_session.close() return jsonify(**{'data': rows})
def api_get_log_trace(log_id): """ Returns the log trace of a particular log record. """ db_session = DBSession() log = db_session.query(Log).filter(Log.id == log_id).first() return jsonify(**{'data': [ {'severity': log.level, 'message': log.msg, 'trace': log.trace, 'created_time': log.created_time} ]})
def store_inventory(self, ctx, inventory_data, chassis_indices): """ Store/update the processed inventory data in database :param ctx: context object :param inventory_data: parsed inventory data as a list of dictionaries :param chassis_indices: a list of index/indices of chassis inventory dictionary in inventory_data :return: None """ if len(chassis_indices) == 0 or len(chassis_indices) > len(inventory_data): logger = get_db_session_logger(ctx.db_session) logger.exception('index/indices of chassis found in inventory output is out of range for host ' + '{}.'.format(ctx.host.hostname)) return # Assign the ordering or "position" of inventory in output from show inventory # to each inventory entry, but adjust the ordering so that chassis always have # negative position(s) (so as to mark corresponding inventory as chassis) # and non-chassis always have consecutive non-negative positions in ascending order, # It goes like this - if there is only one chassis, its position will be -1, # the non-chassis inventories will have positions starting from 0 # If there are multiple chassis, for example 3 chassis, chassis 0 will have position -3, # chassis 1 will have position -2, chassis 2 will have position -1, non-chassis will # still have positions starting from 0 chassis_position = 0 - len(chassis_indices) for chassis_idx in chassis_indices: inventory_data[chassis_idx]['position'] = chassis_position chassis_position += 1 idx = 0 position = 0 rack_number = 0 while idx < len(inventory_data): if rack_number < len(chassis_indices): if idx == chassis_indices[rack_number]: rack_number += 1 else: inventory_data[idx]['position'] = position position += 1 else: inventory_data[idx]['position'] = position position += 1 idx += 1 db_session = DBSession() # this is necessary for now because somewhere in the thread, can be # anywhere in the code, the db_session was not closed - to be found out later. db_session.close() if len(ctx.host.host_inventory) > 0: self.compare_and_update(ctx, db_session, inventory_data) else: self.store_new_inventory(db_session, inventory_data, ctx.host.id) db_session.close() return
def refresh_all(cls): """ Retrieves all the catalog data and SMU XML file data and updates the database. """ db_session = DBSession() catalog = SMUInfoLoader.get_catalog_from_cco() if len(catalog) > 0: system_option = SystemOption.get(db_session) try: # Remove all rows first db_session.query(CCOCatalog).delete() for platform in catalog: releases = catalog[platform] for release in releases: cco_catalog = CCOCatalog(platform=platform,release=release) db_session.add(cco_catalog) SMUInfoLoader(platform, release) system_option.cco_lookup_time = datetime.datetime.utcnow() db_session.commit() return True except Exception: logger.exception('refresh_all() hit exception') db_session.rollback() return False
def delete_custom_command_profile(profile_name): db_session = DBSession() command_profile = get_command_profile(db_session, profile_name) if command_profile is None: abort(404) db_session.delete(command_profile) db_session.commit() return jsonify({'status': 'OK'})
def query_add_inventory(): """ Provide service for user to: 1. Query an inventory based on serial number. 2. Add/Edit the inventory with input serial number, model name(optional), and notes(optional). 3. Delete the inventory with input serial number. Any user can query the inventory, but only Admin user can add, update or delete inventory. """ sn_form = QueryInventoryBySerialNumberForm(request.form) update_inventory_form = UpdateInventoryForm(request.form) inventory_data_fields = { 'serial_number_submitted': None, 'new_inventory': False, 'inventory_name': None, 'description': None, 'hardware_revision': None, 'hostname': '', 'region_name': None, 'chassis': None, 'platform': None, 'software': None, 'last_successful_retrieval': None, 'inventory_retrieval_status': None } error_msg = None success_msg = None if request.method == 'GET': init_query_add_inventory_forms(sn_form, update_inventory_form) elif request.method == 'POST': db_session = DBSession() # if user submitted the form with the serial number # then we display back all the information for the inventory with this serial number if sn_form.hidden_submit_sn.data == 'True': get_inventory_data_by_serial_number(db_session, sn_form, update_inventory_form, inventory_data_fields) # if user submitted the form with the updated inventory info # we update/create or delete the inventory else: if current_user.privilege != UserPrivilege.ADMIN: error_msg = "User not authorized to create, update or delete inventory." # there is restriction on front end - serial_number submitted cannot be empty string, double check here elif update_inventory_form.hidden_serial_number.data: inventory_entry = db_session.query(Inventory).filter( Inventory.serial_number == update_inventory_form.hidden_serial_number.data).first() if update_inventory_form.hidden_action.data == "Update": update_or_add_inventory( db_session, inventory_entry, update_inventory_form.hidden_serial_number.data, update_inventory_form.model_name.data, update_inventory_form.notes.data) elif update_inventory_form.hidden_action.data == "Delete": success_msg, error_msg = delete_inventory( db_session, inventory_entry) else: error_msg = "Unknown request is received." else: error_msg = "Failed to create inventory because serial number submitted is empty." if not error_msg: # clear the forms to indicate a successful inventory update/add action! init_query_add_inventory_forms(sn_form, update_inventory_form) else: sn_form.serial_number.data = update_inventory_form.hidden_serial_number.data get_inventory_data_by_serial_number(db_session, sn_form, update_inventory_form, inventory_data_fields) return render_template('inventory/query_add_inventory.html', sn_form=sn_form, update_inventory_form=update_inventory_form, success_msg=success_msg, error_msg=error_msg, current_user=current_user, **inventory_data_fields)
def get_smu_info_from_cco(self, platform, release): save_to_db = True db_session = DBSession() platform_release = platform + '_' + release try: self.smu_meta = SMUMeta(platform_release=platform_release) # Load data from the SMU XML file self.load() # This can happen if the given platform and release is not valid. # The load() method calls get_smu_info_from_db and failed. if not self.is_valid: logger.error( 'get_smu_info_from_cco() hit exception, platform_release=' + platform_release) return db_smu_meta = db_session.query(SMUMeta).filter( SMUMeta.platform_release == platform_release).first() if db_smu_meta: if db_smu_meta.created_time == self.smu_meta.created_time: save_to_db = False else: # Delete the existing smu_meta and smu_info for this platform and release db_session.delete(db_smu_meta) db_session.commit() if save_to_db: db_session.add(self.smu_meta) else: db_smu_meta.retrieval_time = datetime.datetime.utcnow() db_session.commit() except Exception: logger.exception( 'get_smu_info_from_cco() hit exception, platform_release=' + platform_release)
def api_create_install_request(request): """ Install Action: Pre-Upgrade, Post-Upgrade, and Commit POST: http://localhost:5000/api/v1/install BODY: [ {'hostname': 'My Host', 'install_action': 'Post-Upgrade', 'scheduled_time': '05-02-2016 08:00 AM', 'command_profile': 'Edge Devices', 'dependency': 'Add'} ] Install Action: Add BODY: [ {'hostname': 'My Host', 'install_action': 'Add', 'scheduled_time': '05-02-2016 08:00 AM', 'server_repository': 'My FTP Server', 'software_packages': ['asr9k-px-5.3.3.CSCuz05961.pie, asr9k-px-5.3.3.CSCux89921.pie], 'dependency': 'Pre-Upgrade'} ] Install Action: Activate, Remove, Deactivate BODY: [ {'hostname': 'My Host', 'install_action': 'Activate', 'scheduled_time': '05-02-2016 08:00 AM', 'software_packages': ['asr9k-px-5.3.3.CSCuz05961.pie, asr9k-px-5.3.3.CSCux89921.pie] 'dependency': '101'} ] RETURN: {"api_response": { "install_job_list": [ {"status": "SUCCESS", "hostname": "My Host", "id": 101}, {"status": "FAILED", "hostname": "My Host 2", "status_message": "Unable to locate host"} ] } } """ rows = [] error_found = False db_session = DBSession() custom_command_profile_dict = get_custom_command_profile_name_to_id_dict( db_session) # ---------------------------- first phase is to attempt the data validation ---------------------------- # entries = [] json_list = convert_json_request_to_list(request) for data in json_list: row = dict() try: validate_required_keys_in_dict(data, [KEY_INSTALL_ACTION]) install_action = data[KEY_INSTALL_ACTION] if install_action not in supported_install_actions: raise ValueError("'{}' is an invalid install action.".format( install_action)) validate_acceptable_keys_in_dict(data, acceptable_keys) validate_required_keys_in_dict(data, required_keys_dict[install_action]) hostname = data[KEY_HOSTNAME] host = get_host(db_session, hostname) if host is None: raise ValueError("'{}' is an invalid hostname.".format( data[KEY_HOSTNAME])) if KEY_SERVER_REPOSITORY in data.keys(): server = get_server(db_session, data[KEY_SERVER_REPOSITORY]) if server is None: raise ValueError( "'{}' is an invalid server repository.".format( data[KEY_SERVER_REPOSITORY])) if KEY_CUSTOM_COMMAND_PROFILE in data.keys(): custom_command_profile_names = convert_value_to_list( data, KEY_CUSTOM_COMMAND_PROFILE) for custom_command_profile_name in custom_command_profile_names: custom_command_profile_id = custom_command_profile_dict.get( custom_command_profile_name) if custom_command_profile_id is None: raise ValueError( "'{}' is an invalid custom command profile.". format(custom_command_profile_name)) if KEY_SOFTWARE_PACKAGES in data.keys() and is_empty( data[KEY_SOFTWARE_PACKAGES]): raise ValueError( "Software packages when specified cannot be empty.") # Check time fields and validate their values if KEY_SCHEDULED_TIME not in data.keys(): row[KEY_UTC_SCHEDULED_TIME] = datetime.utcnow() elif KEY_UTC_OFFSET not in data.keys(): raise ValueError( "Missing utc_offset. If scheduled_time is submitted, utc_offset is also required." ) elif not verify_utc_offset(data[KEY_UTC_OFFSET]): raise ValueError( "Invalid utc_offset: Must be in '<+|->dd:dd' format and be between -12:00 and +14:00." ) else: try: time = datetime.strptime(data[KEY_SCHEDULED_TIME], "%m-%d-%Y %I:%M %p") row[KEY_UTC_SCHEDULED_TIME] = get_utc_time( time, data[KEY_UTC_OFFSET]) except ValueError: raise ValueError( "Invalid scheduled_time: {} must be in 'mm-dd-yyyy hh:mm AM|PM' format." .format(data[KEY_SCHEDULED_TIME])) # Handle duplicate entry. It is defined by the hostname and install_action pair. if (hostname, install_action) not in entries: entries.append((hostname, install_action)) else: raise ValueError( "More than one entry with the same hostname: '{}' and install_action: '{}'. " "Remove any duplicate and resubmit.".format( hostname, install_action)) except Exception as e: row[RESPONSE_STATUS] = APIStatus.FAILED row[RESPONSE_STATUS_MESSAGE] = e.message error_found = True # Add the original key value pairs to the new array. for key in data.keys(): row[key] = data[key] rows.append(row) # End of loop if error_found: for row in rows: if RESPONSE_STATUS not in row.keys(): row[RESPONSE_STATUS] = APIStatus.FAILED row[RESPONSE_STATUS_MESSAGE] = 'Not submitted. Check other jobs for error message.' if KEY_UTC_SCHEDULED_TIME in row.keys(): row.pop(KEY_UTC_SCHEDULED_TIME) return jsonify(**{RESPONSE_ENVELOPE: { KEY_INSTALL_JOB_LIST: rows }}), HTTP_BAD_REQUEST # ---------------------------- Second phase is to attempt the job creation ---------------------------- # sorted_list = sorted(rows, cmp=get_key) rows = [] error_found = False implicit_dependency_list = {} for install_request in sorted_list: row = dict() try: hostname = install_request[KEY_HOSTNAME] install_action = install_request[KEY_INSTALL_ACTION] row[KEY_INSTALL_ACTION] = install_action row[KEY_HOSTNAME] = hostname host_id = get_host(db_session, hostname).id utc_scheduled_time = install_request[ KEY_UTC_SCHEDULED_TIME].strftime("%m/%d/%Y %I:%M %p") server_id = -1 if KEY_SERVER_REPOSITORY in install_request.keys(): server = get_server(db_session, install_request[KEY_SERVER_REPOSITORY]) if server is not None: server_id = server.id server_directory = '' if KEY_SERVER_DIRECTORY in install_request.keys(): server_directory = install_request[KEY_SERVER_DIRECTORY] software_packages = [] if KEY_SOFTWARE_PACKAGES in install_request.keys(): software_packages = install_request[KEY_SOFTWARE_PACKAGES] custom_command_profile_ids = [] if KEY_CUSTOM_COMMAND_PROFILE in install_request.keys(): custom_command_profile_names = convert_value_to_list( install_request, KEY_CUSTOM_COMMAND_PROFILE) for custom_command_profile_name in custom_command_profile_names: custom_command_profile_id = custom_command_profile_dict.get( custom_command_profile_name) if custom_command_profile_id is not None: custom_command_profile_ids.append( str(custom_command_profile_id)) install_job = create_or_update_install_job( db_session, host_id=host_id, install_action=install_action, scheduled_time=utc_scheduled_time, software_packages=software_packages, server_id=server_id, server_directory=server_directory, custom_command_profile_ids=custom_command_profile_ids, dependency=get_dependency_id(db_session, implicit_dependency_list, install_request, host_id), created_by=g.api_user.username) row[KEY_ID] = install_job.id if install_action in ordered_install_actions: if hostname not in implicit_dependency_list: implicit_dependency_list[hostname] = [] implicit_dependency_list[hostname].append( (install_job.id, install_action, install_request[KEY_UTC_SCHEDULED_TIME])) row[RESPONSE_STATUS] = APIStatus.SUCCESS except Exception as e: row[RESPONSE_STATUS] = APIStatus.FAILED row[RESPONSE_STATUS_MESSAGE] = e.message row[RESPONSE_TRACE] = traceback.format_exc() error_found = True rows.append(row) return jsonify(**{RESPONSE_ENVELOPE: { KEY_INSTALL_JOB_LIST: rows }}), (HTTP_OK if not error_found else HTTP_MULTI_STATUS_ERROR)
def init(): db_session = DBSession() init_user(db_session) init_system_option(db_session) init_encrypt(db_session) db_session.commit()
def _collect_video_info(self,tid, simple_info, session:DBSession, collect_id = 0): uploader_exists = session.query( exists().where(database.UploaderInfo.mid == simple_info['author_id']) ).scalar() if not uploader_exists: new_up = database.UploaderInfo( mid=simple_info['author_id'], name=simple_info['author_name'], ) session.add(new_up) session.commit() new_video = database.VideoInfo( aid=simple_info['id'], bvid=simple_info['bvid'], tid=tid, title=simple_info['name'], owner_id=simple_info['author_id'], collect_time=datetime.datetime.now(), collect_id = collect_id ) try: session.add(new_video) session.commit() except Exception as e: logger.error(f"向数据库中写入视频信息出错: {simple_info['name']}") session.rollback()
def load_user(user_id): """Hook for Flask-Login to load a User instance from a user ID.""" db_session = DBSession() return db_session.query(User).get(user_id)
def api_host(hostname): db_session = DBSession() host = get_host(db_session, hostname) return get_host_json([host], request)
def check_if_email_notify_enabled(): db_session = DBSession() system_option = SystemOption.get(db_session) db_session.close() return jsonify({'email_notify_enabled': system_option.enable_email_notify})
class Watcher_Thread(threading.Thread): task_queue: Queue thread_id: int use_proxy: bool def __init__(self, thread_id: int, task_queue: Queue, use_proxy=False): threading.Thread.__init__(self) self.thread_id = thread_id self.task_queue = task_queue self.use_proxy = use_proxy self.session = DBSession() self.spider = VideoInfoSpider(use_proxy=use_proxy) def _watch_video(self, video): if not video.cid: try: video.cid = self.spider.get_cid(video.aid) except Exception as e: logger.error(f"T:{self.thread_id} 获取 {video.aid} cid出错:\n {e}") time.sleep(2) return video.first_get_state_time = datetime.datetime.now() try: online_count, zid = self.spider.get_online_count_and_tid( video.aid, video.cid) except Exception as e: logger.error(f"T:{self.thread_id} 获取 {video.aid} 在线观看人数出错:\n {e}") time.sleep(10) return if not video.zid: zone_exists = self.session.query( exists().where(ZoneInfo.zid == zid)).scalar() if not zone_exists: try: new_zone = ZoneInfo(zid=zid, ) self.session.add(new_zone) self.session.commit() except Exception as e: logger.error(f"写入 {video.aid} zid 出错:\n") self.session.rollback() return video.zid = zid try: total_count = self.spider.get_total_count(video.aid) except Exception as e: logger.error(f"T: {self.thread_id} 获取 {video.aid} 当前总状态出错:\n{e}") time.sleep(10) return try: new_state = VideoState( aid=total_count['aid'], time=datetime.datetime.now(), online=online_count, view=total_count['view'], danmaku=total_count['danmaku'], reply=total_count['reply'], favorite=total_count['favorite'], coin=total_count['coin'], share=total_count['share'], like=total_count['like'], dislike=total_count['dislike'], now_rank=total_count['now_rank'], his_rank=total_count['his_rank'], ) self.session.add(new_state) video.last_get_state_time = datetime.datetime.now() logger.info( f"T:{self.thread_id} {video.aid} {video.title} online: {online_count}, total: {new_state.view}" ) self.session.commit() except Exception as e: logger.error(f"写入数据库出错\n {e}") self.session.rollback() def run(self): while True: if self.task_queue.empty(): logger.info( f"thread:{self.thread_id} Empty task queue, Done!Done!Done!" ) break try: video = self.task_queue.get() self._watch_video(video) except Exception as e: logger.error(f"T:{self.thread_id} 爬取任务出错:{e}")
def api_import_inventory(): """ API for importing inventory Note: 1. If inventory already exists in db, and the model name has been discovered by CSM, we will not overwrite the model name with data from here. 2. For data with duplicate serial numbers, only the first data entry will be created in db or used to update an existing inventory. return either status: OK with unimported_inventory: list of unimported data rows (for noting duplicated serial numbers) or status: errors in the imported data separated by comma's """ if current_user.privilege != UserPrivilege.ADMIN: abort(401) importable_header = [ HEADER_FIELD_SERIAL_NUMBER, HEADER_FIELD_MODEL_NAME, HEADER_FIELD_NOTES ] general_notes = request.form['general_notes'] data_list = request.form['data_list'] db_session = DBSession() reader = csv.reader(data_list.splitlines(), delimiter=',') header_row = next(reader) # Check mandatory data fields error = [] if HEADER_FIELD_SERIAL_NUMBER not in header_row: error.append('"serial_number" is missing in the header.') for header_field in header_row: if header_field not in importable_header: error.append('"' + header_field + '" is not a valid header field.') if error: return jsonify({'status': ','.join(error)}) row = 2 # already checked that HEADER_FIELD_SERIAL_NUMBER is in header serial_number_idx = header_row.index(HEADER_FIELD_SERIAL_NUMBER) data_list = list(reader) # Check if each row has the same number of data fields as the header for row_data in data_list: if len(row_data) > 0: if len(row_data) != len(header_row): error.append('line %d has wrong number of data fields.' % row) else: if not row_data[serial_number_idx]: error.append('line %d missing serial number value.' % row) row += 1 if error: return jsonify({'status': ','.join(error)}) # Import the data unique_serial_numbers = set() unimported_inventory = [] row = 1 for data in data_list: row += 1 if len(data) == 0: continue serial_number = '' model_name = '' notes = general_notes for column in range(len(header_row)): header_field = header_row[column] data_field = data[column].strip() if header_field == HEADER_FIELD_SERIAL_NUMBER: serial_number = data_field elif header_field == HEADER_FIELD_MODEL_NAME: model_name = data_field elif header_field == HEADER_FIELD_NOTES and data_field: notes = data_field if serial_number: inventory_obj = db_session.query(Inventory).filter( Inventory.serial_number == serial_number).first() # only create/update inventory data if the serial number is unique among the imported data if serial_number not in unique_serial_numbers: update_or_add_inventory(db_session, inventory_obj, serial_number, model_name, notes, commit=False) unique_serial_numbers.add(serial_number) else: unimported_inventory.append('line %d: ' % row + ','.join(data)) else: return jsonify( {'status': 'Serial number data field cannot be empty.'}) db_session.commit() db_session.close() if unimported_inventory: return jsonify({ 'status': 'OK', 'unimported_inventory': unimported_inventory }) return jsonify({'status': 'OK', 'unimported_inventory': []})
def logs(): return render_template('log.html', system_option=SystemOption.get(DBSession()))
def host_session_log(hostname, table, id): """ This route is also used by mailer.py for email notification. """ db_session = DBSession() record = None doc_central_log_file_path = '' if table == 'install_job': record = db_session.query(InstallJob).filter( InstallJob.id == id).first() elif table == 'install_job_history': record = db_session.query(InstallJobHistory).filter( InstallJobHistory.id == id).first() doc_central_log_file_path = get_doc_central_log_path(record) elif table == 'inventory_job_history': record = db_session.query(InventoryJobHistory).filter( InventoryJobHistory.id == id).first() if record is None: abort(404) file_path = request.args.get('file_path') log_file_path = get_log_directory() + file_path if not (os.path.isdir(log_file_path) or os.path.isfile(log_file_path)): abort(404) file_pairs = {} log_file_contents = '' file_suffix = '.diff.html' if os.path.isdir(log_file_path): # Returns all files under the requested directory log_file_list = get_file_list(log_file_path) diff_file_list = [ filename for filename in log_file_list if file_suffix in filename ] for filename in log_file_list: diff_file_path = '' if file_suffix not in filename: if filename + file_suffix in diff_file_list: diff_file_path = os.path.join(file_path, filename + file_suffix) file_pairs[os.path.join(file_path, filename)] = diff_file_path file_pairs = collections.OrderedDict(sorted(file_pairs.items())) else: with io.open(log_file_path, "rt", encoding='latin-1') as fo: log_file_contents = fo.read() return render_template('host/session_log.html', hostname=hostname, table=table, record_id=id, file_pairs=file_pairs, log_file_contents=log_file_contents, is_file=os.path.isfile(log_file_path), doc_central_log_file_path=doc_central_log_file_path)
def __enter__(self): self.db_session = DBSession() return Users(self.db_session)
def api_get_nonlocal_servers_by_region_name(region_name): db_session = DBSession() region = get_region(db_session, region_name) return get_nonlocal_servers(db_session, region)
def api_get_nonlocal_servers_by_region(region_id): db_session = DBSession() region = get_region_by_id(db_session, region_id) return get_nonlocal_servers(db_session, region)
def export_inventory_information(): """export the inventory search result to cvs, html or excel format.""" db_session = DBSession() export_results_form = ExportInventoryInformationForm(request.form) export_data = dict() export_data['export_format'] = export_results_form.export_format.data export_data['serial_number'] = export_results_form.hidden_serial_number.data \ if export_results_form.hidden_serial_number.data != "" else None export_data['region_ids'] = export_results_form.hidden_region_ids.data.split(',') \ if export_results_form.hidden_region_ids.data else [] export_data['chassis_types'] = export_results_form.hidden_chassis_types.data.split(',') \ if export_results_form.hidden_chassis_types.data else [] export_data['software_versions'] = export_results_form.hidden_software_versions.data.split(',') \ if export_results_form.hidden_software_versions.data else [] export_data['model_names'] = export_results_form.hidden_model_names.data.split(',') \ if export_results_form.hidden_model_names.data else [] export_data['partial_model_names'] = export_results_form.hidden_partial_model_names.data.split(',') \ if export_results_form.hidden_partial_model_names.data else [] export_data['vid'] = export_results_form.hidden_vid.data \ if export_results_form.hidden_vid.data != "" else None if export_data['region_ids']: region_names = db_session.query(Region.name).filter( Region.id.in_(map(int, export_data['region_ids']))).order_by( Region.name.asc()).all() export_data['region_names'] = [] [ export_data['region_names'].append(query_tuple[0]) for query_tuple in region_names ] else: export_data['region_names'] = [] export_data['available_inventory_iter'] = query_available_inventory( db_session, export_data.get('serial_number'), export_data.get('model_names'), export_data.get('partial_model_names'), export_data.get('vid')) export_data['in_use_inventory_iter'] = query_in_use_inventory( db_session, export_data) export_data['user'] = current_user writer = None if export_data.get('export_format') == ExportInformationFormat.HTML: writer = ExportInventoryInfoHTMLWriter(**export_data) elif export_data.get( 'export_format') == ExportInformationFormat.MICROSOFT_EXCEL: writer = ExportInventoryInfoExcelWriter(**export_data) elif export_data.get('export_format') == ExportInformationFormat.CSV: writer = ExportInventoryInfoCSVWriter(**export_data) if writer: file_path = writer.write_report() if export_results_form.send_email.data: email_message = "<html><head></head><body>Please find in the attachment the inventory search results " \ "matching the following search criteria: " search_criteria_in_html = get_search_filter_in_html(export_data) if search_criteria_in_html: email_message += search_criteria_in_html + '</body></html>' else: email_message += ' None</body></html>' create_email_job_with_attachment_files( db_session, email_message, file_path, export_results_form.user_email.data) return send_file(file_path, as_attachment=True) logger.error('inventory: invalid export format "%s" chosen.' % export_data.get('export_format')) return
def api_host_list(): db_session = DBSession() hosts = get_host_list(db_session) return get_host_json(hosts, request)
def api_create_server_repositories(request): """ [{ "hostname": "Repository_1", "server_type": "TFTP", "tftp_server_path": "223.255.254.245", "home_directory": "/auto/tftp-sjc-users1" },{ "hostname": "Repository_2", "server_type": "FTP", "server_address": "172.27.153.150", "home_directory": "/tftpboot" },{ "hostname": "Repository_3", "server_type": "SFTP", "server_address": "nb-server3", "home_directory": "/auto/tftp-vista" }] """ rows = [] db_session = DBSession() error_found = False json_list = convert_json_request_to_list(request) for data in json_list: row = dict() try: validate_required_keys_in_dict(data, [KEY_HOSTNAME, KEY_SERVER_TYPE]) hostname = get_acceptable_string(data[KEY_HOSTNAME]) row[KEY_HOSTNAME] = hostname if len(hostname) == 0: raise ValueError( "Server repository name '{}' is not valid.".format( data[KEY_HOSTNAME])) server_type = data.get(KEY_SERVER_TYPE) if server_type not in [ ServerType.TFTP_SERVER, ServerType.FTP_SERVER, ServerType.SFTP_SERVER, ServerType.LOCAL_SERVER, ServerType.SCP_SERVER ]: raise ValueError( "'{}' is not a supported server type.".format(server_type)) row[KEY_SERVER_TYPE] = server_type validate_required_keys_in_dict(data, required_keys_dict[server_type]) validate_acceptable_keys_in_dict(data, acceptable_keys_dict[server_type]) server = get_server(db_session, hostname) if server is None: # These are the required fields for a new server repository creation. validate_required_keys_in_dict( data, required_keys_dict.get(server_type)) server_url = data.get(server_url_dict[server_type]) server_url = server_url if server_url is not None else \ (None if server is None else server.server_url) server_directory = data.get(server_directory_dict[server_type]) server_directory = server_directory if server_directory is not None else \ (None if server is None else server.server_directory) vrf = data.get(KEY_VRF) if data.get(KEY_VRF) is not None else \ (None if server is None else server.vrf) username = data.get(KEY_USERNAME) if data.get(KEY_USERNAME) is not None else \ (None if server is None else server.username) password = data.get(KEY_PASSWORD) if data.get(KEY_PASSWORD) is not None else \ (None if server is None else server.password) destination_on_host = data.get(KEY_DESTINATION_ON_HOST) if data.get(KEY_DESTINATION_ON_HOST) is not None else \ (None if server is None else server.destination_on_host) create_or_update_server_repository( db_session, hostname=hostname, server_type=server_type, server_url=server_url, username=username, password=password, vrf=vrf, server_directory=server_directory, destination_on_host=destination_on_host, created_by=g.api_user.username, server=get_server(db_session, hostname)) row[RESPONSE_STATUS] = APIStatus.SUCCESS except Exception as e: row[RESPONSE_STATUS] = APIStatus.FAILED row[RESPONSE_STATUS_MESSAGE] = e.message error_found = True rows.append(row) return jsonify(**{RESPONSE_ENVELOPE: { 'server_repository_list': rows }}), (HTTP_OK if not error_found else HTTP_MULTI_STATUS_ERROR)
def startCollect(self, detail = False): collect_id = 0 popular_spider = PopularSpider() video_info_spider = VideoInfoSpider() channels = popular_spider.get_all_channels() session = DBSession() logger.info("开始写入数据库...") for channel in channels: try: channel_exists = session.query( exists().where(database.ChannelInfo.tid == channel['id']) ).scalar() if not channel_exists: new_channel = database.ChannelInfo( tid=channel['id'], name=channel['name'], cover=channel['cover'], subscribed_count=channel['subscribed_count'], archive_count=channel['archive_count'], featured_count=channel['featured_count'], ) session.add(new_channel) session.commit() except Exception: logger.error("频道写入失败,继续下一个频道") session.rollback() db_channels = session.query(database.ChannelInfo).all() for channel in db_channels: try: logger.info(f"开始爬取频道 {channel.tid}, {channel.name}" f"top100 video") hot_videos = popular_spider.get_channel_hot_video(channel_id=channel.tid, video_num=20) except Exception: logger.error("频道获取失败,继续下一个频道") continue for video in hot_videos: # 遍历所有推荐视频 collect_id += 1 if not detail: self._collect_video_info(channel.tid, video, session, collect_id) continue info = video_info_spider.get_video_info(video['id']) uploader_exists = session.query( exists().where(database.UploaderInfo.mid == info['owner']['mid']) ).scalar() if not uploader_exists: new_up = database.UploaderInfo( mid=info['owner']['mid'], name=info['owner']['name'], face=info['owner']['face'], ) session.add(new_up) session.commit() zone_exists = session.query( exists().where(database.ZoneInfo.zid == info['tid']) ).scalar() if not zone_exists: new_zone = database.ZoneInfo( zid=info['tid'], ) session.add(new_zone) session.commit() new_video = database.VideoInfo( aid=info['aid'], bvid=info['bvid'], tid=channel.tid, zid=info['tid'], title=info['title'], pubdate=datetime.datetime.fromtimestamp(info['pubdate']), desc=info['desc'], attribute=info['attribute'], duration=info['duration'], owner_id=info['owner']['mid'], collect_time=datetime.datetime.now(), collect_id=collect_id, ) try: session.add(new_video) session.commit() except Exception as e: logger.error(f"向数据库中写入视频信息出错: {info['title']}") session.rollback() session.close()
def get_smu_info_from_cco(self, platform, release): same_as_db = False db_session = DBSession() platform_release = platform + '_' + release try: self.smu_meta = SMUMeta(platform_release=platform_release) # Load data from the SMU XML file self.load() # This can happen if the given platform and release is not valid. # The load method calls get_smu_info_from_db and failed. if self.smu_meta is None: return db_smu_meta = db_session.query(SMUMeta).filter( SMUMeta.platform_release == platform + '_' + release).first() if db_smu_meta is not None: if db_smu_meta.created_time == self.smu_meta.created_time: same_as_db = True else: # Delete the existing smu_meta and smu_info for this platform and release db_session.delete(db_smu_meta) if not same_as_db: db_session.add(self.smu_meta) else: db_smu_meta.retrieval_time = datetime.datetime.utcnow() # Use Flush to detect concurrent saving condition. It is # possible that another process may perform the same save. # If this happens, Duplicate Key may result. db_session.flush() db_session.commit() except IntegrityError: db_session.rollback() except Exception: db_session.rollback() logger.exception('get_smu_info_from_cco hit exception')
def load_user(user_id): session = DBSession() user = session.query(User).filter(User.userid == user_id).first() session.close() return user
def insert_or_update_user(user_id, fullname, username): session = DBSession() target_user = session.query(User).get(user_id) if not target_user: new_user = User(id=user_id, fullname=fullname, username=username) session.add(new_user) session.commit() elif target_user.fullname != fullname or target_user.username != username: target_user.fullname = fullname target_user.username = username session.commit() session.close()
def home(): if current_user.privilege != UserPrivilege.ADMIN: abort(401) db_session = DBSession() smtp_form = SMTPForm(request.form) admin_console_form = AdminConsoleForm(request.form) smtp_server = get_smtp_server(db_session) system_option = SystemOption.get(db_session) fill_user_privileges( admin_console_form.ldap_default_user_privilege.choices) if request.method == 'POST' and \ smtp_form.validate() and \ admin_console_form.validate(): if smtp_server is None: smtp_server = SMTPServer() db_session.add(smtp_server) smtp_server.server = smtp_form.server.data smtp_server.server_port = smtp_form.server_port.data if len( smtp_form.server_port.data) > 0 else None smtp_server.sender = smtp_form.sender.data smtp_server.use_authentication = smtp_form.use_authentication.data smtp_server.username = smtp_form.username.data if len(smtp_form.password.data) > 0: smtp_server.password = smtp_form.password.data smtp_server.secure_connection = smtp_form.secure_connection.data system_option.inventory_threads = admin_console_form.num_inventory_threads.data system_option.install_threads = admin_console_form.num_install_threads.data system_option.download_threads = admin_console_form.num_download_threads.data system_option.can_schedule = admin_console_form.can_schedule.data system_option.can_install = admin_console_form.can_install.data system_option.enable_email_notify = admin_console_form.enable_email_notify.data system_option.enable_inventory = admin_console_form.enable_inventory.data # The LDAP UI may be hidden if it is not supported. # In this case, the flag is not set. if not is_empty(admin_console_form.enable_ldap_auth.data): system_option.enable_ldap_auth = admin_console_form.enable_ldap_auth.data system_option.ldap_server_url = admin_console_form.ldap_server_url.data system_option.ldap_default_user_privilege = admin_console_form.ldap_default_user_privilege.data system_option.ldap_server_distinguished_names = admin_console_form.ldap_server_distinguished_names.data.strip( ) system_option.inventory_hour = admin_console_form.inventory_hour.data system_option.inventory_history_per_host = admin_console_form.inventory_history_per_host.data system_option.download_history_per_user = admin_console_form.download_history_per_user.data system_option.install_history_per_host = admin_console_form.install_history_per_host.data system_option.total_system_logs = admin_console_form.total_system_logs.data system_option.enable_default_host_authentication = admin_console_form.enable_default_host_authentication.data system_option.default_host_authentication_choice = admin_console_form.default_host_authentication_choice.data system_option.enable_cco_lookup = admin_console_form.enable_cco_lookup.data system_option.use_utc_timezone = admin_console_form.use_utc_timezone.data system_option.default_host_username = admin_console_form.default_host_username.data if len(admin_console_form.default_host_password.data) > 0: system_option.default_host_password = admin_console_form.default_host_password.data system_option.enable_user_credential_for_host = admin_console_form.enable_user_credential_for_host.data db_session.commit() return redirect(url_for('home')) else: admin_console_form.num_inventory_threads.data = system_option.inventory_threads admin_console_form.num_install_threads.data = system_option.install_threads admin_console_form.num_download_threads.data = system_option.download_threads admin_console_form.can_schedule.data = system_option.can_schedule admin_console_form.can_install.data = system_option.can_install admin_console_form.enable_email_notify.data = system_option.enable_email_notify admin_console_form.enable_ldap_auth.data = system_option.enable_ldap_auth admin_console_form.ldap_server_url.data = system_option.ldap_server_url admin_console_form.ldap_default_user_privilege.data = system_option.ldap_default_user_privilege admin_console_form.ldap_server_distinguished_names.data = system_option.ldap_server_distinguished_names admin_console_form.enable_inventory.data = system_option.enable_inventory admin_console_form.inventory_hour.data = system_option.inventory_hour admin_console_form.inventory_history_per_host.data = system_option.inventory_history_per_host admin_console_form.download_history_per_user.data = system_option.download_history_per_user admin_console_form.install_history_per_host.data = system_option.install_history_per_host admin_console_form.total_system_logs.data = system_option.total_system_logs admin_console_form.enable_default_host_authentication.data = system_option.enable_default_host_authentication admin_console_form.default_host_authentication_choice.data = system_option.default_host_authentication_choice admin_console_form.default_host_username.data = system_option.default_host_username admin_console_form.enable_cco_lookup.data = system_option.enable_cco_lookup admin_console_form.use_utc_timezone.data = system_option.use_utc_timezone admin_console_form.cco_lookup_time.data = get_datetime_string( system_option.cco_lookup_time) admin_console_form.enable_user_credential_for_host.data = system_option.enable_user_credential_for_host if not is_empty(system_option.default_host_password): admin_console_form.default_host_password_placeholder = 'Use Password on File' else: admin_console_form.default_host_password_placeholder = 'No Password Specified' if smtp_server is not None: smtp_form.server.data = smtp_server.server smtp_form.server_port.data = smtp_server.server_port smtp_form.sender.data = smtp_server.sender smtp_form.use_authentication.data = smtp_server.use_authentication smtp_form.username.data = smtp_server.username smtp_form.secure_connection.data = smtp_server.secure_connection if not is_empty(smtp_server.password): smtp_form.password_placeholder = 'Use Password on File' else: smtp_form.password_placeholder = 'No Password Specified' return render_template('admin/index.html', admin_console_form=admin_console_form, smtp_form=smtp_form, system_option=SystemOption.get(db_session), is_ldap_supported=is_ldap_supported())
def delete_chat_or_do_nothing(chat_id): session = DBSession() target_chat = session.query(Chat).get(chat_id) if target_chat and not target_chat.enable: session.delete(target_chat) session.commit() related_messages = session.query(Message).filter( Message.from_chat == chat_id) for message in related_messages: session.delete(message) session.commit() msg_text = '成功删除相关记录' else: msg_text = '此前未停用或未启用' session.close() return msg_text
def host_edit(hostname): db_session = DBSession() host = get_host(db_session, hostname) if host is None: abort(404) form = HostForm(request.form) fill_jump_hosts(db_session, form.jump_host.choices) fill_regions(db_session, form.region.choices) fill_software_profiles(db_session, form.software_profile.choices) if request.method == 'POST' and form.validate(): if not can_edit(current_user): abort(401) # Editing a hostname which has already existed in the database. if hostname != form.hostname.data and get_host( db_session, form.hostname.data) is not None: return render_template('host/edit.html', form=form, duplicate_error=True) create_or_update_host( db_session=db_session, hostname=form.hostname.data, region_id=form.region.data, location=form.location.data, roles=form.roles.data, software_profile_id=form.software_profile.data, connection_type=form.connection_type.data, host_or_ip=form.host_or_ip.data, username=form.username.data, password=form.password.data if len(form.password.data) > 0 else host.connection_param[0].password, enable_password=form.enable_password.data if len(form.enable_password.data) > 0 else host.connection_param[0].enable_password, port_number=form.port_number.data, jump_host_id=form.jump_host.data, created_by=current_user.username, host=host) return_url = get_return_url(request, 'home') if return_url is None: return redirect(url_for('home')) else: return redirect(url_for(return_url, hostname=hostname)) else: # Assign the values to form fields form.hostname.data = host.hostname form.region.data = host.region_id form.software_profile.data = host.software_profile_id form.location.data = host.location form.roles.data = host.roles form.host_or_ip.data = host.connection_param[0].host_or_ip form.username.data = host.connection_param[0].username form.jump_host.data = host.connection_param[0].jump_host_id form.connection_type.data = host.connection_param[0].connection_type form.port_number.data = host.connection_param[0].port_number if not is_empty(host.connection_param[0].password): form.password_placeholder = 'Use Password on File' else: form.password_placeholder = 'No Password Specified' if not is_empty(host.connection_param[0].enable_password): form.enable_password_placeholder = 'Use Password on File' else: form.enable_password_placeholder = 'No Password Specified' return render_template('host/edit.html', form=form)
def shutdown_session(exception=None): db_session = DBSession() db_session.close()
if len(args) >= 1: msg = msg % args log = Log(level=record.__dict__['levelname'], trace=trace, msg=msg, created_time=datetime.datetime.utcnow()) self.db_session.add(log) self.db_session.commit() logger = logging.getLogger('logger') logger.setLevel(logging.DEBUG) logger.addHandler(LogHandler(DBSession())) def get_db_session_logger(db_session): """ Return a session specific logger. This is necessary especially if the db_session is from a different process address space. """ session_logger = logging.getLogger('session_logger_%s' % db_session.hash_key) if not hasattr(session_logger, 'initialized'): session_logger.setLevel(logging.DEBUG) session_logger.addHandler(LogHandler(db_session)) session_logger.initialized = True return session_logger
def init_system_option(): db_session = DBSession() if db_session.query(SystemOption).count() == 0: db_session.add(SystemOption()) db_session.commit()