def __get_all_keys(self): return( { NotificationKeys.NotificationId: None, NotificationKeys.NotificationType: None, NotificationKeys.RuleName: None, NotificationKeys.RuleDescription: None, NotificationKeys.CreatedBy: None, NotificationKeys.CreatedTime: r.epoch_time(self.now), NotificationKeys.ModifiedBy: None, NotificationKeys.ModifiedTime: r.epoch_time(self.now), NotificationKeys.Plugin: None, NotificationKeys.User: None, NotificationKeys.Group: None, NotificationKeys.AllAgents: 'false', NotificationKeys.Agents: [], NotificationKeys.Tags: [], NotificationKeys.CustomerName: None, NotificationKeys.AppThreshold: None, NotificationKeys.RebootThreshold: None, NotificationKeys.ShutdownThreshold: None, NotificationKeys.CpuThreshold: None, NotificationKeys.MemThreshold: None, NotificationKeys.FileSystemThreshold: None, NotificationKeys.FileSystem: None, } )
def __get_all_keys(self): return ({ NotificationKeys.NotificationId: None, NotificationKeys.NotificationType: None, NotificationKeys.RuleName: None, NotificationKeys.RuleDescription: None, NotificationKeys.CreatedBy: None, NotificationKeys.CreatedTime: r.epoch_time(self.now), NotificationKeys.ModifiedBy: None, NotificationKeys.ModifiedTime: r.epoch_time(self.now), NotificationKeys.Plugin: None, NotificationKeys.User: None, NotificationKeys.Group: None, NotificationKeys.AllAgents: 'false', NotificationKeys.Agents: [], NotificationKeys.Tags: [], NotificationKeys.CustomerName: None, NotificationKeys.AppThreshold: None, NotificationKeys.RebootThreshold: None, NotificationKeys.ShutdownThreshold: None, NotificationKeys.CpuThreshold: None, NotificationKeys.MemThreshold: None, NotificationKeys.FileSystemThreshold: None, NotificationKeys.FileSystem: None, })
def insert_into_agent_queue(operation, conn=None): """Insert data into the agent_queue DO NOT CALL DIRECTLY Args: operation (list|dict): operation data Basic Usage: >>> from vFense.queue._db import insert_into_agent_queue >>> operation = [{'operation': 'data'}] >>> insert_into_agent_queue(operation) Returns: Tuple (status_code, count, error, generated ids) >>> (2001, 1, None, []) """ data = {} try: operation[AgentQueueKey.CreatedTime] = r.epoch_time(operation[AgentQueueKey.CreatedTime]) operation[AgentQueueKey.ServerQueueTTL] = r.epoch_time(operation[AgentQueueKey.ServerQueueTTL]) operation[AgentQueueKey.AgentQueueTTL] = r.epoch_time(operation[AgentQueueKey.AgentQueueTTL]) data = r.table(QueueCollections.Agent).insert(operation).run(conn) except Exception as e: logger.exception(e) return data
def add_or_update_applications(collection=AppCollections.AppsPerAgent, pkg_list=[], delete_afterwards=True, conn=None): completed = False inserted_count = 0 updated = None replaced_count = 0 deleted_count = 0 pkg_count = len(pkg_list) last_modified_time = mktime(datetime.now().timetuple()) if pkg_count > 0: for pkg in pkg_list: pkg['last_modified_time'] = r.epoch_time(last_modified_time) try: updated = ( r .table(collection) .insert(pkg, upsert=True) .run(conn) ) logger.info(updated) inserted_count += updated['inserted'] replaced_count += updated['replaced'] except Exception as e: logger.exception(e) try: if delete_afterwards: deleted = ( r .table(collection) .get_all( pkg[AppsPerAgentKey.AgentId], index=AppsPerAgentIndexes.AgentId ) .filter( r.row['last_modified_time'] < r.epoch_time( last_modified_time) ) .delete() .run(conn) ) deleted_count += deleted['deleted'] except Exception as e: logger.exception(e) return( { 'pass': completed, 'inserted': inserted_count, 'replaced': replaced_count, 'deleted': deleted_count, 'pkg_count': pkg_count, } )
def add_custom_app_to_agents(username, customer_name, uri, method, file_data, agent_id=None, app_id=None): if app_id and not agent_id: app_info = (fetch_app_data(app_id, collection=AppCollections.CustomApps)) agent_ids = get_all_agent_ids(customer_name, agent_os=app_info[AgentKey.OsCode]) if len(agent_ids) > 0: for agentid in agent_ids: add_file_data(app_id, file_data, agent_id) agent_info_to_insert = ({ CustomAppsPerAgentKey.AgentId: agentid, CustomAppsPerAgentKey.AppId: app_id, CustomAppsPerAgentKey.Status: CommonAppKeys.AVAILABLE, CustomAppsPerAgentKey.CustomerName: customer_name, CustomAppsPerAgentKey.InstallDate: r.epoch_time(0.0) }) insert_app_data(agent_info_to_insert, collection=AppCollections.CustomAppsPerAgent) if agent_id and not app_id: agent_info = get_agent_info(agent_id) apps_info = fetch_apps_data_by_os_code( agent_info[AgentKey.OsCode], customer_name, collection=AppCollections.CustomApps) for app_info in apps_info: app_id = app_info.get(CustomAppsKey.AppId) file_data = fetch_file_data(app_id) add_file_data(app_id, file_data, agent_id) agent_info_to_insert = { CustomAppsPerAgentKey.AgentId: agent_id, CustomAppsPerAgentKey.AppId: app_id, CustomAppsPerAgentKey.Status: CommonAppKeys.AVAILABLE, CustomAppsPerAgentKey.CustomerName: customer_name, CustomAppsPerAgentKey.InstallDate: r.epoch_time(0.0) } insert_app_data(agent_info_to_insert, collection=AppCollections.CustomAppsPerAgent)
def add_custom_app_to_agents(username, customer_name, uri, method, file_data, agent_id=None, app_id=None): if app_id and not agent_id: app_info = ( fetch_app_data( app_id, collection=AppCollections.CustomApps ) ) agent_ids = get_all_agent_ids(customer_name, agent_os=app_info[AgentKey.OsCode]) if len(agent_ids) > 0: for agentid in agent_ids: add_file_data(app_id, file_data, agent_id) agent_info_to_insert = ( { CustomAppsPerAgentKey.AgentId: agentid, CustomAppsPerAgentKey.AppId: app_id, CustomAppsPerAgentKey.Status: CommonAppKeys.AVAILABLE, CustomAppsPerAgentKey.CustomerName: customer_name, CustomAppsPerAgentKey.InstallDate: r.epoch_time(0.0) } ) insert_app_data( agent_info_to_insert, collection=AppCollections.CustomAppsPerAgent ) if agent_id and not app_id: agent_info = get_agent_info(agent_id) apps_info = fetch_apps_data_by_os_code( agent_info[AgentKey.OsCode], customer_name, collection=AppCollections.CustomApps ) for app_info in apps_info: app_id = app_info.get(CustomAppsKey.AppId) file_data = fetch_file_data(app_id) add_file_data(app_id, file_data, agent_id) agent_info_to_insert = { CustomAppsPerAgentKey.AgentId: agent_id, CustomAppsPerAgentKey.AppId: app_id, CustomAppsPerAgentKey.Status: CommonAppKeys.AVAILABLE, CustomAppsPerAgentKey.CustomerName: customer_name, CustomAppsPerAgentKey.InstallDate: r.epoch_time(0.0) } insert_app_data( agent_info_to_insert, collection=AppCollections.CustomAppsPerAgent )
def add_supported_app_to_agents(username, customer_name, uri, method, agent_id=None): if agent_id: agent_info = get_agent_info(agent_id) apps_info = (fetch_apps_data_by_os_code( agent_info[AgentKey.OsCode], customer_name, collection=AppCollections.SupportedApps, )) if len(apps_info) > 0: for app_info in apps_info: app_id = app_info.get(SupportedAppsKey.AppId) file_data = fetch_file_data(app_id) add_file_data(app_id, file_data, agent_id) agent_info_to_insert = ({ SupportedAppsPerAgentKey.AgentId: agent_id, SupportedAppsPerAgentKey.AppId: app_id, SupportedAppsPerAgentKey.Status: AVAILABLE, SupportedAppsPerAgentKey.CustomerName: customer_name, SupportedAppsPerAgentKey.InstallDate: r.epoch_time(0.0) }) insert_app_data( agent_info_to_insert, collection=AppCollections.SupportedAppsPerAgent)
def _set_app_per_node_parameters(self, app): app[AppsPerAgentKey.AgentId] = self.agent_id app[AppsKey.OsCode] = self.os_code app[AppsKey.RvSeverity] = get_proper_severity( app[AppsKey.VendorSeverity] ) app[AppsKey.ReleaseDate] = ( r.epoch_time(app[AppsKey.ReleaseDate]) ) app[AppsPerAgentKey.InstallDate] = ( r.epoch_time(app[AppsPerAgentKey.InstallDate]) ) return app
def __init__(self): last_modified_time = mktime(datetime.now().timetuple()) self.last_modified_time = r.epoch_time(last_modified_time) # Search caching self.previous_os_code_search = None self.previous_agent_list = None
def add_supported_app_to_agents(username, customer_name, uri, method, agent_id=None): if agent_id: agent_info = get_agent_info(agent_id) apps_info = ( fetch_apps_data_by_os_code( agent_info[AgentKey.OsCode], customer_name, collection=AppCollections.SupportedApps, ) ) if len(apps_info) > 0: for app_info in apps_info: app_id = app_info.get(SupportedAppsKey.AppId) file_data = fetch_file_data(app_id) add_file_data( app_id, file_data, agent_id ) agent_info_to_insert = ( { SupportedAppsPerAgentKey.AgentId: agent_id, SupportedAppsPerAgentKey.AppId: app_id, SupportedAppsPerAgentKey.Status: AVAILABLE, SupportedAppsPerAgentKey.CustomerName: customer_name, SupportedAppsPerAgentKey.InstallDate: r.epoch_time(0.0) } ) insert_app_data( agent_info_to_insert, collection=AppCollections.SupportedAppsPerAgent )
def get_entry_info(self, entry): """Parse the top level entry object in the XML file Args: entry (lxml.etree._Element): This is an lxml Element Returns: Dictionary { "cvss_vector": [ { "metric": "Access Vector", "value": "Network" }, { "metric": "Access Complexity", "value": "Medium" } ], "cve_sev": "Medium", "cve_id": "CVE-2009-5138", "cvss_base_score": "5.8", "cvss_exploit_subscore": "8.6", "cvss_version": "2.0", "cvss_impact_subscore": "4.9", "cvss_score": "5.8" } """ data = {} attrib = entry.attrib data[CveKey.CveId] = attrib.get(CVEStrings.CVE_NAME) data[CveKey.CveSev] = attrib.get(CVEStrings.CVE_SEVERITY) data[CveKey.CvePublishedDate] = (r.epoch_time( timestamp_verifier( date_parser(attrib.get(CVEStrings.CVE_PUBLISHED_DATE))))) data[CveKey.CveModifiedDate] = (r.epoch_time( timestamp_verifier( date_parser(attrib.get(CVEStrings.CVE_MODIFIED_DATE))))) data[CveKey.CvssScore] = (attrib.get(CVEStrings.CVSS_SCORE)) data[CveKey.CvssBaseScore] = (attrib.get(CVEStrings.CVSS_BASE_SCORE)) data[CveKey.CvssImpactSubScore] = (attrib.get( CVEStrings.CVSS_IMPACT_SUBSCORE)) data[CveKey.CvssExploitSubScore] = (attrib.get( CVEStrings.CVSS_EXPLOIT_SUBSCORE)) data[CveKey.CvssVector] = (self._parse_vectors( attrib.get(CVEStrings.CVSS_VECTOR))) data[CveKey.CvssVersion] = (attrib.get(CVEStrings.CVSS_VERSION)) return (data)
def get_entry_info(self, entry): """Parse the top level entry object in the XML file Args: entry (lxml.etree._Element): This is an lxml Element Returns: Dictionary { "cvss_vector": [ { "metric": "Access Vector", "value": "Network" }, { "metric": "Access Complexity", "value": "Medium" } ], "cve_sev": "Medium", "cve_id": "CVE-2009-5138", "cvss_base_score": "5.8", "cvss_exploit_subscore": "8.6", "cvss_version": "2.0", "cvss_impact_subscore": "4.9", "cvss_score": "5.8" } """ data = {} attrib = entry.attrib data[CveKey.CveId] = attrib.get(CVEStrings.CVE_NAME) data[CveKey.CveSev] = attrib.get(CVEStrings.CVE_SEVERITY) data[CveKey.CvePublishedDate] = r.epoch_time( timestamp_verifier(date_parser(attrib.get(CVEStrings.CVE_PUBLISHED_DATE))) ) data[CveKey.CveModifiedDate] = r.epoch_time( timestamp_verifier(date_parser(attrib.get(CVEStrings.CVE_MODIFIED_DATE))) ) data[CveKey.CvssScore] = attrib.get(CVEStrings.CVSS_SCORE) data[CveKey.CvssBaseScore] = attrib.get(CVEStrings.CVSS_BASE_SCORE) data[CveKey.CvssImpactSubScore] = attrib.get(CVEStrings.CVSS_IMPACT_SUBSCORE) data[CveKey.CvssExploitSubScore] = attrib.get(CVEStrings.CVSS_EXPLOIT_SUBSCORE) data[CveKey.CvssVector] = self._parse_vectors(attrib.get(CVEStrings.CVSS_VECTOR)) data[CveKey.CvssVersion] = attrib.get(CVEStrings.CVSS_VERSION) return data
def add_or_update_applications(collection=AppCollections.AppsPerAgent, pkg_list=[], delete_afterwards=True, conn=None): completed = False inserted_count = 0 updated = None replaced_count = 0 deleted_count = 0 pkg_count = len(pkg_list) last_modified_time = mktime(datetime.now().timetuple()) if pkg_count > 0: for pkg in pkg_list: pkg['last_modified_time'] = r.epoch_time(last_modified_time) try: updated = (r.table(collection).insert( pkg, conflict="replace").run(conn)) logger.info(updated) inserted_count += updated['inserted'] replaced_count += updated['replaced'] except Exception as e: logger.exception(e) try: if delete_afterwards: deleted = (r.table(collection).get_all( pkg[AppsPerAgentKey.AgentId], index=AppsPerAgentIndexes.AgentId).filter( r.row['last_modified_time'] < r.epoch_time( last_modified_time)).delete().run(conn)) deleted_count += deleted['deleted'] except Exception as e: logger.exception(e) return ({ 'pass': completed, 'inserted': inserted_count, 'replaced': replaced_count, 'deleted': deleted_count, 'pkg_count': pkg_count, })
def update_supported_apps(json_data): try: rv_q = Queue("downloader", connection=RQ_PKG_POOL) conn = db_connect() inserted_count = 0 all_customers = list(r.table(Collection.Customers).map(lambda x: x[CustomerKey.CustomerName]).run(conn)) for i in range(len(json_data)): json_data[i][SupportedAppsKey.Customers] = all_customers json_data[i][SupportedAppsKey.ReleaseDate] = r.epoch_time(json_data[i][SupportedAppsKey.ReleaseDate]) json_data[i][SupportedAppsKey.FilesDownloadStatus] = PackageCodes.FilePendingDownload json_data[i][SupportedAppsKey.Hidden] = "no" json_data[i][SupportedAppsKey.VulnerabilityId] = "" insert_app_data(json_data[i], DownloadCollections.LatestDownloadedSupported) file_data = json_data[i].get(SupportedAppsKey.FileData) add_file_data(json_data[i][SupportedAppsKey.AppId], file_data) data_to_update = {SupportedAppsKey.Customers: all_customers} exists = r.table(AppCollections.SupportedApps).get(json_data[i][SupportedAppsKey.AppId]).run(conn) if exists: updated = ( r.table(AppCollections.SupportedApps) .get(json_data[i][SupportedAppsKey.AppId]) .update(data_to_update) .run(conn) ) else: updated = r.table(AppCollections.SupportedApps).insert(json_data[i]).run(conn) rv_q.enqueue_call( func=download_all_files_in_app, args=( json_data[i][SupportedAppsKey.AppId], json_data[i][SupportedAppsKey.OsCode], None, file_data, 0, AppCollections.SupportedApps, ), timeout=86400, ) inserted_count += updated["inserted"] conn.close() update_apps = IncomingSupportedApps() update_apps.sync_supported_updates_to_all_agents(json_data) except Exception as e: logger.exception(e)
def _set_app_per_agent_properties(self, agent, app_id): return { SupportedAppsPerAgentKey.AgentId: agent[AgentKey.AgentId], SupportedAppsPerAgentKey.CustomerName: agent[AgentKey.CustomerName], SupportedAppsPerAgentKey.Status: CommonAppKeys.AVAILABLE, SupportedAppsPerAgentKey.LastModifiedTime: self.last_modified_time, SupportedAppsPerAgentKey.Update: PackageCodes.ThisIsAnUpdate, SupportedAppsPerAgentKey.InstallDate: r.epoch_time(0.0), SupportedAppsPerAgentKey.AppId: app_id, SupportedAppsPerAgentKey.Id: build_agent_app_id(agent[SupportedAppsPerAgentKey.AgentId], app_id), }
def update_supported_apps(json_data): try: rv_q = Queue('downloader', connection=RQ_PKG_POOL) conn = db_connect() inserted_count = 0 all_customers = list( r.table(Collection.Customers).map( lambda x: x[CustomerKey.CustomerName]).run(conn)) for i in range(len(json_data)): json_data[i][SupportedAppsKey.Customers] = all_customers json_data[i][SupportedAppsKey.ReleaseDate] = \ r.epoch_time(json_data[i][SupportedAppsKey.ReleaseDate]) json_data[i][SupportedAppsKey.FilesDownloadStatus] = \ PackageCodes.FilePendingDownload json_data[i][SupportedAppsKey.Hidden] = 'no' json_data[i][SupportedAppsKey.VulnerabilityId] = '' insert_app_data(json_data[i], DownloadCollections.LatestDownloadedSupported) file_data = json_data[i].get(SupportedAppsKey.FileData) add_file_data(json_data[i][SupportedAppsKey.AppId], file_data) data_to_update = {SupportedAppsKey.Customers: all_customers} exists = (r.table(AppCollections.SupportedApps).get( json_data[i][SupportedAppsKey.AppId]).run(conn)) if exists: updated = (r.table(AppCollections.SupportedApps).get( json_data[i][SupportedAppsKey.AppId]).update( data_to_update).run(conn)) else: updated = (r.table(AppCollections.SupportedApps).insert( json_data[i]).run(conn)) rv_q.enqueue_call(func=download_all_files_in_app, args=(json_data[i][SupportedAppsKey.AppId], json_data[i][SupportedAppsKey.OsCode], None, file_data, 0, AppCollections.SupportedApps), timeout=86400) inserted_count += updated['inserted'] conn.close() update_apps = IncomingSupportedApps() update_apps.sync_supported_updates_to_all_agents(json_data) except Exception as e: logger.exception(e)
def insert_into_agent_queue(operation, conn=None): """Insert data into the agent_queue DO NOT CALL DIRECTLY Args: operation (list|dict): operation data Basic Usage: >>> from vFense.queue._db import insert_into_agent_queue >>> operation = [{'operation': 'data'}] >>> insert_into_agent_queue(operation) Returns: Tuple (status_code, count, error, generated ids) >>> (2001, 1, None, []) """ data = {} try: operation[AgentQueueKey.CreatedTime] = ( r.epoch_time(operation[AgentQueueKey.CreatedTime]) ) operation[AgentQueueKey.ServerQueueTTL] = ( r.epoch_time(operation[AgentQueueKey.ServerQueueTTL]) ) operation[AgentQueueKey.AgentQueueTTL] = ( r.epoch_time(operation[AgentQueueKey.AgentQueueTTL]) ) data = ( r .table(QueueCollections.Agent) .insert(operation) .run(conn) ) except Exception as e: logger.exception(e) return(data)
def get_date_posted(date_em): """Parse em tags, to retrieve the date posted and convert it to epoch. Args: date_em (str): """ date_posted = u'' try: day, month, year = date_em.text.split() day = int(re.sub('[a-zA-Z]+', '', day)) month = month_to_num_month[re.sub(',', '', month)] year = int(year) date_posted = ( r.epoch_time(mktime(datetime(year, month, day).timetuple())) ) except Exception as e: logger.exception(e) return(date_posted)
def _set_app_per_agent_properties(self, agent, app_id): return { SupportedAppsPerAgentKey.AgentId: agent[AgentKey.AgentId], SupportedAppsPerAgentKey.CustomerName: agent[AgentKey.CustomerName], SupportedAppsPerAgentKey.Status: CommonAppKeys.AVAILABLE, SupportedAppsPerAgentKey.LastModifiedTime: self.last_modified_time, SupportedAppsPerAgentKey.Update: PackageCodes.ThisIsAnUpdate, SupportedAppsPerAgentKey.InstallDate: r.epoch_time(0.0), SupportedAppsPerAgentKey.AppId: app_id, SupportedAppsPerAgentKey.Id: build_agent_app_id(agent[SupportedAppsPerAgentKey.AgentId], app_id) }
def get_os_apps_history_for_tag(username, customer_name, uri, method, tag_id, status, start_date=None, end_date=None, conn=None): try: if not start_date and not end_date: start_date = mktime( (datetime.now() - timedelta(days=1 * 365)).timetuple()) end_date = mktime(datetime.now().timetuple()) elif start_date and not end_date: end_date = mktime(datetime.now().timetuple()) elif not start_date and end_date: start_date = 0.0 data = (r.table(TagsPerAgentCollection, use_outdated=True).get_all( tag_id, index=TagsPerAgentIndexes.TagId).pluck( TagsPerAgentKey.AgentId).eq_join( lambda x: [status, x[AppsPerAgentKey.AgentId]], r.table(AppCollections.AppsPerAgent), index=AppsPerAgentIndexes.StatusAndAgentId).zip().eq_join( AppsKey.AppId, r.table( AppCollections.UniqueApplications)).zip().filter( r.row[AppsKey.ReleaseDate].during( r.epoch_time(start_date), r.epoch_time(end_date))).pluck( AppsKey.AppId, AppsKey.Name, AppsKey.Version, AppsKey.RvSeverity, AppsKey.ReleaseDate). group(lambda x: x[AppsKey.ReleaseDate].to_epoch_time()).map( lambda x: { 'details': [{ AppsKey.AppId: x[AppsKey.AppId], AppsKey.Name: x[AppsKey.Name], AppsKey.Version: x[AppsKey.Version], AppsKey.RvSeverity: x[AppsKey.RvSeverity] }], CommonAppKeys.COUNT: 1, }).reduce( lambda x, y: { "count": x["count"] + y["count"], "details": x["details"] + y["details"], }).ungroup().map({ 'timestamp': r.row['group'], 'total_count': r.row['reduction']['count'], 'details': (r.row['reduction']['details'].group( lambda a: a['rv_severity']).map( lambda a: { 'apps': [{ AppsKey.AppId: a[AppsKey.AppId], AppsKey.Name: a[AppsKey.Name], AppsKey.Version: a[AppsKey.Version], }], CommonAppKeys.COUNT: 1 }).reduce( lambda a, b: { "count": a["count"] + b["count"], "apps": a["apps"] + b["apps"], }).ungroup()) }).run(conn)) results = (GenericResults(username, uri, method).information_retrieved( data, len(data))) except Exception as e: results = (GenericResults(username, uri, method).something_broke( 'available apps over time graph', 'graph', e)) logger.exception(results) return (results)
def parse_spread_sheet(bulletin_file): """Parse the entire microsoft excel bulleting data and return the data, ready to be inserted into the database. Args: bulletin_file (str): The file location on disk Returns: List of dictionairies """ bulletin_list = [] workbook = open_workbook(bulletin_file) sheet = workbook.sheet_by_name(WindowsBulletinStrings.WORKBOOK_SHEET) rows = range(sheet.nrows) rows.pop(0) for i in rows: row = sheet.row_values(i) bulletin_dict = {} supercede_list = [] if row[7] != '': row[7] = 'KB' + str(int(row[7])) if row[2] != '': row[2] = 'KB' + str(int(row[2])) rows_to_use = ( row[1] + row[2] + row[3] + row[4] + row[6] + row[7] + row[8] + row[9] ) rows_to_use = \ unicode(rows_to_use).encode(sys.stdout.encoding, 'replace') built_id = build_bulletin_id(rows_to_use) bulletin_dict[WindowsSecurityBulletinKey.Id] = built_id date = xldate_as_tuple(row[0], workbook.datemode) epoch_time = mktime(datetime(*date).timetuple()) bulletin_dict[WindowsSecurityBulletinKey.DatePosted] = ( r.epoch_time(epoch_time) ) # Need to see if I can pull the column names and use that instead # of using the row number bulletin_dict[WindowsSecurityBulletinKey.BulletinId] = row[1] bulletin_dict[WindowsSecurityBulletinKey.BulletinKb] = row[2] bulletin_dict[WindowsSecurityBulletinKey.BulletinSeverity] = row[3] bulletin_dict[WindowsSecurityBulletinKey.BulletinImpact] = row[4] bulletin_dict[WindowsSecurityBulletinKey.Details] = row[5] bulletin_dict[WindowsSecurityBulletinKey.AffectedProduct] = row[6] bulletin_dict[WindowsSecurityBulletinKey.ComponentKb] = row[7] bulletin_dict[WindowsSecurityBulletinKey.AffectedComponent] = row[8] bulletin_dict[WindowsSecurityBulletinKey.ComponentImpact] = row[9] bulletin_dict[WindowsSecurityBulletinKey.ComponentSeverity] = row[10] if len(row) == 15: supercedes = row[12] reboot = row[13] cve_ids = row[14] else: supercedes = row[11] reboot = row[12] cve_ids = row[13] info = supercedes.split(',') for j in info: bulletin_data = j.split('[') if len(bulletin_data) > 1: bulletin_id = bulletin_data[0] bulletin_kb = re.sub('^', 'KB', bulletin_data[1][:-1]) else: bulletin_id = bulletin_data[0] bulletin_kb = None supercede_list.append( { WindowsSecurityBulletinKey.SupersedesBulletinId: bulletin_id, WindowsSecurityBulletinKey.SupersedesBulletinKb: bulletin_kb } ) bulletin_dict[WindowsSecurityBulletinKey.Supersedes] = supercede_list bulletin_dict[WindowsSecurityBulletinKey.Reboot] = reboot bulletin_dict[WindowsSecurityBulletinKey.CveIds] = cve_ids.split(',') bulletin_list.append(bulletin_dict) return(bulletin_list)
def get_os_apps_history_for_tag(username, customer_name, uri, method, tag_id, status, start_date=None, end_date=None, conn=None): try: if not start_date and not end_date: start_date = mktime((datetime.now() - timedelta(days=1*365)).timetuple()) end_date = mktime(datetime.now().timetuple()) elif start_date and not end_date: end_date = mktime(datetime.now().timetuple()) elif not start_date and end_date: start_date = 0.0 data = ( r .table(TagsPerAgentCollection, use_outdated=True) .get_all(tag_id, index=TagsPerAgentIndexes.TagId) .pluck(TagsPerAgentKey.AgentId) .eq_join( lambda x: [ status, x[AppsPerAgentKey.AgentId] ], r.table(AppCollections.AppsPerAgent), index=AppsPerAgentIndexes.StatusAndAgentId ) .zip() .eq_join(AppsKey.AppId, r.table(AppCollections.UniqueApplications)) .zip() .filter( r.row[AppsKey.ReleaseDate].during( r.epoch_time(start_date), r.epoch_time(end_date) ) ) .pluck( AppsKey.AppId, AppsKey.Name, AppsKey.Version, AppsKey.RvSeverity, AppsKey.ReleaseDate ) .group( lambda x: x[AppsKey.ReleaseDate].to_epoch_time() ) .map( lambda x: { 'details': [ { AppsKey.AppId: x[AppsKey.AppId], AppsKey.Name: x[AppsKey.Name], AppsKey.Version: x[AppsKey.Version], AppsKey.RvSeverity: x[AppsKey.RvSeverity] } ], CommonAppKeys.COUNT: 1, } ) .reduce( lambda x, y: { "count": x["count"] + y["count"], "details": x["details"] + y["details"], } ) .ungroup() .map( { 'timestamp': r.row['group'], 'total_count': r.row['reduction']['count'], 'details': ( r.row['reduction']['details'] .group( lambda a: a['rv_severity'] ) .map( lambda a: { 'apps': [ { AppsKey.AppId: a[AppsKey.AppId], AppsKey.Name: a[AppsKey.Name], AppsKey.Version: a[AppsKey.Version], } ], CommonAppKeys.COUNT: 1 } ) .reduce( lambda a, b: { "count": a["count"] + b["count"], "apps": a["apps"] + b["apps"], } ) .ungroup() ) } ) .run(conn) ) results = ( GenericResults( username, uri, method ).information_retrieved(data, len(data)) ) except Exception as e: results = ( GenericResults( username, uri, method ).something_broke('available apps over time graph', 'graph', e) ) logger.exception(results) return(results)
def begining_of_time(): return(r.epoch_time(0.0))
def epoch_time_to_db_time(epoch): return(r.epoch_time(epoch))
def time_now(): return(r.epoch_time(int(time())))
def store_package_info_in_db(username, customer_name, uri, method, size, md5, operating_system, uuid, name, severity, arch, major_version, minor_version, release_date=0.0, vendor_name=None, description=None, cli_options=None, support_url=None, kb=None, conn=None): PKG_FILE = TMP_DIR + uuid + '/' + name URL_PATH = 'https://localhost/packages/tmp/' + uuid + '/' url = URL_PATH + name if os.path.exists(PKG_FILE): if (isinstance(release_date, str) or isinstance(release_date, unicode)): orig_release_date = release_date if (len(release_date.split('-')) == 3 or len(release_date.split('/')) == 3): release_date = (r.epoch_time(date_parser(release_date))) else: release_date = (r.epoch_time(timestamp_verifier(release_date))) data_to_store = { CustomAppsKey.Name: name, CustomAppsPerAgentKey.Dependencies: [], CustomAppsKey.RvSeverity: severity, CustomAppsKey.VendorSeverity: severity, CustomAppsKey.ReleaseDate: release_date, CustomAppsKey.VendorName: vendor_name, CustomAppsKey.Description: description, CustomAppsKey.MajorVersion: major_version, CustomAppsKey.MinorVersion: minor_version, CustomAppsKey.Version: major_version + '.' + minor_version, CustomAppsKey.OsCode: operating_system, CustomAppsKey.Kb: kb, CustomAppsKey.Hidden: 'no', CustomAppsKey.CliOptions: cli_options, CustomAppsKey.Arch: arch, CustomAppsKey.RebootRequired: 'possible', CustomAppsKey.SupportUrl: support_url, CustomAppsKey.Customers: [customer_name], CustomAppsPerAgentKey.Update: PackageCodes.ThisIsNotAnUpdate, CustomAppsKey.FilesDownloadStatus: PackageCodes.FileCompletedDownload, CustomAppsKey.AppId: uuid } file_data = ([{ FilesKey.FileUri: url, FilesKey.FileSize: int(size), FilesKey.FileHash: md5, FilesKey.FileName: name }]) try: updated = (r.table(AppCollections.CustomApps).insert( data_to_store, upsert=True).run(conn)) add_custom_app_to_agents(username, customer_name, uri, method, file_data, app_id=uuid) data_to_store['release_date'] = orig_release_date results = (GenericResults(username, uri, method).object_created( uuid, 'custom_app', data_to_store)) logger.info(results) except Exception as e: results = (GenericResults(username, uri, method).something_broke( uuid, 'custom_app', e)) logger.exception(e) else: results = (GenericResults(username, uri, method).file_doesnt_exist(name, e)) logger.info(results) return (results)
def begining_of_time(): return (r.epoch_time(0.0))
def epoch_time_to_db_time(epoch): return (r.epoch_time(epoch))
def time_now(): return (r.epoch_time(int(time())))
def store_package_info_in_db( username, customer_name, uri, method, size, md5, operating_system, uuid, name, severity, arch, major_version, minor_version, release_date=0.0, vendor_name=None, description=None, cli_options=None, support_url=None, kb=None, conn=None, ): PKG_FILE = TMP_DIR + uuid + "/" + name URL_PATH = "https://localhost/packages/tmp/" + uuid + "/" url = URL_PATH + name if os.path.exists(PKG_FILE): if isinstance(release_date, str) or isinstance(release_date, unicode): orig_release_date = release_date if len(release_date.split("-")) == 3 or len(release_date.split("/")) == 3: release_date = r.epoch_time(date_parser(release_date)) else: release_date = r.epoch_time(timestamp_verifier(release_date)) data_to_store = { CustomAppsKey.Name: name, CustomAppsPerAgentKey.Dependencies: [], CustomAppsKey.RvSeverity: severity, CustomAppsKey.VendorSeverity: severity, CustomAppsKey.ReleaseDate: release_date, CustomAppsKey.VendorName: vendor_name, CustomAppsKey.Description: description, CustomAppsKey.MajorVersion: major_version, CustomAppsKey.MinorVersion: minor_version, CustomAppsKey.Version: major_version + "." + minor_version, CustomAppsKey.OsCode: operating_system, CustomAppsKey.Kb: kb, CustomAppsKey.Hidden: "no", CustomAppsKey.CliOptions: cli_options, CustomAppsKey.Arch: arch, CustomAppsKey.RebootRequired: "possible", CustomAppsKey.SupportUrl: support_url, CustomAppsKey.Customers: [customer_name], CustomAppsPerAgentKey.Update: PackageCodes.ThisIsNotAnUpdate, CustomAppsKey.FilesDownloadStatus: PackageCodes.FileCompletedDownload, CustomAppsKey.AppId: uuid, } file_data = [ {FilesKey.FileUri: url, FilesKey.FileSize: int(size), FilesKey.FileHash: md5, FilesKey.FileName: name} ] try: updated = r.table(AppCollections.CustomApps).insert(data_to_store, conflict="replace").run(conn) add_custom_app_to_agents(username, customer_name, uri, method, file_data, app_id=uuid) data_to_store["release_date"] = orig_release_date results = GenericResults(username, uri, method).object_created(uuid, "custom_app", data_to_store) logger.info(results) except Exception as e: results = GenericResults(username, uri, method).something_broke(uuid, "custom_app", e) logger.exception(e) else: results = GenericResults(username, uri, method).file_doesnt_exist(name, e) logger.info(results) return results