def _append_shares_to_device(ip, device, data, external_priorities={}): device_data = get_device_data(device) full_data = merge_data( data, { 'database': {'device': device_data}, }, only_multiple=True, ) append_merged_proposition(full_data, device, external_priorities) selected_data = select_data(full_data, external_priorities) for share_mount in selected_data.get('disk_shares', []): _create_or_update_share_mount(ip, device, share_mount)
def _append_shares_to_device(ip, device, data, external_priorities={}): device_data = get_device_data(device) full_data = merge_data( data, { 'database': {'device': device_data}, }, only_multiple=True, ) append_merged_proposition(full_data, device, external_priorities) selected_data = select_data(full_data, external_priorities) parsed_mounts = set() for share_mount in selected_data.get('disk_shares', []): status, mount = _create_or_update_share_mount(ip, device, share_mount) if mount: parsed_mounts.add(mount.pk) device.disksharemount_set.exclude(pk__in=parsed_mounts).delete()
def _append_shares_to_device(ip, device, data, external_priorities={}): device_data = get_device_data(device) full_data = merge_data( data, { 'database': { 'device': device_data }, }, only_multiple=True, ) append_merged_proposition(full_data, device, external_priorities) selected_data = select_data(full_data, external_priorities) parsed_mounts = set() for share_mount in selected_data.get('disk_shares', []): status, mount = _create_or_update_share_mount(ip, device, share_mount) if mount: parsed_mounts.add(mount.pk) device.disksharemount_set.exclude(pk__in=parsed_mounts).delete()
def _append_connections_to_device(device, data, external_priorities): device_data = get_device_data(device) full_data = merge_data( data, { 'database': {'device': device_data}, }, only_multiple=True, ) append_merged_proposition(full_data, device, external_priorities) selected_data = select_data(full_data, external_priorities) parsed_connections = set() for conn_data in selected_data.get('connections', []): conn = _create_or_update_connection(device, conn_data) if conn: parsed_connections.add(conn.pk) Connection.objects.filter( Q(outbound=device), Q(connection_type=ConnectionType.network), ~Q(pk__in=parsed_connections), ).delete()
def _save_job_results(job_id, start_ts): if (int(time.time()) - start_ts) > 86100: # 24h - 5min return try: job = rq.job.Job.fetch(job_id, django_rq.get_connection()) except rq.exceptions.NoSuchJobError: return # job with this id does not exist... if job.result is None and not job.is_failed: # we must wait... _enqueue_save_job_results(job_id) return elif job.is_failed: # nothing to do... return external_priorities = get_external_results_priorities(job.result) # management? is_management = False if job.args: try: is_management = IPAddress.objects.filter( address=job.args[0]).values_list('is_management', flat=True)[0] except IndexError: pass # first... update device devices, ids_lookup, sn_lookup, macs_lookup = _find_devices(job.result) if len(devices) > 1: raise AutomergerError( 'Many devices found for: ids=%s, sn=%s, macs=%s' % (ids_lookup, sn_lookup, macs_lookup)) used_serial_numbers = set() used_mac_addresses = set() for device in devices: device_data = get_device_data(device) if 'serial_number' in device_data: used_serial_numbers |= set([device_data['serial_number']]) if 'mac_addresses' in device_data: used_mac_addresses |= set(device_data['mac_addresses']) data = merge_data( job.result, { 'database': { 'device': device_data }, }, only_multiple=True, ) append_merged_proposition(data, device, external_priorities) selected_data = _select_data(data, external_priorities, is_management) set_device_data(device, selected_data, save_priority=SAVE_PRIORITY) device.save(priority=SAVE_PRIORITY) # now... we create new devices from `garbage` if not devices: garbage = {} for plugin_name, plugin_result in job.result.items(): if 'device' not in plugin_result: continue if 'serial_number' in plugin_result['device']: if plugin_result['device'][ 'serial_number'] in used_serial_numbers: continue if 'mac_addresses' in plugin_result['device']: if set(plugin_result['device']['mac_addresses'], ) != set( plugin_result['device']['mac_addresses'], ) - used_mac_addresses: continue if any(( plugin_result['device'].get('serial_number'), plugin_result['device'].get('mac_addresses'), )): garbage[plugin_name] = plugin_result if garbage: data = merge_data(garbage) selected_data = _select_data(data, external_priorities) if all(( any(( selected_data.get('serial_number'), selected_data.get('mac_addresses', []), )), any(( selected_data.get('model_name'), selected_data.get('type'), )), )): device_from_data(selected_data, save_priority=SAVE_PRIORITY) # mark this scan results update_scan_summary(job)
def _save_job_results(job_id, start_ts): if (int(time.time()) - start_ts) > 86100: # 24h - 5min return try: job = rq.job.Job.fetch(job_id, django_rq.get_connection()) except rq.exceptions.NoSuchJobError: return # job with this id does not exist... if job.result is None and not job.is_failed: # we must wait... _enqueue_save_job_results(job_id) return elif job.is_failed: # nothing to do... return external_priorities = get_external_results_priorities(job.result) # management? is_management = False if job.args: try: is_management = IPAddress.objects.filter( address=job.args[0] ).values_list( 'is_management', flat=True )[0] except IndexError: pass # first... update device devices, ids_lookup, sn_lookup, macs_lookup = _find_devices(job.result) if len(devices) > 1: raise AutomergerError( 'Many devices found for: ids=%s, sn=%s, macs=%s' % ( ids_lookup, sn_lookup, macs_lookup ) ) used_serial_numbers = set() used_mac_addresses = set() for device in devices: device_data = get_device_data(device) if 'serial_number' in device_data: used_serial_numbers |= set([device_data['serial_number']]) if 'mac_addresses' in device_data: used_mac_addresses |= set(device_data['mac_addresses']) data = merge_data( job.result, { 'database': {'device': device_data}, }, only_multiple=True, ) append_merged_proposition(data, device, external_priorities) selected_data = select_data(data, external_priorities, is_management) set_device_data(device, selected_data, save_priority=SAVE_PRIORITY) device.save(priority=SAVE_PRIORITY) # now... we create new devices from `garbage` if not devices: garbage = {} for plugin_name, plugin_result in job.result.items(): if 'device' not in plugin_result: continue if 'serial_number' in plugin_result['device']: if plugin_result['device'][ 'serial_number' ] in used_serial_numbers: continue if 'mac_addresses' in plugin_result['device']: if set( plugin_result['device']['mac_addresses'], ) != set( plugin_result['device']['mac_addresses'], ) - used_mac_addresses: continue if any(( plugin_result['device'].get('serial_number'), plugin_result['device'].get('mac_addresses'), )): garbage[plugin_name] = plugin_result if garbage: data = merge_data(garbage) selected_data = select_data(data, external_priorities) if all(( any( ( selected_data.get('serial_number'), selected_data.get('mac_addresses', []), ) ), any( ( selected_data.get('model_name'), selected_data.get('type'), ) ), )): device_from_data(selected_data, save_priority=SAVE_PRIORITY) # mark this scan results update_scan_summary(job)