def test_delete_graph_snapshots(self): g = GraphGenerator().create_graph() self.graph_persistor.last_event_timestamp = utcnow() self.graph_persistor.store_graph(g) self.graph_persistor.delete_graph_snapshots(utcnow()) graph_snapshot = self.graph_persistor.load_graph(utcnow()) self.assertIsNone(graph_snapshot)
def test_persist_graph(self): g = GraphGenerator().create_graph() current_time = utcnow() self.graph_persistor.last_event_timestamp = current_time self.graph_persistor.store_graph(g) graph_snapshot = self.graph_persistor.load_graph(current_time) self.assert_graph_equal(g, graph_snapshot) self.graph_persistor.delete_graph_snapshots(utcnow())
def simple_static_generators(switch_num=2, host_num=10, snapshot_events=0, snap_vals=None, update_events=0, update_vals=None): """A function for returning static datasource events generators. Returns generators for a given number of routers, switches and hosts. Hosts will be distributed across switches in round-robin style. Switches are interconnected in a line. :param switch_num: number of zones :param host_num: number of hosts :param snapshot_events: number of snapshot events per zone :param snap_vals: preset values for ALL snapshot events :param update_events: number of values from update event :param update_vals: preset values for update event :return: generators for static datasource events """ # TODO(yujunz) mock routers which connects all switches mapping = [(host_index, host_index % switch_num) for host_index in range(host_num)] test_entity_spec_list = [] if snapshot_events > 0: if snap_vals is None: snap_vals = {} snap_vals.update({ DSProps.DATASOURCE_ACTION: DatasourceAction.SNAPSHOT, DSProps.SAMPLE_DATE: str(utcnow()) }) test_entity_spec_list.append({ tg.DYNAMIC_INFO_FKEY: tg.DRIVER_STATIC_SNAPSHOT_D, tg.STATIC_INFO_FKEY: tg.DRIVER_STATIC_SNAPSHOT_S, tg.EXTERNAL_INFO_KEY: snap_vals, tg.MAPPING_KEY: mapping, tg.NAME_KEY: 'Static snapshot generator', tg.NUM_EVENTS: snapshot_events }) if update_events > 0: if update_vals is None: update_vals = {} update_vals.update({ DSProps.DATASOURCE_ACTION: DatasourceAction.UPDATE, DSProps.SAMPLE_DATE: str(utcnow()) }) test_entity_spec_list.append({ tg.DYNAMIC_INFO_FKEY: tg.DRIVER_STATIC_SNAPSHOT_D, tg.STATIC_INFO_FKEY: None, tg.EXTERNAL_INFO_KEY: update_vals, tg.MAPPING_KEY: mapping, tg.NAME_KEY: 'Static update generator', tg.NUM_EVENTS: update_events }) return tg.get_trace_generators(test_entity_spec_list)
def mark_deleted(g, item): if isinstance(item, Vertex): item[VProps.VITRAGE_IS_DELETED] = True item[VProps.VITRAGE_SAMPLE_TIMESTAMP] = str(utcnow()) g.update_vertex(item) elif isinstance(item, Edge): item[EProps.VITRAGE_IS_DELETED] = True item[EProps.UPDATE_TIMESTAMP] = str(utcnow()) g.update_edge(item)
def _find_outdated_entities_to_mark_as_deleted(self): vitrage_sample_tstmp = str(utcnow() - timedelta( seconds=2 * CONF.datasources.snapshots_interval)) query = { 'and': [ { '!=': { VProps.VITRAGE_TYPE: VITRAGE_DATASOURCE } }, { '<': { VProps.VITRAGE_SAMPLE_TIMESTAMP: vitrage_sample_tstmp } }, { '==': { VProps.VITRAGE_IS_DELETED: False } }, ] } vertices = self.graph.get_vertices(query_dict=query) return set(self._filter_vertices_to_be_marked_as_deleted(vertices))
def _filter_and_cache_alarms(self, alarms, filter_): alarms_to_update = [] now = datetime_utils.utcnow(False) for alarm in alarms: alarm_key = self._alarm_key(alarm) old_alarm, timestamp = self.cache.get(alarm_key, (None, None)) if filter_(self, alarm, old_alarm): # delete state changed alarm: alarm->OK if not self._is_erroneous(alarm): alarm[DSProps.EVENT_TYPE] = GraphAction.DELETE_ENTITY alarms_to_update.append(alarm) self.cache[alarm_key] = alarm, now # add alarms that were deleted values = list(self.cache.values()) for cached_alarm, timestamp in values: if self._is_erroneous(cached_alarm) and timestamp is not now: LOG.debug("deleting cached_alarm %s", cached_alarm) cached_alarm[DSProps.EVENT_TYPE] = GraphAction.DELETE_ENTITY alarms_to_update.append(cached_alarm) self.cache.pop(self._alarm_key(cached_alarm)) return alarms_to_update
def test_persist_two_graphs(self): g1 = GraphGenerator().create_graph() current_time1 = utcnow() self.graph_persistor.last_event_timestamp = current_time1 self.graph_persistor.store_graph(g1) graph_snapshot1 = self.graph_persistor.load_graph(current_time1) g2 = GraphGenerator(5).create_graph() current_time2 = utcnow() self.graph_persistor.last_event_timestamp = current_time2 self.graph_persistor.store_graph(g2) graph_snapshot2 = self.graph_persistor.load_graph(current_time2) self.assert_graph_equal(g1, graph_snapshot1) self.assert_graph_equal(g2, graph_snapshot2) self.graph_persistor.delete_graph_snapshots(utcnow())
def test_load_last_graph_snapshot_until_timestamp(self): g1 = GraphGenerator().create_graph() self.graph_persistor.last_event_timestamp = utcnow() self.graph_persistor.store_graph(g1) time.sleep(1) time_in_between = utcnow() time.sleep(1) g2 = GraphGenerator(5).create_graph() self.graph_persistor.last_event_timestamp = utcnow() self.graph_persistor.store_graph(g2) graph_snapshot = self.graph_persistor.load_graph(time_in_between) self.assert_graph_equal(g1, graph_snapshot) self.graph_persistor.delete_graph_snapshots(utcnow())
def initializing_process(self): try: LOG.info('Consistency Initializing Process - Started') if not self._wait_for_action( self.initialization_status.is_received_all_end_messages): LOG.error('Maximum retries for consistency initializator ' 'were done') LOG.info('All end messages were received') self.evaluator.enabled = True timestamp = str(utcnow()) all_vertices = self.graph.get_vertices() self._run_evaluator(all_vertices) self._wait_for_processing_evaluator_events() self._mark_old_deduced_alarms_as_deleted(timestamp) self.initialization_status.status = \ self.initialization_status.FINISHED LOG.info('Consistency Initializing Process - Finished') except Exception as e: LOG.exception('Error in deleting vertices from entity_graph: %s', e)
def _periodic_process_setup_stage(self, consistency_interval): self._create_processor_with_graph(self.conf, processor=self.processor) current_time = utcnow() # set all vertices to be have timestamp that consistency won't get self._update_timestamp(self.processor.entity_graph.get_vertices(), current_time + timedelta(seconds=1.5 * consistency_interval)) # check number of instances in graph instance_vertices = self.processor.entity_graph.get_vertices({ VProps.CATEGORY: EntityCategory.RESOURCE, VProps.TYPE: NOVA_INSTANCE_DATASOURCE }) self.assertEqual(self.NUM_INSTANCES, len(instance_vertices)) # set current timestamp of part of the instances self._update_timestamp(instance_vertices[0:3], current_time) # set part of the instances as deleted for i in range(3, 6): instance_vertices[i][VProps.IS_DELETED] = True self.processor.entity_graph.update_vertex(instance_vertices[i]) # set part of the instances as deleted for i in range(6, 9): instance_vertices[i][VProps.IS_DELETED] = True instance_vertices[i][VProps.SAMPLE_TIMESTAMP] = str( current_time + timedelta(seconds=2 * consistency_interval + 1)) self.processor.entity_graph.update_vertex(instance_vertices[i])
def _convert_alarm_rule_change_event(self, event): """handle alarm rule change notification example of changed rule: "detail": {"severity": "critical", "rule": {"query": [{"field": "traits.resource_id", "type": "", "value": "1", "op": "eq"}], "event_type": "instance.update"}} """ old_alarm = self._old_alarm(event) entity = old_alarm.copy() changed_rule = event[AodhProps.DETAIL] for (changed_type, changed_info) in changed_rule.items(): # handle changed rule which may effect the neighbor if changed_type == AodhProps.RULE: entity.update(self._parse_changed_rule( changed_rule[changed_type])) # handle other changed alarm properties elif changed_type in AodhProps.__dict__.values(): entity[changed_type] = changed_info return self._filter_and_cache_alarm(entity, old_alarm, self._filter_get_erroneous, datetime_utils.utcnow(False))
def add_template(self, template_def): result = syntax_validation(template_def) if not result.is_valid_config: LOG.info('Unable to load template, syntax err: %s' % result.comment) else: result = content_validation(template_def, self._def_templates) if not result.is_valid_config: LOG.info('Unable to load template, content err: %s' % result.comment) template_uuid = uuidutils.generate_uuid() current_time = datetime_utils.utcnow() self.templates[str(template_uuid)] = Template(template_uuid, template_def, current_time, result) if result.is_valid_config: template_data = \ TemplateLoader().load(template_def, self._def_templates) for scenario in template_data.scenarios: for equivalent_scenario in self._expand_equivalence(scenario): self._add_scenario(equivalent_scenario)
def _periodic_process_setup_stage(self, consistency_interval): self._create_processor_with_graph(self.conf, processor=self.processor) current_time = utcnow() # set all vertices to be have timestamp that consistency won't get self._update_timestamp(self.processor.entity_graph.get_vertices(), current_time + timedelta(seconds=1.5 * consistency_interval)) # check number of instances in graph instance_vertices = self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE }) self.assertThat(instance_vertices, matchers.HasLength(self.NUM_INSTANCES)) # set current timestamp of part of the instances self._update_timestamp(instance_vertices[0:3], current_time) # set part of the instances as deleted for i in range(3, 6): instance_vertices[i][VProps.VITRAGE_IS_DELETED] = True self.processor.entity_graph.update_vertex(instance_vertices[i]) # set part of the instances as deleted for i in range(6, 9): instance_vertices[i][VProps.VITRAGE_IS_DELETED] = True instance_vertices[i][VProps.VITRAGE_SAMPLE_TIMESTAMP] = str( current_time + timedelta(seconds=2 * consistency_interval + 1)) self.processor.entity_graph.update_vertex(instance_vertices[i])
def expirer_periodic(): expire_by = \ utcnow(with_timezone=False) - \ timedelta(days=self.conf.persistency.alarm_history_ttl) try: self.db.alarms.delete_expired(expire_by) except Exception: LOG.exception('History tables - periodic cleanup run failed.')
def _convert_alarm_creation_event(self, event): entity = self._convert_base_event(event) detail = self._convert_detail_event(event) entity.update(detail) return self._filter_and_cache_alarm(entity, None, self._filter_get_erroneous, datetime_utils.utcnow(False))
def _convert_alarm_state_transition_event(self, event): old_alarm = self._old_alarm(event) entity = old_alarm.copy() entity[AodhProps.STATE] = event[AodhProps.DETAIL][AodhProps.STATE] return self._filter_and_cache_alarm(entity, old_alarm, self._filter_get_change, datetime_utils.utcnow(False))
def _add_resources_with_different_timestamps(self, consistency_interval, datasource_name, resource_type): # add resources to the graph: # - updated_resource # - outdated_resource with an old timestamp # - deleted_resource with an old timestamp and is_deleted==true future_timestamp = \ str(utcnow() + timedelta(seconds=2 * consistency_interval)) past_timestamp = \ str(utcnow() - timedelta(seconds=2 * consistency_interval - 1)) updated_resource = self._create_resource( vitrage_id=resource_type + '1234', resource_type=resource_type, datasource_name=datasource_name, sample_timestamp=future_timestamp) outdated_resource = self._create_resource( vitrage_id=resource_type + '5678', resource_type=resource_type, datasource_name=datasource_name, sample_timestamp=past_timestamp) deleted_resource = self._create_resource( vitrage_id=resource_type + '9999', resource_type=resource_type, datasource_name=datasource_name, sample_timestamp=past_timestamp, is_deleted=True) self.graph.add_vertex(updated_resource) self.graph.add_vertex(outdated_resource) self.graph.add_vertex(deleted_resource) # get the list of vertices resource_vertices = self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: resource_type }) self.assertThat(resource_vertices, matchers.HasLength(3), 'Wrong number of vertices of type %s', resource_type)
def _push_events_to_queue(self, vertices, action): for vertex in vertices: event = { DSProps.ENTITY_TYPE: CONSISTENCY_DATASOURCE, DSProps.DATASOURCE_ACTION: DatasourceAction.UPDATE, DSProps.SAMPLE_DATE: str(utcnow()), DSProps.EVENT_TYPE: action, VProps.VITRAGE_ID: vertex[VProps.VITRAGE_ID] } self.evaluator_queue.put(event)
def _find_stale_entities(self): query = { 'and': [ {'!=': {VProps.TYPE: VITRAGE_TYPE}}, {'<': {VProps.SAMPLE_TIMESTAMP: str(utcnow() - timedelta( seconds=2 * self.conf.datasources.snapshots_interval))}} ] } vertices = self.graph.get_vertices(query_dict=query) return set(self._filter_vertices_to_be_deleted(vertices))
def _find_old_deleted_entities(self): query = { 'and': [ {'==': {VProps.IS_DELETED: True}}, {'<': {VProps.SAMPLE_TIMESTAMP: str(utcnow() - timedelta( seconds=self.conf.consistency.min_time_to_delete))}} ] } vertices = self.graph.get_vertices(query_dict=query) return self._filter_vertices_to_be_deleted(vertices)
def _convert_alarm_state_transition_event(self, event): old_alarm = self._old_alarm(event) entity = old_alarm.copy() try: entity[AodhProps.STATE] = event[AodhProps.DETAIL][AodhProps.STATE] except Exception: LOG.exception("Failed to Convert alarm state transition event.") return self._filter_and_cache_alarm(entity, old_alarm, self._filter_get_change, datetime_utils.utcnow(False))
def _find_stale_entities(self): query = { 'and': [ {'!=': {VProps.TYPE: VITRAGE_TYPE}}, {'<': {VProps.SAMPLE_TIMESTAMP: str(utcnow() - timedelta( seconds=2 * self.conf.datasources.snapshots_interval))}}, {'==': {VProps.IS_DELETED: False}} ] } vertices = self.graph.get_vertices(query_dict=query) return set(self._filter_vertices_to_be_deleted(vertices))
def _to_events(self, vertices, action): for vertex in vertices: event = { DSProps.ENTITY_TYPE: CONSISTENCY_DATASOURCE, DSProps.DATASOURCE_ACTION: DatasourceAction.UPDATE, DSProps.SAMPLE_DATE: str(utcnow()), DSProps.EVENT_TYPE: action, VProps.VITRAGE_ID: vertex[VProps.VITRAGE_ID], VProps.ID: vertex.get(VProps.ID, None), VProps.VITRAGE_TYPE: vertex[VProps.VITRAGE_TYPE], VProps.VITRAGE_CATEGORY: vertex[VProps.VITRAGE_CATEGORY], VProps.IS_REAL_VITRAGE_ID: True } yield event
def _push_events_to_queue(self, vertices, action): for vertex in vertices: event = { DSProps.ENTITY_TYPE: CONSISTENCY_DATASOURCE, DSProps.DATASOURCE_ACTION: DatasourceAction.UPDATE, DSProps.SAMPLE_DATE: str(utcnow()), DSProps.EVENT_TYPE: action, VProps.VITRAGE_ID: vertex[VProps.VITRAGE_ID], VProps.ID: vertex.get(VProps.ID, None), VProps.VITRAGE_TYPE: vertex[VProps.VITRAGE_TYPE], VProps.VITRAGE_CATEGORY: vertex[VProps.VITRAGE_CATEGORY], VProps.IS_REAL_VITRAGE_ID: True } self.actions_callback('consistency', event)
def _convert_alarm_creation_event(self, event): entity = self._convert_alarm_common(event) alarm_info = event[AodhProps.DETAIL] detail = self._convert_alarm_detail(alarm_info) entity.update(detail) alarm_type = self._get_aodh_alarm_type(alarm_info) alarm_rule = alarm_info[AodhProps.RULE] rule_info = self._convert_alarm_rule(alarm_type, alarm_rule) entity.update(rule_info) return self._filter_and_cache_alarm(entity, None, self._filter_get_erroneous, datetime_utils.utcnow(False))
def _is_highest_score(cls, db_actions, action_info): """Get the top action from the list and compare to action_info Actions are sorted according to: score - primary, ascending created_at - secondary, descending """ if not db_actions: return True highest_score_action = min( db_actions, key=lambda action: (-action.score, action.created_at or utcnow(False))) return highest_score_action.trigger == action_info.trigger_id and \ highest_score_action.action_id == action_info.action_id
def add_def_template(self, def_template): result = def_template_syntax_validation(def_template) if not result.is_valid_config: LOG.info('Unable to load definition template, syntax err: %s' % result.comment) else: result = DefValidator.def_template_content_validation(def_template) if not result.is_valid_config: LOG.info('Unable to load definition template, content err: %s' % result.comment) current_time = datetime_utils.utcnow() include_uuid = uuidutils.generate_uuid() self._def_templates[str(include_uuid)] = Template(include_uuid, def_template, current_time, result)
def _create_alarm(vitrage_id, alarm_type, project_id=None, vitrage_resource_project_id=None, metadata=None): return graph_utils.create_vertex( vitrage_id, vitrage_category=EntityCategory.ALARM, vitrage_type=alarm_type, vitrage_sample_timestamp=None, update_timestamp=str(utcnow()), vitrage_is_deleted=False, vitrage_is_placeholder=False, entity_id=vitrage_id, entity_state='active', project_id=project_id, vitrage_resource_project_id=vitrage_resource_project_id, metadata=metadata)
def _find_old_deleted_entities(self): vitrage_sample_tstmp = str(utcnow() - timedelta( seconds=CONF.consistency.min_time_to_delete)) query = { 'and': [{ '==': { VProps.VITRAGE_IS_DELETED: True } }, { '<': { VProps.VITRAGE_SAMPLE_TIMESTAMP: vitrage_sample_tstmp } }] } vertices = self.graph.get_vertices(query_dict=query) return self._filter_vertices_to_be_deleted(vertices)
def test_update_entity_state(self): # create instance event with host neighbor and check validity (vertex, neighbors, processor) =\ self._create_and_check_entity(status='STARTING') # check added entity vertex = processor.entity_graph.get_vertex(vertex.vertex_id) self.assertEqual('STARTING', vertex.properties[VProps.STATE]) # update instance event with state running vertex.properties[VProps.STATE] = 'RUNNING' vertex.properties[VProps.VITRAGE_SAMPLE_TIMESTAMP] = str(utcnow()) processor.update_entity(vertex, neighbors) # check state self._check_graph(processor, self.NUM_VERTICES_AFTER_CREATION, self.NUM_EDGES_AFTER_CREATION) vertex = processor.entity_graph.get_vertex(vertex.vertex_id) self.assertEqual('RUNNING', vertex.properties[VProps.STATE])
def test_update_entity_state(self): # create instance event with host neighbor and check validity (vertex, neighbors, processor) =\ self._create_and_check_entity(status='STARTING') # check added entity vertex = processor.entity_graph.get_vertex(vertex.vertex_id) self.assertEqual('STARTING', vertex.properties[VProps.STATE]) # update instance event with state running vertex.properties[VProps.STATE] = 'RUNNING' vertex.properties[VProps.SAMPLE_TIMESTAMP] = str(utcnow()) processor.update_entity(vertex, neighbors) # check state self._check_graph(processor, self.NUM_VERTICES_AFTER_CREATION, self.NUM_EDGES_AFTER_CREATION) vertex = processor.entity_graph.get_vertex(vertex.vertex_id) self.assertEqual('RUNNING', vertex.properties[VProps.STATE])
def _periodic_process_setup_stage(self, consistency_interval): self._create_processor_with_graph(processor=self.processor) current_timestamp = datetime.utcnow() current_time = str(datetime.datetime_delta(0, current_timestamp)) time_1_5 = str(datetime.datetime_delta( 1 * consistency_interval, current_timestamp)) time_2 = str(datetime.datetime_delta( 2 * consistency_interval + 1, current_timestamp)) # set all vertices to be have timestamp that consistency won't get self._update_timestamp( self.processor.entity_graph.get_vertices(), time_1_5) # check number of instances in graph instance_vertices = self.processor.entity_graph.get_vertices({ VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE, VProps.VITRAGE_TYPE: NOVA_INSTANCE_DATASOURCE }) self.assertThat(instance_vertices, matchers.HasLength(self.NUM_INSTANCES)) # set current timestamp of part of the instances self._update_timestamp(instance_vertices[0:3], current_time) # set part of the instances as deleted for i in range(3, 6): instance_vertices[i][VProps.VITRAGE_IS_DELETED] = True self.processor.entity_graph.update_vertex(instance_vertices[i]) # set part of the instances as deleted for i in range(6, 9): instance_vertices[i][VProps.VITRAGE_IS_DELETED] = True instance_vertices[i][VProps.VITRAGE_SAMPLE_TIMESTAMP] = time_2 self.processor.entity_graph.update_vertex(instance_vertices[i]) self._add_resources_by_type(consistency_interval=consistency_interval, datasource_name='static', resource_type='nic') self._add_resources_by_type(consistency_interval=consistency_interval, datasource_name='cinder.volume', resource_type='cinder.volume')
def add_template(self, template_def): current_time = datetime_utils.utcnow() result = syntax_validation(template_def) if not result.is_valid: LOG.info('Unable to load template: %s' % result.comment) else: result = content_validation(template_def) if not result.is_valid: LOG.info('Unable to load template: %s' % result.comment) template_uuid = md5(str(template_def).encode()).hexdigest() self.templates[str(template_uuid)] = Template(template_uuid, template_def, current_time, result) if result.is_valid: template_data = TemplateData(template_def) self._add_template_scenarios(template_data)
def _create_resource(vitrage_id, resource_type, project_id=None, datasource_name=None, sample_timestamp=None, is_deleted=False): if not datasource_name: datasource_name = resource_type return graph_utils.create_vertex( vitrage_id, vitrage_category=EntityCategory.RESOURCE, vitrage_type=resource_type, vitrage_sample_timestamp=sample_timestamp, update_timestamp=str(utcnow()), vitrage_is_deleted=is_deleted, vitrage_is_placeholder=False, entity_id=vitrage_id, entity_state='active', project_id=project_id, datasource_name=datasource_name, )
def _filter_and_cache_alarms(self, alarms, filter_): alarms_to_update = [] now = datetime_utils.utcnow(False) for alarm in alarms: alarm_key = self._alarm_key(alarm) old_alarm = self.cache.get(alarm_key, (None, None))[0] if self._filter_and_cache_alarm(alarm, old_alarm, filter_, now): alarms_to_update.append(alarm) # add alarms that were deleted # (i.e. the alarm definition was deleted from the datasource) values = list(self.cache.values()) for cached_alarm, timestamp in values: if self._is_erroneous(cached_alarm) and timestamp is not now: LOG.debug('deleting cached_alarm %s', cached_alarm) cached_alarm[DSProps.EVENT_TYPE] = GraphAction.DELETE_ENTITY alarms_to_update.append(cached_alarm) self.cache.pop(self._alarm_key(cached_alarm)) return alarms_to_update
def initializing_process(self, on_end_messages_func): try: LOG.info('Init Started') if not self._wait_for_all_end_messages(): LOG.error('Initialization - max retries reached') else: LOG.info('Initialization - All end messages were received') on_end_messages_func() timestamp = str(utcnow()) self.evaluator.run_evaluator() if not self._wait_for_action(self.evaluator_queue.empty): LOG.error('Evaluator Queue Not Empty') self._mark_old_deduced_alarms_as_deleted(timestamp, self.graph, self.evaluator_queue) self.status = self.FINISHED LOG.info('Init Finished') except Exception as e: LOG.exception('Init Failed: %s', e)
def mark_vertex_as_deleted(self, vertex): """Marks the vertex as is deleted, and updates deletion timestamp""" vertex[VProps.IS_DELETED] = True vertex[VProps.SAMPLE_TIMESTAMP] = str(utcnow()) self.update_vertex(vertex)
def mark_edge_as_deleted(self, edge): """Marks the edge as is deleted, and updates delete timestamp""" edge[EProps.IS_DELETED] = True edge[EProps.UPDATE_TIMESTAMP] = str(utcnow()) self.update_edge(edge)
def _add_sampling_time(entity): entity[DSProps.SAMPLE_DATE] = str(datetime_utils.utcnow())
def _add_default_properties(event): event[DSProps.DATASOURCE_ACTION] = AType.UPDATE event[DSProps.ENTITY_TYPE] = VITRAGE_TYPE event[VProps.UPDATE_TIMESTAMP] = str(datetime_utils.utcnow(False)) event[VProps.SAMPLE_TIMESTAMP] = str(datetime_utils.utcnow())