def _format_action_exec_result(self, action_node, liveaction_db, created_at, updated_at, error=None): """ Format ActionExecution result so it can be used in the final action result output. :rtype: ``dict`` """ assert(isinstance(created_at, datetime.datetime)) assert(isinstance(updated_at, datetime.datetime)) result = {} execution_db = None if liveaction_db: execution_db = ActionExecution.get(liveaction__id=str(liveaction_db.id)) result['id'] = action_node.name result['name'] = action_node.name result['execution_id'] = str(execution_db.id) if execution_db else None result['workflow'] = None result['created_at'] = isotime.format(dt=created_at) result['updated_at'] = isotime.format(dt=updated_at) if error or not liveaction_db: result['state'] = LIVEACTION_STATUS_FAILED else: result['state'] = liveaction_db.status if error: result['result'] = error else: result['result'] = liveaction_db.result return result
def from_model(cls, model): doc = super(cls, cls)._from_model(model) if model.start_timestamp: doc['start_timestamp'] = isotime.format(model.start_timestamp, offset=False) if model.end_timestamp: doc['end_timestamp'] = isotime.format(model.end_timestamp, offset=False) return cls(**doc)
def test_notify_triggers_jinja_patterns(self, dispatch): liveaction_db = LiveActionDB(action='core.local') liveaction_db.id = bson.ObjectId() liveaction_db.description = '' liveaction_db.status = 'succeeded' liveaction_db.parameters = {'cmd': 'mamma mia', 'runner_foo': 'foo'} on_success = NotificationSubSchema(message='Command {{action_parameters.cmd}} succeeded.', data={'stdout': '{{action_results.stdout}}'}) liveaction_db.notify = NotificationSchema(on_success=on_success) liveaction_db.start_timestamp = date_utils.get_datetime_utc_now() liveaction_db.end_timestamp = \ (liveaction_db.start_timestamp + datetime.timedelta(seconds=50)) LiveAction.add_or_update(liveaction_db) execution = MOCK_EXECUTION execution.liveaction = vars(LiveActionAPI.from_model(liveaction_db)) execution.status = liveaction_db.status notifier = Notifier(connection=None, queues=[]) notifier.process(execution) exp = {'status': 'succeeded', 'start_timestamp': isotime.format(liveaction_db.start_timestamp), 'route': 'notify.default', 'runner_ref': 'local-shell-cmd', 'channel': 'notify.default', 'message': u'Command mamma mia succeeded.', 'data': {'result': '{}', 'stdout': 'stuff happens'}, 'action_ref': u'core.local', 'execution_id': str(MOCK_EXECUTION.id), 'end_timestamp': isotime.format(liveaction_db.end_timestamp)} dispatch.assert_called_once_with('core.st2.generic.notifytrigger', payload=exp, trace_context={}) notifier.process(execution)
def _format_action_exec_result(self, action_node, liveaction_db, created_at, updated_at, error=None): """ Format ActionExecution result so it can be used in the final action result output. :rtype: ``dict`` """ assert isinstance(created_at, datetime.datetime) assert isinstance(updated_at, datetime.datetime) result = {} execution_db = None if liveaction_db: execution_db = ActionExecution.get(liveaction__id=str(liveaction_db.id)) result['id'] = action_node.name result['name'] = action_node.name result['execution_id'] = str(execution_db.id) if execution_db else None result['workflow'] = None result['created_at'] = isotime.format(dt=created_at) result['updated_at'] = isotime.format(dt=updated_at) if error or not liveaction_db: result['state'] = LIVEACTION_STATUS_FAILED else: result['state'] = liveaction_db.status if error: result['result'] = error else: result['result'] = liveaction_db.result return result
def pull_to_dict(pull): result = {} author = user_to_dict(pull.user) assignee = user_to_dict(pull.assignee) merged_by = user_to_dict(pull.merged_by) result['id'] = pull.id result['pr_id'] = int(re.sub(r'.*/([0-9]+)(#.*)?', r'\1', pull.html_url)) result['author'] = author result['assign'] = assignee result['title'] = pull.title result['body'] = pull.body result['url'] = pull.html_url result['html_url'] = pull.html_url result['base'] = pull.base.ref result['head'] = pull.head.ref result['state'] = pull.state result['merged'] = pull.merged # noinspection SpellCheckingInspection result['mergeable_state'] = pull.mergeable_state result['merge_commit_sha'] = pull.merge_commit_sha if pull.labels: labels = [label_to_dict(label) for label in pull.labels] else: labels = [] result['labels'] = labels if pull.get_commits(): commits = [commit_to_dict(commit) for commit in pull.get_commits()] else: commits = [] result['commits'] = commits # Note: We convert it to a serialize type (string) if pull.created_at: created_at = isotime.format(pull.created_at) else: created_at = None if pull.closed_at: closed_at = isotime.format(pull.closed_at) else: closed_at = None if pull.merged_at: merged_at = isotime.format(pull.merged_at) else: merged_at = None result['created_at'] = created_at result['closed_at'] = closed_at result['merged_at'] = merged_at result['merged_by'] = merged_by return result
def setUpClass(cls): super(TestActionExecutionFilters, cls).setUpClass() cls.dt_base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) cls.num_records = 100 cls.refs = {} cls.start_timestamps = [] cls.fake_types = [ { "trigger": copy.deepcopy(fixture.ARTIFACTS["trigger"]), "trigger_type": copy.deepcopy(fixture.ARTIFACTS["trigger_type"]), "trigger_instance": copy.deepcopy( fixture.ARTIFACTS["trigger_instance"] ), "rule": copy.deepcopy(fixture.ARTIFACTS["rule"]), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["chain"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["action-chain"]), "liveaction": copy.deepcopy( fixture.ARTIFACTS["liveactions"]["workflow"] ), "context": copy.deepcopy(fixture.ARTIFACTS["context"]), "children": [], }, { "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["local"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["run-local"]), "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["task1"]), }, ] def assign_parent(child): candidates = [v for k, v in cls.refs.items() if v.action["name"] == "chain"] if candidates: parent = random.choice(candidates) child["parent"] = str(parent.id) parent.children.append(child["id"]) cls.refs[str(parent.id)] = ActionExecution.add_or_update(parent) for i in range(cls.num_records): obj_id = str(bson.ObjectId()) timestamp = cls.dt_base + datetime.timedelta(seconds=i) fake_type = random.choice(cls.fake_types) data = copy.deepcopy(fake_type) data["id"] = obj_id data["start_timestamp"] = isotime.format(timestamp, offset=False) data["end_timestamp"] = isotime.format(timestamp, offset=False) data["status"] = data["liveaction"]["status"] data["result"] = data["liveaction"]["result"] if fake_type["action"]["name"] == "local" and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) cls.start_timestamps = sorted(cls.start_timestamps)
def pull_to_dict(pull): result = {} author = user_to_dict(pull.user) assignee = user_to_dict(pull.assignee) merged_by = user_to_dict(pull.merged_by) result['id'] = pull.id result['pr_id'] = int(re.sub(r'.*/([0-9]+)(#.*)?', r'\1', pull.html_url)) result['author'] = author result['assign'] = assignee result['title'] = pull.title result['body'] = pull.body result['url'] = pull.html_url result['base'] = pull.base.ref result['head'] = pull.head.ref result['state'] = pull.state result['merged'] = pull.merged result['mergeable_state'] = pull.mergeable_state result['merge_commit_sha'] = pull.merge_commit_sha if pull.labels: labels = [label_to_dict(label) for label in pull.labels] else: labels = [] result['labels'] = labels if pull.get_commits(): commits = [commit_to_dict(commit) for commit in pull.get_commits()] else: commits = [] result['commits'] = commits # Note: We convert it to a serialize type (string) if pull.created_at: created_at = isotime.format(pull.created_at) else: created_at = None if pull.closed_at: closed_at = isotime.format(pull.closed_at) else: closed_at = None if pull.merged_at: merged_at = isotime.format(pull.merged_at) else: merged_at = None result['created_at'] = created_at result['closed_at'] = closed_at result['merged_at'] = merged_at result['merged_by'] = merged_by return result
def test_format(self): dt = date.add_utc_tz(datetime.datetime(2000, 1, 1, 12)) dt_str_usec_offset = '2000-01-01T12:00:00.000000+00:00' dt_str_usec = '2000-01-01T12:00:00.000000Z' dt_str_offset = '2000-01-01T12:00:00+00:00' dt_str = '2000-01-01T12:00:00Z' dt_unicode = u'2000-01-01T12:00:00Z' # datetime object self.assertEqual(isotime.format(dt, usec=True, offset=True), dt_str_usec_offset) self.assertEqual(isotime.format(dt, usec=True, offset=False), dt_str_usec) self.assertEqual(isotime.format(dt, usec=False, offset=True), dt_str_offset) self.assertEqual(isotime.format(dt, usec=False, offset=False), dt_str) self.assertEqual(isotime.format(dt_str, usec=False, offset=False), dt_str) self.assertEqual(isotime.format(dt_unicode, usec=False, offset=False), dt_unicode) # unix timestamp (epoch) dt = 1557390483 self.assertEqual(isotime.format(dt, usec=True, offset=True), '2019-05-09T08:28:03.000000+00:00') self.assertEqual(isotime.format(dt, usec=False, offset=False), '2019-05-09T08:28:03Z') self.assertEqual(isotime.format(dt, usec=False, offset=True), '2019-05-09T08:28:03+00:00')
def from_model(cls, model, mask_secrets=False): doc = super(cls, cls)._from_model(model, mask_secrets=mask_secrets) if model.start_timestamp: doc['start_timestamp'] = isotime.format(model.start_timestamp, offset=False) if model.end_timestamp: doc['end_timestamp'] = isotime.format(model.end_timestamp, offset=False) if getattr(model, 'notify', None): doc['notify'] = NotificationsHelper.from_model(model.notify) return cls(**doc)
def from_model(cls, model): doc = super(cls, cls)._from_model(model) if model.start_timestamp: doc['start_timestamp'] = isotime.format(model.start_timestamp, offset=False) if model.end_timestamp: doc['end_timestamp'] = isotime.format(model.end_timestamp, offset=False) if getattr(model, 'notify', None): doc['notify'] = NotificationsHelper.from_model(model.notify) return cls(**doc)
def from_model(cls, model, mask_secrets=False): doc = super(cls, cls)._from_model(model, mask_secrets=mask_secrets) if model.start_timestamp: doc["start_timestamp"] = isotime.format(model.start_timestamp, offset=False) if model.end_timestamp: doc["end_timestamp"] = isotime.format(model.end_timestamp, offset=False) if getattr(model, "notify", None): doc["notify"] = NotificationsHelper.from_model(model.notify) return cls(**doc)
def from_model(cls, model): doc = cls._from_model(model) start_timestamp = isotime.format(model.start_timestamp, offset=False) doc['start_timestamp'] = start_timestamp end_timestamp = model.end_timestamp if end_timestamp: end_timestamp = isotime.format(end_timestamp, offset=False) doc['end_timestamp'] = end_timestamp attrs = {attr: value for attr, value in six.iteritems(doc) if value} return cls(**attrs)
def test_request(self): request, execution = self._submit_request() self.assertIsNotNone(execution) self.assertEqual(execution.id, request.id) self.assertEqual(execution.action, '.'.join([self.actiondb.pack, self.actiondb.name])) self.assertEqual(execution.context['user'], request.context['user']) self.assertDictEqual(execution.parameters, request.parameters) self.assertEqual(execution.status, action_constants.LIVEACTION_STATUS_REQUESTED) self.assertTrue(execution.notify is not None) # mongoengine DateTimeField stores datetime only up to milliseconds self.assertEqual(isotime.format(execution.start_timestamp, usec=False), isotime.format(request.start_timestamp, usec=False))
def setUpClass(cls): super(TestActionExecutionFilters, cls).setUpClass() cls.dt_base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) cls.num_records = 100 cls.refs = {} cls.start_timestamps = [] cls.fake_types = [ { "trigger": copy.deepcopy(fixture.ARTIFACTS["trigger"]), "trigger_type": copy.deepcopy(fixture.ARTIFACTS["trigger_type"]), "trigger_instance": copy.deepcopy(fixture.ARTIFACTS["trigger_instance"]), "rule": copy.deepcopy(fixture.ARTIFACTS["rule"]), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["chain"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["action-chain"]), "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["workflow"]), "context": copy.deepcopy(fixture.ARTIFACTS["context"]), "children": [], }, { "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["local"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["run-local"]), "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["task1"]), }, ] def assign_parent(child): candidates = [v for k, v in cls.refs.iteritems() if v.action["name"] == "chain"] if candidates: parent = random.choice(candidates) child["parent"] = str(parent.id) parent.children.append(child["id"]) cls.refs[str(parent.id)] = ActionExecution.add_or_update(parent) for i in range(cls.num_records): obj_id = str(bson.ObjectId()) timestamp = cls.dt_base + datetime.timedelta(seconds=i) fake_type = random.choice(cls.fake_types) data = copy.deepcopy(fake_type) data["id"] = obj_id data["start_timestamp"] = isotime.format(timestamp, offset=False) data["end_timestamp"] = isotime.format(timestamp, offset=False) data["status"] = data["liveaction"]["status"] data["result"] = data["liveaction"]["result"] if fake_type["action"]["name"] == "local" and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) cls.start_timestamps = sorted(cls.start_timestamps)
def setUpClass(cls): super(TestActionExecutionFilters, cls).setUpClass() cls.dt_base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) cls.num_records = 100 cls.refs = {} cls.start_timestamps = [] cls.fake_types = [ { 'trigger': copy.deepcopy(fixture.ARTIFACTS['trigger']), 'trigger_type': copy.deepcopy(fixture.ARTIFACTS['trigger_type']), 'trigger_instance': copy.deepcopy(fixture.ARTIFACTS['trigger_instance']), 'rule': copy.deepcopy(fixture.ARTIFACTS['rule']), 'action': copy.deepcopy(fixture.ARTIFACTS['actions']['chain']), 'runner': copy.deepcopy(fixture.ARTIFACTS['runners']['action-chain']), 'liveaction': copy.deepcopy(fixture.ARTIFACTS['liveactions']['workflow']), 'context': copy.deepcopy(fixture.ARTIFACTS['context']), 'children': [] }, { 'action': copy.deepcopy(fixture.ARTIFACTS['actions']['local']), 'runner': copy.deepcopy(fixture.ARTIFACTS['runners']['run-local']), 'liveaction': copy.deepcopy(fixture.ARTIFACTS['liveactions']['task1']) } ] def assign_parent(child): candidates = [v for k, v in cls.refs.items() if v.action['name'] == 'chain'] if candidates: parent = random.choice(candidates) child['parent'] = str(parent.id) parent.children.append(child['id']) cls.refs[str(parent.id)] = ActionExecution.add_or_update(parent) for i in range(cls.num_records): obj_id = str(bson.ObjectId()) timestamp = cls.dt_base + datetime.timedelta(seconds=i) fake_type = random.choice(cls.fake_types) data = copy.deepcopy(fake_type) data['id'] = obj_id data['start_timestamp'] = isotime.format(timestamp, offset=False) data['end_timestamp'] = isotime.format(timestamp, offset=False) data['status'] = data['liveaction']['status'] data['result'] = data['liveaction']['result'] if fake_type['action']['name'] == 'local' and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) cls.start_timestamps = sorted(cls.start_timestamps)
def from_model(cls, model, mask_secrets=False): doc = cls._from_model(model, mask_secrets=mask_secrets) start_timestamp = isotime.format(model.start_timestamp, offset=False) doc['start_timestamp'] = start_timestamp end_timestamp = model.end_timestamp if end_timestamp: end_timestamp = isotime.format(end_timestamp, offset=False) doc['end_timestamp'] = end_timestamp attrs = {attr: value for attr, value in six.iteritems(doc) if value} return cls(**attrs)
def from_model(cls, model): doc = cls._from_model(model) start_timestamp = isotime.format(doc['execution']['start_timestamp'], offset=False) end_timestamp = doc['execution'].get('end_timestamp', None) if end_timestamp is not None: end_timestamp = isotime.format(end_timestamp, offset=False) doc['execution']['end_timestamp'] = end_timestamp doc['execution']['start_timestamp'] = start_timestamp attrs = {attr: value for attr, value in six.iteritems(doc) if value} return cls(**attrs)
def from_model(cls, model, mask_secrets=False): doc = cls._from_model(model, mask_secrets=mask_secrets) start_timestamp = isotime.format(model.start_timestamp, offset=False) doc['start_timestamp'] = start_timestamp end_timestamp = model.end_timestamp if end_timestamp: end_timestamp = isotime.format(end_timestamp, offset=False) doc['end_timestamp'] = end_timestamp for entry in doc.get('log', []): entry['timestamp'] = isotime.format(entry['timestamp'], offset=False) attrs = {attr: value for attr, value in six.iteritems(doc) if value} return cls(**attrs)
def test_req_non_workflow_action(self): actiondb = self.actiondbs[ACTION['name']] req, ex = self._submit_request(action_ref=ACTION_REF) self.assertIsNotNone(ex) self.assertEqual(ex.action_is_workflow, False) self.assertEqual(ex.id, req.id) self.assertEqual(ex.action, '.'.join([actiondb.pack, actiondb.name])) self.assertEqual(ex.context['user'], req.context['user']) self.assertDictEqual(ex.parameters, req.parameters) self.assertEqual(ex.status, action_constants.LIVEACTION_STATUS_REQUESTED) self.assertTrue(ex.notify is not None) # mongoengine DateTimeField stores datetime only up to milliseconds self.assertEqual(isotime.format(ex.start_timestamp, usec=False), isotime.format(req.start_timestamp, usec=False))
def _format_action_exec_result(self, action_node, liveaction_db, created_at, updated_at, error=None): """ Format ActionExecution result so it can be used in the final action result output. :rtype: ``dict`` """ if not isinstance(created_at, datetime.datetime): raise TypeError( f"The created_at is not a datetime object was({type(created_at)})." ) if not isinstance(updated_at, datetime.datetime): raise TypeError( f"The updated_at is not a datetime object was({type(updated_at)})." ) result = {} execution_db = None if liveaction_db: execution_db = ActionExecution.get( liveaction__id=str(liveaction_db.id)) result["id"] = action_node.name result["name"] = action_node.name result["execution_id"] = str(execution_db.id) if execution_db else None result["liveaction_id"] = str( liveaction_db.id) if liveaction_db else None result["workflow"] = None result["created_at"] = isotime.format(dt=created_at) result["updated_at"] = isotime.format(dt=updated_at) if error or not liveaction_db: result["state"] = action_constants.LIVEACTION_STATUS_FAILED else: result["state"] = liveaction_db.status if error: result["result"] = error else: result["result"] = liveaction_db.result return result
def from_model(cls, model, mask_secrets=False): instance = cls._from_model(model, mask_secrets=mask_secrets) if instance.get('occurrence_time', None): instance['occurrence_time'] = isotime.format(instance['occurrence_time'], offset=False) return cls(**instance)
def from_model(cls, model, mask_secrets=True): if not KeyValuePairAPI.crypto_setup: KeyValuePairAPI._setup_crypto() doc = cls._from_model(model, mask_secrets=mask_secrets) if getattr(model, 'expire_timestamp', None) and model.expire_timestamp: doc['expire_timestamp'] = isotime.format(model.expire_timestamp, offset=False) encrypted = False secret = getattr(model, 'secret', False) if secret: encrypted = True if not mask_secrets and secret: doc['value'] = symmetric_decrypt(KeyValuePairAPI.crypto_key, model.value) encrypted = False scope = getattr(model, 'scope', SYSTEM_SCOPE) if scope: doc['scope'] = scope key = doc.get('name', None) if scope == USER_SCOPE and key: doc['user'] = UserKeyReference.get_user(key) doc['name'] = UserKeyReference.get_name(key) doc['encrypted'] = encrypted attrs = {attr: value for attr, value in six.iteritems(doc) if value is not None} return cls(**attrs)
def _purge_action_executions(self): """ Purge action executions and corresponding live action, stdout and stderr object which match the criteria defined in the config. """ LOG.info('Performing garbage collection for action executions and related objects') utc_now = get_datetime_utc_now() timestamp = (utc_now - datetime.timedelta(days=self._action_executions_ttl)) # Another sanity check to make sure we don't delete new executions if timestamp > (utc_now - datetime.timedelta(days=MINIMUM_TTL_DAYS)): raise ValueError('Calculated timestamp would violate the minimum TTL constraint') timestamp_str = isotime.format(dt=timestamp) LOG.info('Deleting action executions older than: %s' % (timestamp_str)) assert timestamp < utc_now try: purge_executions(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception('Failed to delete executions: %s' % (six.text_type(e))) return True
def create_token(username, ttl=None, metadata=None): """ :param username: Username of the user to create the token for. If the account for this user doesn't exist yet it will be created. :type username: ``str`` :param ttl: Token TTL (in seconds). :type ttl: ``int`` :param metadata: Optional metadata to associate with the token. :type metadata: ``dict`` """ if not ttl or ttl > cfg.CONF.auth.token_ttl: ttl = cfg.CONF.auth.token_ttl if username: try: User.get_by_name(username) except: user = UserDB(name=username) User.add_or_update(user) LOG.audit('Registered new user "%s".' % username) token = uuid.uuid4().hex expiry = datetime.datetime.utcnow() + datetime.timedelta(seconds=ttl) expiry = isotime.add_utc_tz(expiry) token = TokenDB(user=username, token=token, expiry=expiry, metadata=metadata) Token.add_or_update(token) LOG.audit('Access granted to %s with the token set to expire at "%s".' % ('user "%s"' % username if username else "an anonymous user", isotime.format(expiry, offset=False))) return token
def test_token_model_null_user(self): dt = date_utils.get_datetime_utc_now() tk = TokenAPI(user=None, token=uuid.uuid4().hex, expiry=isotime.format(dt)) self.assertRaises(ValueError, Token.add_or_update, TokenAPI.to_model(tk))
def _purge_action_executions(self): """ Purge action executions and corresponding live action, stdout and stderr object which match the criteria defined in the config. """ LOG.info( 'Performing garbage collection for action executions and related objects' ) utc_now = get_datetime_utc_now() timestamp = (utc_now - datetime.timedelta(days=self._action_executions_ttl)) # Another sanity check to make sure we don't delete new executions if timestamp > (utc_now - datetime.timedelta(days=MINIMUM_TTL_DAYS)): raise ValueError( 'Calculated timestamp would violate the minimum TTL constraint' ) timestamp_str = isotime.format(dt=timestamp) LOG.info('Deleting action executions older than: %s' % (timestamp_str)) assert timestamp < utc_now try: purge_executions(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception('Failed to delete executions: %s' % (six.text_type(e))) return True
def _purge_action_executions_output(self): utc_now = get_datetime_utc_now() timestamp = utc_now - datetime.timedelta( days=self._action_executions_output_ttl) # Another sanity check to make sure we don't delete new objects if timestamp > (utc_now - datetime.timedelta( days=MINIMUM_TTL_DAYS_EXECUTION_OUTPUT)): raise ValueError( "Calculated timestamp would violate the minimum TTL constraint" ) timestamp_str = isotime.format(dt=timestamp) LOG.info("Deleting action executions output objects older than: %s" % (timestamp_str)) if timestamp >= utc_now: raise ValueError(f"Calculated timestamp ({timestamp}) is" f" later than now in UTC ({utc_now}).") try: purge_execution_output_objects(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception("Failed to delete execution output objects: %s" % (six.text_type(e))) return True
def from_model(cls, model): doc = cls._from_model(model) timestamp = isotime.format(doc['execution']['start_timestamp'], offset=False) doc['execution']['start_timestamp'] = timestamp attrs = {attr: value for attr, value in six.iteritems(doc) if value} return cls(**attrs)
def _purge_action_executions_output(self): LOG.info( 'Performing garbage collection for action executions output objects' ) utc_now = get_datetime_utc_now() timestamp = ( utc_now - datetime.timedelta(days=self._action_executions_output_ttl)) # Another sanity check to make sure we don't delete new objects if timestamp > (utc_now - datetime.timedelta( days=MINIMUM_TTL_DAYS_EXECUTION_OUTPUT)): raise ValueError( 'Calculated timestamp would violate the minimum TTL constraint' ) timestamp_str = isotime.format(dt=timestamp) LOG.info('Deleting action executions output objects older than: %s' % (timestamp_str)) assert timestamp < utc_now try: purge_execution_output_objects(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception('Failed to delete execution output objects: %s' % (six.text_type(e))) return True
def _purge_rule_enforcements(self): """ Purge rule enforcements which match the criteria defined in the config. """ utc_now = get_datetime_utc_now() timestamp = utc_now - datetime.timedelta( days=self._rule_enforcements_ttl) # Another sanity check to make sure we don't delete new objects if timestamp > (utc_now - datetime.timedelta(days=MINIMUM_TTL_DAYS)): raise ValueError( "Calculated timestamp would violate the minimum TTL constraint" ) timestamp_str = isotime.format(dt=timestamp) LOG.info("Deleting rule enforcements older than: %s" % (timestamp_str)) if timestamp >= utc_now: raise ValueError(f"Calculated timestamp ({timestamp}) is" f" later than now in UTC ({utc_now}).") try: purge_rule_enforcements(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception("Failed to delete rule enforcements: %s" % (six.text_type(e))) return True
def _purge_task_executions(self): """ Purge workflow task executions and corresponding live action, stdout and stderr object which match the criteria defined in the config. """ utc_now = get_datetime_utc_now() timestamp = utc_now - datetime.timedelta( days=self._task_executions_ttl) # Another sanity check to make sure we don't delete new executions if timestamp > (utc_now - datetime.timedelta(days=MINIMUM_TTL_DAYS)): raise ValueError( "Calculated timestamp would violate the minimum TTL constraint" ) timestamp_str = isotime.format(dt=timestamp) LOG.info("Deleting workflow task executions older than: %s" % (timestamp_str)) assert timestamp < utc_now try: purge_task_executions(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception("Failed to delete workflow task executions: %s" % (six.text_type(e))) return True
def _purge_trigger_instances(self): """ Purge trigger instances which match the criteria defined in the config. """ utc_now = get_datetime_utc_now() timestamp = (utc_now - datetime.timedelta(days=self._trigger_instances_ttl)) # Another sanity check to make sure we don't delete new executions if timestamp > (utc_now - datetime.timedelta(days=MINIMUM_TTL_DAYS)): raise ValueError( 'Calculated timestamp would violate the minimum TTL constraint' ) timestamp_str = isotime.format(dt=timestamp) LOG.info('Deleting trigger instances older than: %s' % (timestamp_str)) assert timestamp < utc_now try: purge_trigger_instances(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception('Failed to trigger instances: %s' % (six.text_type(e))) return True
def from_model(cls, model, mask_secrets=True): if not KeyValuePairAPI.crypto_setup: KeyValuePairAPI._setup_crypto() doc = cls._from_model(model, mask_secrets=mask_secrets) if getattr(model, "expire_timestamp", None) and model.expire_timestamp: doc["expire_timestamp"] = isotime.format(model.expire_timestamp, offset=False) encrypted = False secret = getattr(model, "secret", False) if secret: encrypted = True if not mask_secrets and secret: doc["value"] = symmetric_decrypt(KeyValuePairAPI.crypto_key, model.value) encrypted = False scope = getattr(model, "scope", SYSTEM_SCOPE) if scope: doc["scope"] = scope key = doc.get("name", None) if (scope == USER_SCOPE or scope == FULL_USER_SCOPE) and key: doc["user"] = UserKeyReference.get_user(key) doc["name"] = UserKeyReference.get_name(key) doc["encrypted"] = encrypted attrs = { attr: value for attr, value in six.iteritems(doc) if value is not None } return cls(**attrs)
def from_model(cls, model, mask_secrets=True): if not KeyValuePairAPI.crypto_setup: KeyValuePairAPI._setup_crypto() doc = cls._from_model(model, mask_secrets=mask_secrets) if getattr(model, 'expire_timestamp', None) and model.expire_timestamp: doc['expire_timestamp'] = isotime.format(model.expire_timestamp, offset=False) encrypted = False secret = getattr(model, 'secret', False) if secret: encrypted = True if not mask_secrets and secret: doc['value'] = symmetric_decrypt(KeyValuePairAPI.crypto_key, model.value) encrypted = False scope = getattr(model, 'scope', SYSTEM_SCOPE) if scope: doc['scope'] = scope key = doc.get('name', None) if (scope == USER_SCOPE or scope == FULL_USER_SCOPE) and key: doc['user'] = UserKeyReference.get_user(key) doc['name'] = UserKeyReference.get_name(key) doc['encrypted'] = encrypted attrs = {attr: value for attr, value in six.iteritems(doc) if value is not None} return cls(**attrs)
def test_format_sec_truncated(self): dt1 = date.add_utc_tz(datetime.datetime.utcnow()) dt2 = isotime.parse(isotime.format(dt1, usec=False)) dt3 = datetime.datetime(dt1.year, dt1.month, dt1.day, dt1.hour, dt1.minute, dt1.second) self.assertLess(dt2, dt1) self.assertEqual(dt2, date.add_utc_tz(dt3))
def from_model(cls, model, mask_secrets=False): doc = cls._from_model(model, mask_secrets=mask_secrets) start_timestamp = model.start_timestamp start_timestamp_iso = isotime.format(start_timestamp, offset=False) doc['start_timestamp'] = start_timestamp_iso end_timestamp = model.end_timestamp if end_timestamp: end_timestamp_iso = isotime.format(end_timestamp, offset=False) doc['end_timestamp'] = end_timestamp_iso doc['elapsed_seconds'] = (end_timestamp - start_timestamp).total_seconds() for entry in doc.get('log', []): entry['timestamp'] = isotime.format(entry['timestamp'], offset=False) attrs = {attr: value for attr, value in six.iteritems(doc) if value} return cls(**attrs)
def test_schedule(self): context = {'user': USERNAME} parameters = {'hosts': 'localhost', 'cmd': 'uname -a'} request = ActionExecutionDB(action=ACTION_REF, context=context, parameters=parameters) request = action_service.schedule(request) execution = ActionExecution.get_by_id(str(request.id)) self.assertIsNotNone(execution) self.assertEqual(execution.id, request.id) action = '.'.join([self.actiondb.pack, self.actiondb.name]) actual_action = execution.action self.assertEqual(actual_action, action) self.assertEqual(execution.context['user'], request.context['user']) self.assertDictEqual(execution.parameters, request.parameters) self.assertEqual(execution.status, ACTIONEXEC_STATUS_SCHEDULED) # mongoengine DateTimeField stores datetime only up to milliseconds self.assertEqual(isotime.format(execution.start_timestamp, usec=False), isotime.format(request.start_timestamp, usec=False))
def from_component_model(cls, component_model): return { 'object_id': component_model.object_id, 'ref': component_model.ref, 'updated_at': isotime.format(component_model.updated_at, offset=False), 'caused_by': component_model.caused_by }
def test_get_marker_from_db(self): marker_dt = date_utils.get_datetime_utc_now() - datetime.timedelta(minutes=5) marker_db = DumperMarkerDB(marker=isotime.format(marker_dt, offset=False), updated_at=date_utils.get_datetime_utc_now()) DumperMarker.add_or_update(marker_db) exec_exporter = ExecutionsExporter(None, None) export_marker = exec_exporter._get_export_marker_from_db() self.assertEqual(export_marker, date_utils.add_utc_tz(marker_dt))
def issue_to_dict(issue): result = {} author = user_to_dict(issue.user) assignee = user_to_dict(issue.assignee) closed_by = user_to_dict(issue.closed_by) if issue.pull_request: is_pull_request = True else: is_pull_request = False result['id'] = issue.id result['repository'] = issue.repository.name result['author'] = author result['assign'] = assignee result['title'] = issue.title result['body'] = issue.body result['url'] = issue.html_url result['state'] = issue.state result['is_pull_request'] = is_pull_request if issue.labels: labels = [label_to_dict(label) for label in issue.labels] else: labels = [] result['labels'] = labels # Note: We convert it to a serialize type (string) if issue.created_at: created_at = isotime.format(issue.created_at) else: created_at = None if issue.closed_at: closed_at = isotime.format(issue.closed_at) else: closed_at = None result['created_at'] = created_at result['closed_at'] = closed_at result['closed_by'] = closed_by return result
def _add_auth_headers(self, env, token): """Write authenticated user data to headers Build headers that represent authenticated user: * HTTP_X_AUTH_TOKEN_EXPIRY: Token expiration datetime * HTTP_X_USER_NAME: Name of confirmed user """ env['HTTP_X_AUTH_TOKEN_EXPIRY'] = isotime.format(token.expiry) env['HTTP_X_USER_NAME'] = str(token.user)
def from_model(cls, model, mask_secrets=False): doc = cls._from_model(model=model, mask_secrets=mask_secrets) attrs = {attr: value for attr, value in six.iteritems(doc) if value is not None} attrs['created_at'] = isotime.format(model.created_at, offset=False) if model.created_at \ else None # key_hash is ignored. attrs.pop('key_hash', None) # key is unknown so the calling code will have to update after conversion. attrs['key'] = None return cls(**attrs)
def from_model(cls, model, mask_secrets=False): doc = cls._from_model(model, mask_secrets=mask_secrets) if 'id' in doc: del doc['id'] if model.expire_timestamp: doc['expire_timestamp'] = isotime.format(model.expire_timestamp, offset=False) attrs = {attr: value for attr, value in six.iteritems(doc) if value is not None} return cls(**attrs)
def create_token(username, ttl=None, metadata=None, add_missing_user=True, service=False): """ :param username: Username of the user to create the token for. If the account for this user doesn't exist yet it will be created. :type username: ``str`` :param ttl: Token TTL (in seconds). :type ttl: ``int`` :param metadata: Optional metadata to associate with the token. :type metadata: ``dict`` :param add_missing_user: Add the user given by `username` if they don't exist :type add_missing_user: ``bool`` :param service: True if this is a service (non-user) token. :type service: ``bool`` """ if ttl: # Note: We allow arbitrary large TTLs for service tokens. if not service and ttl > cfg.CONF.auth.token_ttl: msg = ('TTL specified %s is greater than max allowed %s.' % (ttl, cfg.CONF.auth.token_ttl)) raise TTLTooLargeException(msg) else: ttl = cfg.CONF.auth.token_ttl if username: try: User.get_by_name(username) except: if add_missing_user: user_db = UserDB(name=username) User.add_or_update(user_db) extra = {'username': username, 'user': user_db} LOG.audit('Registered new user "%s".' % (username), extra=extra) else: raise UserNotFoundError() token = uuid.uuid4().hex expiry = date_utils.get_datetime_utc_now() + datetime.timedelta(seconds=ttl) token = TokenDB(user=username, token=token, expiry=expiry, metadata=metadata, service=service) Token.add_or_update(token) username_string = username if username else 'an anonymous user' token_expire_string = isotime.format(expiry, offset=False) extra = {'username': username, 'token_expiration': token_expire_string} LOG.audit('Access granted to "%s" with the token set to expire at "%s".' % (username_string, token_expire_string), extra=extra) return token
def from_model(cls, model, mask_secrets=False): instance = cls._from_model(model, mask_secrets=mask_secrets) instance['start_timestamp'] = isotime.format(model.start_timestamp, offset=False) if model.action_executions: instance['action_executions'] = [cls.from_component_model(action_execution) for action_execution in model.action_executions] if model.rules: instance['rules'] = [cls.from_component_model(rule) for rule in model.rules] if model.trigger_instances: instance['trigger_instances'] = [cls.from_component_model(trigger_instance) for trigger_instance in model.trigger_instances] return cls(**instance)
def create_token(username, ttl=None, metadata=None): """ :param username: Username of the user to create the token for. If the account for this user doesn't exist yet it will be created. :type username: ``str`` :param ttl: Token TTL (in seconds). :type ttl: ``int`` :param metadata: Optional metadata to associate with the token. :type metadata: ``dict`` """ if ttl: if ttl > cfg.CONF.auth.token_ttl: msg = 'TTL specified %s is greater than max allowed %s.' % ( ttl, cfg.CONF.auth.token_ttl ) raise TTLTooLargeException(msg) else: ttl = cfg.CONF.auth.token_ttl if username: try: User.get_by_name(username) except: user = UserDB(name=username) User.add_or_update(user) extra = {'username': username, 'user': user} LOG.audit('Registered new user "%s".' % (username), extra=extra) token = uuid.uuid4().hex expiry = datetime.datetime.utcnow() + datetime.timedelta(seconds=ttl) expiry = isotime.add_utc_tz(expiry) token = TokenDB(user=username, token=token, expiry=expiry, metadata=metadata) Token.add_or_update(token) username_string = username if username else 'an anonymous user' token_expire_string = isotime.format(expiry, offset=False) extra = {'username': username, 'token_expiration': token_expire_string} LOG.audit('Access granted to "%s" with the token set to expire at "%s".' % (username_string, token_expire_string), extra=extra) return token
def _write_marker_to_db(self, new_marker): LOG.info('Updating marker in db to: %s', new_marker) markers = DumperMarker.get_all() if len(markers) > 1: LOG.exception('More than one dumper marker found. Using first found one.') marker = isotime.format(new_marker, offset=False) updated_at = date_utils.get_datetime_utc_now() if markers: marker_id = markers[0]['id'] else: marker_id = None marker_db = DumperMarkerDB(id=marker_id, marker=marker, updated_at=updated_at) return DumperMarker.add_or_update(marker_db)
def test_get_all_filter_by_timestamp(self): resp = self.app.get('/v1/triggerinstances') self.assertEqual(resp.status_int, http_client.OK) timestamp_largest = resp.json[0]['occurrence_time'] timestamp_middle = resp.json[1]['occurrence_time'] dt = isotime.parse(timestamp_largest) dt = dt + datetime.timedelta(seconds=1) timestamp_largest = isotime.format(dt, offset=False) resp = self.app.get('/v1/triggerinstances?timestamp_gt=%s' % timestamp_largest) # Since we sort trigger instances by time (latest first), the previous # get should return no trigger instances. self.assertEqual(len(resp.json), 0) resp = self.app.get('/v1/triggerinstances?timestamp_lt=%s' % (timestamp_middle)) self.assertEqual(len(resp.json), 1)
def test_datetime_range(self): base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(60): timestamp = base + datetime.timedelta(seconds=i) doc = copy.deepcopy(self.fake_history_subtasks[0]) doc['id'] = str(bson.ObjectId()) doc['start_timestamp'] = isotime.format(timestamp) obj = ActionExecutionAPI(**doc) ActionExecution.add_or_update(ActionExecutionAPI.to_model(obj)) dt_range = '2014-12-25T00:00:10Z..2014-12-25T00:00:19Z' objs = ActionExecution.query(start_timestamp=dt_range) self.assertEqual(len(objs), 10) dt_range = '2014-12-25T00:00:19Z..2014-12-25T00:00:10Z' objs = ActionExecution.query(start_timestamp=dt_range) self.assertEqual(len(objs), 10)
def test_token_model(self): dt = isotime.add_utc_tz(datetime.datetime.utcnow()) tk1 = TokenAPI(user='******', token=uuid.uuid4().hex, expiry=isotime.format(dt, offset=False)) tkdb1 = TokenAPI.to_model(tk1) self.assertIsNotNone(tkdb1) self.assertIsInstance(tkdb1, TokenDB) self.assertEqual(tkdb1.user, tk1.user) self.assertEqual(tkdb1.token, tk1.token) self.assertEqual(tkdb1.expiry, isotime.parse(tk1.expiry)) tkdb2 = Token.add_or_update(tkdb1) self.assertEqual(tkdb1, tkdb2) self.assertIsNotNone(tkdb2.id) tk2 = TokenAPI.from_model(tkdb2) self.assertEqual(tk2.user, tk1.user) self.assertEqual(tk2.token, tk1.token) self.assertEqual(tk2.expiry, tk1.expiry)
def test_format(self): dt = date.add_utc_tz(datetime.datetime(2000, 1, 1, 12)) dt_str_usec_offset = '2000-01-01T12:00:00.000000+00:00' dt_str_usec = '2000-01-01T12:00:00.000000Z' dt_str_offset = '2000-01-01T12:00:00+00:00' dt_str = '2000-01-01T12:00:00Z' dt_unicode = u'2000-01-01T12:00:00Z' self.assertEqual(isotime.format(dt, usec=True, offset=True), dt_str_usec_offset) self.assertEqual(isotime.format(dt, usec=True, offset=False), dt_str_usec) self.assertEqual(isotime.format(dt, usec=False, offset=True), dt_str_offset) self.assertEqual(isotime.format(dt, usec=False, offset=False), dt_str) self.assertEqual(isotime.format(dt_str, usec=False, offset=False), dt_str) self.assertEqual(isotime.format(dt_unicode, usec=False, offset=False), dt_unicode)
def test_sort_by_start_timestamp(self): base = isotime.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(60): timestamp = base + datetime.timedelta(seconds=i) doc = copy.deepcopy(self.fake_history_subtasks[0]) doc['id'] = str(bson.ObjectId()) doc['execution']['start_timestamp'] = isotime.format(timestamp) obj = ActionExecutionHistoryAPI(**doc) ActionExecutionHistory.add_or_update(ActionExecutionHistoryAPI.to_model(obj)) dt_range = '2014-12-25T00:00:10Z..2014-12-25T00:00:19Z' objs = ActionExecutionHistory.query(execution__start_timestamp=dt_range, order_by=['execution__start_timestamp']) self.assertLess(objs[0].execution['start_timestamp'], objs[9].execution['start_timestamp']) dt_range = '2014-12-25T00:00:19Z..2014-12-25T00:00:10Z' objs = ActionExecutionHistory.query(execution__start_timestamp=dt_range, order_by=['-execution__start_timestamp']) self.assertLess(objs[9].execution['start_timestamp'], objs[0].execution['start_timestamp'])
def _purge_action_executions_output(self): LOG.info('Performing garbage collection for action executions output objects') utc_now = get_datetime_utc_now() timestamp = (utc_now - datetime.timedelta(days=self._action_executions_output_ttl)) # Another sanity check to make sure we don't delete new objects if timestamp > (utc_now - datetime.timedelta(days=MINIMUM_TTL_DAYS_EXECUTION_OUTPUT)): raise ValueError('Calculated timestamp would violate the minimum TTL constraint') timestamp_str = isotime.format(dt=timestamp) LOG.info('Deleting action executions output objects older than: %s' % (timestamp_str)) assert timestamp < utc_now try: purge_execution_output_objects(logger=LOG, timestamp=timestamp) except Exception as e: LOG.exception('Failed to delete execution output objects: %s' % (six.text_type(e))) return True
def from_model(cls, model, mask_secrets=True): if not KeyValuePairAPI.crypto_setup: KeyValuePairAPI._setup_crypto() doc = cls._from_model(model, mask_secrets=mask_secrets) if 'id' in doc: del doc['id'] if model.expire_timestamp: doc['expire_timestamp'] = isotime.format(model.expire_timestamp, offset=False) encrypted = False if model.secret: encrypted = True if not mask_secrets and model.secret: doc['value'] = symmetric_decrypt(KeyValuePairAPI.crypto_key, model.value) encrypted = False doc['encrypted'] = encrypted attrs = {attr: value for attr, value in six.iteritems(doc) if value is not None} return cls(**attrs)