def create(self, docs, **kwargs): new_guids = [] provider = get_resource_service("ingest_providers").find_one(source="aapmm", req=None) if provider and "config" in provider and "username" in provider["config"]: self.backend.set_credentials(provider["config"]["username"], provider["config"]["password"]) for doc in docs: if not doc.get("desk"): # if no desk is selected then it is bad request raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.") try: archived_doc = self.backend.find_one_raw(doc["guid"], doc["guid"]) except FileNotFoundError as ex: raise ProviderError.externalProviderError(ex, provider) dest_doc = dict(archived_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc["_id"] = new_id generate_unique_id_and_name(dest_doc) if provider: dest_doc["ingest_provider"] = str(provider[superdesk.config.ID_FIELD]) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, update=None, desk_id=doc.get("desk"), stage_id=doc.get("stage")) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = archived_doc["_id"] dest_doc[FAMILY_ID] = archived_doc["_id"] remove_unwanted(dest_doc) set_original_creator(dest_doc) superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get("_id")) return new_guids
def create(self, docs, **kwargs): new_guids = [] provider = self.get_provider() for doc in docs: if not doc.get('desk'): # if no desk is selected then it is bad request raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.") try: archived_doc = self.fetch(doc['guid']) except FileNotFoundError as ex: raise ProviderError.externalProviderError(ex, provider) dest_doc = dict(archived_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc['_id'] = new_id generate_unique_id_and_name(dest_doc) if provider: dest_doc['ingest_provider'] = str(provider[superdesk.config.ID_FIELD]) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage')) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = archived_doc['_id'] dest_doc[FAMILY_ID] = archived_doc['_id'] remove_unwanted(dest_doc) set_original_creator(dest_doc) superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('_id')) return new_guids
def create(self, docs, **kwargs): new_guids = [] provider = get_resource_service('ingest_providers').find_one( source='aapmm', req=None) for doc in docs: if not doc.get('desk'): # if no desk is selected then it is bad request raise SuperdeskApiError.badRequestError( "Destination desk cannot be empty.") archived_doc = self.backend.find_one_raw(doc['guid'], doc['guid']) dest_doc = dict(archived_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc['_id'] = new_id generate_unique_id_and_name(dest_doc) if provider: dest_doc['ingest_provider'] = str( provider[superdesk.config.ID_FIELD]) dest_doc[config.VERSION] = 1 send_to(dest_doc, doc.get('desk'), doc.get('stage')) dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED) dest_doc[INGEST_ID] = archived_doc['_id'] dest_doc[FAMILY_ID] = archived_doc['_id'] remove_unwanted(dest_doc) set_original_creator(dest_doc) superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('_id')) return new_guids
def create(self, docs, **kwargs): new_guids = [] provider = get_resource_service('ingest_providers').find_one(source='aapmm', req=None) for doc in docs: if not doc.get('desk'): # if no desk is selected then it is bad request raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.") archived_doc = self.backend.find_one_raw(doc['guid'], doc['guid']) dest_doc = dict(archived_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc['_id'] = new_id generate_unique_id_and_name(dest_doc) if provider: dest_doc['ingest_provider'] = str(provider[superdesk.config.ID_FIELD]) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage')) dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED) dest_doc[INGEST_ID] = archived_doc['_id'] dest_doc[FAMILY_ID] = archived_doc['_id'] remove_unwanted(dest_doc) set_original_creator(dest_doc) superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('_id')) return new_guids
def on_create(self, docs): # events generated by recurring rules generated_events = [] for event in docs: # generates an unique id if 'guid' not in event: event['guid'] = generate_guid(type=GUID_NEWSML) event['_id'] = event['guid'] # set the author set_original_creator(event) # overwrite expiry date overwrite_event_expiry_date(event) # We ignore the 'update_method' on create if 'update_method' in event: del event['update_method'] # generates events based on recurring rules if event['dates'].get('recurring_rule', None): generated_events.extend(generate_recurring_events(event)) # remove the event that contains the recurring rule. We don't need it anymore docs.remove(event) if generated_events: docs.extend(generated_events)
def on_create(self, docs): """Set default metadata""" for doc in docs: doc['guid'] = generate_guid(type=GUID_NEWSML) doc['planning_type'] = 'agenda' set_original_creator(doc) self._validate_unique_agenda(doc, {})
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get('_id') if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( 'Fail to found ingest item with _id: %s' % id_of_item_to_be_fetched) if not is_workflow_state_transition_valid( 'fetch_from_ingest', ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it ingest_doc = get_resource_service('macros').execute_macro( ingest_doc, doc.get('macro')) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {'archived': archived}) dest_doc = dict(ingest_doc) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc['_id'] = new_id dest_doc['guid'] = new_id dest_doc['destination_groups'] = doc.get('destination_groups') generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, desk_id, stage_id) dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc['_id'] remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get('state', STATE_FETCHED), doc.get('destination_groups')) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) doc.update(dest_doc) if kwargs.get('notify', True): push_notification('item:fetch', fetched=1) return id_of_fetched_items
def on_create(self, docs): """Set default metadata.""" for doc in docs: if 'guid' not in doc: doc['guid'] = generate_guid(type=GUID_NEWSML) doc[config.ID_FIELD] = doc['guid'] set_original_creator(doc) self._set_planning_event_date(doc)
def create(self, docs, **kwargs): search_provider = get_resource_service('search_providers').find_one( search_provider=PROVIDER_NAME, req=None) if not search_provider or search_provider.get('is_closed', False): raise SuperdeskApiError.badRequestError( 'No search provider found or the search provider is closed.') if 'config' in search_provider: self.backend.set_credentials(search_provider['config']) new_guids = [] for doc in docs: if not doc.get( 'desk'): # if no desk is selected then it is bad request raise SuperdeskApiError.badRequestError( "Destination desk cannot be empty.") try: archived_doc = self.backend.find_one_raw( doc['guid'], doc['guid']) except FileNotFoundError as ex: raise ProviderError.externalProviderError(ex, search_provider) dest_doc = dict(archived_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc[config.ID_FIELD] = new_id generate_unique_id_and_name(dest_doc) if search_provider: dest_doc['ingest_provider'] = str( search_provider[config.ID_FIELD]) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage')) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = archived_doc[config.ID_FIELD] dest_doc[FAMILY_ID] = archived_doc[config.ID_FIELD] dest_doc[ITEM_OPERATION] = ITEM_FETCH remove_unwanted(dest_doc) set_original_creator(dest_doc) superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc[config.ID_FIELD]) get_resource_service('search_providers').system_update( search_provider[config.ID_FIELD], {'last_item_update': utcnow()}, search_provider) return new_guids
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get('_id') if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError('Fail to found ingest item with _id: %s' % id_of_item_to_be_fetched) if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it ingest_doc = get_resource_service('macros').execute_macro(ingest_doc, doc.get('macro')) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {'archived': archived}) dest_doc = dict(ingest_doc) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc['_id'] = new_id dest_doc['guid'] = new_id dest_doc['destination_groups'] = doc.get('destination_groups') generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, desk_id, stage_id) dest_doc[config.CONTENT_STATE] = doc.get('state', STATE_FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc['_id'] remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get('state', STATE_FETCHED), doc.get('destination_groups')) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) doc.update(dest_doc) if kwargs.get('notify', True): push_notification('item:fetch', fetched=1) return id_of_fetched_items
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get("_id") if id is None else id desk_id = doc.get("desk") stage_id = doc.get("stage") ingest_service = get_resource_service("ingest") ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( "Fail to found ingest item with _id: %s" % id_of_item_to_be_fetched ) if not is_workflow_state_transition_valid("fetch_from_ingest", ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get("macro"): # there is a macro so transform it ingest_doc = get_resource_service("macros").execute_macro(ingest_doc, doc.get("macro")) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {"archived": archived}) dest_doc = dict(ingest_doc) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc["_id"] = new_id dest_doc["guid"] = new_id generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc["_id"] dest_doc[ITEM_OPERATION] = ITEM_FETCH remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package(dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) doc.update(dest_doc) if kwargs.get("notify", True): push_notification("item:fetch", fetched=1) return id_of_fetched_items
def _update_archive(guid, item): """ Assigns a State to the content, removes unwanted attributes, sets the original creator and updates the item in archive collection. :param guid: :param item: from ingest collection """ item[config.CONTENT_STATE] = STATE_FETCHED remove_unwanted(item) set_original_creator(item) superdesk.get_resource_service(ARCHIVE).update(guid, item)
def create(self, docs, **kwargs): for doc in docs: ingest_doc = superdesk.get_resource_service("ingest").find_one(req=None, _id=doc.get("guid")) if not ingest_doc: msg = "Fail to found ingest item with guid: %s" % doc.get("guid") raise SuperdeskApiError.notFoundError(msg) if not is_workflow_state_transition_valid("fetch_as_from_ingest", ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() archived = utcnow() superdesk.get_resource_service("ingest").patch(ingest_doc.get("_id"), {"archived": archived}) doc["archived"] = archived archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get("guid")) if not archived_doc: dest_doc = dict(ingest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, doc.get("desk")) dest_doc[config.CONTENT_STATE] = STATE_FETCHED remove_unwanted(dest_doc) for ref in [ ref for group in dest_doc.get("groups", []) for ref in group.get("refs", []) if "residRef" in ref ]: ref["location"] = ARCHIVE ref["guid"] = ref["residRef"] set_original_creator(dest_doc) if doc.get(PACKAGE): links = dest_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) dest_doc[LINKED_IN_PACKAGES] = links superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get("guid")) desk = doc.get("desk") refs = [ {"guid": ref.get("residRef"), "desk": desk, PACKAGE: dest_doc.get("_id")} for group in dest_doc.get("groups", []) for ref in group.get("refs", []) if "residRef" in ref ] if refs: self.create(refs) else: if doc.get(PACKAGE): links = archived_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get("_id"), {LINKED_IN_PACKAGES: links}) return [doc.get("guid") for doc in docs]
def create(self, docs, **kwargs): for doc in docs: ingest_doc = superdesk.get_resource_service('ingest').find_one(req=None, _id=doc.get('guid')) if not ingest_doc: msg = 'Fail to found ingest item with guid: %s' % doc.get('guid') raise SuperdeskApiError.notFoundError(msg) if not is_workflow_state_transition_valid('fetch_as_from_ingest', ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() archived = utcnow() superdesk.get_resource_service('ingest').patch(ingest_doc.get('_id'), {'archived': archived}) doc['archived'] = archived archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get('guid')) if not archived_doc: dest_doc = dict(ingest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, doc.get('desk')) dest_doc[config.CONTENT_STATE] = STATE_FETCHED remove_unwanted(dest_doc) for ref in [ref for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref]: ref['location'] = ARCHIVE ref['guid'] = ref['residRef'] set_original_creator(dest_doc) if doc.get(PACKAGE): links = dest_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) dest_doc[LINKED_IN_PACKAGES] = links superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('guid')) desk = doc.get('desk') refs = [{'guid': ref.get('residRef'), 'desk': desk, PACKAGE: dest_doc.get('_id')} for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref] if refs: self.create(refs) else: if doc.get(PACKAGE): links = archived_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) superdesk.get_resource_service(ARCHIVE).patch(archived_doc.get('_id'), {LINKED_IN_PACKAGES: links}) return [doc.get('guid') for doc in docs]
def create(self, docs, **kwargs): new_guids = [] provider = get_resource_service('ingest_providers').find_one( source='aapmm', req=None) if provider and 'config' in provider and 'username' in provider[ 'config']: self.backend.set_credentials(provider['config']['username'], provider['config']['password']) for doc in docs: if not doc.get('desk'): # if no desk is selected then it is bad request raise SuperdeskApiError.badRequestError( "Destination desk cannot be empty.") try: archived_doc = self.backend.find_one_raw( doc['guid'], doc['guid']) except FileNotFoundError as ex: raise ProviderError.externalProviderError(ex, provider) dest_doc = dict(archived_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc['_id'] = new_id generate_unique_id_and_name(dest_doc) if provider: dest_doc['ingest_provider'] = str( provider[superdesk.config.ID_FIELD]) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage')) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = archived_doc['_id'] dest_doc[FAMILY_ID] = archived_doc['_id'] remove_unwanted(dest_doc) set_original_creator(dest_doc) superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('_id')) return new_guids
def create(self, docs, **kwargs): search_provider = get_resource_service('search_providers').find_one(search_provider=PROVIDER_NAME, req=None) if not search_provider or search_provider.get('is_closed', False): raise SuperdeskApiError.badRequestError('No search provider found or the search provider is closed.') if 'config' in search_provider: self.backend.set_credentials(search_provider['config']) new_guids = [] for doc in docs: if not doc.get('desk'): # if no desk is selected then it is bad request raise SuperdeskApiError.badRequestError("Destination desk cannot be empty.") try: archived_doc = self.backend.find_one_raw(doc['guid'], doc['guid']) except FileNotFoundError as ex: raise ProviderError.externalProviderError(ex, search_provider) dest_doc = dict(archived_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc[config.ID_FIELD] = new_id generate_unique_id_and_name(dest_doc) if search_provider: dest_doc['ingest_provider'] = str(search_provider[config.ID_FIELD]) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage')) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = archived_doc[config.ID_FIELD] dest_doc[FAMILY_ID] = archived_doc[config.ID_FIELD] dest_doc[ITEM_OPERATION] = ITEM_FETCH remove_unwanted(dest_doc) set_original_creator(dest_doc) superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc[config.ID_FIELD]) get_resource_service('search_providers').system_update(search_provider[config.ID_FIELD], {'last_item_update': utcnow()}, search_provider) return new_guids
def create(self, docs, **kwargs): new_guids = [] provider = self.get_provider() for doc in docs: if not doc.get('desk'): # if no desk is selected then it is bad request raise SuperdeskApiError.badRequestError( _("Destination desk cannot be empty.")) try: archived_doc = self.fetch(doc['guid']) except FileNotFoundError as ex: raise ProviderError.externalProviderError(ex, provider) dest_doc = dict(archived_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc['_id'] = new_id generate_unique_id_and_name(dest_doc) if provider: dest_doc['ingest_provider'] = str( provider[superdesk.config.ID_FIELD]) dest_doc[config.VERSION] = 1 send_to(doc=dest_doc, update=None, desk_id=doc.get('desk'), stage_id=doc.get('stage')) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = archived_doc['_id'] dest_doc[FAMILY_ID] = archived_doc['_id'] dest_doc[ITEM_OPERATION] = ITEM_FETCH remove_unwanted(dest_doc) set_original_creator(dest_doc) superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('_id')) if new_guids: get_resource_service('search_providers').system_update( provider.get(config.ID_FIELD), {'last_item_update': utcnow()}, provider) return new_guids
def _duplicate_event(self, updates, original, events_service): new_event = deepcopy(original) new_event.update(updates) for f in { '_id', 'guid', 'unique_name', 'unique_id', 'lock_user', 'lock_time', 'lock_session', 'lock_action', '_created', '_updated', '_etag', 'pubstatus', 'reason', 'duplicate_to' }: new_event.pop(f, None) new_event[ITEM_STATE] = WORKFLOW_STATE.IN_PROGRESS new_event['guid'] = generate_guid(type=GUID_NEWSML) new_event['_id'] = new_event['guid'] new_event['duplicate_from'] = original[config.ID_FIELD] set_original_creator(new_event) created_event = events_service.create([new_event])[0] history_service = get_resource_service('events_history') history_service.on_reschedule_from(new_event) return created_event
def on_create(self, docs): for doc in docs: set_original_creator(doc)
def fetch(self, docs, id=None, **kwargs): id_of_fetched_items = [] for doc in docs: id_of_item_to_be_fetched = doc.get( config.ID_FIELD) if id is None else id desk_id = doc.get('desk') stage_id = doc.get('stage') ingest_service = get_resource_service('ingest') ingest_doc = ingest_service.find_one(req=None, _id=id_of_item_to_be_fetched) if not ingest_doc: raise SuperdeskApiError.notFoundError( _('Fail to found ingest item with _id: {id}').format( id=id_of_item_to_be_fetched)) if not is_workflow_state_transition_valid('fetch_from_ingest', ingest_doc[ITEM_STATE]): raise InvalidStateTransitionError() if doc.get('macro'): # there is a macro so transform it ingest_doc = get_resource_service('macros').execute_macro( ingest_doc, doc.get('macro')) archived = utcnow() ingest_service.patch(id_of_item_to_be_fetched, {'archived': archived}) dest_doc = dict(ingest_doc) if doc.get('target'): dest_doc.update(doc.get('target')) new_id = generate_guid(type=GUID_TAG) id_of_fetched_items.append(new_id) dest_doc[config.ID_FIELD] = new_id dest_doc[GUID_FIELD] = new_id generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 dest_doc['versioncreated'] = archived send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id) dest_doc[ITEM_STATE] = doc.get(ITEM_STATE, CONTENT_STATE.FETCHED) dest_doc[INGEST_ID] = dest_doc[FAMILY_ID] = ingest_doc[ config.ID_FIELD] dest_doc[ITEM_OPERATION] = ITEM_FETCH remove_unwanted(dest_doc) set_original_creator(dest_doc) self.__fetch_items_in_package( dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) self.__fetch_associated_items( dest_doc, desk_id, stage_id, doc.get(ITEM_STATE, CONTENT_STATE.FETCHED)) desk = get_resource_service('desks').find_one(req=None, _id=desk_id) if desk and desk.get('default_content_profile'): dest_doc['profile'] = desk['default_content_profile'] if dest_doc.get('type', 'text') in MEDIA_TYPES: dest_doc['profile'] = None get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(doc=dest_doc) build_custom_hateoas(custom_hateoas, dest_doc) superdesk.item_fetched.send(self, item=dest_doc, ingest_item=ingest_doc) doc.update(dest_doc) if kwargs.get('notify', True): ingest_doc.update({'task': dest_doc.get('task')}) push_item_move_notification(ingest_doc, doc, 'item:fetch') return id_of_fetched_items
def create(self, docs, **kwargs): new_guids = [] for doc in docs: ingest_doc = superdesk.get_resource_service('ingest').find_one(req=None, _id=doc.get('guid')) if not ingest_doc: # see if it is in archive, if it is duplicate it archived_doc = superdesk.get_resource_service(ARCHIVE).find_one(req=None, _id=doc.get('guid')) if archived_doc: send_to(archived_doc, doc.get('desk')) new_guid = superdesk.get_resource_service('archive').duplicate_content(archived_doc) new_guids.append(new_guid) else: msg = 'Fail to found ingest item with guid: %s' % doc.get('guid') raise SuperdeskApiError.notFoundError(msg) else: # We are fetching from ingest if not is_workflow_state_transition_valid('fetch_as_from_ingest', ingest_doc[config.CONTENT_STATE]): raise InvalidStateTransitionError() archived = utcnow() superdesk.get_resource_service('ingest').patch(ingest_doc.get('_id'), {'archived': archived}) doc['archived'] = archived dest_doc = dict(ingest_doc) new_id = generate_guid(type=GUID_TAG) new_guids.append(new_id) dest_doc['_id'] = new_id dest_doc['guid'] = new_id generate_unique_id_and_name(dest_doc) dest_doc[config.VERSION] = 1 send_to(dest_doc, doc.get('desk')) dest_doc[config.CONTENT_STATE] = STATE_FETCHED dest_doc[INGEST_ID] = ingest_doc['_id'] dest_doc[FAMILY_ID] = ingest_doc['_id'] remove_unwanted(dest_doc) for ref in [ref for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref]: ref['location'] = ARCHIVE ref['guid'] = ref['residRef'] set_original_creator(dest_doc) if doc.get(PACKAGE): links = dest_doc.get(LINKED_IN_PACKAGES, []) links.append({PACKAGE: doc.get(PACKAGE)}) dest_doc[LINKED_IN_PACKAGES] = links desk = doc.get('desk') refs = [{'guid': ref.get('residRef'), 'desk': desk, PACKAGE: dest_doc.get('_id')} for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref] if refs: new_ref_guids = self.create(refs) count = 0 for ref in [ref for group in dest_doc.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref]: ref['residRef'] = ref['guid'] = new_ref_guids[count] count += 1 superdesk.get_resource_service(ARCHIVE).post([dest_doc]) insert_into_versions(dest_doc.get('guid')) push_notification('item:fetch', item=str(ingest_doc.get('_id'))) return new_guids
def on_create(self, docs): """Set default metadata.""" for doc in docs: doc['guid'] = generate_guid(type=GUID_NEWSML) set_original_creator(doc)