def step_impl_when_we_duplicate_event(context, event_id): with context.app.test_request_context(context.app.config['URL_PREFIX']): events_service = get_resource_service('events') original_event = events_service.find_one(req=None, _id=event_id) duplicate_event = deepcopy(original_event) for key, value in original_event.items(): if key.startswith('_'): duplicate_event.pop(key, None) for key in [ 'state', 'firstcreated', 'versioncreated', 'ingest_provider', 'guid' ]: duplicate_event.pop(key, None) duplicate_event['duplicate_from'] = event_id duplicate_event['dates']['start'] = "2099-01-02" duplicate_event['dates']['start'] = "2099-01-03" duplicate_event['unique_id'] = 456 duplicate_event['definition_short'] = 'duplicate' duplicate_event['name'] = 'duplicate' context.text = json.dumps(duplicate_event) item = post_data(context, '/events') set_placeholder(context, 'DUPLICATE_EVENT_ID', item['_id'])
def then_we_store_assignment_id(context, tag, index): index = int(index) response = get_json_data(context.response) assert len(response.get('coverages')), 'Coverage are not defined.' coverage = response.get('coverages')[index] coverage_id = coverage.get('coverage_id') set_placeholder(context, tag, coverage_id)
def then_versioned_file_exists(context, path): path = apply_placeholders(context, path) assert os.path.isfile(path), '{} is not a file'.format(path) file_name = os.path.basename(path) with open(path, 'r') as json_file: data = json.load(json_file) set_placeholder(context, file_name, data)
def step_impl_fetch_from_provider_ingest(context, provider_name, guid): with context.app.test_request_context(context.app.config['URL_PREFIX']): ingest_provider_service = get_resource_service('ingest_providers') provider = ingest_provider_service.find_one(name=provider_name, req=None) provider_service = get_feeding_service(provider['feeding_service']) file_path = os.path.join(provider.get('config', {}).get('path', ''), guid) feeding_parser = provider_service.get_feed_parser(provider) if isinstance(feeding_parser, XMLFeedParser): with open(file_path, 'rb') as f: xml_string = etree.etree.fromstring(f.read()) parsed = feeding_parser.parse(xml_string, provider) else: parsed = feeding_parser.parse(file_path, provider) items = [parsed] if not isinstance(parsed, list) else parsed for item in items: item['versioncreated'] = utcnow() item['expiry'] = utcnow() + timedelta(minutes=20) failed = context.ingest_items(items, provider, provider_service) assert len(failed) == 0, failed provider = ingest_provider_service.find_one(name=provider_name, req=None) ingest_provider_service.system_update(provider['_id'], {LAST_ITEM_UPDATE: utcnow()}, provider) for item in items: set_placeholder(context, '{}.{}'.format(provider_name, item['guid']), item['_id'])
def then_we_store_assignment_id(context, tag, index, coverage_index): index = int(index) coverage_index = int(coverage_index) response = get_json_data(context.response) coverage = (response.get('coverages') or [])[coverage_index] assert len(coverage.get('scheduled_updates')), 'scheduled_updates are not defined.' scheduled_update = coverage['scheduled_updates'][index] set_placeholder(context, tag, scheduled_update.get('assigned_to', {}).get('assignment_id'))