def export(resource): token = request.headers.get("Authorization", None) authorized = app.auth.check_auth(token, None, None, "POST") domain = app.config.get('DOMAIN') schema = domain.get(resource, {}) fieldnames = schema.get('schema', {}).keys() if not authorized: status_code = 400 data = { "_error": {"code": 401, "message": "Please provide proper credentials"}, "_status": "ERR", } return jsonify(data), status_code q = get_internal(resource) res = q[0] if len(q) > 0 else {} total_resources = res.get('_meta', None).get('total', None) results = [] # if more items exist than initial request, reset max_results to total to get a full export if total_resources > res.get('_meta', None).get('max_results', None): set_args = request.args.copy() set_args['max_results'] = total_resources request.args = set_args q_all = get_internal(resource) results = q_all[0].get('_items', None) else: results = res.get('_items', None) if results and len(results) > 0: for item in results: item.pop('_links', None) csvfile = io.BytesIO() writer = csv.DictWriter(csvfile, fieldnames=list(set(results[0].keys() + fieldnames))) writer.writeheader() for item in results: writer.writerow({ k: v.encode('ascii', 'ignore') if isinstance(v, basestring) else v for k, v in item.iteritems() }) csvfile.seek(0) return send_file(csvfile, attachment_filename="export.csv") else: status_code = 404 data = { "_error": {"code": 404, "message": "Resource not Found"}, "_status": "ERR", } return jsonify(data), status_code
def students_dormant(): resource = 'users' lookup = {'role': 'student'} response = get_internal(resource, **lookup) response = list(response) students = response[0][config.ITEMS] classes_students, *_ = get_internal('classes_students') classes_students = classes_students[config.ITEMS] students = filter(lambda v: _dormant_students(classes_students, v), students) response[0][config.ITEMS] = list(students) return send_response(resource, response)
def add_file(items): project_info=getitem_internal('project',**{'_id': items[0]['ProjectID']})[0] trimble_folder_id=project_info['TrimbleFolderID'] for item in items: # download file file=IO() file_path=file.save_file(item['Url']) # upload to trimble token=get_internal('lastToken')[0]['_items']['token'] headers={"Authorization":"Bearer "+token} files = {'file': open(file_path, 'rb')} r = requests.post(trimble_url+'files?parentId='+trimble_folder_id,files=files,headers=headers) trimble_data=r.json()[0] TrimbleVersionID=trimble_data['versionId'] item['TrimbleVersionID']=TrimbleVersionID # extract features from ifc file ifc=IFC(file_path) entityList, entities, data=ifc.parse_geometry() file.remove_file(file_path) bim=Model(data=data,model_id=TrimbleVersionID) features=bim.get_features() item['ThumbnailUrl']=process_thumbnail(TrimbleVersionID,headers) item['Entities']=entityList for entity in entities: entity['TrimbleVersionID']=TrimbleVersionID post_internal('entity',entities) post_internal('feature',features)
def acl_activity_roles(activity_id): clubs, _, _, status, _ = get_internal( 'organizations_process', **{ 'type_id': { '$in': [6, 2, 19] }, 'main_activity.id': activity_id }) if status == 200: clubs = list(set([d['id'] for d in clubs['_items']])) resource = 'functions_types_activity_count' datasource = app.config['DOMAIN'][resource]['datasource'] aggregation = datasource.get('aggregation') if aggregation: aggregation['pipeline'][0]['$match']['active_in_org_id'][ '$in'] = clubs functions, _, _, agg_status, _ = _perform_aggregation( resource, aggregation['pipeline'], aggregation['options']) if agg_status == 200: funcs = [{ 'type_id': f['_id'].get('type_id', 0), 'name': f['_id'].get('name', '') } for f in functions['_items']] return eve_response(funcs, status) return eve_response([], status)
def whitelist_admin(_id=None): if request.method == 'POST': return dumps(post_internal('whitelist', request.json)[0]) elif request.method == 'PUT': app.data.driver.db['whitelist'].update({"_id": ObjectId(_id)}, {"$set": request.json}) return dumps(get_internal('whitelist')[0])
def student_classes_admin(_id=None): if request.method == 'POST': return dumps(post_internal('student_classes', request.json)[0]) elif request.method == 'PUT': app.data.driver.db['student_classes'].update({"_id": ObjectId(_id)}, {"$set": request.json}) return dumps(get_internal('student_classes')[0])
def acl_clubs(): clubs, _, _, status, _ = get_internal('organizations', **{'type_id': { '$in': [6, 2, 19] }}) if status == 200: return eve_response([d['id'] for d in clubs['_items']]) return eve_response([], status)
def test_search_audit_creation(client, app): app.data.insert('items', [{ "_id": "5ab03a87bdd78169bb6d0785", "body_html": "Once upon a time there was a fish who could swim" }, { "body_html": "Once upon a time there was a aardvark that could not swim" }]) with app.test_request_context(query_string='q=fish&include_fields=body_html', path='/news'): g.user = company_id response = get_internal('news/search') assert len(response[0]['_items']) == 1 audit_check('5ab03a87bdd78169bb6d0785')
def _update_person(item): lookup = {'person_id': item['id']} competences, _, _, c_status, _ = get_internal(RESOURCE_COMPETENCES_PROCESS, **lookup) if c_status == 200: on_competence_post(competences.get('_items', [])) licenses, _, _, l_status, _ = get_internal(RESOURCE_LICENSES_PROCESS, **lookup) if l_status == 200: on_license_post(licenses.get('_items', [])) functions, _, _, f_status, _ = get_internal(RESOURCE_FUNCTIONS_PROCESS, **lookup) app.logger.debug('Functions\n{}'.format(functions)) if f_status == 200: on_function_post(functions.get('_items', [])) try: # Need to get personreturn response, last_modified, etag, 200 person, _, _, p_status = getitem_internal(RESOURCE_PERSONS_PROCESS, **{'id': item['id']}) if p_status == 200: # Broadcast all broadcast({ 'entity': 'person', 'entity_id': item['id'], 'orgs': list( set([x['activity'] for x in person['memberships']] + [x['discipline'] for x in person['memberships']] + [x['club'] for x in person['memberships']])) }) except Exception as e: print('[ERR]', e) print(person)
def acl_activities_clubs(activity_id): clubs, _, _, status, _ = get_internal( 'organizations_process', **{ 'type_id': { '$in': [6, 2, 19] }, 'main_activity.id': activity_id }) if status == 200: return eve_response(list(set([d['id'] for d in clubs['_items']])), status) return eve_response([], status)
def _get_internal_item(resource, _id=None, _id_field=None, default=None): # NOTE! get_internal returns: response, last_modified, etag, status, headers # response, status and headers seem ok, but etag and last_modified are None. lookup = {'_id': _id} if _id is not None else {} lookup = {_id_field: _id} if _id_field is not None else lookup logger.debug('Calling get_internal(resource=%s, lookup=%s)' % (resource, lookup)) response, last_modified, etag, status, headers = get_internal( resource, **lookup) logger.debug( 'Calling get_internal returned: response=%s, last_modified=%s, etag=%s, status=%s, headers=%s' % (str(response)[:11], last_modified, etag, status, headers)) items = response.get('_items') return items[0] if len(items) > 0 else default
def add_project(items): for item in items: token=get_internal('lastToken')[0]['_items']['token'] headers={"Content-Type":"application/json","Authorization":"Bearer "+token} payload={ 'name':item['Name'], 'description':item['Description'], } r = requests.post(trimble_url+'projects',data=json.dumps(payload),headers=headers) trimble_data=r.json() TrimbleFolderID=trimble_data['rootId'] TrimbleProjectID=trimble_data['id'] item['TrimbleFolderID']=TrimbleFolderID item['TrimbleProjectID']=TrimbleProjectID
def get_students(): attendances, *_ = get_internal('attendances') attendances = attendances[config.ITEMS] attendances_students, *_ = get_internal('attendances_students') attendances_students = attendances_students[config.ITEMS] lookup = { config.DATE_CREATED: { '$gte': (utc_now - timedelta(hours=12)) }, 'tutor': app.auth.get_request_auth_value() } attendances_tutors, *_ = get_internal('attendances_tutors', **lookup) attendances_tutors = attendances_tutors[config.ITEMS] for v in attendances_tutors: for v2 in attendances: if v['attendance'] == v2[config.ID_FIELD]: v['attendance'] = v2 for v in attendances_tutors: lookup = {'class': v['attendance']['class']} students, *_ = get_internal('classes_students', **lookup) students = students[config.ITEMS] students = filter( lambda v2: not_in_attendance_students(attendances_students, v, v2), students) v['students'] = list(students) attendances_tutors = filter(lambda v: len(v['students']) > 0, attendances_tutors) attendances_tutors = {config.ITEMS: list(attendances_tutors)} return jsonify(attendances_tutors)
def acl_roles(): functions, _, _, status, _ = get_internal('functions_types_count') if status == 200: funcs = [{ 'type_id': f['_id'].get('type_id', 0), 'name': f['_id'].get('name', False) } for f in functions['_items']] # funcs = list({v['type_id']: v for v in funcs}.values()) funcs[:] = [d for d in funcs if d.get('name', False) is not False] return eve_response(funcs, status) return eve_response([], status)
def _get_org(org_id) -> dict: """Get org from organizations internal :param org_id: Organization id :type org_id: int :return org: Returns the organization :rtype: dict """ org, _, _, status, _ = get_internal('organizations', **{'id': org_id}) if status == 200: if '_items' in org and len(org['_items']) == 1: return org['_items'][0] return {}
def get_files(data): items=data['_items'] if len(items)==0: return for item in items: if item['ThumbnailUrl']=="": token=get_internal('lastToken')[0]['_items']['token'] headers={"Authorization":"Bearer "+token} img_data=process_thumbnail(item['TrimbleVersionID'],headers) if img_data=="": continue item['ThumbnailUrl']=img_data payload={ "ThumbnailUrl":img_data } patch_internal('file',payload,**{'_id': item['_id']})
def clara_responses_download(): items = (get_internal('clara_responses')[0])['_items'] # print(items) def generate(): data = StringIO() w = csv.writer(data, delimiter=';') for item in ['main_scale', 'clara_item']: header = [] for res in items[0]['clara_items']: header.append(res['clara_item'][item]) # write header w.writerow(header) yield data.getvalue() data.seek(0) data.truncate(0) # write each log item for item in items: # resH = [] resA = [] for res in item['clara_items']: # resH.append(res['clara_item']['itembank_id']) resA.append(res['response_option']['response_value']) # w.writerow(resH) w.writerow(resA) yield data.getvalue() data.seek(0) data.truncate(0) # add a filename headers = Headers() try: headers.set('Content-Disposition', 'attachment', filename='{}.csv'.format( items[0]['student_class']['student_class'])) except IndexError: headers.set('Content-Disposition', 'attachment', filename='empty.csv') # stream the response as the data is generated return Response(stream_with_context(generate()), mimetype='text/csv', headers=headers)
def _get_functions_types(type_id) -> dict: """Get org from organizations internal :param org_id: Organization id :type org_id: int :return org: Returns the organization :rtype: dict """ function_type, _, _, status, _ = get_internal('functions_types', **{'id': type_id}) if status == 200: if '_items' in function_type and len(function_type['_items']) == 1: return function_type['_items'][0] return {}
def get_within_delay(_id, event_type='ors_reminder', persons=[]): lookup = { 'event_from_id': _id, 'recepient': { '$in': persons }, 'type': event_type, 'event_created': { '$gte': datetime.datetime.utcnow() - datetime.timedelta(seconds=REMINDER_DELTA) } } response, _, _, status, _ = get_internal('notifications', **lookup) return list( set([notification['recepient'] for notification in response['_items']]))
def _get_person(person_id) -> dict: """Get person from persons internal :param person_id: Person id :type person_id: int :return org: Returns the person given :rtype: dict """ if person_id is not None: person, _, _, status, _ = get_internal('persons', **{'id': person_id}) if status == 200: if '_items' in person and len(person['_items']) == 1: return person['_items'][0] return {}
def export(resource): token = request.headers.get("Authorization", None) authorized = app.auth.check_auth(token, None, None, "POST") domain = app.config.get('DOMAIN') schema = domain.get(resource, {}) fieldnames = schema.get('schema', {}).keys() if not authorized: status_code = 400 data = { "_error": { "code": 401, "message": "Please provide proper credentials" }, "_status": "ERR", } return jsonify(data), status_code q = get_internal(resource) res = q[0] if len(q) > 0 else {} results = res.get('_items', None) if results and len(results) > 0: for item in results: item.pop('_links', None) csvfile = io.BytesIO() writer = csv.DictWriter(csvfile, fieldnames=list( set(results[0].keys() + fieldnames))) writer.writeheader() for item in results: writer.writerow(item) csvfile.seek(0) return send_file(csvfile, attachment_filename="export.csv") else: status_code = 404 data = { "_error": { "code": 404, "message": "Resource not Found" }, "_status": "ERR", } return jsonify(data), status_code
def search(): response = get_internal('media_releases_search') return send_response('media_releases_search', response)
def search(): response = get_internal('wire_search') return send_response('wire_search', response)
def clara_responses_admin(): return dumps(get_internal('clara_responses')[0])
def search(): response = get_internal('agenda') return send_response('agenda', response)
def get_viewer_data(item): project_info=getitem_internal('project',**{'_id': item['ProjectID']})[0] item['TrimbleProjectID']=project_info['TrimbleProjectID'] item['token']=get_internal('lastToken')[0]['_items']['token']
def search(): response = get_internal('monitoring_search') return send_response('monitoring_search', response)
def _acl_from_functions(person_id): function_acl = [] lookup = { 'person_id': person_id, 'is_deleted': False, 'is_passive': False, 'type_is_license': False, '$or': [{ 'to_date': { '$gt': '{}Z'.format(datetime.now().isoformat()) } }, { 'to_date': { '$exists': False } }] } functions, _, _, status, _ = get_internal('functions_process', **lookup) if status == 200: for f in functions['_items']: org, _, _, fstatus = getitem_internal( 'organizations', **{'id': f['active_in_org_id']}) if fstatus == 200 and org.get('type_id', 0) in [ 6, 2, 19, 14 ]: # 2 særforbund, 19 seksjon, 14 er gren """ Due to NA if org.get('type_id', 0) == 6: activities = _get_activities_in_club(org['id']) else: activities = [org.get('activities', {'id': 27}).get('id')] """ for activity in [ v['id'] for v in org.get('activities', [{ 'id': 27 }]) ]: try: if activity in list(NLF_ORG.keys()): function_acl.append({ 'activity': activity, 'org': f['active_in_org_id'], 'role': f['type_id'], 'name': f.get('type_name', 'ukjent'), 'func': f['id'], 'type': org.get('type_id') }) except: pass return status, function_acl return status, function_acl
def search(): response = get_internal(search_endpoint_name) return send_response(search_endpoint_name, response)
def get_atom(): def _format_date(date): iso8601 = date.isoformat() if date.tzinfo: return iso8601 return iso8601 + 'Z' def _format_update_date(date): DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S" return date.strftime(DATETIME_FORMAT) + 'Z' auth = app.auth if not auth.authorized([], None, flask.request.method): return auth.authenticate() XML_ROOT = '<?xml version="1.0" encoding="UTF-8"?>' _message_nsmap = { None: 'http://www.w3.org/2005/Atom', 'dcterms': 'http://purl.org/dc/terms/', 'media': 'http://search.yahoo.com/mrss/', 'mi': 'http://schemas.ingestion.microsoft.com/common/' } # feed = etree.Element('feed', attrib={'lang': 'en-us'}, nsmap=_message_nsmap) feed = etree.Element('feed', nsmap=_message_nsmap) SubElement(feed, 'title').text = etree.CDATA('{} Atom Feed'.format( app.config['SITE_NAME'])) SubElement(feed, 'updated').text = _format_update_date(utcnow()) SubElement(SubElement(feed, 'author'), 'name').text = app.config['SITE_NAME'] SubElement(feed, 'id').text = flask.url_for('atom.get_atom', _external=True) SubElement(feed, 'link', attrib={ 'href': flask.url_for('atom.get_atom', _external=True), 'rel': 'self' }) response = get_internal('news/search') # req = ParsedRequest() # req.args = {'include_fields': 'abstract'} # response = superdesk.get_resource_service('news/search').get(req=req, lookup=None) for item in response[0].get('_items'): try: complete_item = superdesk.get_resource_service('items').find_one( req=None, _id=item.get('_id')) # If featuremedia is not allowed for the company don't add the item if ((complete_item.get('associations') or {}).get('featuremedia') or {}).get('renditions'): if not check_association_permission(complete_item): continue entry = SubElement(feed, 'entry') # If the item has any parents we use the id of the first, this should be constant throught the update # history if complete_item.get('ancestors') and len( complete_item.get('ancestors')): SubElement(entry, 'id').text = complete_item.get('ancestors')[0] else: SubElement(entry, 'id').text = complete_item.get('_id') SubElement(entry, 'title').text = etree.CDATA( complete_item.get('headline')) SubElement(entry, 'published').text = _format_date( complete_item.get('firstpublished')) SubElement(entry, 'updated').text = _format_update_date( complete_item.get('versioncreated')) SubElement(entry, 'link', attrib={ 'rel': 'self', 'href': flask.url_for('news/item.get_item', item_id=item.get('_id'), format='TextFormatter', _external=True) }) if complete_item.get('byline'): SubElement(SubElement(entry, 'author'), 'name').text = complete_item.get('byline') if complete_item.get('pubstatus') == 'usable': SubElement(entry, etree.QName(_message_nsmap.get('dcterms'), 'valid')).text = \ 'start={}; end={}; scheme=W3C-DTF'.format(_format_date(utcnow()), _format_date(utcnow() + datetime.timedelta(days=30))) else: # in effect a kill set the end date into the past SubElement(entry, etree.QName(_message_nsmap.get('dcterms'), 'valid')).text = \ 'start={}; end={}; scheme=W3C-DTF'.format(_format_date(utcnow()), _format_date(utcnow() - datetime.timedelta(days=30))) categories = [{ 'name': s.get('name') } for s in complete_item.get('service', [])] for category in categories: SubElement(entry, 'category', attrib={'term': category.get('name')}) SubElement(entry, 'summary').text = etree.CDATA( complete_item.get('description_text', '')) # If there are any image embeds then reset the source to a Newshub asset html_updated = False regex = r' EMBED START Image {id: \"editor_([0-9]+)' root_elem = lxml_html.fromstring(complete_item.get( 'body_html', '')) comments = root_elem.xpath('//comment()') for comment in comments: if 'EMBED START Image' in comment.text: m = re.search(regex, comment.text) # Assumes the sibling of the Embed Image comment is the figure tag containing the image figure_elem = comment.getnext() if figure_elem is not None and figure_elem.tag == "figure": imgElem = figure_elem.find("./img") if imgElem is not None and m and m.group(1): embed_id = "editor_" + m.group(1) src = complete_item.get("associations").get( embed_id).get("renditions").get("16-9") if src: imgElem.attrib["src"] = flask.url_for( 'assets.get_item', asset_id=src.get('media'), _external=True) html_updated = True if html_updated: complete_item["body_html"] = to_string(root_elem, method="html") SubElement(entry, 'content', attrib={ 'type': 'html' }).text = etree.CDATA(complete_item.get('body_html', '')) if ((complete_item.get('associations') or {}).get('featuremedia') or {}).get('renditions'): image = ((complete_item.get('associations') or {}).get('featuremedia') or {}).get('renditions').get("16-9") metadata = ((complete_item.get('associations') or {}).get('featuremedia') or {}) url = flask.url_for('assets.get_item', _external=True, asset_id=image.get('media')) media = SubElement(entry, etree.QName(_message_nsmap.get('media'), 'content'), attrib={ 'url': url, 'type': image.get('mimetype'), 'medium': 'image' }) SubElement(media, etree.QName(_message_nsmap.get('media'), 'credit')).text = metadata.get('byline') SubElement( media, etree.QName( _message_nsmap.get('media'), 'title')).text = metadata.get('description_text') SubElement(media, etree.QName( _message_nsmap.get('media'), 'text')).text = metadata.get('body_text') focr = SubElement( media, etree.QName(_message_nsmap.get('mi'), 'focalRegion')) SubElement(focr, etree.QName(_message_nsmap.get('mi'), 'x1')).text = str( image.get('poi').get('x')) SubElement(focr, etree.QName(_message_nsmap.get('mi'), 'x2')).text = str( image.get('poi').get('x')) SubElement(focr, etree.QName(_message_nsmap.get('mi'), 'y1')).text = str( image.get('poi').get('y')) SubElement(focr, etree.QName(_message_nsmap.get('mi'), 'y2')).text = str( image.get('poi').get('y')) except Exception as ex: logger.exception('processing {} - {}'.format(item.get('_id'), ex)) return flask.Response( XML_ROOT + etree.tostring(feed, pretty_print=True).decode('utf-8'), mimetype='application/atom+xml')
def search(): response = get_internal('am_news_search') return send_response('am_news_search', response)
def test_get_all_company_products_audit_creation(client, app): with app.test_request_context(path='/account/products/'): g.user = '******' response = get_internal('account/products') assert len(response[0]['_items']) == 1 audit_check('5ab03a87bdd78169bb6d0783')
def search(): response = get_internal('factcheck_search') return send_response('factcheck_search', response)
def acl_simple_all(): functions, _, _, status, _ = get_internal('functions') if status == 200: pass