def org_user(org): ## Validating env vars config = VariablesConfig(required_env_vars, optional_env_vars) if not config.validate(): sys.exit(1) request_data = request.get_data() json_data = json.loads(str(request_data.decode("utf-8"))) for element in json_data: invi_username = element["username"] if element['deleted'] == True: data = requests.delete( f"{config.github_base_url}/orgs/{org}/memberships/{invi_username}", auth=(username, token)) if data.status_code == 204: logger.info('User has been removed from organization') if data.status_code == 403: logger.info( 'Not allowed to remove user from organization. Status code: 403' ) else: logger.warning( f'Failing with content: {data.content} and status code: {data.status_code}' ) else: data = requests.get( f"{config.github_base_url}/orgs/{org}/members/{invi_username}", auth=(username, token)) if data.status_code == 204: logger.info('User already part of organization') if data.status_code == 404: logger.info('User not part of organization') logger.info(f'Trying to add user: {invi_username}') invi_response = requests.put( f"{config.github_base_url}/orgs/{org}/memberships/{invi_username}", auth=(username, token)) if invi_response.status_code == 200: decoded_data = json.loads( invi_response.content.decode('utf-8-sig')) if decoded_data.get('state') == "pending": logger.info( f"Organization invitation sent to username: {invi_username}" ) if decoded_data.get('state') == "active": logger.info( f"User with username: {invi_username}, already part of organization" ) if invi_response.status_code == 422 or invi_response.status_code == 403: logger.warning( f"Organization invitation could not be sent to username: {invi_username}" ) logger.warning( f"Failed with error code: {invi_response.status_code}") return jsonify({'Steve reporting': "work complete..."})
def fylke_data(): config = VariablesConfig(required_env_vars, optional_env_vars) if not config.validate(): sys.exit(1) logger.info(f"The geodata-connector is running") valid_response = None exceed_limit = True result_offset = 0 return_object = [] result_record_count = 5000 if valid_response == None: logger.info("Requesting access token...") valid_response = get_token(config) token = {'Authorization': 'Bearer ' + valid_response['token']} if valid_response['expires'] <= 10: logger.info("Refreshing access token...") valid_response = get_token(config) token = {'Authorization': 'Bearer ' + valid_response['token']} while exceed_limit is not None: try: request_url = f"{config.base_url}/rest/services/Geomap_UTM33_EUREF89/GeomapMatrikkel/FeatureServer/4/query?where=fylkeid={config.fylke_id}&f=pjson&outFields={config.attributes}&returnExceededLimitFeatures=True&resultOffset={str(result_offset)}&resultRecordCount{str(result_record_count)}" data = requests.get(request_url, headers=token) decoded_data = json.loads(data.content.decode('utf-8-sig')) return_object.extend(decoded_data['features']) logger.info( f"extending result as exceed page limit is still {exceed_limit}" ) if not data.ok: logger.error( f"Unexpected response status code: {data.content}") return f"Unexpected error : {data.content}", 500 raise else: try: exceed_limit = decoded_data["exceededTransferLimit"] except Exception: exceed_limit = None if exceed_limit is not None: result_offset += int(result_record_count) logger.info(f"Result offset is now {result_offset}") except Exception as e: logger.warning( f"Service not working correctly. Failing with error : {e}") logger.info("Returning objects...") return Response(stream_json(return_object), mimetype='application/json')
def get(): ## Validating env vars config = VariablesConfig(required_env_vars, optional_env_vars) if not config.validate(): sys.exit(1) request_data = request.get_data() json_data = json.loads(str(request_data.decode("utf-8"))) # Helpers user_id = None headers = { "Accept": "application/json", "Authorization": f"{password}", "Content-type": "application/json" } try: actor_id = json_data[0].get('aktorid') data = requests.get(f"{active_users_base_url}?actor_id={actor_id}", headers=headers) decoded_data = json.loads(data.content.decode('utf-8-sig')) if decoded_data.get('success') == True: user_id = decoded_data.get('user_id') else: logger.info( 'Setting user_id to None, as user was not found in Dekode.') user_id = None except Exception as e: logger.warning( f"Could not get aktorid from SESAM. Failed with error: {e}") transform_response = [] if json_data[0].get("_id"): return_dictionary = { "_id": f"{json_data[0].get('_id')}", "actor_id": actor_id, "user_id": user_id } transform_response.append(return_dictionary) else: logger.error(f"No _id provided in payload... Skipping entity") pass return Response(stream_json(transform_response), mimetype='application/json')
def post(): ## Validating env vars config = VariablesConfig(required_env_vars, optional_env_vars) if not config.validate(): sys.exit(1) headers = { "Accept": "application/json", "Authorization": f"{password}", "Content-type": "application/json" } request_data = request.get_data() json_data = json.loads(str(request_data.decode("utf-8"))) for element in json_data: function = element['properties'] del element['properties'] if '/' in function: logger.info('trying to update user') update_response = requests.post(f"{base_url}/{function}", headers=headers, data=json.dumps(element)) if update_response.status_code == 200: logger.info(f"User has been updated!") else: logger.error( f"Failed to update user, with error: {update_response.content}" ) if function == "users": logger.info('trying to create user') create_response = requests.post(f"{base_url}/{function}", headers=headers, data=json.dumps(element)) if create_response.status_code == 201: logger.info(f"User has been created!") else: logger.error( f"Failed to create user, with error: {create_response.content}" ) else: logger.info('Nothing to do...') return jsonify({'Steve reporting': "work complete..."})
from requests.exceptions import Timeout from sesamutils import sesam_logger, VariablesConfig from sesamutils.flask import serve from collections import OrderedDict app = Flask(__name__) logger = sesam_logger('zendesk', app=app, timestamp=True) # Default values can be given to optional environment variables by the use of tuples required_env_vars = ["USER", "TOKEN","SUBDOMAIN"] optional_env_vars = [("DEBUG","false"),("LOG_LEVEL", "INFO"),"DUMMY_URL"] config = VariablesConfig(required_env_vars, optional_env_vars=optional_env_vars) if not config.validate(): logger.error("Environment variables do not validate. Exiting system.") os.sys.exit(1) USER = config.USER TOKEN = config.TOKEN ZEN_AUTH = (USER+'/token', TOKEN) SUBDOMAIN = config.SUBDOMAIN DUMMY_URL = False DEBUG = config.DEBUG in ["true","True","yes"] if hasattr(config, 'DUMMY_URL'): DUMMY_URL = config.DUMMY_URL ZENURL = DUMMY_URL if (ZENURL[-1] == '/'):
def get_data(): config = VariablesConfig(required_env_vars) if not config.validate(): sys.exit(1) logger.info(f"The geodata-connector is running") request_data = request.get_data() json_data = json.loads(str(request_data.decode("utf-8"))) valid_response = None return_object = [] for element in json_data[0].get("payload"): if valid_response == None: logger.info("Requesting access token...") valid_response = get_token(config) token = {'Authorization': 'Bearer ' + valid_response['token']} if valid_response['expires'] <= 10: logger.info("Refreshing access token...") valid_response = get_token(config) token = {'Authorization': 'Bearer ' + valid_response['token']} try: ## Query parameters for dynamic fetching wkid = str(element.get("wkid")) x = str(element.get('x_coordinate')) y = str(element.get('y_coordinate')) if '~f' in x or y: x = x.strip('~f') y = y.strip('~f') logger.info( f"The x, y and wkid respectively '{x}', '{y}', '{wkid}'") if not x or not y: logger.warning( f"The x or y coordinates '{x}', '{y}' are not provided in the right format" ) geometry_query = { "x": x, "y": y, "spatialReference": { "wkid": wkid } } ## Requesting geo data request_url = f"{config.base_url}/rest/services/Geomap_UTM33_EUREF89/GeomapMatrikkel/MapServer/5/query?geometry={geometry_query}&geometryType=esriGeometryPoint&inSR={wkid}&spatialRel=esriSpatialRelIntersects&relationParam=&outFields=kommunenr%2Cgardsnr%2Cbruksnr&returnGeometry=false&returnTrueCurves=false&returnIdsOnly=false&returnCountOnly=false&returnZ=false&returnM=false&returnDistinctValues=false&f=pjson" geo_data = requests.get(request_url, headers=token) if not geo_data.ok: logger.error( f"Unexpected response status code: {geo_data.content}") return f"Unexpected error : {geo_data.content}", 500 raise try: geo_transform = geo_data.json()['features'][0] geo_transform["geodata"] = geo_transform.pop("attributes") except IndexError as e: logger.error(f"exiting with error {e}") geo_transform = default_response except KeyError as e: logger.error(f"exiting with error {e}") geo_transform = default_response sesam_dict = dict_merger(dict(element), dict(geo_transform)) return_object.append(sesam_dict) ## except Exception as e: logger.warning( f"Service not working correctly. Failing with error : {e}") transform_response = [] if json_data[0].get("_id"): return_dictionary = { "_id": f"{json_data[0].get('_id')}", "geo_response": return_object } transform_response.append(return_dictionary) else: logger.info(f"No _id provided in payload...") return_dictionary = {"geo_response": return_object} transform_response.append(return_dictionary) return Response(stream_json(transform_response), mimetype='application/json')
from sesamutils.flask import serve import os app = Flask(__name__) # set GOOGLE_APPLICATION_CREDENTIALS envvar for GCP authentication os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = 'service_account_key.json' required_env_vars = [ "GOOGLE_APPLICATION_CREDENTIALS", "GOOGLE_APPLICATION_CREDENTIALS_CONTENT", "QUERY_CONFIGS" ] optional_env_vars = [("DEFAULT_PAGE_SIZE", 100)] env_config = VariablesConfig(required_env_vars, optional_env_vars) if not env_config.validate(): sys.exit(1) logger = sesam_logger('google-bigquery', app=app) env_config.QUERY_CONFIGS = json.loads(env_config.QUERY_CONFIGS) env_config.DEFAULT_PAGE_SIZE = int(env_config.DEFAULT_PAGE_SIZE) logger.info('started up with\n\tQUERY_CONFIGS:{}'.format( env_config.QUERY_CONFIGS)) # write out service config from env var to known file with open(env_config.GOOGLE_APPLICATION_CREDENTIALS, "wb") as out_file: out_file.write(env_config.GOOGLE_APPLICATION_CREDENTIALS_CONTENT.encode())
def test_validate_missing_variable(): required_vars = ["var1", "var3"] config = VariablesConfig(required_vars) assert config.validate() == False
import time # Activate logging logger = sesam_logger("sap-odata-source") # Get env.vars required_env_vars = ["SERVICE_URL"] optional_env_vars = [ "LOG_LEVEL", ("AUTH_TYPE", "basic"), "USERNAME", "PASSWORD", "TOKEN_URL", "TOKEN_REQUEST_HEADERS", "TOKEN_REQUEST_BODY" ] env_vars = VariablesConfig(required_env_vars, optional_env_vars=optional_env_vars) # Check that all required env.vars are supplied if not env_vars.validate(): sys.exit(1) # Verify authentication supported_auth_types = ["basic", "token"] if env_vars.AUTH_TYPE.lower() not in supported_auth_types: logger.error(f"Unsupported authentication type: {env_vars.AUTH_TYPE}") sys.exit(1) else: logger.info(f"Using {env_vars.AUTH_TYPE.lower()} authentication") # Start the service app = Flask(__name__)
def create_embedded_data(): logger.info(f"Test data service is ready to do your bidding..") ## Validating env vars config = VariablesConfig(required_env_vars) if not config.validate(): sys.exit(1) pipe_id = request.args.get('pipe_id') max_entities = int(request.args.get('entities')) header = { 'Authorization': f'Bearer {config.jwt}', "content-type": "application/json" } try: sesam_config_request = requests.get( f"{config.base_url}/pipes/{pipe_id}/config", headers=header, verify=False) json_config_response = json.loads( sesam_config_request.content.decode('utf-8-sig')) sesam_entity_request = requests.get( f"{config.base_url}/datasets/{pipe_id}/entities?deleted=False&history=False", headers=header, verify=False) json_entity_response = json.loads( sesam_entity_request.content.decode('utf-8-sig')) json_schema_response = json.loads( sesam_entity_request.content.decode('utf-8-sig')) json_mapping_schema = cleaning_json_schema(pipe_id, json_schema_response[:1]) flattened_entities = [] for entity in json_entity_response: flattened_entities.append(flatten_json(entity)) json_entity_response = draw_representative_values(flattened_entities, k=max_entities) embedded_entities = [] for response_elements in json_entity_response: new_entity = {} for response_key, response_value in response_elements.items(): for schema_elements in json_mapping_schema: if response_key in schema_elements: new_entity[response_key] = response_value if response_key.split(':', 1)[-1] in schema_elements: new_entity[response_key.split(':', 1)[-1]] = response_value if response_key not in schema_elements: try: for schema_key, schema_value in schema_elements.items( ): if type(schema_value) is dict: for nested_key, nested_value in schema_value.items( ): if response_key.split( ':')[-1] in nested_key: if schema_key not in new_entity: new_entity[schema_key] = {} new_entity[schema_key][ nested_key] = response_value if type(nested_value) is list: if nested_key not in new_entity[ schema_key]: new_entity[schema_key][ nested_key] = [] for nested_dicts in nested_value: new_entity[schema_key][ nested_key].append( nested_dicts) if type(nested_value) is dict: if response_key.split( ':')[-1] in nested_key: if nested_key not in new_entity[ schema_key]: new_entity[schema_key][ nested_key] = {} new_entity[schema_key][ nested_key] = nested_value if nested_key not in new_entity[ schema_key]: new_entity[schema_key][ nested_key] = {} new_entity[schema_key][ nested_key] = nested_value if type(schema_value) is list: if schema_key not in new_entity: new_entity[schema_key] = [] for nested_dicts in schema_value: new_entity[schema_key].append( nested_dicts) except Exception: pass embedded_entities.append(new_entity) new_source = { "_id": json_config_response["_id"], "type": json_config_response["type"], "source": { "type": "conditional", "alternatives": { "prod": json_config_response["source"], "test": { "type": "embedded", "entities": embedded_entities } }, "condition": "$ENV(node-env)" }, "transform": json_config_response["transform"] } check_response = requests.put( f"{config.base_url}/pipes/{pipe_id}/config?force=True", headers=header, data=json.dumps(new_source), verify=False) if not check_response.ok: return_msg = f"Unexpected error : {check_response.content}", 500 else: return_msg = f"Your pipe with id : {pipe_id} has been updated with test data" except Exception as e: return_msg = f"Your pipe with id : {pipe_id} could unfortunately not be updated... I failed with the following error : {e}" return jsonify({"Response": f"{return_msg}"})