def check_cert_validity(certfile, validity_interval=1): args = ["x509", "-in", certfile, "-text"] # TODO: change the openssl bash command with the pyOpenSSL API # if so we may remove 'plumbum' from requirements of rapydo-http repo from plumbum import local from plumbum.commands.processes import ProcessExecutionError try: # Pattern in plumbum library for executing a shell command command = local["openssl"] log.verbose("Executing command openssl {}", command, args) output = command(args) except ProcessExecutionError as e: raise e pattern = re.compile(r"Validity.*\n\s*Not Before: (.*)\n" + r"\s*Not After *: (.*)") validity = pattern.search(output).groups() not_before = dateutil.parser.parse(validity[0]) not_after = dateutil.parser.parse(validity[1]) now = datetime.now(pytz.utc) valid = (not_before < now) and ( not_after > now - timedelta(hours=validity_interval)) return valid, not_before, not_after
def retry(self, retry_interval=3, max_retries=-1): retry_count = 0 # Get the exception which will signal a missing connection exceptions = self.set_connection_exception() if exceptions is None: exceptions = (BaseException, ) while max_retries != 0 or retry_count < max_retries: retry_count += 1 if retry_count > 1: log.verbose("testing again in {} secs", retry_interval) try: obj = self.custom_connection() except exceptions as e: log.error("Catched: {}({})", e.__class__.__name__, e) log.exit("Service '{}' not available", self.name) else: break # Increment sleeps time if doing a lot of retries if retry_count % 3 == 0: log.debug("Incrementing interval") retry_interval += retry_interval return obj
def get_models(self): """ Read models from base/custom yaml files """ # BASE definitions path = os.path.join(ABS_RESTAPI_PATH, MODELS_DIR) try: data = load_yaml_file('swagger.yaml', path=path) except AttributeError as e: log.exit(e) # EXTENDED definitions, if any extended_models = None if EXTENDED_PACKAGE != EXTENDED_PROJECT_DISABLED: path = os.path.join(os.curdir, EXTENDED_PACKAGE, MODELS_DIR) # NOTE: with logger=False I skip the warning if this file doesn't exist try: extended_models = load_yaml_file('swagger.yaml', path=path) except AttributeError as e: log.verbose(e) # CUSTOM definitions path = os.path.join(os.curdir, CUSTOM_PACKAGE, MODELS_DIR) try: custom_models = load_yaml_file('swagger.yaml', path=path) except AttributeError as e: log.verbose(e) custom_models = {} if extended_models is None: return mix(data, custom_models) m1 = mix(data, extended_models) return mix(m1, custom_models)
def save_token(self, user, token, jti, token_type=None): ip = self.get_remote_ip() ip_loc = self.localize_ip(ip) if token_type is None: token_type = self.FULL_TOKEN now = datetime.now() exp = now + timedelta(seconds=self.shortTTL) token_entry = self.db.Token( jti=jti, token=token, token_type=token_type, creation=now, last_access=now, expiration=exp, IP=ip, hostname=ip_loc, ) token_entry.emitted_for = user try: self.db.session.add(token_entry) # Save user updated in profile endpoint self.db.session.add(user) self.db.session.commit() log.verbose("Token stored inside the DB") except BaseException as e: log.error("DB error ({}), rolling back", e) self.db.session.rollback()
def get_smtp_client(smtp_host, smtp_port, username=None, password=None): ################### # https://stackabuse.com/how-to-send-emails-with-gmail-using-python/ if smtp_port == '465': smtp = SMTP_SSL(smtp_host) else: smtp = SMTP(smtp_host) # if this is 587 we might need also # smtp.starttls() ################### smtp.set_debuglevel(0) log.verbose("Connecting to {}:{}", smtp_host, smtp_port) try: smtp.connect(smtp_host, smtp_port) smtp.ehlo() except socket.gaierror as e: log.error(str(e)) return None if username is not None and password is not None: log.verbose("Authenticating SMTP") try: smtp.login(username, password) except SMTPAuthenticationError as e: log.error(str(e)) return None return smtp
def load_yaml_file(file, path, keep_order=False): filepath = os.path.join(path, file) log.verbose("Reading file {}", filepath) if not os.path.exists(filepath): raise AttributeError("YAML file does not exist: {}".format(filepath)) with open(filepath) as fh: try: if keep_order: OrderedLoader.add_constructor( yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, construct_mapping) loader = yaml.load_all(fh, OrderedLoader) else: loader = yaml.load_all(fh, yaml.loader.Loader) docs = list(loader) if len(docs) == 0: raise AttributeError("YAML file is empty: {}".format(filepath)) return docs[0] except Exception as e: # # IF dealing with a strange exception string (escaped) # import codecs # error, _ = codecs.getdecoder("unicode_escape")(str(error)) raise AttributeError("Failed to read file {}: {}".format( filepath, e))
def custom_connection(self, **kwargs): ################## # mix kwargs with variables variables = self.variables for key, value in kwargs.items(): variables[key] = value ################## # connect for authentication if required uri = "mongodb://{}:{}/{}".format( variables.get('host'), variables.get('port'), AUTH_DB, ) mongodb.connect(uri, alias=AUTH_DB) ################## db = variables.get('database', 'UNKNOWN') uri = "mongodb://{}:{}/{}".format(variables.get('host'), variables.get('port'), db) mongodb.connect(uri, alias=db) link = mongodb._get_connection(alias=db) log.verbose("Connected to db {}", db) class obj: connection = link return obj
def load_variables(service, enable_var=None, prefix=None): variables = {} host = None if prefix is None: _, prefix = Detector.prefix_name(service) for var, value in os.environ.items(): if enable_var is not None and var == enable_var: continue var = var.lower() # This is the case when a variable belongs to a service 'prefix' if var.startswith(prefix): # Fix key and value before saving key = var[len(prefix) :] # One thing that we must avoid is any quote around our value value = value.strip('"').strip("'") # save variables[key] = value if key == 'host': host = value # Verify if service is EXTERNAL variables['external'] = False if isinstance(host, str): # and host.count('.') > 2: if not host.endswith('dockerized.io'): variables['external'] = True log.verbose("Service {} detected as external: {}", service, host) return variables
def get_authentication_module(auth_service): module_name = "services.authentication.{}".format(auth_service) log.verbose("Loading auth extension: {}", module_name) module = Meta.get_module_from_string(modulestring=module_name, prefix_package=True, exit_on_fail=True) return module
def test_ticket(self, path): # self.ticket_supply(code) try: with self.prc.data_objects.open(path, 'r') as obj: log.verbose(obj.__class__.__name__) except iexceptions.SYS_FILE_DESC_OUT_OF_RANGE: return False else: return True
def init_parameters(self): # Make sure you can parse arguments at every call self._args = {} self._json_args = {} self._params = {} # Query parameters self._parser = reqparse.RequestParser() # use self to get the classname classname = self.myname() # use request to recover uri and method uri = str(request.url_rule) method = request.method.lower() # FIXME: this works only for 'query' parameters # recover from the global mem parameters query parameters current_params = (mem.customizer._query_params.get(classname, {}).get( uri, {}).get(method, {})) if len(current_params) > 0: # Basic options basevalue = str # Python3 # basevalue = unicode #Python2 act = 'store' # store is normal, append is a list loc = ['headers', 'values'] # multiple locations trim = True for param, data in current_params.items(): # FIXME: Add a method to convert types swagger <-> flask tmptype = data.get('type', 'string') if tmptype == 'boolean': mytype = bool if tmptype == 'number': mytype = int else: mytype = basevalue # TO CHECK: I am creating an option to handle arrays if tmptype == 'select': act = 'append' self._parser.add_argument( param, type=mytype, default=data.get('default', None), required=data.get('required', False), trim=trim, action=act, location=loc, ) log.verbose("Accept param '{}' type {}", param, mytype)
def __get_channel(self): if self.__channel is None: log.verbose('Creating new channel.') self.__channel = self.__connection.channel() elif self.__channel.is_closed: log.verbose('Recreating channel.') self.__channel = self.__connection.channel() return self.__channel
def get_instance(self, **kwargs): # Parameters global_instance = kwargs.pop('global_instance', False) isauth = kwargs.pop('authenticator', False) cache_expiration = kwargs.pop('cache_expiration', None) # pinit = kwargs('project_initialization', False) # Variables obj = None ctx = stack.top ref = self unique_hash = str(sorted(kwargs.items())) # When not using the context, this is the first connection if ctx is None: # First connection, before any request obj = self.connect() if obj is None: return None # self.initialization(obj=obj) self.set_object(obj=obj, ref=ref) log.verbose("First connection for {}", self.name) else: if not isauth: if not global_instance: ref = ctx obj = self.get_object(ref=ref, key=unique_hash) if obj is not None and cache_expiration is not None: now = datetime.now() exp = timedelta(seconds=cache_expiration) if now < obj.connection_time + exp: log.verbose("Cache is still valid for {}", self) else: log.info("Cache expired for {}", self) obj = None if obj is None: obj = self.connect(**kwargs) if obj is None: return None self.set_object(obj=obj, ref=ref, key=unique_hash) else: pass obj = self.set_models_to_service(obj) return obj
def set_models_to_service(self, obj): if len(self.models) < 1 and self.__class__.__name__ == 'NeoModel': raise Exception() for name, model in self.models.items(): # Save attribute inside class with the same name log.verbose("Injecting model '{}'", name) setattr(obj, name, model) obj.models = self.models return obj
def wrapper(*args, **kwargs): # Recover the auth object auth_type, token = self.get_authorization_token( allow_access_token_parameter=allow_access_token_parameter) # Base header for errors headers = {HTTPAUTH_AUTH_HEADER: self.authenticate_header()} # Internal API 'self' reference decorated_self = Meta.get_self_reference_from_args(*args) if auth_type is None or auth_type.lower( ) != self._scheme.lower(): # Wrong authentication string msg = ( "Missing credentials in headers, e.g. {}: '{} TOKEN'". format(HTTPAUTH_AUTH_FIELD, HTTPAUTH_DEFAULT_SCHEME)) log.info("Unauthorized request: missing credentials") return decorated_self.force_response( errors=msg, code=hcodes.HTTP_BAD_UNAUTHORIZED, headers=headers, ) # Handling OPTIONS forwarded to our application: # ignore headers and let go, avoid unwanted interactions with CORS if request.method != 'OPTIONS': # Check authentication token_fn = decorated_self.auth.verify_token if not self.authenticate(token_fn, token): # Clear TCP receive buffer of any pending data log.verbose(request.data) # Mimic the response from a normal endpoint # To use the same standards log.info("Invalid token received '{}'", token) return decorated_self.force_response( errors="Invalid token received", code=hcodes.HTTP_BAD_UNAUTHORIZED, headers=headers) # Check roles if len(roles) > 0: roles_fn = decorated_self.auth.verify_roles if not self.authenticate_roles(roles_fn, roles, required_roles): log.info("Unauthorized request: missing privileges") return decorated_self.force_response( errors="You are not authorized: missing privileges", code=hcodes.HTTP_BAD_UNAUTHORIZED, ) return func(*args, **kwargs)
def load_classes(self): for service in self.services_configuration: name, _ = self.prefix_name(service) if not self.available_services.get(name): continue log.verbose("Looking for class {}", name) variables = service.get('variables') ext_name = service.get('class') # Get the existing class try: MyClass = self.load_class_from_module(ext_name, service=service) # Passing variables MyClass.set_variables(variables) if service.get('load_models'): base_models = self.meta.import_models( name, BACKEND_PACKAGE, exit_on_fail=True ) if EXTENDED_PACKAGE == EXTENDED_PROJECT_DISABLED: extended_models = {} else: extended_models = self.meta.import_models( name, EXTENDED_PACKAGE, exit_on_fail=False ) custom_models = self.meta.import_models( name, CUSTOM_PACKAGE, exit_on_fail=False ) MyClass.set_models(base_models, extended_models, custom_models) except AttributeError as e: log.error(str(e)) log.exit('Invalid Extension class: {}', ext_name) # Save self.services_classes[name] = MyClass log.debug("Got class definition for {}", MyClass) if len(self.services_classes) < 1: raise KeyError("No classes were recovered!") return self.services_classes
def get(self): current_user = self.get_current_user() data = { 'uuid': current_user.uuid, 'status': "Valid user", 'email': current_user.email, } # roles = [] roles = {} for role in current_user.roles: # roles.append(role.name) roles[role.name] = role.description data["roles"] = roles try: for g in current_user.belongs_to.all(): data["group"] = { "uuid": g.uuid, "shortname": g.shortname, "fullname": g.fullname, } except BaseException as e: log.verbose(e) data["isAdmin"] = self.auth.verify_admin() data["isLocalAdmin"] = self.auth.verify_local_admin() if hasattr(current_user, 'privacy_accepted'): data["privacy_accepted"] = current_user.privacy_accepted if hasattr(current_user, 'name'): data["name"] = current_user.name if hasattr(current_user, 'surname'): data["surname"] = current_user.surname if self.auth.SECOND_FACTOR_AUTHENTICATION is not None: data['2fa'] = self.auth.SECOND_FACTOR_AUTHENTICATION obj = meta.get_customizer_class('apis.profile', 'CustomProfile') if obj is not None: try: data = obj.manipulate(ref=self, user=current_user, data=data) except BaseException as e: log.error("Could not custom manipulate profile:\n{}", e) return self.force_response(data)
def get_dn_from_cert(cls, certdir, certfilename, ext='pem'): dn = '' cpath = os.path.join(cls._dir, certdir, "{}.{}".format(certfilename, ext)) with open(cpath) as fh: content = fh.read() cert = crypto.load_certificate(crypto.FILETYPE_PEM, content) sub = cert.get_subject() for tup in sub.get_components(): dn += '/' + tup[0].decode() + '=' + tup[1].decode() log.verbose("Host DN computed is {}", dn) return dn
def get_input(self, forcing=True, single_parameter=None, default=None): """ Recover parameters from current requests. Note that we talk about JSON only when having a PUT method, while there is URL encoding for GET, DELETE and Headers encoding with POST. Non-JSON Parameters are already parsed at this point, while JSON parameters may be already saved from another previous call """ self.parse() # TODO: study how to apply types in swagger not only for query params # so we can use them for validation # if is an upload in streaming, I must not consume # request.data or request.json, otherwise it get lost if len(self._json_args ) < 1 and request.mimetype != 'application/octet-stream': try: self._json_args = request.get_json(force=forcing) except Exception as e: log.verbose("Error retrieving input parameters, {}", e) # json payload and formData cannot co-exist if len(self._json_args) < 1: self._json_args = request.form # NOTE: if JSON all parameters are just string at the moment... for key, value in self._json_args.items(): if value is None: continue # TODO: remove and check # how to fix the `request.form` emptiness if key in self._args and self._args[key] is not None: key += '_json' self._args[key] = value if single_parameter is not None: return self._args.get(single_parameter, default) if len(self._args) > 0: log.verbose("Parameters {}", obfuscate_dict(self._args)) return self._args
def tests(wait, core, file, folder): """Compute tests and coverage""" if wait: while starting_up(): log.debug('Waiting service startup') time.sleep(5) mywait() log.debug("Starting unit tests: {}", pretty_errors) # launch unittests and also compute coverage log.warning("Running all tests and computing coverage.\n" + "This may take some minutes.") parameters = [] if core: parameters.append(current_package) elif file is not None: if not os.path.isfile(os.path.join("tests", file)): log.exit("File not found: {}", file) else: parameters.append("default") parameters.append(file) elif folder is not None: if not os.path.isdir(os.path.join("tests", folder)): log.exit("Folder not found: {}", folder) else: parameters.append("default") parameters.append(folder) try: # TODO: convert the `pyunittests` script from the docker image into python # Pattern in plumbum library for executing a shell command from plumbum import local command = local["pyunittests"] log.verbose("Executing command pyunittests {}", parameters) output = command(parameters) except Exception as e: log.error(str(e)) raise e log.info("Completed:\n{}", output)
def enable_inheritance(self, path, zone=None): if zone is None: zone = self.get_current_zone() key = 'inherit' ACL = iRODSAccess(access_name=key, path=path, user_zone=zone) try: self.prc.permissions.set(ACL) # , recursive=False) log.verbose("Enabled {} to {}", key, path) except iexceptions.CAT_INVALID_ARGUMENT: if not self.is_collection(path) and not self.is_dataobject(path): raise IrodsException("Cannot set Inherit: path not found") else: raise IrodsException("Cannot set Inherit") return False else: return True
def copy( self, sourcepath, destpath, recursive=False, force=False, compute_checksum=False, compute_and_verify_checksum=False, ): if recursive: log.error("Recursive flag not implemented for copy") if self.is_collection(sourcepath): raise IrodsException("Copy directory not supported") if compute_checksum: raise IrodsException("Compute_checksum not supported in copy") if compute_and_verify_checksum: raise IrodsException( "Compute_and_verify_checksum not supported in copy") if sourcepath == destpath: raise IrodsException("Source and destination path are the same") try: log.verbose("Copy {} into {}", sourcepath, destpath) source = self.prc.data_objects.get(sourcepath) self.create_empty(destpath, directory=False, ignore_existing=force) target = self.prc.data_objects.get(destpath) with source.open('r+') as f: with target.open('w') as t: for line in f: # if t.writable(): t.write(line) except iexceptions.DataObjectDoesNotExist: raise IrodsException( "DataObject not found (or no permission): {}".format( sourcepath)) except iexceptions.CollectionDoesNotExist: raise IrodsException( "Collection not found (or no permission): {}".format( sourcepath))
def verify_token(self, token, raiseErrors=False, token_type=None): # Force token cleaning self._user = None if token is None: return False # Decode the current token payload = self.unpack_token(token, raiseErrors=raiseErrors) if payload is None: return False payload_type = payload.get("t", self.FULL_TOKEN) if token_type is None: token_type = self.FULL_TOKEN if token_type != payload_type: log.error("Invalid token type {}, required: {}", payload_type, token_type) return False # Get the user from payload self._user = self.get_user_object(payload=payload) if self._user is None: return False if not self.verify_token_custom( user=self._user, jti=payload['jti'], payload=payload): return False # e.g. for graph: verify the (token <- user) link if not self.refresh_token(payload['jti']): return False log.verbose("User authorized") self._token = token self._jti = payload['jti'] return True
def wait_socket(host, port, service_name): import time import errno import socket sleep_time = 1 timeout = 1 log.verbose("Waiting for {} ({}:{})", service_name, host, port) counter = 0 while True: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.settimeout(timeout) try: result = s.connect_ex((host, port)) except socket.gaierror: result = errno.ESRCH if result == 0: log.info("Service {} is reachable", service_name) break counter += 1 if counter % 20 == 0: log.warning( "'{}' service ({}:{}) still unavailable after {} seconds", service_name, host, port, (sleep_time + timeout) * counter, ) else: log.debug("Not reachable yet: {} ({}:{})", service_name, host, port) time.sleep(sleep_time)
def set_models(cls, base_models, extended_models, custom_models): # Join models as described by issue #16 cls.models = base_models for m in [extended_models, custom_models]: for key, model in m.items(): # Verify if overriding if key in base_models.keys(): original_model = base_models[key] # Override if issubclass(model, original_model): log.verbose("Overriding model {}", key) cls.models[key] = model continue # Otherwise just append cls.models[key] = model if len(cls.models) > 0: log.verbose("Loaded models")
def wrapper(self, *args, **kwargs): from neomodel import db as transaction transaction_open = True try: try: transaction.begin() log.verbose("Neomodel transaction BEGIN2") except SystemError: transaction_open = False log.debug("Neomodel transaction is already in progress") out = func(self, *args, **kwargs) if transaction_open: transaction.commit() log.verbose("Neomodel transaction COMMIT2") else: log.debug("Skipping neomodel transaction commit") return out except Exception as e: if not transaction_open: log.debug("Skipping neomodel transaction rollback") else: try: log.verbose("Neomodel transaction ROLLBACK") transaction.rollback() except Exception as sub_ex: log.warning("Exception raised during rollback: {}", sub_ex) raise e
def get_debug_instance(MyClass): """ Obtain a debug instance from any flask ext we have in the app e.g. from restapi.flask_ext import get_debug_instance from restapi.flask_ext.flask_celery import CeleryExt obj = get_debug_instance(CeleryExt) """ ####### # NOTE: impors are needed here for logging to work correctly from restapi.services.detect import detector log.verbose("Detector imported: {}", detector) # avoid PEP complaints # FIXME: e.g. importing-programmatically # docs.python.org/3/library/importlib.html ####### instance = MyClass() obj = instance.connect() obj = instance.set_models_to_service(obj) return obj
def connect(self, **kwargs): obj = None # BEFORE ok = self.pre_connection(**kwargs) if not ok: log.critical("Unable to make preconnection for {}", self.name) return obj # Try until it's connected if len(kwargs) > 0: obj = self.custom_connection(**kwargs) else: obj = self.retry() log.verbose("Connected! {}", self.name) # AFTER self.post_connection(obj, **kwargs) obj.connection_time = datetime.now() return obj
def get_customizer_class(self, module_relpath, class_name, args=None): abspath = "{}.{}".format(CUSTOM_PACKAGE, module_relpath) MyClass = self.get_class_from_string( class_name, Meta.get_module_from_string(abspath), skip_error=True, ) instance = None if args is None: args = {} if MyClass is None: log.verbose("No customizer available for {}", class_name) else: try: instance = MyClass(**args) except BaseException as e: log.error("Errors during customizer: {}", e) else: log.debug("Customizer called: {}", class_name) return instance
def wrapper(self, *args, **kwargs): from neomodel import db as transaction try: transaction.begin() log.verbose("Neomodel transaction BEGIN") out = func(self, *args, **kwargs) transaction.commit() log.verbose("Neomodel transaction COMMIT") return out except Exception as e: log.verbose("Neomodel transaction ROLLBACK") try: transaction.rollback() except Exception as sub_ex: log.warning("Exception raised during rollback: {}", sub_ex) raise e