예제 #1
0
파일: utils.py 프로젝트: rj919/voxmachina
def compile_map(folder_path, file_suffix='', json_model=False, pythonic=False):

    from os import path
    from labpack.records.settings import load_settings

    file_map = {}

    file_list = compile_list(folder_path, file_suffix)
    for file_path in file_list:
        file_details = load_settings(file_path)
        file_key = path.split(file_path)[1].replace(file_suffix, '')
        if pythonic:
            file_key = file_key.replace(' ', '_').replace('-', '_').lower()
        if json_model:
            from jsonmodel.validators import jsonModel
            file_map[file_key] = jsonModel(file_details)
            # add any schema in metadata
            if 'schema' in file_map[file_key].metadata.keys():
                metadata_details = file_map[file_key].metadata
                metadata_key = file_key + '-metadata'
                if pythonic:
                    metdata_key = metadata_key.replace(' ', '_').replace(
                        '-', '_').lower()
                file_map[metadata_key] = jsonModel(metadata_details)
        else:
            file_map[file_key] = file_details

    return file_map
예제 #2
0
파일: bot.py 프로젝트: rj919/buttonBot
    def __init__(self, message_schema, case_schema, request_schema, response_schema, question_sequence):

        self.messageModel = jsonModel(message_schema)
        self.caseModel = jsonModel(case_schema)
        self.requestModel = jsonModel(request_schema)
        self.responseModel = jsonModel(response_schema)
        self.questions = question_sequence
예제 #3
0
def compile_model(command_schema, cli_schema):
    '''
        a method to create a jsonmodel object for command fields with cli metadata
         
    :param command_schema: dictionary with jsonmodel valid schema for command arguments
    :param cli_schema: dictionary with jsonmodel valid schema for command line interface metadata
    :return: jsonmodel object for command fields
    '''

    # validate model structure
    if not 'components' in command_schema.keys():
        command_schema['components'] = {}
    if not 'metadata' in command_schema.keys():
        command_schema['metadata'] = {}

# construct cli_model
    cli_model = jsonModel(cli_schema)

    # inject cli fields into the metadata for each field
    for key, value in command_schema['schema'].items():
        field_key = '.%s' % key
        if not field_key in command_schema['components'].keys():
            command_schema['components'][field_key] = {}
        if not 'field_metadata' in command_schema['components'][
                field_key].keys():
            command_schema['components'][field_key]['field_metadata'] = {}
        cli_fields = command_schema['components'][field_key]['field_metadata']
        command_schema['components'][field_key][
            'field_metadata'] = cli_model.ingest(**cli_fields)


# inject cli fields into metadata field
    command_schema['metadata'] = cli_model.ingest(**command_schema['metadata'])

    return jsonModel(command_schema)
예제 #4
0
파일: bot.py 프로젝트: rj919/buttonBot
    def __init__(self, message_schema, case_schema, request_schema,
                 response_schema, question_sequence):

        self.messageModel = jsonModel(message_schema)
        self.caseModel = jsonModel(case_schema)
        self.requestModel = jsonModel(request_schema)
        self.responseModel = jsonModel(response_schema)
        self.questions = question_sequence
예제 #5
0
    def __init__(self, client_id, client_secret, auth_endpoint, token_endpoint, redirect_uri, request_mimetype='', status_endpoint='', requests_handler=None, error_map=None):

        ''' the initialization method for oauth2 client class

        :param client_id: string with client id registered for app with service
        :param client_secret: string with client secret registered for app with service
        :param auth_endpoint: string with service endpoint for authorization code requests
        :param token_endpoint: string with service endpoint for token post requests
        :param redirect_uri: string with url for redirect callback registered with service
        :param request_mimetype: [optional] string with mimetype for token post requests
        :param status_endpoint: [optional] string with service endpoint to retrieve status of token
        :param requests_handler: [optional] callable that handles requests errors
        :param error_map: [optional] dictionary with key value strings for service error msgs
        '''

        title = '%s.__init__' % self.__class__.__name__

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # construct class object models
        object_models = {}
        for key, value in self._class_objects.items():
            object_models[key] = jsonModel(value)
        from labpack.compilers.objects import _method_constructor
        self.objects = _method_constructor(object_models)

    # validate inputs
        input_fields = {
            'client_id': client_id,
            'client_secret': client_secret,
            'auth_endpoint': auth_endpoint,
            'token_endpoint': token_endpoint,
            'redirect_uri': redirect_uri,
            'request_mimetype': request_mimetype,
            'status_endpoint': status_endpoint,
            'error_map': error_map
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct class properties
        self.client_id = client_id
        self.client_secret = client_secret
        self.auth_endpoint = auth_endpoint
        self.token_endpoint = token_endpoint
        self.redirect_uri = redirect_uri
        self.request_mimetype = request_mimetype
        self.status_endpoint = status_endpoint

    # construct handlers
        self.requests_handler = requests_handler
        self.error_map = error_map
예제 #6
0
def retrieve_settings(model_path, file_path, secret_key=''):

    # validate input
    title = 'retrieve_settings'
    from jsonmodel.validators import jsonModel
    model_details = load_settings(model_path)
    settings_model = jsonModel(model_details)

    # try to load settings from file
    file_settings = {}
    try:
        file_settings = load_settings(file_path, secret_key)
    except:
        pass


# retrieve environmental variables
    environ_var = ingest_environ()

    #  construct settings details from file and environment
    settings_details = settings_model.ingest(**{})
    for key in settings_model.schema.keys():
        if key.upper() in environ_var.keys():
            settings_details[key] = environ_var[key.upper()]
        elif key in file_settings.keys():
            settings_details[key] = file_settings[key]

    return settings_details
예제 #7
0
    def __init__(self, access_token, service_scope, usage_client=None, requests_handler=None):

        ''' initialization method for moves client class

        :param access_token: string with access token for user provided by moves oauth
        :param service_scope: dictionary with service type permissions
        :param usage_client: callable that records usage data
        :param requests_handler: callable that handles requests errors
        '''
        
        title = '%s.__init__' % self.__class__.__name__

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # construct client attributes
        object_title = '%s(access_token=%s)' % (title, str(access_token))
        self.access_token = self.fields.validate(access_token, '.access_token', object_title)
        object_title = '%s(service_scope=[...])' % title
        self.service_scope = self.fields.validate(service_scope, '.service_scope', object_title)
        self.endpoint = self.fields.schema['api_endpoint']

    # construct handlers
        self.moves_handler = movesHandler(usage_client)
        self.requests_handler = requests_handler
예제 #8
0
    def __init__(self, api_key, allow_fees=False, usage_client=None, requests_handler=None):

        '''
            a method to initialize the mandrill client class
            
        :param api_key: string with api key generated by mandrill
        :param allow_fees: [optional] boolean to allow additional fees 
        :param usage_client: callable that records usage data
        :param requests_handler: callable that handles requests errors
        '''

        title = '%s.__init__' % self.__class__.__name__

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        input_fields = {
            'api_key': api_key
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct class properties
        self.api_endpoint = self.fields.schema['api_endpoint']
        self.api_key = api_key
        self.allow_fees = allow_fees

    # construct method handlers
        self.service_handler = mandrillHandler(usage_client)
        self.requests_handler = requests_handler
예제 #9
0
    def __init__(self, keyspace_name, table_name, record_schema, cassandra_session, replication_strategy=None):

        title = '%s.__init__' % self.__class__.__name__
    
    # construct fields model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        input_fields = {
            'keyspace_name': keyspace_name,
            'table_name': table_name,
            'record_schema': record_schema,
            'replication_strategy': replication_strategy
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # validate cassandra session
        from sys import path as sys_path
        sys_path.append(sys_path.pop(0))
        from cassandra.cluster import Session
        sys_path.insert(0, sys_path.pop())
        if not isinstance(cassandra_session, Session):
            raise ValueError('%s(cassandra_session) must be a cassandra.cluster.Session datatype.' % title)
        self.session = cassandra_session
예제 #10
0
    def __init__(self, virtualbox_name='', verbose=False):

        '''
            a method to initialize the dockerClient class

        :param virtualbox_name: [optional] string with name of virtualbox image
        :return: dockerClient object
        '''

        title = '%s.__init__' % self.__class__.__name__
    
    # construct super
        super(dockerClient, self).__init__()

    # construct fields model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)
    
    # validate inputs
        input_fields = {
            'virtualbox_name': virtualbox_name
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct properties
        self.vbox = virtualbox_name
        self.verbose = verbose
        
    # construct localhost
        from labpack.platforms.localhost import localhostClient
        self.localhost = localhostClient()

    # verbosity
        if self.verbose:
            print('Checking docker installation...', end='', flush=True)

    # validate docker installation
        self._validate_install()
        if self.verbose:
            print('.', end='', flush=True)

    # validate virtualbox installation
        self.vbox_running = self._validate_virtualbox()
        if self.verbose:
            print('.', end='', flush=True)
    
    # set virtualbox variables
        if self.vbox_running:
            self._set_virtualbox()
            if self.verbose:
                print('.', end='', flush=True)

        if self.verbose:
            print(' done.')
예제 #11
0
def ingest_environ(model_path=''):

    ''' a method to convert environment variables to a python dictionary

    :param model_path: [optional] string with path to jsonmodel of data to ingest
    :return: dictionary with environmental variables

    NOTE:   if a model is provided, then only those fields in the model will be
            added to the output and the value of any environment variable which
            matches the uppercase name of each field in the model will be added
            to the dictionary if its value is valid according to the model. if
            a value is not valid, the method will throw a InputValidationError
    '''

# convert environment variables into json typed data
    from os import environ, path
    typed_dict = {}
    environ_variables = dict(environ)
    for key, value in environ_variables.items():
        if value.lower() == 'true':
            typed_dict[key] = True
        elif value.lower() == 'false':
            typed_dict[key] = False
        elif value.lower() == 'null':
            typed_dict[key] = None
        elif value.lower() == 'none':
            typed_dict[key] = None
        else:
            try:
                try:
                    typed_dict[key] = int(value)
                except:
                    typed_dict[key] = float(value)
            except:
                typed_dict[key] = value

# feed environment variables through model
    if model_path:
        if not path.exists(model_path):
            raise ValueError('%s is not a valid file path.' % model_path)
        model_dict = load_settings(model_path)
        from jsonmodel.validators import jsonModel
        model_object = jsonModel(model_dict)
        default_dict = model_object.ingest(**{})
        for key in default_dict.keys():
            if key.upper() in typed_dict:
                valid_kwargs = {
                    'input_data': typed_dict[key.upper()],
                    'object_title': 'Environment variable %s' % key.upper(),
                    'path_to_root': '.%s' % key
                }
                default_dict[key] = model_object.validate(**valid_kwargs)
        return default_dict

    return typed_dict
예제 #12
0
    def __init__(self, access_token, collection_name=''):
        
        '''
            a method to initialize the driveClient class
            
        :param access_token: string with oauth2 access token for users account
        :param collection_name: [optional] string with name of collection for import
        '''    

        title = '%s.__init__' % self.__class__.__name__
    
    # construct input validation model
        self.fields = jsonModel(self._class_fields)
        
    # validate inputs
        input_fields = {
            'access_token': access_token,
            'collection_name': collection_name
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)
    
    # construct access token
        self.access_token = access_token
        
    # construct drive client
        import httplib2
        from googleapiclient import discovery
        from oauth2client.client import AccessTokenCredentials
        google_credentials = AccessTokenCredentials(self.access_token, 'my-user-agent/1.0')
        google_http = httplib2.Http()
        google_http = google_credentials.authorize(google_http)
        google_drive = discovery.build('drive', 'v3', http=google_http)
        self.drive = google_drive.files()
    
    # construct collection properties
        self.permissions_write = True
        self.permissions_content = True
        self.drive_space = 'drive'
        self.space_id = ''
        if collection_name:
            self.collection_name = collection_name
        else:
            self.collection_name = 'My Drive'
    
    # construct file object
        from labpack import __module__
        from jsonmodel.loader import jsonLoader
        drive_rules = jsonLoader(__module__, 'storage/google/drive-rules.json')
        self.object_file = drive_rules['file_object']
        
    # validate access token
        self._validate_token()
예제 #13
0
def inject_defaults(command_schema, default_schema):

    default_model = jsonModel(default_schema)
    for key, value in default_model.schema.items():
        if not key in command_schema['schema'].keys():
            command_schema['schema'][key] = value
    for key, value in default_model.components.items():
        if not key in command_schema['components'].keys():
            command_schema['components'][key] = value

    return command_schema
예제 #14
0
    def __init__(self, collection_name='', prod_name='', org_name='', root_path=''):

        ''' initialization method of appdata client class

        :param collection_name: [optional] string with name of collection to store records
        :param prod_name: [optional] string with name of application product
        :param org_name: [optional] string with name of organization behind product
        :param root_path: [optional] string with path to root of collections (defaults to user home)
        '''

        title = '%s.__init__' % self.__class__.__name__
        
    # add localhost property to class
        self.localhost = localhostClient()

    # construct input validation model
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        if not collection_name:
            collection_name = 'User Data'
        else:
            collection_name = self.fields.validate(collection_name, '.collection_name')
        if not org_name:
            org_name = __team__
        else:
            org_name = self.localhost.fields.validate(org_name, '.org_name')
        if not prod_name:
            prod_name = __module__
        else:
            prod_name = self.localhost.fields.validate(prod_name, '.prod_name')
    
    # construct collection name
        from copy import deepcopy
        self.collection_name = deepcopy(collection_name)

    # construct app folder
        if not root_path:
            self.app_folder = self.localhost.app_data(org_name=org_name, prod_name=prod_name)
        else:
            root_path = self.fields.validate(root_path, '.root_path')
            if os.path.exists(root_path):
                if not os.path.isdir(root_path):
                    raise ValueError('%s(root_path="%s") is an existing file.' % (title, root_path))
            self.app_folder = os.path.abspath(root_path)

    # validate existence of file data folder in app data (or create)
        if self.localhost.os in ('Linux', 'FreeBSD', 'Solaris'):
            collection_name = collection_name.replace(' ', '-').lower()
        self.collection_folder = os.path.join(self.app_folder, collection_name)
        self.fields.validate(self.collection_folder, '.record_key_path')
        if not os.path.exists(self.collection_folder):
            os.makedirs(self.collection_folder)
예제 #15
0
    def __init__(self, usage_client=None):

        ''' initialization method for dropbox handler class

        :param usage_client: callable that records usage data
        '''

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # construct initial methods
        self.rate_limits = self.fields.schema['rate_limits']
        self.usage_client = usage_client
예제 #16
0
    def __init__(self, client_id, client_secret, retrieve_details=True, sandbox=False, requests_handler=None, usage_client=None):
        
        ''' the initialization method for the capital one client

        :param client_id: string with client id registered for app with service
        :param client_secret: string with client secret registered for app with service
        :param retrieve_details: boolean to automatically retrieve, store and refresh account details
        :param sandbox: boolean to send requests to test sandbox
        :param requests_handler: callable that handles requests errors
        :param usage_client: callable that records usage data
        '''

        title = '%s.__init__' % self.__class__.__name__

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        input_fields = {
            'client_id': client_id,
            'client_secret': client_secret,
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct class properties
        self.client_id = client_id
        self.client_secret = client_secret
        self.sandbox = sandbox
        self.token_endpoint = 'https://api.capitalone.com/oauth2/token'
        self.deposits_endpoint = 'https://api.capitalone.com/deposits/'
        if sandbox:
            self.token_endpoint = 'https://api-sandbox.capitalone.com/oauth2/token'
            self.deposits_endpoint = 'https://api-sandbox.capitalone.com/deposits/'
        self._access_token = None
        self.expires_at = 0

    # construct handlers
        self.requests_handler = requests_handler
        self.response_handler = depositsHandler(usage_client)

    # retrieve access token
        self.retrieve_details = retrieve_details
        self.products = None
        if retrieve_details:
            self.access_token()
            self._get_products()
예제 #17
0
def requestDeconstructor(request_object, request_schema):

    request_details = {}
    request_model = jsonModel(request_schema)

    try:
        request_details = request_object.get_json(silent=True)
    except:
        return request_details

    if not request_details:
        return request_details

    request_details = request_model.ingest(**request_details)

    return request_details
예제 #18
0
def requestDeconstructor(request_object, request_schema):

    request_details = {}
    request_model = jsonModel(request_schema)

    try:
        request_details = request_object.get_json(silent=True)
    except:
        return request_details

    if not request_details:
        return request_details

    request_details = request_model.ingest(**request_details)

    return request_details
예제 #19
0
    def __init__(self, regex_schema, override=False):

        '''
            instantiates class with a regular expression dictionary

        :param regex_schema: dictionary with regular expression name, pattern key-pairs
        :param override: boolean to ignore value errors raised from regex name conflicts
        '''

        class_name = self.__class__.__name__

    # construct class method validator
        self.fields = jsonModel(self._class_methods)

    # validate inputs
        object_title = '%s.__init__(regex_schema={...})' % class_name
        regex_schema = self.fields.validate(regex_schema, '.__init__.regex_schema', object_title)

    # construct builtin list to differentiate custom methods
        self.builtins = []
        for item in self.__dir__():
            self.builtins.append(item)

    # construct a method for each regex pattern in input
        for key, value in regex_schema.items():
            if key in self.builtins:
                if not override:
                    raise ValueError('\nRegex key %s is the name of a %s builtin method.' % (key, class_name))
            else:
                pro_char = re.compile('[^\w]')
                first_char = re.compile('[a-zA-Z]')
                method_name = pro_char.sub('_', key)
                if first_char.match(method_name):
                    try:
                        getattr(self, method_name)
                        if not override:
                            raise ValueError('\nThe method for regex key %s will conflict with another key.' % key)
                    except:
                        pass
                    sub_methods = {
                        'pattern': re.compile(value),
                        'name': key
                    }
                    method_object = _method_constructor(sub_methods)
                    setattr(self, method_name, method_object)
                elif not override:
                    raise ValueError('\Regex key %s must begin with a letter.' % key)
예제 #20
0
파일: utils.py 프로젝트: rj919/voxmachina
def ingest_environ(model_path=''):

    # convert environment variables into json typed data
    from os import environ, path
    typed_dict = {}
    environ_variables = dict(environ)
    for key, value in environ_variables.items():
        if value.lower() == 'true':
            typed_dict[key] = True
        elif value.lower() == 'false':
            typed_dict[key] = False
        elif value.lower() == 'null':
            typed_dict[key] = None
        elif value.lower() == 'none':
            typed_dict[key] = None
        else:
            try:
                try:
                    typed_dict[key] = int(value)
                except:
                    typed_dict[key] = float(value)
            except:
                typed_dict[key] = value


# feed environment variables through model
    if model_path:
        from labpack.records.settings import load_settings
        if not path.exists(model_path):
            raise ValueError('%s is not a valid file path.' % model_path)
        model_dict = load_settings(model_path)
        from jsonmodel.validators import jsonModel
        model_object = jsonModel(model_dict)
        default_dict = model_object.ingest(**{})
        for key in default_dict.keys():
            if key.upper() in typed_dict:
                valid_kwargs = {
                    'input_data': typed_dict[key.upper()],
                    'object_title': 'Environment variable %s' % key.upper(),
                    'path_to_root': '.%s' % key
                }
                default_dict[key] = model_object.validate(**valid_kwargs)
        return default_dict

    return typed_dict
예제 #21
0
    def __init__(self, access_id, secret_key, region_name, owner_id, user_name, verbose=True, usage_client=None):

        '''
            a method for initializing the connection to AWS Polly
            
        :param access_id: string with access_key_id from aws IAM user setup
        :param secret_key: string with secret_access_key from aws IAM user setup
        :param region_name: string with name of aws region
        :param owner_id: string with aws account id
        :param user_name: string with name of user access keys are assigned to
        :param verbose: boolean to enable process messages
        :param usage_client: callable object to track resource usage
        '''
        
        title = '%s.__init__' % self.__class__.__name__

    # initialize model
        from labpack import __module__
        from jsonmodel.loader import jsonLoader
        from jsonmodel.validators import jsonModel
        class_fields = jsonLoader(__module__, 'speech/aws/polly-rules.json')
        self.fields = jsonModel(class_fields)

    # construct iam connection
        from labpack.authentication.aws.iam import iamClient
        self.iam = iamClient(access_id, secret_key, region_name, owner_id, user_name, verbose)

    # construct polly client connection
        client_kwargs = {
            'service_name': 'polly',
            'region_name': self.iam.region_name,
            'aws_access_key_id': self.iam.access_id,
            'aws_secret_access_key': self.iam.secret_key
        }
        self.connection = boto3.client(**client_kwargs)
        self.verbose = verbose
        self.usage_client = usage_client
    
    # construct range of polly options
        self.voice_ids = self.fields.components['.voice_id']['discrete_values']
        self.output_formats = self.fields.components['.output_format']['discrete_values']
        
    # construct pythonic conversion method
        from labpack.parsing.conversion import camelcase_to_lowercase
        self.ingest = camelcase_to_lowercase
예제 #22
0
def compile_map(folder_path, file_suffix='', json_model=False):

    from os import path
    from labpack.records.settings import load_settings

    file_map = {}

    file_list = compile_list(folder_path, file_suffix)
    for file_path in file_list:
        file_details = load_settings(file_path)
        file_key = path.split(file_path)[1].replace(file_suffix, '')
        if json_model:
            from jsonmodel.validators import jsonModel
            file_map[file_key] = jsonModel(file_details)
        else:
            file_map[file_key] = file_details

    return file_map
예제 #23
0
    def __init__(self, access_token, collection_name=''):
        
        '''
            a method to initialize the dropboxClient class
            
        :param access_token: string with oauth2 access token for users account
        '''    

        title = '%s.__init__' % self.__class__.__name__
    
    # construct input validation model
        self.fields = jsonModel(self._class_fields)
        
    # validate inputs
        input_fields = {
            'access_token': access_token,
            'collection_name': collection_name
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)
    
    # workaround for module namespace conflict
        from sys import path as sys_path
        sys_path.append(sys_path.pop(0))
        from dropbox import Dropbox
        from dropbox.files import FileMetadata, WriteMode, DeleteArg
        from dropbox.exceptions import ApiError
        sys_path.insert(0, sys_path.pop())
    
    # construct dropbox client
        from labpack.compilers.objects import _method_constructor
        self.dropbox = Dropbox(oauth2_access_token=access_token)
    
    # construct dropbox objects
        self.objects = _method_constructor({
            'FileMetadata': FileMetadata,
            'ApiError': ApiError,
            'WriteMode': WriteMode,
            'DeleteArg': DeleteArg
        })
    
    # construct collection name
        self.collection_name = collection_name
예제 #24
0
    def __init__(self, group_name, server_url='ml.internalpositioning.com', password=''):

        '''
            a method to initialize a findClient class object

        :param group_name: string with name of group 
        :param server_url: string with url for FIND server
        :param password: [optional] string with password to mosquitto server

        # https://www.internalpositioning.com/api
        '''

        title = '%s.__init__' % self.__class__.__name__
        
    # construct fields
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)
    
    # validate inputs
        input_fields = {
            'group_name': group_name,
            'server_url': server_url,
            'password': password
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)
        
    # construct class properties
        self.server_url = server_url
        self.endpoint = 'https://%s' % server_url
        self.endpoint_public = 'http://%s' % server_url
        self.group_name = group_name
        self.password = password
        self.positions = {}
        self.locations = {}

    # add regex patterns
        import re
        self.user_pattern = re.compile('location/(.*)$')
예제 #25
0
    def __init__(self, api_key, email_key, account_domain, usage_client=None, requests_handler=None):

        ''' 
            initialization method for mailgun client class

        :param api_key: string with api key provided by mailgun
        :param email_key: string with email validation key provide by mailgun
        :param account_domain: string with domain from which to send email
        :param usage_client: callable that records usage data
        :param requests_handler: callable that handles requests errors
        '''

        title = '%s.__init__' % self.__class__.__name__

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        input_fields = {
            'api_key': api_key,
            'email_key': email_key,
            'account_domain': account_domain
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct client properties
        self.api_endpoint = self.fields.schema['api_endpoint']
        self.account_domain = account_domain
        self.api_key = api_key
        self.email_key = email_key

    # construct handlers
        self.service_handler = mailgunHandler(usage_client)
        self.requests_handler = requests_handler
예제 #26
0
    def __init__(self, service_username, service_password, requests_handler=None, magic_file=''):

        title = '%s.__init__' % self.__class__.__name__

    # construct class field model
        from jsonmodel.validators import jsonModel
        for key, value in self._class_fields['schema']['audio_extensions'].items():
            self._class_fields['components']['.audio_mimetype']['discrete_values'].append(value['mimetype'])
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        input_fields = {
            'service_username': service_username,
            'service_password': service_password
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)

    # define class properties
        self.username = service_username
        self.password = service_password

    # construct watson client
        self.client = SpeechToTextV1(username=self.username, password=self.password)

    # construct handlers
        self.requests_handler = requests_handler

    # construct magic format checker
        self.magic = None
        if magic_file:
            from labpack.parsing.magic import labMagic
            self.magic = labMagic(magic_file)

    # import validate extension
        from labpack.parsing.regex import validate_extension
        self._validate_extension = validate_extension
예제 #27
0
파일: heroku.py 프로젝트: AvdN/pocketLab
    def __init__(self,
                 account_email,
                 account_password,
                 app_subdomain,
                 verbose=False):
        ''' a method to initialize the herokuClient class '''

        title = '%s.__init__' % self.__class__.__name__

        # construct fields model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

        # validate inputs
        input_fields = {
            'account_email': account_email,
            'account_password': account_password,
            'app_subdomain': app_subdomain
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)
        self.email = account_email
        self.password = account_password
        self.subdomain = app_subdomain

        # construct class properties
        self.verbose = verbose

        # construct localhost
        from labpack.platforms.localhost import localhostClient
        self.localhost = localhostClient()

        # validate installation
        self._validate_install()

        # validate access
        self._validate_access()
예제 #28
0
    def __init__(self, usage_client=None):

        '''
            initialization method for deposits handler class
            
        :param usage_client: callable that records usage data
        '''

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # construct initial methods
        self.rate_limits = self.fields.schema['rate_limits']
        self.usage_client = usage_client
    
    # construct error map
        self.error_map = { 
            429: "The request has been rejected because of rate limiting -- you've sent too many requests in a given amount of time.",
            500: "General server error.",
            502: "Internal connection failure.",
            503: "The server is unavailable due to heavy traffic or maintenance."
        }
예제 #29
0
    def __init__(self, account_email, auth_token, verbose=True):
        
        ''' a method to initialize the herokuClient class '''

        title = '%s.__init__' % self.__class__.__name__
    
    # initialize super
        super(herokuClient, self).__init__(verbose=verbose)
        
    # construct fields model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        input_fields = {
            'account_email': account_email,
            'auth_token': auth_token
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.fields.validate(value, '.%s' % key, object_title)
    
    # construct properties
        self.email = account_email
        self.token = auth_token
        self.subdomain = ''
        self.apps = []
        
    # construct localhost
        from labpack.platforms.localhost import localhostClient
        self.localhost = localhostClient()
    
    # validate installation
        self._validate_install()
    
    # validate access
        self._validate_login()
예제 #30
0
    def __init__(self, bot_id, access_token, requests_handler=None):

        ''' initialization method for moves client class

        :param bot_id: integer with telegram id number for bot
        :param access_token: string with access token for bot provided by telegram botfather
        :param requests_handler: callable that handles requests errors
        '''

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # construct client attributes
        object_title = '%s.__init__(bot_id=%s)' % (self.__class__.__name__, str(bot_id))
        self.bot_id = self.fields.validate(bot_id, '.bot_id', object_title)
        object_title = '%s.__init__(access_token=%s)' % (self.__class__.__name__, str(access_token))
        self.access_token = self.fields.validate(access_token, '.access_token', object_title)
        self.api_endpoint = '%s%s:%s' % (self.fields.schema['api_endpoint'], self.bot_id, self.access_token)
        self.file_endpoint = '%s%s:%s/' % (self.fields.schema['file_endpoint'], self.bot_id, self.access_token)

    # construct handlers
        self.requests_handler = requests_handler
        self.telegram_handler = telegramBotHandler()
예제 #31
0
def start(service_list, verbose=True, virtualbox='default'):

    title = 'start'

# validate inputs
    if isinstance(service_list, str):
        if service_list:
            service_list = [service_list]
    input_fields = {
        'service_list': service_list,
        'verbose': verbose,
        'virtualbox': virtualbox
    }
    for key, value in input_fields.items():
        if value:
            object_title = '%s(%s=%s)' % (title, key, str(value))
            fields_model.validate(value, '.%s' % key, object_title)

# validate installation of docker
    from pocketlab.methods.docker import dockerClient
    docker_client = dockerClient(virtualbox_name=virtualbox, verbose=verbose)

# verbosity
    if verbose:
        print('Checking service configurations...', end='', flush=True)
        
# construct list of paths to services
    from pocketlab.methods.service import retrieve_services
    start_list, msg_insert = retrieve_services(service_list)

# construct lab list
    lab_list = []
    
# validate lab files
    from os import path
    from pocketlab.methods.validation import validate_lab
    from pocketlab import __module__
    from jsonmodel.loader import jsonLoader
    from jsonmodel.validators import jsonModel
    lab_model = jsonModel(jsonLoader(__module__, 'models/lab-config.json'))
    for details in start_list:
        file_path = path.join(details['path'], 'lab.yaml')
        service_name = details['name']
        lab_details = validate_lab(lab_model, file_path, service_name)
        details['config'] = lab_details
        lab_list.append(details)
        
# retrieve list of docker images

# validate lab config image exists
        
# retrieve list of docker containers

# validate lab config container doesn't already exist
# TODO resolve name conflicts on deploy updating
        
# validate mount paths exist
        
        if verbose:
            print('.', end='', flush=True)
        
# end validations
    if verbose:
        print(' done.')

# retrieve system ip
    system_ip = docker_client.ip()

# instantiate containers
    exit_msg = ''
    port_list = []
    for service in lab_list:
        service_config = service['config']
        service_name = service['name']
        service_root = service['path']

    # construct environment variables
        container_envvar = { 'SYSTEM_IP_ADDRESS': system_ip }
        if service_config['docker_environment_variables']:
            for key, value in service_config['docker_environment_variables'].items():
                container_envvar[key.upper()] = '"%s"' % value
    
    # construct sys command
        container_name = service_config['docker_container_alias']
        sys_command = 'docker run --name %s' % container_name
        for key, value in container_envvar.items():
            sys_command += ' -e %s=%s' % (key, value)
        if service_config['docker_mount_volumes']:
            for key, value in service_config['docker_mount_volumes'].items():
                sys_command += ' -v "%s":"%s"' % (key, value)
        sys_command += ' -it -d'
        port_count = 0
        port_value = ''
        if service_config['docker_port_mapping']:
            for key, value in service_config['docker_port_mapping'].items():
                port_value = value
                if value in port_list:
                    from copy import deepcopy
                    port_value = deepcopy(value)
                    for i in range(1000):
                        port_value += 1
                        if not port_value in port_list:
                            break
                port_list.append(port_value)
                port_count += 1
                sys_command += ' -p %s:%s' % (str(port_value), key)
        sys_command += ' %s' % service_config['docker_image_name'] 
    
    # determine port message
        port_msg = ''
        if port_count == 1:
            port_msg = ' at %s:%s' % (system_ip, port_value)
            
    # run command
        from subprocess import check_output
        docker_response = check_output(sys_command).decode('utf-8')
        service_msg = 'Container "%s" started%s' % (container_name, port_msg)
        if len(lab_list) > 1:
            if verbose:
                print(service_msg)
        else:
            exit_msg = service_msg

    # TODO consider ROLLBACK options
    
    if len(lab_list) > 1:
        exit_msg = 'Finished starting %s' % msg_insert
        
    return exit_msg

# # import dependencies
#     from os import path
#     from copy import deepcopy
#     from pocketlab.importers.config_file import configFile
#     from pocketlab.clients.localhost_client import localhostClient
#     from pocketlab.clients.docker_session import dockerSession
#     from pocketlab.validators.config_model import configModel
#     from pocketlab.validators.absolute_path import absolutePath
#     from pocketlab.validators.available_image import availableImage
#     from pocketlab.compilers.docker_run import dockerRun
#
# # ingest verbose options
#     verbose = kwargs['verbose']
#
# # determine system properties
#     localhost = localhostClient()
#
# # ingest & validate component file
#     component_file = kwargs['componentFile']
#     comp_details = configFile(component_file, kwargs)
#     comp_details = configModel(comp_details, 'rules/lab-component-model.json', kwargs, 'component settings')
#
# # determine component root from component file
#     root_path, file_name = path.split(path.abspath(component_file))
#
# # construct path details to mounted volumes
#     mounted_volumes = {}
#     for volume in comp_details['mounted_volumes']:
#         host_path = absolutePath(volume, root_path, kwargs, 'mounted volume')
#         system_path = host_path.replace('\\','/').replace('C:','//c')
#         absolute_path = deepcopy(host_path)
#         docker_path = absolute_path.replace(root_path,'')
#         container_path = docker_path.replace('\\','/')
#         mounted_volumes[system_path] = container_path
#
# # ingest & validate virtualbox property
#     vbox_name = kwargs['virtualbox']
#     if not localhost.os in ('Windows','Mac'):
#         vbox_name = ''
#
# # check for docker installation
#     docker_session = dockerSession(kwargs, vbox_name)
#
# # check that docker image is available locally
#     image_list = docker_session.images()
#     availableImage(comp_details['docker_image'], comp_details['image_tag'], image_list, kwargs)
#
# # retrieve list of active container aliases & busy ports
#     container_list = docker_session.ps()
#     busy_ports = []
#     active_containers = []
#     for container in container_list:
#         active_containers.append(container['NAMES'])
#         container_settings = docker_session.inspect(container_alias=container['NAMES'])
#         container_synopsis = docker_session.synopsis(container_settings)
#         if container_synopsis['mapped_ports']:
#             for key in container_synopsis['mapped_ports'].keys():
#                 busy_ports.append(key)
#
# # check that alias name is available
#     if comp_details['container_alias'] in active_containers:
#         from pocketlab.exceptions.lab_exception import labException
#         header_list = [ 'NAMES', 'STATUS', 'IMAGE', 'PORTS']
#         error = {
#             'kwargs': kwargs,
#             'message': 'Container "%s" already in use. Containers currently active:' % comp_details['container_alias'],
#             'tprint': { 'headers': header_list, 'rows': container_list },
#             'error_value': comp_details['container_alias'],
#             'failed_test': 'unavailable_resource'
#         }
#         raise labException(**error)
#
# # construct port mappings for mapped ports
#     mapped_ports = {}
#     for port in comp_details['exposed_ports']:
#         open_port = int(deepcopy(port))
#         while str(open_port) in busy_ports:
#             open_port += 1
#         mapped_ports[str(open_port)] = str(port)
#
# # add system_ip to injected variables
#     system_ip = docker_session.ip()
#     injected_variables = {
#         'SYSTEM_LOCALHOST': system_ip
#     }
#     for key, value in comp_details['injected_variables'].items():
#         injected_variables[key] = value
#
# # compile docker run script from settings
#     run_details = {
#         'name': comp_details['container_alias'],
#         'injected_variables': injected_variables,
#         'mounted_volumes': mounted_volumes,
#         'mapped_ports': mapped_ports,
#         'docker_image': comp_details['docker_image'],
#         'image_tag': comp_details['image_tag'],
#         'run_command': comp_details['run_command']
#     }
#     run_script = dockerRun(run_details)
#
# # start container
#     container_id = docker_session.run(run_script)
#     if verbose:
#         start_text = 'Sweet! Container "%s" started' % comp_details['container_alias']
#         if run_details['mapped_ports']:
#             start_text += ' on port'
#             if len(run_details['mapped_ports'].keys()) > 1:
#                 start_text += 's'
#             previous_port = False
#             for key in run_details['mapped_ports'].keys():
#                 if previous_port:
#                     start_text += ','
#                 start_text += ' %s:%s' % (system_ip, key)
#                 previous_port = True
#             start_text += '.'
#         print(start_text)
#
#     container_details = {
#         'mapped_ports': mapped_ports,
#         'container_alias': comp_details['container_alias'],
#         'container_id': container_id
#     }
#
#     return container_details
예제 #32
0
파일: stop.py 프로젝트: AvdN/pocketLab
def stop(service_list, verbose=True, virtualbox='default'):
    
    title = 'stop'

# validate inputs
    if isinstance(service_list, str):
        if service_list:
            service_list = [service_list]
    input_fields = {
        'service_list': service_list,
        'verbose': verbose,
        'virtualbox': virtualbox
    }
    for key, value in input_fields.items():
        if value:
            object_title = '%s(%s=%s)' % (title, key, str(value))
            fields_model.validate(value, '.%s' % key, object_title)

# validate installation of docker
    from pocketlab.methods.docker import dockerClient
    docker_client = dockerClient(virtualbox_name=virtualbox, verbose=verbose)

# verbosity
    if verbose:
        print('Checking service configurations...', end='', flush=True)
        
# construct list of paths to services
    from pocketlab.methods.service import retrieve_services
    stop_list, msg_insert = retrieve_services(service_list)

# construct lab list
    lab_list = []

# validate lab files
    from os import path
    from pocketlab.methods.validation import validate_lab
    from pocketlab import __module__
    from jsonmodel.loader import jsonLoader
    from jsonmodel.validators import jsonModel
    lab_model = jsonModel(jsonLoader(__module__, 'models/lab-config.json'))
    for details in stop_list:
        file_path = path.join(details['path'], 'lab.yaml')
        service_name = details['name']
        lab_details = validate_lab(lab_model, file_path, service_name)
        details['config'] = lab_details
        lab_list.append(details)

        # retrieve list of docker containers

        # validate lab config container exists
        # TODO resolve name discovery when namespace transmutes

        if verbose:
            print('.', end='', flush=True)

            # end validations
    if verbose:
        print(' done.')

# stop containers
    exit_msg = ''
    for service in lab_list:
        container_alias = service['config']['docker_container_alias']
        docker_status = docker_client.rm(container_alias)
        if docker_status == container_alias:
            service_msg = 'Container "%s" terminated.' % docker_status
            if len(lab_list) > 1:
                if verbose:
                    print(service_msg)
            else:
                exit_msg = service_msg
        else:
            raise ValueError(docker_status)
    
    if len(lab_list) > 1:
        exit_msg = 'Finished terminating %s' % msg_insert
        
    return exit_msg
예제 #33
0
def retrieve_oauth2_configs(folder_path=''):

    ''' a method to retrieve oauth2 configuration details from files or envvar '''

# define oauth2 model
    oauth2_fields = {
        "schema": {
            "oauth2_app_name": "My App",
            "oauth2_developer_name": "Collective Acuity",
            "oauth2_service_name": "moves",
            "oauth2_auth_endpoint": "https://api.moves-app.com/oauth/v1/authorize",
            "oauth2_token_endpoint": "https://api.moves-app.com/oauth/v1/access_token",
            "oauth2_client_id": "ABC-DEF1234ghijkl-567MNOPQRST890uvwxyz",
            "oauth2_client_secret": "abcdefgh01234456789_IJKLMNOPQrstuv-wxyz",
            "oauth2_redirect_uri": "https://collectiveacuity.com/authorize/moves",
            "oauth2_service_scope": "activity location",
            "oauth2_response_type": "code",
            "oauth2_request_mimetype": "",
            "oauth2_service_logo": "https://pbs.twimg.com/profile_images/3/d_400x400.png",
            "oauth2_service_description": "",
            "oauth2_service_setup": 0.0
        }
    }

# retrieve keys, value pairs from config files in cred folder
    if folder_path:
        envvar_details = {}
        import os
        from labpack.records.settings import load_settings
        file_list = []
        for suffix in ['.yaml', '.yml', '.json']:
            for file_name in os.listdir(folder_path):
                file_path = os.path.join(folder_path, file_name)
                if os.path.isfile(file_path):
                    if file_name.find(suffix) > -1:
                        file_list.append(file_path)

        for file_path in file_list:
            file_details = load_settings(file_path)
            envvar_details.update(**file_details)

# or ingest environmental variables
    else:
        from labpack.records.settings import ingest_environ
        envvar_details = ingest_environ()

# map oauth2 variables
    import re
    oauth2_map = {}
    for key in oauth2_fields['schema'].keys():
        key_pattern = '%s$' % key[6:]
        key_regex = re.compile(key_pattern)
        for k, v in envvar_details.items():
            if key_regex.findall(k.lower()):
                service_name = key_regex.sub('',k.lower())
                if not service_name in oauth2_map.keys():
                    oauth2_map[service_name] = {}
                oauth2_map[service_name][key] = v

# ingest models
    from jsonmodel.validators import jsonModel
    oauth2_model = jsonModel(oauth2_fields)
    oauth2_services = {}
    for key, value in oauth2_map.items():
        valid_oauth2 = {}
        try:
            valid_oauth2 = oauth2_model.validate(value)
        except:
            pass
        if valid_oauth2:
            oauth2_services[key] = valid_oauth2

    return oauth2_services
예제 #34
0
파일: init.py 프로젝트: AvdN/pocketLab
                    from jsonmodel.loader import jsonLoader
                    from pocketlab.methods.config import compile_yaml
                    config_schema = jsonLoader(__module__, value['schema_path'])
                    config_text = compile_yaml(config_schema)
                    with open(config_path, 'wt') as f:
                        f.write(config_text)
                        f.close()
                    _printer(config_path)
                            
    # add readme file
        readme_path = 'README.md'
        if not path.exists(readme_path):
            from pocketlab.methods.config import construct_readme
            readme_text = construct_readme(vcs_service=vcs_service)
            with open(readme_path, 'wt', encoding='utf-8') as f:
                f.write(readme_text)
                f.close()
            _printer(readme_path)

        exit_msg = 'Lab framework setup in current directory.'

    return exit_msg

if __name__ == "__main__":

    from labpack.records.settings import load_settings
    from jsonmodel.validators import jsonModel
    config_path = '../models/lab-config.json'
    config_model = jsonModel(load_settings(config_path))
    print(config_model.ingest())
예제 #35
0
def compile_settings(model_path, file_path, ignore_errors=False):

    ''' a method to compile configuration values from different sources

        NOTE:   method searches the environment variables, a local
                configuration path and the default values for a jsonmodel
                object for valid configuration values. if an environmental
                variable or key inside a local config file matches the key
                for a configuration setting declared in the jsonmodel schema,
                its value will be added to the configuration file as long
                as the value is model valid. SEE jsonmodel module.

        NOTE:   the order of assignment:
                    first:  environment variable
                    second: configuration file
                    third:  default value
                    fourth: empty value

        NOTE:   method is guaranteed to produce a full set of top-level keys

    :param model_path: string with path to jsonmodel valid model data
    :param file_path: string with path to local configuration file
    :param ignore_errors: [optional] boolean to ignore any invalid values
    :return: dictionary with settings
    '''

# construct configuration model and default details
    from jsonmodel.validators import jsonModel
    config_model = jsonModel(load_settings(model_path))
    default_details = config_model.ingest(**{})

# retrieve environmental variables and file details
    environ_details = ingest_environ()

    try:
        file_details = load_settings(file_path)
    except:
        file_details = {}

# construct config details from (first) envvar, (second) file, (third) default
    config_details = {}
    for key in default_details.keys():
        test_file = True
        test_default = True
        if key.upper() in environ_details.keys():
            test_file = False
            test_default = False
            try:
                config_details[key] = config_model.validate(environ_details[key.upper()], '.%s' % key)
            except:
                if ignore_errors:
                    test_file = True
                    test_default = True
                else:
                    raise
        if key in file_details.keys() and test_file:
            test_default = False
            try:
                config_details[key] = config_model.validate(file_details[key], '.%s' % key)
            except:
                if ignore_errors:
                    test_default = True
                else:
                    raise
        if test_default:
            config_details[key] = default_details[key]

    return config_details
예제 #36
0
def compile_yaml(config_schema, yaml_path=''):

    '''
        a method to compile a yaml file with top-level comments from a json model
        
    :param config_schema: dictionary with json model schema architecture 
    :param yaml_path: [optional] string with path to user yaml file
    :return: string with yaml formatted text
    '''

# construct config model
    from jsonmodel.validators import jsonModel
    config_model = jsonModel(config_schema)
    
# construct order dict
    config_list = []
    config_details = config_model.ingest()
    for key, value in config_details.items():
        details = {
            'key': key,
            'value': value,
            'position': 0,
            'comments': ''
        }
        comp_key = '.%s' % key
        if comp_key in config_model.keyMap.keys():
            if 'field_metadata' in config_model.keyMap[comp_key].keys():
                metadata = config_model.keyMap[comp_key]['field_metadata']
                if 'position' in metadata.keys():
                    if isinstance(metadata['position'], int):
                        details['position'] = metadata['position']
                if 'comments' in metadata.keys():
                    if isinstance(metadata['comments'], str):
                        details['comments'] = metadata['comments']
        config_list.append(details)
    config_list.sort(key=lambda k: k['position'])

# construct config text
    config_text = ''
    if 'comments' in config_model.metadata.keys():
        if isinstance(config_model.metadata['comments'], str):
            comment_lines = config_model.metadata['comments'].splitlines()
            for comment in comment_lines:
                config_text += '# %s\n' % comment
    for item in config_list:
        comment_stub = '\n'
        if item['comments']:
            comment_stub = ' # %s\n' % item['comments']
        try:
            float(item['key'])
            item_key = "'%s'" % item['key']
        except:
            item_key = item['key']
        if isinstance(item['value'], dict):
            line_text = '%s:%s' % (item_key, comment_stub)
            for key, value in item['value'].items():
                try:
                    float(key)
                    key_text = "'%s'" % key
                except:
                    key_text = key
                value_text = value
                if isinstance(value, str):
                    try:
                        float(value)
                        value_text = "'%s'" % value
                    except:
                        pass              
                line_text += '  %s: %s\n' % (key_text, value_text)
        elif isinstance(item['value'], list):
            line_text = '%s:%s' % (item_key, comment_stub)
            for i in range(len(item['value'])):
                value_text = item['value'][i]
                if isinstance(value_text, str):
                    try:
                        float(value_text)
                        value_text = "'%s'" % value_text
                    except:
                        pass
                line_text += '  - %s\n' % value_text
        else:
            line_text = '%s: %s%s' % (item_key, str(item['value']), comment_stub)
        config_text += line_text

# update user config
    if yaml_path:
        from os import path
        if not path.exists(yaml_path):
            raise ValueError('%s is not a valid path.')
        import ruamel.yaml
        user_text = open(yaml_path).read()
        user_code = ruamel.yaml.load(user_text, ruamel.yaml.RoundTripLoader)
        user_len = len(user_code.keys())
        count = 0
        for item in config_list:
            if item['key'] not in user_code.keys():
                insert_row = user_len + count
                insert_kwargs = {
                    'pos': insert_row,
                    'key': item['key'],
                    'value': item['value']
                }
                if item['comments']:
                    insert_kwargs['comment'] = item['comments']
                user_code.insert(**insert_kwargs)
                count += 1
        config_text = ruamel.yaml.dump(user_code, Dumper=ruamel.yaml.RoundTripDumper)

    return config_text
예제 #37
0
    def __init__(self, bucket_name, database_url, document_schema=None, verbose=False, public=False, configs=None):

        ''' the initialization method for syncGatewayAdmin class '''
        
        # https://developer.couchbase.com/documentation/mobile/1.5/guides/sync-gateway/config-properties/index.html
        # https://developer.couchbase.com/documentation/mobile/current/guides/sync-gateway/sync-function-api-guide/index.html
        
        title = '%s.__init__' % self.__class__.__name__

    # import default sync function
        from os import path
        from importlib.util import find_spec
        module_path = find_spec(__module__).submodule_search_locations[0]
        sync_path = path.join(module_path, 'databases/models/sync_function.js')
        sync_text = open(sync_path).read()
        sync_text = self._update_js(sync_text, document_schema)

    # construct fields
        self._class_fields['components']['.configs.sync']['default_value'] = sync_text
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)
    
    # validate inputs
        input_fields = {
            'bucket_name': bucket_name,
            'database_url': database_url,
            'document_schema': document_schema,
            'configs': configs
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)
        
    # test connection to db
        self.admin_access = True
        try:
            response = requests.get(database_url)
            response = response.json()
            if not 'ADMIN' in response.keys():
                self.admin_access = False
        except:
            raise Exception('%s(database_url="%s") is not a valid couchbase url.' % (title, database_url))
    
    # construct class properties
        from os import path
        self.bucket_name = bucket_name
        self.database_url = database_url
        self.bucket_url = path.join(database_url, bucket_name)
        
    # construct verbose method
        self.printer_on = True
        def _printer(msg, flush=False):
            if verbose and self.printer_on:
                if flush:
                    print(msg, end='', flush=True)
                else:
                    print(msg)
        self.printer = _printer
    
    # construct document model
        from jsonmodel.validators import jsonModel
        self.public = public
        self.model = None
        if document_schema:
            self.model = jsonModel(document_schema)
        elif not self.public:
            self.model = jsonModel({'schema': { 'uid': 'abc012XYZ789' }, 'components': {'.': { 'extra_fields': True}}})
        if self.model:
            if not 'uid' in self.model.schema.keys():
                document_schema['schema']['uid'] = 'abc012XYZ789'
            if not self.model.components:
                document_schema['components'] = { '.': { 'extra_fields': True } }
            elif not '.' in self.model.components.keys():
                document_schema['components']['.'] = { 'extra_fields': True }
            elif not 'extra_fields' in self.model.components['.'].keys():
                document_schema['components']['.']['extra_fields'] = True
            self.model = jsonModel(document_schema)

    # construct configs
        default_fields = self.fields.ingest(**{})
        self.configs = default_fields['configs']
        self.configs['bucket'] = self.bucket_name
        self.configs['name'] = self.bucket_name
        if self.public:
            from jsonmodel.loader import jsonLoader
            read_only_configs = jsonLoader(__module__, 'databases/models/sync-gateway-public.json')
            for key, value in read_only_configs.items():
                self.configs[key] = value
        if configs:
            for key, value in configs.items():
                self.configs[key] = value
        
    # construct lab record generator
        from labpack.records.id import labID
        self.labID = labID
    
    # create db (update config) if none exists
        self._update_bucket()
예제 #38
0
파일: init.py 프로젝트: rj919/lyvia
# construct sql tables
from labpack.databases.sql import sqlClient
sql_tables = {}
for key, value in object_map.items():
    table_name = key.replace('-', '_')
    sql_kwargs = {
        'table_name': table_name,
        'database_url': 'sqlite:///../data/records.db',
        'record_schema': value
    }
    sql_tables[table_name] = sqlClient(**sql_kwargs)

# construct data object models
api_model = load_settings('models/api.json')
telemetry_model = jsonModel(load_settings('models/telemetry-post.json'))

# construct email client
from labpack.email.mailgun import mailgunClient
from labpack.handlers.requests import handle_requests
mailgun_cred = load_settings('../cred/mailgun.yaml')
mailgun_kwargs = {
    'api_key': mailgun_cred['mailgun_api_key'],
    'email_key': mailgun_cred['mailgun_email_key'],
    'account_domain': mailgun_cred['mailgun_spf_route'],
    'requests_handler': handle_requests
}
email_client = mailgunClient(**mailgun_kwargs)

if __name__ == "__main__":
예제 #39
0
파일: resources.py 프로젝트: rj919/fishBase
class resourceModel(object):

    def __init__(self, json_model):
        if not isinstance(json_model, jsonModel):
            raise TypeError('resourceModel input must be a jsonModel object.')
        self.model = json_model
        self.request = requestModel(self.model)
        self.database = databaseModel(self.model)

# import json object files
operationFile = json.loads(open('models/operation-model.json').read())
catchFile = json.loads(open('models/catch-model.json').read())
specimenFile = json.loads(open('models/specimen-model.json').read())

# construct object models from files
operationModel = jsonModel(operationFile)
catchModel = jsonModel(catchFile)
specimenModel = jsonModel(specimenFile)

# construct resource objects from object models
operationResource = resourceModel(operationModel)
catchResource = resourceModel(catchModel)
specimenResource = resourceModel(specimenModel)

if __name__ == '__main__':
    print(operationResource.request.model.schema)



예제 #40
0
            raise TypeError('databaseModel input must be a jsonModel object.')
        self.model = json_model


class resourceModel(object):
    def __init__(self, json_model):
        if not isinstance(json_model, jsonModel):
            raise TypeError('resourceModel input must be a jsonModel object.')
        self.model = json_model
        self.request = requestModel(self.model)
        self.database = databaseModel(self.model)


# import json object files
operationFile = json.loads(open('models/operation-model.json').read())
catchFile = json.loads(open('models/catch-model.json').read())
specimenFile = json.loads(open('models/specimen-model.json').read())

# construct object models from files
operationModel = jsonModel(operationFile)
catchModel = jsonModel(catchFile)
specimenModel = jsonModel(specimenFile)

# construct resource objects from object models
operationResource = resourceModel(operationModel)
catchResource = resourceModel(catchModel)
specimenResource = resourceModel(specimenModel)

if __name__ == '__main__':
    print(operationResource.request.model.schema)
예제 #41
0
    def __init__(self):

        ''' a method to initialize a client class to interact with the localhost '''

    # construct class field input validation property
        self.fields = jsonModel(self._class_fields)

    # retrieve operating system from localhost
        self.os = osClient()

    # TODO: determine file system and parameters
    # TODO: request latest info from
    # https://en.wikipedia.org/wiki/Comparison_of_file_systems#Limits

    # retrieve IP from system
        self.os.nodename = gethostname()
        self.ip = gethostbyname(self.os.nodename)

    # retrieve environment variables from system
        self.environ = dict(os.environ.items())

    # retrieve path to user home
        self.home = ''
        if self.os.sysname == 'Windows':
            env_username = os.environ.get('USERNAME')
            from re import compile
            xp_pattern = compile('^C:\\Documents and Settings')
            app_data = ''
            if os.environ.get('APPDATA'):
                app_data = os.environ.get('APPDATA')
            if xp_pattern.findall(app_data):
                self.home = 'C:\\Documents and Settings\\%s' % env_username
            else:
                self.home = 'C:\\Users\\%s' % env_username
        elif self.os.sysname in ('Linux', 'FreeBSD', 'Solaris', 'Darwin'):
            self.home = os.path.expanduser('~')

    # retrieve path to shell configs
        self.bash_config = ''
        self.sh_config = ''
        if self.os.sysname == 'Windows':
            bash_config = '.bash_profile'
            sh_config = ''
        else:
            bash_config = '.bashrc'
            sh_config = '.cshrc'
        if bash_config:
            self.bash_config = os.path.join(self.home, bash_config)
        if sh_config:
            self.sh_config = os.path.join(self.home, sh_config)
    # TODO check different terminal protocols

    # construct file record model property
        file_model = {
            'schema': {
                'file_name': 'test.json',
                'file_path': '/home/user/.config/collective-acuity-labpack/user-data/test.json',
                'file_size': 678,
                'create_date': 1474509314.419702,
                'update_date': 1474509314.419702,
                'access_date': 1474509314.419702
            },
            'components': {
                '.file_size': {
                    'integer_data': True
                }
            }
        }
        self.file_model = jsonModel(file_model)
예제 #42
0
    def unitTests(self, valid_input, valid_query):

        # print(self.keyMap)

        # test model fields
        assert isinstance(self.title, str)
        assert isinstance(self.description, str)
        assert isinstance(self.url, str)
        assert isinstance(self.metadata, dict)
        assert isinstance(self.maxSize, int)

        # test declarative fields in model keyMap
        assert self.keyMap["."]["value_datatype"]
        assert self.keyMap[".userID"]["required_field"]
        assert self.keyMap[".address.region"]["declared_value"]
        assert self.keyMap[".rating"]["default_value"]
        assert self.keyMap[".emoticon"]["byte_data"]
        assert self.keyMap[".rating"]["integer_data"]
        assert self.keyMap[".userID"]["min_length"]
        assert self.keyMap[".comments[0]"]["max_length"]
        assert self.keyMap[".rating"]["min_value"]
        assert self.keyMap[".rating"]["max_value"]
        assert self.keyMap[".comments"]["min_size"]
        assert self.keyMap[".comments"]["max_size"]
        assert self.keyMap[".comments"]["unique_values"]
        assert self.keyMap[".userID"]["must_not_contain"]
        assert self.keyMap[".comments[0]"]["must_contain"]
        assert self.keyMap[".address.region"]["contains_either"]
        assert self.keyMap[".address.country_code"]["discrete_values"]
        assert self.keyMap[".emoticon"]["example_values"]
        assert self.keyMap[".address.region"]["field_title"]
        assert self.keyMap[".userID"]["field_description"]
        assert self.keyMap[".emoticon"]["field_metadata"]

        # TODO: "identical_to": ".similar_string",
        # TODO: "lambda_function": "",
        # TODO: "validation_url": "",

        # test declared values for empty values
        assert isinstance(self.keyMap[".address.country_code"]["declared_value"], int)
        assert isinstance(self.keyMap[".address.postal_code"]["declared_value"], str)

        # test integers in key name exception
        try:
            test_schema = {"schema": {0: "value"}}
            jsonModel(test_schema)
        except ModelValidationError as err:
            assert str(err).find("Model declaration is invalid")

        # test validation with empty path to root
        v_input = deepcopy(valid_input)
        assert self.validate(v_input)

        # test validation with dot-path to root
        v_input = deepcopy(valid_input)
        assert self.validate(v_input, ".")

        # test individual component validation
        v_input = deepcopy(valid_input)
        assert self.validate(v_input["datetime"], ".datetime") == v_input["datetime"]
        v_input = deepcopy(valid_input)
        assert self.validate(v_input["userID"], ".userID") == v_input["userID"]
        v_input = deepcopy(valid_input)
        assert not self.validate(v_input["active"], ".active")
        v_input = deepcopy(valid_input)
        assert self.validate(v_input["comments"], ".comments") == v_input["comments"]
        v_input = deepcopy(valid_input)
        assert self.validate(v_input["address"], ".address") == v_input["address"]

        # test non-existent path to root exception
        v_input = deepcopy(valid_input)
        try:
            self.validate(v_input, ".not_a_path")
        except ModelValidationError as err:
            assert str(err).find("Model declaration is invalid")

        # test path to root not a string
        v_input = deepcopy(valid_input)
        try:
            self.validate(v_input, [".datetime"])
        except ModelValidationError as err:
            assert str(err).find("Model declaration is invalid")

        # test invalid input type
        invalid_list = []
        try:
            self.validate(invalid_list)
        except InputValidationError as err:
            assert err.error["model_schema"]
            assert err.error["failed_test"] == "value_datatype"

        # test object title in error message
        try:
            self.validate(invalid_list, object_title="List input")
        except InputValidationError as err:
            assert err.error["object_title"]
            assert str(err).find("input is invalid.")
            assert str(err).find("Value []")

        # test non-json valid object exception
        invalid_object = jsonModel({"schema": {"test": "object"}})
        try:
            self.validate(invalid_object, object_title="jsonModel input")
        except InputValidationError as err:
            assert str(err).find("Value jsonModel")

        # test json structure of error message
        try:
            self.validate(invalid_list)
        except InputValidationError as err:
            assert json.dumps(err.error)
        try:
            self.validate(invalid_object)
        except InputValidationError as err:
            assert json.dumps(err.error)

        # test invalid input data type
        try:
            self.validate("1449179763.312077", ".datetime")
        except InputValidationError as err:
            assert err.error["failed_test"] == "value_datatype"
            assert json.dumps(err.error)

        # test non-json valid input datatype
        try:
            self.validate(invalid_object, ".comments[0]")
        except InputValidationError as err:
            assert err.error["failed_test"] == "value_datatype"
            assert json.dumps(err.error)

        # test key_datatype exception
        integer_keyname_error = deepcopy(valid_input)
        integer_keyname_error[2] = "integer key name"
        try:
            self.validate(integer_keyname_error)
        except InputValidationError as err:
            assert err.error["error_value"] == 2

        # test extra_fields exception
        extra_key_input = deepcopy(valid_input)
        extra_key_input["extraKey"] = "string"
        try:
            self.validate(extra_key_input)
        except InputValidationError as err:
            assert err.error["failed_test"] == "extra_fields"
            assert not err.error["object_title"]

        # test required_field exception
        missing_key_input = deepcopy(valid_input)
        del missing_key_input["active"]
        try:
            self.validate(missing_key_input, object_title="Required field exception")
        except InputValidationError as err:
            assert err.error["failed_test"] == "required_field"
            assert err.error["object_title"]

        # test required_field false in dictionaries
        optional_key = deepcopy(valid_input)
        del optional_key["comments"]
        assert not "comments" in self.validate(optional_key).keys()

        # test default_value insertion
        default_rating = deepcopy(valid_input)
        new_default_rating = self.validate(default_rating)
        assert new_default_rating["rating"] == 5

        # test min_size exception
        short_list = deepcopy(valid_input)
        short_list["comments"] = []
        try:
            self.validate(short_list)
        except InputValidationError as err:
            assert err.error["failed_test"] == "min_size"
            assert not err.error["object_title"]

        # test max_size exception
        long_list = deepcopy(valid_input)
        long_list["comments"].append("pewter")
        try:
            self.validate(long_list, object_title="Max size exception")
        except InputValidationError as err:
            assert err.error["failed_test"] == "max_size"
            assert err.error["object_title"]

        # test value_datatype exception
        mixed_list = deepcopy(valid_input)
        mixed_list["comments"][1] = 100
        try:
            self.validate(mixed_list)
        except InputValidationError as err:
            assert err.error["failed_test"] == "value_datatype"

        # test unique_values exception
        duplicate_list = deepcopy(valid_input)
        duplicate_list["comments"][2] = "gold"
        try:
            self.validate(duplicate_list)
        except InputValidationError as err:
            assert err.error["failed_test"] == "unique_values"

        # test integer_data exception
        integers_only = deepcopy(valid_input)
        integers_only["rating"] = 3.5
        try:
            self.validate(integers_only, object_title="Integer data exception")
        except InputValidationError as err:
            assert err.error["failed_test"] == "integer_data"
            assert err.error["object_title"]

        # test min_value exception
        min_number = deepcopy(valid_input)
        min_number["rating"] = 0
        try:
            self.validate(min_number)
        except InputValidationError as err:
            assert err.error["failed_test"] == "min_value"
            assert not err.error["object_title"]

        # test max_value exception
        max_number = deepcopy(valid_input)
        max_number["rating"] = 11
        try:
            self.validate(max_number)
        except InputValidationError as err:
            assert err.error["failed_test"] == "max_value"

        # test greater_than exception for numbers
        greater_number = deepcopy(valid_input)
        greater_number["datetime"] = 0.1
        try:
            self.validate(greater_number)
        except InputValidationError as err:
            assert err.error["failed_test"] == "greater_than"

        # test less_than exception for numbers
        less_number = deepcopy(valid_input)
        less_number["datetime"] = 2000000000.1
        try:
            self.validate(less_number)
        except InputValidationError as err:
            assert err.error["failed_test"] == "less_than"

        # test min_value for strings exception
        low_string = deepcopy(valid_input)
        low_string["userID"] = "0000000000000"
        try:
            self.validate(low_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "min_value"

        # test max_value for strings exception
        high_string = deepcopy(valid_input)
        high_string["userID"] = "zzzzzzzzzzzzz"
        try:
            self.validate(high_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "max_value"

        # test greater_than exception for strings
        greater_string = deepcopy(valid_input)
        greater_string["address"]["region"] = "AA"
        try:
            self.validate(greater_string, object_title="Greater than exception")
        except InputValidationError as err:
            assert err.error["failed_test"] == "greater_than"
            assert err.error["object_title"]

        # test less_than exception for strings
        less_string = deepcopy(valid_input)
        less_string["address"]["region"] = "Zzzzzzzzzzzzzzzzzzzzzzzz"
        try:
            self.validate(less_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "less_than"
            assert not err.error["object_title"]

        # test excluded_values for strings exception
        excluded_string = deepcopy(valid_input)
        excluded_string["emoticon"] = "c2Fk"
        try:
            self.validate(excluded_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "excluded_values"

        # test excluded_values for strings exception
        excluded_number = deepcopy(valid_input)
        excluded_number["rating"] = 7
        try:
            self.validate(excluded_number)
        except InputValidationError as err:
            assert err.error["failed_test"] == "excluded_values"

        # test discrete_values exception
        discrete_string = deepcopy(valid_input)
        discrete_string["address"]["city"] = "Boston"
        try:
            self.validate(discrete_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "discrete_values"

        # test discrete_values exception
        discrete_number = deepcopy(valid_input)
        discrete_number["address"]["country_code"] = 20
        try:
            self.validate(discrete_number)
        except InputValidationError as err:
            assert err.error["failed_test"] == "discrete_values"

        # test byte_data exception
        byte_string = deepcopy(valid_input)
        byte_string["emoticon"] = "happy"
        try:
            self.validate(byte_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "byte_data"

        # test max_length
        max_string = deepcopy(valid_input)
        max_string["userID"] = "LongAlphaNumericID"
        try:
            self.validate(max_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "max_length"

        # test min_length exception
        min_string = deepcopy(valid_input)
        min_string["userID"] = "ShortID"
        try:
            self.validate(min_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "min_length"

        # test must_not_contain exception
        prohibited_string = deepcopy(valid_input)
        prohibited_string["userID"] = "6nPb/9gTwLz3f"
        try:
            self.validate(prohibited_string)
        except InputValidationError as err:
            assert err.error["failed_test"] == "must_not_contain"

        # test must_contain exception
        required_words = deepcopy(valid_input)
        required_words["comments"][0] = "a"
        try:
            self.validate(required_words)
        except InputValidationError as err:
            assert err.error["failed_test"] == "must_contain"

        # test contains_either exception
        optional_words = deepcopy(valid_input)
        optional_words["address"]["region"] = "N1"
        try:
            self.validate(optional_words)
        except InputValidationError as err:
            assert err.error["failed_test"] == "contains_either"

        # test empty list
        empty_list = deepcopy(valid_input)
        empty_list["comments"] = []
        input_value = self.keyMap[".comments"]["min_size"]
        self.keyMap[".comments"]["min_size"] = 0
        assert self.validate(empty_list)
        self.keyMap[".comments"]["min_size"] = input_value

        # test list reconstruction
        assert isinstance(self._reconstruct(".comments"), list)

        # test dict reconstruction
        assert isinstance(self._reconstruct(".address"), dict)

        # test nested reconstruction
        assert isinstance(self._reconstruct(".address.country_code"), int)
        assert isinstance(self._reconstruct(".comments[0]"), str)

        # test ingest valid input
        ingest_input = deepcopy(valid_input)
        self.ingest(**ingest_input)

        # test malformed dictionary datatype input ingestion
        malformed_datatype = deepcopy(valid_input)
        malformed_datatype["address"] = "my home"
        valid_output = self.ingest(**malformed_datatype)
        assert not valid_output["address"]["region"]

        # test missing default input injection
        missing_default = deepcopy(valid_input)
        assert not "rating" in missing_default.keys()
        valid_output = self.ingest(**missing_default)
        assert valid_output["rating"] == 5

        # test malformed default input replacement
        malformed_default = deepcopy(valid_input)
        malformed_default["rating"] = "5"
        valid_output = self.ingest(**malformed_default)
        assert valid_output["rating"] == 5

        # test invalid default input replacement
        invalid_default = deepcopy(valid_input)
        invalid_default["rating"] = 11
        valid_output = self.ingest(**invalid_default)
        assert valid_output["rating"] == 5

        # test missing input null injection
        missing_string = deepcopy(valid_input)
        del missing_string["userID"]
        valid_output = self.ingest(**missing_string)
        assert isinstance(valid_output["userID"], str)
        assert not valid_output["userID"]

        # test malformed input null injection
        malformed_string = deepcopy(valid_input)
        malformed_string["userID"] = {"key": "value"}
        valid_output = self.ingest(**malformed_string)
        assert isinstance(valid_output["userID"], str)
        assert not valid_output["userID"]

        # test invalid input null injection
        invalid_string = deepcopy(valid_input)
        invalid_string["userID"] = "tooShort"
        valid_output = self.ingest(**invalid_string)
        assert isinstance(valid_output["userID"], str)
        assert not valid_output["userID"]

        # test strip extra field input
        ingest_input = deepcopy(extra_key_input)
        valid_output = self.ingest(**ingest_input)
        assert "extraKey" in ingest_input.keys()
        assert not "extraKey" in valid_output.keys()

        # test tag along of extra fields in input
        self.keyMap["."]["extra_fields"] = True
        ingest_input = deepcopy(extra_key_input)
        valid_output = self.ingest(**ingest_input)
        assert "extraKey" in ingest_input.keys()
        assert "extraKey" in valid_output.keys()
        self.keyMap["."]["extra_fields"] = False

        # test mass injection of defaults and nulls
        valid_output = self.ingest(**{})
        for key in self.schema.keys():
            assert key in valid_output.keys()
        assert valid_output["rating"] == 5
        assert not valid_output["userID"]
        assert not valid_output["comments"]
        assert valid_output["address"]
        ex_int = 0
        assert valid_output["address"]["country_code"].__class__ == ex_int.__class__

        # test nested default injection
        ingest_input = deepcopy(valid_input)
        assert not "city" in ingest_input["address"].keys()
        valid_output = self.ingest(**ingest_input)
        assert valid_output["address"]["city"] == "New York"

        # test max list length ingestion
        long_list = deepcopy(valid_input)
        long_list["comments"].insert(0, "pewter")
        assert len(long_list["comments"]) == 4
        valid_output = self.ingest(**long_list)
        assert len(valid_output["comments"]) == 3
        assert "bronze" not in valid_output["comments"]

        # print(self.validate(valid_input))
        print(self.ingest(**valid_input))
        # print(self.ingest(**{}))

        test_model = {
            "schema": self.schema,
            "components": self.components,
            "metadata": self.metadata,
            "title": self.title,
            "description": self.description,
            "max_size": self.maxSize,
            "url": self.url,
        }

        # test list ingestion of dictionaries
        new_model = deepcopy(test_model)
        new_model["schema"]["test_list"] = [{"test": "me"}]
        list_model = jsonModel(new_model)
        test_input = {"test_list": [{"test": "me"}, {"test": "you"}]}
        valid_output = list_model.ingest(**test_input)
        assert valid_output["test_list"]

        # test json valid structure of model components
        assert json.dumps(test_model)

        # test . use in key names
        dot_key_names = deepcopy(test_model)
        dot_key_names["schema"]["."] = {".": ""}
        dot_key_names["components"][".."] = {"required_field": False}
        dot_model = jsonModel(dot_key_names)
        dot_ingested = dot_model.ingest(**{".": {".": "test"}})
        assert dot_ingested["."]["."] == "test"

        # test null value in model
        null_schema = deepcopy(test_model)
        null_schema["test"] = None
        assert jsonModel(null_schema)

        # test empty schema exception
        empty_schema = deepcopy(test_model)
        empty_schema["schema"] = {}
        try:
            jsonModel(empty_schema)
        except ModelValidationError as err:
            assert err

        # test invalid json data exception
        object_model = deepcopy(test_model)
        object_model["schema"]["not_json"] = list_model
        try:
            jsonModel(object_model)
        except ModelValidationError as err:
            assert str(err).find(".not_json") > 0
        object_model["schema"]["not_json"] = [list_model]
        try:
            jsonModel(object_model)
        except ModelValidationError as err:
            assert str(err).find(".not_json[0]") > 0

        # test wrong datatype qualifier keys in components exception
        integer_criteria_error = deepcopy(test_model)
        integer_criteria_error["components"][".rating"]["must_not_contain"] = ["\\w"]
        try:
            jsonModel(integer_criteria_error)
        except ModelValidationError as err:
            assert str(err).find(".rating") > 0

        # test wrong datatype qualifier values in components exception
        contains_either_error = deepcopy(test_model)
        contains_either_error["components"][".address.region"]["contains_either"].append(2)
        try:
            jsonModel(contains_either_error)
        except ModelValidationError as err:
            assert str(err).find(".address.region") > 0

        # test wrong datatype qualifier values in components exception
        min_size_error = deepcopy(test_model)
        min_size_error["components"][".comments"]["min_size"] = -2
        try:
            jsonModel(min_size_error)
        except ModelValidationError as err:
            assert str(err).find(".comments") > 0

        # test wrong datatype qualifier values in components exception
        max_value_error = deepcopy(test_model)
        max_value_error["components"][".rating"]["max_value"] = "10"
        try:
            jsonModel(max_value_error)
        except ModelValidationError as err:
            assert str(err).find(".rating") > 0

        # test wrong datatype qualifier values in components exception
        field_description_error = deepcopy(test_model)
        field_description_error["components"][".userID"]["field_description"] = []
        try:
            jsonModel(field_description_error)
        except ModelValidationError as err:
            assert str(err).find(".userID") > 0

        # test conflicting byte data and range criteria exception
        byte_range_error = deepcopy(test_model)
        byte_range_error["components"][".emoticon"]["min_value"] = "Ng=="
        try:
            jsonModel(byte_range_error)
        except ModelValidationError as err:
            assert str(err).find(".emoticon") > 0

        # test conflicting greater_than and contains_either exception
        value_contains_error = deepcopy(test_model)
        value_contains_error["components"][".address.region"]["greater_than"] = "1B"
        try:
            jsonModel(value_contains_error)
        except ModelValidationError as err:
            assert str(err).find(".address.region") > 0

        # test conflicting declared and excluded values in schema exception
        excluded_value_error = deepcopy(test_model)
        excluded_value_error["components"][".address.city"]["excluded_values"] = ["New Orleans"]
        try:
            jsonModel(excluded_value_error)
        except ModelValidationError as err:
            assert str(err).find(".address.city") > 0

        # test conflicting discrete and excluded values in schema exception
        discrete_value_error = deepcopy(test_model)
        discrete_value_error["components"][".address.city"]["excluded_values"] = ["Miami"]
        try:
            jsonModel(discrete_value_error)
        except ModelValidationError as err:
            assert str(err).find(".address.city") > 0

        # test item designator pattern used in schema keys
        item_designator_error = deepcopy(test_model)
        item_designator_error["schema"]["[1]"] = ""
        try:
            jsonModel(item_designator_error)
        except ModelValidationError as err:
            assert str(err).find(".[1]") > 0

        # test query rules input
        assert jsonModel(test_model, self.queryRules)

        # test query rules json file
        query_rules = json.loads(open("../models/query-rules.json").read())
        assert jsonModel(test_model, query_rules)

        # test query rules extra field exception
        query_rules_field = deepcopy(self.queryRules)
        query_rules_field[".none_fields"] = {}
        try:
            jsonModel(test_model, query_rules_field)
        except ModelValidationError as err:
            assert str(err).find(".string_fields") > 0

        # test query rules extra qualifier exception
        query_rules_qualifier = deepcopy(self.queryRules)
        query_rules_qualifier[".string_fields"]["field_title"] = "not a qualifier"
        try:
            jsonModel(test_model, query_rules_qualifier)
        except ModelValidationError as err:
            assert str(err).find(".string_fields") > 0

        # test query rules qualifier value exception
        query_rules_value = deepcopy(self.queryRules)
        query_rules_value[".string_fields"]["min_value"] = 0.0
        try:
            jsonModel(test_model, query_rules_value)
        except ModelValidationError as err:
            assert str(err).find(".string_fields") > 0

        # test internal walk method
        metals_list = ["gold", "silver", "bronze"]
        metal_list = [{"metal": "gold"}, {"metal": "silver"}, {"metal": "bronze"}]
        test_input = {"metals": deepcopy(metals_list)}
        results = self._walk(".metals[0]", test_input)
        assert len(results) == 3
        test_input = {"metals": deepcopy(metal_list)}
        results = self._walk(".metals[0].metal", test_input)
        assert len(results) == 3
        for group in test_input["metals"]:
            group["metal"] = deepcopy(metals_list)
        results = self._walk(".metals[0].metal[0]", test_input)
        assert len(results) == 9
        for group in test_input["metals"]:
            group["metal"] = deepcopy(metal_list)
        results = self._walk(".metals[0].metal[0].metal", test_input)
        assert len(results) == 9
        for metals in test_input["metals"]:
            for metal in metals["metal"]:
                metal["metal"] = deepcopy(metals_list)
        results = self._walk(".metals[0].metal[0].metal[0]", test_input)
        assert len(results) == 27
        for metals in test_input["metals"]:
            for metal in metals["metal"]:
                metal["metal"] = deepcopy(metal_list)
        # print(test_input)
        results = self._walk(".metals[0].metal[0].metal[0].metal", test_input)
        assert len(results) == 27
        # print(results)

        # test evaluate valid input
        truth_table = []
        for key, value in valid_query.items():
            truth_table.append(self._evaluate_field(valid_input, key, value))
        assert True in truth_table
        assert False in truth_table

        # print(valid_input)
        # print(valid_query)
        # print(truth_table)

        # test evaluate query field missing in input
        test_query = deepcopy(valid_query)
        test_input = deepcopy(valid_input)
        eval_kwargs = {
            "record_dict": test_input,
            "field_name": ".address.country_code",
            "field_criteria": test_query[".address.country_code"],
        }
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome

        # test evaluate query field missing with value_exists: false
        eval_kwargs["field_criteria"]["value_exists"] = False
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert eval_outcome

        # test evaluate query field exists in input
        eval_kwargs["field_name"] = ".datetime"
        eval_kwargs["field_criteria"] = test_query[".datetime"]
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert eval_outcome

        # test evaluate query field exists with value_exists: false
        eval_kwargs["field_criteria"]["value_exists"] = False
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        del eval_kwargs["field_criteria"]["value_exists"]

        # test evaluate maximum size query failure
        eval_kwargs["field_name"] = ".comments"
        eval_kwargs["field_criteria"] = test_query[".comments"]
        eval_kwargs["field_criteria"]["max_size"] = 2
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        del eval_kwargs["field_criteria"]["max_size"]

        # test evaluate unique values query failure
        eval_kwargs["record_dict"]["comments"].append("gold")
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        eval_kwargs["record_dict"]["comments"].pop()

        # test evaluate min length query failure
        eval_kwargs["field_name"] = ".userID"
        eval_kwargs["field_criteria"] = test_query[".userID"]
        eval_kwargs["field_criteria"]["min_length"] = 14
        del eval_kwargs["field_criteria"]["min_value"]
        eval_kwargs["field_criteria"]["max_length"] = 14
        del eval_kwargs["field_criteria"]["max_value"]
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome

        # test evaluate max value query failure
        eval_kwargs["field_criteria"]["min_length"] = 12
        eval_kwargs["field_criteria"]["max_length"] = 14
        eval_kwargs["field_criteria"]["max_value"] = "2222222222222"
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome

        # test evaluate less than query failure
        eval_kwargs["field_name"] = ".datetime"
        eval_kwargs["field_criteria"] = test_query[".datetime"]
        eval_kwargs["field_criteria"]["less_than"] = 200000.0
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        eval_kwargs["field_criteria"]["less_than"] = 2000000000.0

        # test evaluate integer only query failure
        eval_kwargs["field_criteria"]["integer_data"] = True
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        del eval_kwargs["field_criteria"]["integer_data"]

        # test evaluate excluded values query failure
        eval_kwargs["field_name"] = (".emoticon",)
        eval_kwargs["field_criteria"] = test_query[".emoticon"]
        eval_kwargs["field_criteria"]["excluded_values"].append("aGFwcHk=")
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        eval_kwargs["field_criteria"]["excluded_values"].pop()

        # test evaluate discrete value query failure
        eval_kwargs["field_name"] = (".address.region",)
        eval_kwargs["field_criteria"] = test_query[".address.region"]
        eval_kwargs["field_criteria"]["discrete_values"] = ["CA", "MA"]
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        del eval_kwargs["field_criteria"]["discrete_values"]

        # test evaluate byte data query failure
        eval_kwargs["field_criteria"]["byte_data"] = True
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        del eval_kwargs["field_criteria"]["byte_data"]

        # test evaluate contains either query failure
        eval_kwargs["field_criteria"]["contains_either"][0] = "[A-Z]{3}"
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        eval_kwargs["field_criteria"]["contains_either"][0] = "[A-Z]{2}"

        # test evaluate must contain query failure
        eval_kwargs["field_name"] = (".address.country",)
        eval_kwargs["field_criteria"] = {"must_contain": ["America"]}
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome
        del eval_kwargs["field_criteria"]["must_contain"]

        # test evaluate must not contain query failure
        eval_kwargs["field_criteria"] = {"must_not_contain": ["States"]}
        eval_outcome = self._evaluate_field(**eval_kwargs)
        assert not eval_outcome

        # test sample empty query
        assert self.query(valid_query)
        assert isinstance(self.query(valid_query), bool)

        # test query criteria invalid qualifier exception
        query_qualifier_error = deepcopy(valid_query)
        query_qualifier_error[".address.region"]["required_field"] = False
        try:
            self.query(query_qualifier_error)
        except QueryValidationError as err:
            assert str(err).find(".address.region") > 0

        # test query criteria value exists exception
        query_value_exists = deepcopy(valid_query)
        query_value_exists[".address.region"]["value_exists"] = True
        self.query(query_value_exists)
        query_value_exists[".address.region"]["value_exists"] = False
        try:
            self.query(query_value_exists)
        except QueryValidationError as err:
            assert str(err).find("value_exists:") > 0

        # test query method with valid query on valid input
        assert not self.query(valid_query, valid_input)
        assert isinstance(self.query(valid_query, valid_input), bool)

        # test query method with non-existent field
        assert self.query({".rating": {"value_exists": False}}, valid_input)
        assert not self.query({".rating": {"value_exists": True}}, valid_input)

        # test query method with number field queries
        assert self.query({".datetime": {"value_exists": True}}, valid_input)
        assert not self.query({".datetime": {"value_exists": False}}, valid_input)
        assert self.query({".datetime": {"min_value": 1.1}}, valid_input)
        assert not self.query({".datetime": {"min_value": 1500000000}}, valid_input)
        assert self.query({".datetime": {"max_value": 1500000000}}, valid_input)
        assert not self.query({".datetime": {"max_value": 1.1}}, valid_input)
        assert self.query({".datetime": {"integer_data": False}}, valid_input)
        assert not self.query({".datetime": {"integer_data": True}}, valid_input)
        test_input = deepcopy(valid_input)
        test_input["datetime"] = 50
        assert not self.query({".datetime": {"integer_data": False}}, test_input)
        assert self.query({".datetime": {"integer_data": True}}, test_input)
        assert not self.query({".datetime": {"greater_than": 1449179763.312077}}, valid_input)
        assert not self.query({".datetime": {"less_than": 1449179763.312077}}, valid_input)
        assert self.query({".datetime": {"discrete_values": [1449179763.312077]}}, valid_input)
        assert not self.query({".datetime": {"excluded_values": [1449179763.312077]}}, valid_input)

        # test query method with string field queries
        assert self.query({".userID": {"value_exists": True}}, valid_input)
        assert not self.query({".userID": {"value_exists": False}}, valid_input)
        assert self.query({".userID": {"min_length": 2}}, valid_input)
        assert not self.query({".userID": {"min_length": 14}}, valid_input)
        assert not self.query({".userID": {"max_length": 2}}, valid_input)
        assert self.query({".userID": {"max_length": 14}}, valid_input)
        assert self.query({".userID": {"min_value": "11111111111"}}, valid_input)
        assert not self.query({".userID": {"min_value": "zzzzzzzzzzz"}}, valid_input)
        assert not self.query({".userID": {"max_value": "11111111111"}}, valid_input)
        assert self.query({".userID": {"max_value": "zzzzzzzzzzz"}}, valid_input)
        assert not self.query({".userID": {"greater_than": "6nPbM9gTwLz3f"}}, valid_input)
        assert self.query({".userID": {"greater_than": "6nPbM9gTwLz"}}, valid_input)
        assert not self.query({".userID": {"less_than": "6nPbM9gTwLz3f"}}, valid_input)
        assert self.query({".userID": {"less_than": "6nPbM9gTwLz3g"}}, valid_input)
        assert self.query({".userID": {"discrete_values": ["6nPbM9gTwLz3f"]}}, valid_input)
        assert not self.query({".userID": {"discrete_values": ["6nPbM9gTwLz"]}}, valid_input)
        assert not self.query({".userID": {"excluded_values": ["6nPbM9gTwLz3f"]}}, valid_input)
        assert self.query({".userID": {"excluded_values": ["6nPbM9gTwLz"]}}, valid_input)
        assert self.query({".userID": {"must_contain": ["6nPbM9gTwLz"]}}, valid_input)
        assert not self.query({".userID": {"must_contain": ["/"]}}, valid_input)
        assert not self.query({".userID": {"must_not_contain": ["6nPbM9gTwLz"]}}, valid_input)
        assert self.query({".userID": {"must_not_contain": ["/"]}}, valid_input)
        assert self.query({".userID": {"contains_either": ["6nPbM9gTwLz", "/"]}}, valid_input)
        assert not self.query({".userID": {"contains_either": [":", "/"]}}, valid_input)
        assert self.query({".emoticon": {"byte_data": True}}, valid_input)
        assert not self.query({".userID": {"byte_data": True}}, valid_input)
        assert self.query({".userID": {"byte_data": False}}, valid_input)
        assert not self.query({".emoticon": {"byte_data": False}}, valid_input)

        # test query method with list field queries
        assert self.query({".comments": {"value_exists": True}}, valid_input)
        assert not self.query({".comments": {"value_exists": False}}, valid_input)
        assert self.query({".comments": {"min_size": 2}}, valid_input)
        assert not self.query({".comments": {"min_size": 4}}, valid_input)
        assert not self.query({".comments": {"max_size": 2}}, valid_input)
        assert self.query({".comments": {"max_size": 4}}, valid_input)
        assert self.query({".comments": {"unique_values": True}}, valid_input)
        assert not self.query({".comments": {"unique_values": False}}, valid_input)
        test_input = deepcopy(valid_input)
        test_input["comments"].append("gold")
        assert not self.query({".comments": {"unique_values": True}}, test_input)
        assert self.query({".comments": {"unique_values": False}}, test_input)

        # test items in list field queries
        assert self.query({".comments[0]": {"value_exists": True}}, valid_input)
        assert not self.query({".comments[0]": {"value_exists": False}}, valid_input)
        assert self.query({".comments[0]": {"min_length": 3}}, valid_input)
        assert not self.query({".comments[0]": {"min_length": 5}}, valid_input)
        assert self.query({".comments[0]": {"must_contain": [".{2}"]}}, valid_input)
        assert not self.query({".comments[0]": {"must_contain": ["g.{2}"]}}, valid_input)
        assert self.query({".comments[0]": {"contains_either": ["l", "o"]}}, valid_input)
        assert not self.query({".comments[0]": {"contains_either": ["r", "e"]}}, valid_input)

        return self
예제 #43
0
    def __init__(self, instance_id, pem_file, access_id, secret_key, region_name, owner_id, user_name, login_name='', verbose=True):

        '''
            a method for initializing the SSH connection parameters to the EC2 instance

        :param instance_id: string with AWS id of instance
        :param pem_file: string with path to keypair pem file
        :param access_id: string with access_key_id from aws IAM user setup
        :param secret_key: string with secret_access_key from aws IAM user setup
        :param region_name: string with name of aws region
        :param owner_id: string with aws account id
        :param user_name: string with name of user access keys are assigned to
        :param login_name: [optional] string with name of login user
        :param verbose: boolean to enable process messages
        '''

        title = '%s.__init__' % self.__class__.__name__

    # initialize model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # construct localhost client
        from labpack.platforms.localhost import localhostClient
        self.localhost = localhostClient()

    # validate credentials and construct ec2 method
        from labpack.platforms.aws.ec2 import ec2Client
        self.ec2 = ec2Client(access_id, secret_key, region_name, owner_id, user_name, verbose)

    # validate inputs
        input_fields = {
            'instance_id': instance_id,
            'pem_file': pem_file
        }
        for key, value in input_fields.items():
            object_title = '%s(%s=%s)' % (title, key, str(value))
            self.ec2.fields.validate(value, '.%s' % key, object_title)

    # construct class properties
        self.instance_id = instance_id

    # verify pem file exists
        from os import path
        if not path.exists(pem_file):
            raise Exception('%s is not a valid path.' % pem_file)
        self.pem_file = path.abspath(pem_file)

    # verify user has privileges
        try:
            self.ec2.iam.printer_on = False
            self.ec2.list_keypairs()
        except AWSConnectionError as err:
            if str(err).find('Could not connect') > -1:
                raise
            raise AWSConnectionError(title, 'You must have privileges to access EC2 to use sshClient')

    # verify instance exists
        instance_list = self.ec2.list_instances()
        if instance_id not in instance_list:
            raise Exception('%s does not exist in this region or permission scope.' % instance_id)

    # verify instance has public ip
        instance_details = self.ec2.read_instance(instance_id)
        if not instance_details['public_ip_address']:
            raise Exception('%s requires a public IP address to access through ssh.' % instance_id)
        self.instance_ip = instance_details['public_ip_address']

    # retrieve login name from tag
        self.login_name = ''
        input_fields = {
            'login_name': login_name
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)
                self.login_name = login_name
        if not self.login_name:
            for tag in instance_details['tags']:
                if tag['key'] == 'UserName':
                    self.login_name = tag['value']
        if not self.login_name:
            raise Exception('SSH access to %s requires a login_name argument or UserName tag' % instance_id)

    # verify local and remote pem file names match
        from os import path
        pem_absolute = path.abspath(pem_file)
        pem_root, pem_ext = path.splitext(pem_absolute)
        pem_path, pem_name = path.split(pem_root)
        if not instance_details['key_name'] == pem_name:
            raise Exception('%s does not match name of keypair %s for instance %s.' % (pem_name, instance_details['keypair'], instance_id))

    # verify instance is ready
        self.ec2.check_instance_status(instance_id)

    # verify security group allows ssh
        group_list = []
        for group in instance_details['security_groups']:
            group_list.append(group['group_id'])
        if not group_list:
            raise Exception('SSH access to %s requires a security group attached to instance.' % instance_id)
        port_22_list = []
        for group_id in group_list:
            group_details = self.ec2.read_security_group(group_id)
            for permission in group_details['ip_permissions']:
                if permission['from_port'] == 22:
                    port_22_list.extend(permission['ip_ranges'])
        if not port_22_list:
            raise Exception('SSH access to %s requires a security group with inbound permissions for port 22.' % instance_id)
        from labpack.records.ip import get_ip
        current_ip = get_ip()
        ssh_access = False
        for ip_range in port_22_list:
            if ip_range['cidr_ip'].find('0.0.0.0/0') > -1 or ip_range['cidr_ip'].find(current_ip) > -1:
                ssh_access = True
                break
        if not ssh_access:
            raise Exception('SSH access to %s requires your IP %s to be added to port 22 on its security group.' % (instance_id, current_ip))
        
    # verify pem file has access
        try:
            self.script('ls -a')
        except:
            raise AWSConnectionError(title, '%s does not have access to instance %s.' % (pem_name, instance_id))

    # verify scp
        self.scp = False
        
    # turn printer back on
        self.ec2.iam.printer_on = True
예제 #44
0
def positional_filter(positional_filters, title=''):

    ''' 
        a method to construct a conditional filter function to test positional arguments

    :param positional_filters: dictionary or list of dictionaries with query criteria
    :param title: string with name of function to use instead
    :return: callable for filter_function 

    NOTE:   query criteria architecture

            each item in the path filters argument must be a dictionary
            which is composed of integer-value key names that represent the
            index value of the positional segment to test and key values
            with the dictionary of conditional operators used to test the
            string value in the indexed field of the record.

            eg. positional_filters = [ { 0: { 'must_contain': [ '^lab' ] } } ]

            this example filter looks at the first segment of each key string
            in the collection for a string value which starts with the
            characters 'lab'. as a result, it will match both the following:
                lab/unittests/1473719695.2165067.json
                'laboratory20160912.json'

    NOTE:   the filter function uses a query filters list structure to represent
            the disjunctive normal form of a logical expression. a record is
            added to the results list if any query criteria dictionary in the
            list evaluates to true. within each query criteria dictionary, all
            declared conditional operators must evaluate to true.

            in this way, the positional_filters represents a boolean OR operator and
            each criteria dictionary inside the list represents a boolean AND
            operator between all keys in the dictionary.

            each query criteria uses the architecture of query declaration in
            the jsonModel.query method
    
    NOTE:   function function will lazy load a dictionary input

    positional_filters:
    [ { 0: { conditional operators }, 1: { conditional_operators }, ... } ]

    conditional operators:
        "byte_data": false,
        "discrete_values": [ "" ],
        "excluded_values": [ "" ],
        'equal_to': '',
        "greater_than": "",
        "less_than": "",
        "max_length": 0,
        "max_value": "",
        "min_length": 0,
        "min_value": "",
        "must_contain": [ "" ],
        "must_not_contain": [ "" ],
        "contains_either": [ "" ]
    '''

# define help text    
    if not title:
        title = 'positional_filter'
    filter_arg = '%s(positional_filters=[...])' % title

# construct path_filter model
    filter_schema = {
        'schema': {
            'byte_data': False,
            'discrete_values': [ '' ],
            'excluded_values': [ '' ],
            'equal_to': '',
            'greater_than': '',
            'less_than': '',
            'max_length': 0,
            'max_value': '',
            'min_length': 0,
            'min_value': '',
            'must_contain': [ '' ],
            'must_not_contain': [ '' ],
            'contains_either': [ '' ]
        },
        'components': {
            '.discrete_values': {
                'required_field': False
            },
            '.excluded_values': {
                'required_field': False
            },
            '.must_contain': {
                'required_field': False
            },
            '.must_not_contain': {
                'required_field': False
            },
            '.contains_either': {
                'required_field': False
            }
        }
    }
    from jsonmodel.validators import jsonModel
    filter_model = jsonModel(filter_schema)

# lazy load path dictionary
    if isinstance(positional_filters, dict):
        positional_filters = [ positional_filters ]
        
# validate input
    if not isinstance(positional_filters, list):
        raise TypeError('%s must be a list.' % filter_arg)
    for i in range(len(positional_filters)):
        if not isinstance(positional_filters[i], dict):
            raise TypeError('%s item %s must be a dictionary.' % (filter_arg, i))
        for key, value in positional_filters[i].items():
            _key_name = '%s : {...}' % key
            if not isinstance(key, int):
                raise TypeError('%s key name must be an int.' % filter_arg.replace('...', _key_name))
            elif not isinstance(value, dict):
                raise TypeError('%s key value must be a dictionary' % filter_arg.replace('...', _key_name))
            filter_model.validate(value)

# construct segment value model
    segment_schema = { 'schema': { 'segment_value': 'string' } }
    segment_model = jsonModel(segment_schema)

# construct filter function
    def filter_function(*args):
        max_index = len(args) - 1
        for filter in positional_filters:
            criteria_match = True
            for key, value in filter.items():
                if key > max_index:
                    criteria_match = False
                    break
                segment_criteria = { '.segment_value': value }
                segment_data = { 'segment_value': args[key] }
                if not segment_model.query(segment_criteria, segment_data):
                    criteria_match = False
                    break
            if criteria_match:
                return True
        return False

    return filter_function
예제 #45
0
    def __init__(self, hostname, port=9042, username='', password='', cert_path=''):

        title = '%s.__init__' % self.__class__.__name__

    # construct fields model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # ingest hostname
        self_hostname = hostname
        if isinstance(hostname, str):
            if hostname:
                self_hostname = [ hostname ]

    # validate inputs
        input_fields = {
            'hostname': self_hostname,
            'port': port,
            'username': username,
            'password': password,
            'cert_path': cert_path
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct endpoint
        self.hostname = self_hostname
        self.port = port
        self.username = username
        self.password = password
        self.cert_path = cert_path

    # construct cluster
        cluster_kwargs = {
            'contact_points': self.hostname,
            'port': self.port
        }
        if self.username and self.password:
            from sys import path as sys_path
            sys_path.append(sys_path.pop(0))
            from cassandra.auth import PlainTextAuthProvider
            sys_path.insert(0, sys_path.pop())
            cluster_kwargs['auth_provider'] = PlainTextAuthProvider(
                username=username, 
                password=password
            )
        if cert_path:
            from os import path
            import ssl
            if not path.exists(cert_path):
                raise ValueError('%s(cert_path="%s") is not a valid file path.' % (title, cert_path))
            cluster_kwargs['ssl_options'] = {
                'ca_certs': cert_path,
                'cert_reqs': ssl.CERT_REQUIRED,
                'ssl_version': ssl.PROTOCOL_TLSv1
            }
        self.cluster = Cluster(**cluster_kwargs)

    # construct session
        self.session = self.cluster.connect()
예제 #46
0
    def __init__(self, magic_file=''):

        ''' initialization method for labMagic class

        :param magic_file: [optional] string with local path to magic.mgc file
        '''

        title = '%s.__init__' % self.__class__.__name__

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        input_fields = {
            'magic_file': magic_file
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct magic method
        magic_kwargs = {
            'mime': True,
            'uncompress': True
        }
        from labpack.platforms.localhost import localhostClient
        sys_name = localhostClient().os.sysname
        if sys_name == 'Windows':
            if not magic_file:
                raise IndexError('%s(magic_file="...") is required on Windows systems.')
        import os
        if magic_file:
            if not os.path.exists(magic_file):
                raise ValueError('%s(magic_file=%s) is not a valid file path.' % (title, magic_file))
            magic_kwargs['magic_file'] = magic_file
        try:
        # workaround for module namespace conflict
            from sys import path as sys_path
            sys_path.append(sys_path.pop(0))
            import magic
            sys_path.insert(0, sys_path.pop())
            self.magic = magic.Magic(**magic_kwargs)
        except:
            raise Exception('\nmagiclab requires the python-magic module. try: pip install python-magic\npython-magic requires the C library libmagic. See documentation in labpack.parsing.magic.')

    # construct mimetypes method
        import mimetypes
        self.mimetypes = mimetypes.MimeTypes()

    # retrieve updates to mimetypes
        mimetype_urls = self.fields.schema['mimetype_urls']
        from labpack.storage.appdata import appdataClient
        mime_collection = appdataClient('Mime Types')
        mime_filter = mime_collection.conditional_filter([{-1:{'must_contain': ['mime.types']}}])
        mime_list = mime_collection.list(mime_filter)
        for key in mimetype_urls.keys():
            file_path = os.path.join(mime_collection.collection_folder, key)
            if key not in mime_list:
                file_dir = os.path.split(file_path)[0]
                if not os.path.exists(file_dir):
                    os.makedirs(file_dir)
                import requests
                try:
                    response = requests.get(mimetype_urls[key])
                except Exception:
                    from labpack.handlers.requests import handle_requests
                    request_kwargs = {'url': mimetype_urls[key]}
                    response_details = handle_requests(requests.Request(**request_kwargs))
                    print('magiclab attempted to retrieve latest mimetype registry resource at %s but ran into this non-fatal error: %s' % (mimetype_urls[key], response_details['error']))
                    break
                with open(file_path, 'wb') as f:
                    f.write(response.content)
                    f.close()
            ext_map = mimetypes.read_mime_types(file_path)
            for key, value in ext_map.items():
                self.mimetypes.add_type(value, key)
예제 #47
0
파일: update.py 프로젝트: AvdN/pocketLab
    def _apply_update(root_path, service_name=''):

    # construct message
        msg_insert = 'local service'
        if service_name:
            msg_insert = 'service "%s"' % service_name

    # update vcs ignore
        import hashlib
        from os import path
        from pocketlab.methods.vcs import merge_ignores
        vcs_files = {
            'git': {
                'path': path.join(root_path, '.gitignore'),
                'name': '.gitignore'
            },
            'mercurial': {
                'path': path.join(root_path, '.hgignore'),
                'name': '.hgignore'
            }
        }
        for key, value in vcs_files.items():
            if path.exists(value['path']):
                old_text = open(value['path']).read()
                old_hash = hashlib.sha1(old_text.encode('utf-8')).hexdigest()
                new_text = merge_ignores(old_text, vcs_templates[key])
                new_hash = hashlib.sha1(new_text.encode('utf-8')).hexdigest()
                if old_hash != new_hash:
                    with open(value['path'], 'wt') as f:
                        f.write(new_text)
                        f.close()
                    if verbose:
                        print('%s file for %s updated.' % (value['name'], msg_insert))

    # update lab yaml
        from pocketlab import __module__
        from jsonmodel.loader import jsonLoader
        from jsonmodel.validators import jsonModel
        from labpack.records.settings import save_settings, load_settings
        config_schema = jsonLoader(__module__, 'models/lab-config.json')
        config_model = jsonModel(config_schema)
        template_config = config_model.ingest()
        config_path = path.join(root_path, 'lab.yaml')
        if path.exists(config_path):
            try:
                old_config = load_settings(config_path)
                template_config.update(**old_config)
                if old_config != template_config:
                    from pocketlab.methods.config import compile_yaml
                    config_text = compile_yaml(config_schema, config_path)
                    with open(config_path, 'wt') as f:
                        f.write(config_text)
                        f.close()
                    if verbose:
                        print('lab.yaml file for %s updated.' % msg_insert)
            except:
                 print('lab.yaml file for %s is corrupted. Skipped.' % msg_insert)

    # update setup.py
        setup_path = path.join(root_path, 'setup.py')
        if path.exists(setup_path):
            from pocketlab.methods.config import update_setup
            old_text = open(setup_path).read()
            old_hash = hashlib.sha1(old_text.encode('utf-8')).hexdigest()
            new_text = update_setup(old_text)
            new_hash = hashlib.sha1(new_text.encode('utf-8')).hexdigest()
            if old_hash != new_hash:
                with open(setup_path, 'wt', encoding='utf-8') as f:
                    f.write(new_text)
                    f.close()
                if verbose:
                    print('setup.py file for %s updated.' % msg_insert)