Exemplo n.º 1
0
def remove(service_name):

    title = 'remove'

    # validate inputs
    input_fields = {'service_name': service_name}
    for key, value in input_fields.items():
        if value:
            object_title = '%s(%s=%s)' % (title, key, str(value))
            fields_model.validate(value, '.%s' % key, object_title)

# construct registry client
    from pocketlab import __module__
    from labpack.storage.appdata import appdataClient
    registry_client = appdataClient(collection_name='Registry Data',
                                    prod_name=__module__)

    # search for service name
    exit_msg = ''
    service_key = '%s.yaml' % service_name
    search_condition = [{0: {'discrete_values': [service_key]}}]
    search_filter = registry_client.conditional_filter(search_condition)
    search_results = registry_client.list(search_filter)
    if not search_results:
        raise ValueError('"%s" does not exist in project registry.' %
                         service_name)
    else:
        registry_client.delete(search_results[0])
        exit_msg = '"%s" removed from project registry.' % service_name

    return exit_msg
Exemplo n.º 2
0
def retrieve_service_root(service_name, command_context=''):

# construct registry client
    from os import path
    from pocketlab import __module__
    from labpack.storage.appdata import appdataClient
    registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)

# validate service name exists in registry
    file_name = '%s.yaml' % service_name
    filter_function = registry_client.conditional_filter([{0:{'discrete_values':[file_name]}}])
    service_list = registry_client.list(filter_function=filter_function)
    if not file_name in service_list:
        error_msg = '"%s" not found in the registry.' % service_name
        if command_context:
            error_msg += ' %s' % command_context
        raise ValueError(error_msg)

# retrieve root path to service
    service_details = registry_client.read(file_name)
    if not 'service_root' in service_details.keys():
        error_msg = 'Record for project "%s" has been corrupted.' % service_name
        if command_context:
            error_msg += ' %s' % command_context
        raise ValueError(error_msg)
    service_root = service_details['service_root']
    if not path.exists(service_root):
        error_msg = 'Path %s to project "%s" no longer exists.' % (service_root, service_name)
        if command_context:
            error_msg += ' %s' % command_context
        raise ValueError(error_msg)

    return service_root
Exemplo n.º 3
0
def retrieve_services(service_list=None, all=False):
    
    '''
        a method to generate the root path for one or more services
        
    :param service_list: list of strings with name of services
    :param all: boolean to indicate the retrieve all paths in registry
    :return: list of dictionaries, string with exit message insert
    '''
    
# define default returns
    path_list = []
    msg_insert = 'local service'
    
# add named service to service list
    if service_list:
        msg_insert = ''
        for i in range(len(service_list)):
            service = service_list[i]
            if msg_insert:
                if i + 1 == len(service_list):
                    msg_insert += ' and '
                else:
                    msg_insert += ', '
            msg_insert += '"%s"' % service
            service_root = retrieve_service_root(service)
            service_details = {
                'name': service,
                'path': service_root
            }
            path_list.append(service_details)

# add all services in registry to service list
    elif all:
        msg_insert = 'all services'
        from pocketlab import __module__
        from labpack.storage.appdata import appdataClient
        registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__)
        from labpack.records.settings import load_settings
        for file_path in registry_client.localhost.walk(registry_client.collection_folder):
            try:
                details = load_settings(file_path)
                service_details = {
                    'name': details['service_name'],
                    'path': details['service_root']
                }
                path_list.append(service_details)
            except:
                pass

# add local path to service list
    else:
        path_list.append({'name': '', 'path': './'})
    
    return path_list, msg_insert
Exemplo n.º 4
0
def compile_collections(collection_list,
                        prod_name,
                        org_name,
                        data_path='../data',
                        s3_config=None):

    record_collections = {}

    if s3_config:
        if s3_config['aws_s3_access_key_id']:

            import re
            prod_name = re.sub(
                r'[^\w]', '',
                prod_name.lower().replace(' ', '-').replace('_', '-'))
            org_name = re.sub(
                r'[^\w]', '',
                org_name.lower().replace(' ', '-').replace('_', '-'))

            from labpack.storage.aws.s3 import s3Client
            for collection in collection_list:
                collection_name = re.sub(
                    r'[^\w]', '',
                    collection.lower().replace(' ', '-').replace('_', '-'))
                record_collections[collection] = s3Client(
                    access_id=s3_config['aws_s3_access_key_id'],
                    secret_key=s3_config['aws_s3_secret_access_key'],
                    region_name=s3_config['aws_s3_default_region'],
                    owner_id=s3_config['aws_s3_owner_id'],
                    user_name=s3_config['aws_s3_user_name'],
                    collection_name=collection_name,
                    prod_name=prod_name,
                    org_name=org_name)

    if not record_collections:
        from labpack.storage.appdata import appdataClient
        for collection in collection_list:
            record_collections[collection] = appdataClient(collection,
                                                           root_path=data_path)

    return record_collections
Exemplo n.º 5
0
def monitor_telegram(telegram_config):

    from time import time
    from labpack.storage.appdata import appdataClient
    telegram_data_client = appdataClient('Telegram', prod_name='achesBot')
    from labpack.messaging.telegram import telegramBotClient
    init_kwargs = {
        'access_token': telegram_config['telegram_access_token'],
        'bot_id': telegram_config['telegram_bot_id']
    }
    admin_id = 'telegram_%s' % telegram_config['telegram_admin_id']
    telegram_bot_client = telegramBotClient(**init_kwargs)
    update_key = 'last-update.yaml'
    update_record = telegram_data_client.read(update_key)
    last_update = update_record['last_update']
    updates_details = telegram_bot_client.get_updates(last_update)
    update_list = []
    if updates_details['json']['result']:
        update_list = sorted(updates_details['json']['result'],
                             key=lambda k: k['update_id'])
        offset_details = {'last_update': update_list[-1]['update_id']}
        telegram_data_client.create(update_key, offset_details)
    for update in update_list:
        user_id = update['message']['from']['id']
        contact_id = 'telegram_%s' % user_id
        record_key = 'incoming/%s/%s.json' % (contact_id, str(time()))
        telegram_data_client.create(record_key, update)

        # analyze message
        response_details = analyze_message(update, user_id,
                                           telegram_bot_client)
        if response_details['function'] == 'send_message':
            telegram_bot_client.send_message(**response_details['kwargs'])
        elif response_details['function'] == 'send_photo':
            telegram_bot_client.send_photo(**response_details['kwargs'])

    # save response
        record_key = 'outgoing/%s/%s.json' % (contact_id, str(time()))
        telegram_data_client.create(record_key, response_details)

    return True
Exemplo n.º 6
0
def clean(verbose=True):

    # construct registry client
    from pocketlab import __module__
    from labpack.storage.appdata import appdataClient
    registry_client = appdataClient(collection_name='Registry Data',
                                    prod_name=__module__)

    # remove each file in registry without a valid path
    from os import path
    from labpack.records.settings import load_settings, remove_settings
    for file_path in registry_client.localhost.walk(
            registry_client.collection_folder):
        remove_file = False
        try:
            details = load_settings(file_path)
            service_name = details['service_name']
            service_root = details['service_root']
            if not path.exists(service_root):
                remove_file = True
        except:
            remove_file = True
        if remove_file:
            if verbose:
                file_root, file_ext = path.splitext(file_path)
                file_dir, file_name = path.split(file_root)
                print('Broken service "%s" removed from lab registry.' %
                      file_name)
            remove_settings(file_path)

# TODO remove docker containers with exit 1 status

# TODO remove docker images with ^none name

    exit_msg = 'Lab environment has been cleaned up.'

    return exit_msg
Exemplo n.º 7
0
def monitor_telegram():

    from os import environ
    from time import time
    from labpack.storage.appdata import appdataClient
    telegram_data_client = appdataClient('Telegram', prod_name='beenServedBot')

    # construct bot client
    from labpack.messaging.telegram import telegramBotClient
    init_kwargs = {
        'access_token': environ['TELEGRAM_ACCESS_TOKEN'],
        'bot_id': int(environ['TELEGRAM_BOT_ID'])
    }
    admin_id = 'telegram_%s' % int(environ['TELEGRAM_ADMIN_ID'])
    telegram_bot_client = telegramBotClient(**init_kwargs)

    # retrieve update record
    update_key = 'last-update.yaml'
    update_filter = telegram_data_client.conditional_filter([{
        0: {
            'must_contain': ['%s$' % update_key]
        }
    }])
    if not telegram_data_client.list(update_filter):
        telegram_data_client.create(update_key, {'last_update': 0})
    update_record = telegram_data_client.read(update_key)

    # get updates from telegram
    last_update = update_record['last_update']
    updates_details = telegram_bot_client.get_updates(last_update)
    update_list = []
    if updates_details['json']['result']:
        update_list = sorted(updates_details['json']['result'],
                             key=lambda k: k['update_id'])
        offset_details = {'last_update': update_list[-1]['update_id']}
        telegram_data_client.create(update_key, offset_details)


# process updates
    from pprint import pprint
    for update in update_list:
        pprint(update)
        user_id = update['message']['from']['id']
        contact_id = 'telegram_%s' % user_id
        record_key = 'incoming/%s/%s.json' % (contact_id, str(time()))
        telegram_data_client.create(record_key, update)

        # analyze message
        analyze_kwargs = {
            'update_details': update,
            'user_id': user_id,
            'telegram_bot_client': telegram_bot_client,
            'telegram_data_client': telegram_data_client
        }
        response_details = analyze_message(**analyze_kwargs)
        if response_details['function'] == 'send_message':
            telegram_bot_client.send_message(**response_details['kwargs'])
        elif response_details['function'] == 'send_photo':
            telegram_bot_client.send_photo(**response_details['kwargs'])

    # save response
        record_key = 'outgoing/%s/%s.json' % (contact_id, str(time()))
        telegram_data_client.create(record_key, response_details)

    return True
Exemplo n.º 8
0
def home(service_name, print_path=False, service_path='', overwrite=False):
    '''
        a method to manage the local path information for a service

    :param service_name: string with name of service to add to registry
    :param print_path: [optional] boolean to retrieve local path of service from registry
    :param service_path: [optional] string with path to service root
    :param overwrite: [optional] boolean to overwrite existing service registration
    :return: string with local path to service
    '''

    title = 'home'

    # validate inputs
    input_map = {'service_name': service_name, 'service_path': service_path}
    for key, value in input_map.items():
        if value:
            object_title = '%s(%s=%s)' % (title, key, str(value))
            fields_model.validate(value, '.%s' % key, object_title)

# validate requirements
# TODO boolean algebra method to check not both inputs

# resolve print path request
    if print_path:

        # retrieve service root
        from pocketlab.methods.service import retrieve_service_root
        command_context = 'Try running "lab home %s" first from its root.' % service_name
        service_root = retrieve_service_root(service_name, command_context)

        # return root path to bash command
        import sys
        exit_msg = 'Transport to "%s" underway.;%s' % (service_name,
                                                       service_root)
        print(exit_msg)
        sys.exit()

# resolve service request

# validate existence of home alias
    from os import path
    from labpack.platforms.localhost import localhostClient
    localhost_client = localhostClient()
    home_alias = "alias home='function _home(){ lab_output=\"$(lab home --print $1)\"; IFS=\";\" read -ra LINES <<< \"$lab_output\"; echo \"${LINES[0]}\"; cd \"${LINES[1]}\"; };_home'"
    config_list = [localhost_client.bash_config, localhost_client.sh_config]
    for i in range(len(config_list)):
        if config_list[i]:
            if not path.exists(config_list[i]):
                with open(config_list[i], 'wt') as f:
                    f.write('# alias for pocketlab home command\n')
                    f.write(home_alias)
                    f.close()
            else:
                import re
                home_pattern = re.compile('alias home\=')
                lab_pattern = re.compile(
                    'alias home\=\'function _home\(\)\{\slab_output')
                home_match = False
                lab_match = False
                with open(config_list[i], 'rt') as f:
                    for line in f:
                        line = line.partition('#')[0]
                        line = line.rstrip()
                        if home_pattern.findall(line):
                            home_match = True
                        if lab_pattern.findall(line):
                            lab_match = True
                if not home_match:
                    with open(config_list[i], 'a') as f:
                        f.write('\n\n# alias for pocketlab home command\n')
                        f.write(home_alias)
                        f.close()
                elif not lab_match:
                    raise ValueError(
                        'the "home" alias is being used by another program.')
                else:
                    pass
    # TODO allow declaration of different alias
    # TODO check system path for home command

# construct registry client
    from pocketlab import __module__
    from labpack.storage.appdata import appdataClient
    registry_client = appdataClient(collection_name='Registry Data',
                                    prod_name=__module__)

    # validate service name is not already in registry
    file_name = '%s.yaml' % service_name
    filter_function = registry_client.conditional_filter([{
        0: {
            'discrete_values': [file_name]
        }
    }])
    service_list = registry_client.list(filter_function=filter_function)
    if file_name in service_list:
        if not overwrite:
            suggest_msg = 'Add -f to overwrite.'
            raise ValueError('"%s" already exists in the registry. %s' %
                             (service_name, suggest_msg))


# add service to registry
    service_root = './'
    if service_path:
        if not path.exists(service_path):
            raise ValueError('"%s" is not a valid path.' % service_path)
        elif not path.isdir(service_path):
            raise ValueError('"%s" is not a valid directory.' % service_path)
        service_root = service_path
    file_details = {
        'service_name': service_name,
        'service_root': path.abspath(service_root)
    }
    registry_client.create(file_name, file_details)

    exit_msg = '"%s" added to registry. To return to workdir, run "home %s"' % (
        service_name, service_name)
    return exit_msg
Exemplo n.º 9
0
    return exit_msg

if __name__ == '__main__':

    # add dependencies
    try:
        import pytest
    except:
        print(
            'pytest module required to perform unittests. try: pip install pytest'
        )
        exit()
    from time import time
    from pocketlab import __module__
    from labpack.storage.appdata import appdataClient
    registry_client = appdataClient(collection_name='Registry Data',
                                    prod_name=__module__)
    unittest_service = 'unittest_service_name_%s' % str(time()).replace(
        '.', '')

    # test invalid name exception
    from jsonmodel.exceptions import InputValidationError
    with pytest.raises(InputValidationError):
        home('not valid')

# test new service
    assert home(unittest_service).find(unittest_service)

    # test existing service exception
    with pytest.raises(ValueError):
        home(unittest_service)
Exemplo n.º 10
0
    def deploy_app(self, site_folder, runtime_type=''):

        ''' a method to deploy a static html page to heroku using php '''

        title = '%s.deploy_php' % self.__class__.__name__

    # validate inputs
        input_fields = {
            'site_folder': site_folder,
            'runtime_type': runtime_type
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # verify app subdomain
        if not self.subdomain:
            raise Exception('You must access a subdomain before you can deploy to heroku. Try: %s.access()' % self.__class__.__name__)
                    
    # validate existence of site folder
        from os import path
        if not path.exists(site_folder):
            raise ValueError('%s is not a valid path on localhost.' % site_folder)
    
    # validate existence of proper runtime file
        runtime_file = 'index.html'
        static_build = False
        if runtime_type == 'php':
            runtime_file = 'index.php'
        elif runtime_type in ('ruby', 'java', 'python', 'jingo'):
            runtime_file = 'Procfile'
        elif runtime_type == 'node':
            runtime_file = 'package.json'
        else:
            runtime_type = 'html'
            static_build = True
        build_file = path.join(site_folder, runtime_file)
        if not path.exists(build_file):
            raise Exception('%s must contain an %s file to build a %s app.' % (site_folder, runtime_file, runtime_type))
        if runtime_type == 'python':
            req_file = path.join(site_folder, 'requirements.txt')
            if not path.exists(req_file):
                raise Exception('%s must contain a requirements.txt file to build a python app.' % site_folder)
        if runtime_type == 'jingo':
            req_file = path.join(site_folder, 'package.json')
            if not path.exists(req_file):
                raise Exception('%s must contain a package.json file to build a jingo app.' % site_folder)

    # validate container plugin
        from os import devnull
        from subprocess import check_output
        self.printer('Checking heroku plugin requirements ... ', flush=True)
        sys_command = 'heroku plugins'
        heroku_plugins = check_output(sys_command, shell=True, stderr=open(devnull, 'wb')).decode('utf-8')
        if heroku_plugins.find('heroku-builds') == -1:
            self.printer('ERROR')
            raise Exception(
                'heroku builds plugin required. Try: heroku plugins:install heroku-builds')
        self.printer('done.')
    
    # construct temporary folder
        self.printer('Creating temporary files ... ', flush=True)
        from shutil import copytree, move, ignore_patterns
        from os import makedirs
        from time import time
        from labpack import __module__
        from labpack.storage.appdata import appdataClient
        client_kwargs = {
            'collection_name': 'TempFiles',
            'prod_name': __module__
        }
        tempfiles_client = appdataClient(**client_kwargs)
        temp_folder = path.join(tempfiles_client.collection_folder, 'heroku%s' % time())
    
    # define cleanup function
        def _cleanup_temp():
            self.printer('Cleaning up temporary files ... ', flush=True)
            from shutil import rmtree
            rmtree(temp_folder, ignore_errors=True)
            self.printer('done.')

    # copy site to temporary folder
        try:
            makedirs(temp_folder)
            site_root, site_name = path.split(path.abspath(site_folder))
            build_path = path.join(temp_folder, site_name)
            copytree(site_folder, build_path, ignore=ignore_patterns('*node_modules/*','*.lab/*'))
            if static_build:
                index_path = path.join(build_path, 'index.html')
                home_path = path.join(build_path, 'home.html')
                compose_path = path.join(build_path, 'compose.json')
                php_path = path.join(build_path, 'index.php')
                with open(compose_path, 'wt') as f:
                    f.write('{}')
                    f.close()
                with open(php_path, 'wt') as f:
                    f.write('<?php include_once("home.html"); ?>')
                    f.close()
                move(index_path, home_path)
        except:
            self.printer('ERROR')
            _cleanup_temp()
            raise
        self.printer('done.')

    # deploy site to heroku
        self.printer('Deploying %s to heroku ... ' % site_folder, flush=True)
        try:
            sys_command = 'cd %s; heroku builds:create -a %s' % (temp_folder, self.subdomain)
            self._handle_command(sys_command, print_pipe=True)
        except:
            self.printer('ERROR')
            raise
        finally:
            _cleanup_temp()

        self.printer('Deployment complete.')
        
        return True
Exemplo n.º 11
0
def analyze_message(update_details, user_id, telegram_bot_client):

    from os import environ
    from server.feedback import get_kitten
    from labpack.storage.appdata import appdataClient
    telegram_data_client = appdataClient('Telegram', prod_name='achesBot')
    user_key = 'users/telegram_%s.yaml' % user_id
    user_record = telegram_data_client.read(user_key)

    # parse message
    message_string = ''
    if 'voice' in update_details['message'].keys():
        voice_id = update_details['message']['voice']['file_id']
        details = telegram_bot_client.get_route(voice_id)
        file_route = details['json']['result']['file_path']
        file_buffer = telegram_bot_client.get_file(
            file_route, file_name='voice_telegram_%s' % user_id)
        file_data = file_buffer.getvalue()
        file_name = file_buffer.name

        from server.bluemix import bluemix_speech2text, bluemix_token
        bluemix_username = environ['bluemix_speech2text_username'.upper()]
        bluemix_password = environ['bluemix_speech2text_password'.upper()]
        token_details = bluemix_token(bluemix_username, bluemix_password)
        if token_details['json']:
            auth_token = token_details['json']['token']
            speech_details = bluemix_speech2text(file_data, file_name,
                                                 auth_token)
            if speech_details['json']:
                if 'results' in speech_details['json'].keys():
                    transcript_results = speech_details['json']['results']
                    if transcript_results:
                        alternative_list = transcript_results[0][
                            'alternatives']
                        sorted_results = sorted(alternative_list,
                                                key=lambda k: k['confidence'])
                        message_string = sorted_results[0]['transcript']

    elif 'text' in update_details['message'].keys():
        if update_details['message']['text']:
            message_string = update_details['message']['text']

# define default response
    print(message_string)
    response_details = {
        'function': 'send_message',
        'kwargs': {
            'user_id': user_id,
            'message_text': 'Thanks. Your symptoms have been recorded.'
        }
    }

    # handle navigation
    if message_string.lower() in ('start', '/start', 'help', '/help', 'about',
                                  '/about'):
        response_details['kwargs'][
            'message_text'] = 'Aches & Pains bot is your personal health journal. To create an entry, simply type or speak into the app about how you feel. Your entry will be logged, coded and added to your medical records. \n\nYou can also type the following commands:\n\t__/help__ : for this message\n\t__/feedback__ : for selection of feedback options\n\t__/history__ : for last three entries'
        response_details['kwargs']['message_style'] = 'markdown'

# update feedback types
    elif message_string.lower() == 'kittens':
        user_details = {'feedback_type': 'kittens'}
        telegram_data_client.create(user_key, user_details, overwrite=True)
        response_details['kwargs'][
            'message_text'] = 'Sweet! Your feedback type has been updated to cute kittens.'
    elif message_string.lower() == 'text':
        user_details = {'feedback_type': 'text'}
        telegram_data_client.create(user_key, user_details, overwrite=True)
        response_details['kwargs'][
            'message_text'] = 'Sweet! Your feedback type has been updated to normal text.'
    elif message_string.lower() in ('feedback', '/feedback'):
        response_details['kwargs'][
            'message_text'] = 'Select a type of feedback:'
        response_details['kwargs']['button_list'] = ['Text', 'Kittens']

# retrieve history
    elif message_string.lower() in ('history', '/history'):
        from server.nlp.nlp_engine import nlp_engine, engine
        # Check to see that database file is written if needed
        # assert os.path.isfile(nlp.database_file)
        n1 = nlp_engine()
        report_string = n1.extract('janedoe')
        import re
        entry_pattern = re.compile('\n\s\s\s\sSun,.*')
        entry_search = entry_pattern.findall(report_string)
        message_text = 'Your last three entries:\n'
        for i in [-3, -2, -1]:
            message_text += '\n%s' % entry_search[i]
        response_details['kwargs']['message_text'] = message_text
    elif message_string == '.':
        response_details['function'] = 'pass'


# add entry to record
    else:
        if not message_string and 'voice' in update_details['message'].keys():
            response_details['kwargs'][
                'message_text'] = 'Transcription failed. Can you type that out instead?'
        else:
            from server.nlp.nlp_engine import nlp_engine, engine
            # Check to see that database file is written if needed
            # assert os.path.isfile(nlp.database_file)
            n1 = nlp_engine()
            token_list = []
            sentences_list = message_string.split('.')
            for sentence in sentences_list:
                word_list = sentence.split(' ')
                token_list.append(word_list)
            # n1.slurp("janedoe", [["My", "leg", "hurts"], ["I", "took", "an", "aspirin"]])
            n1.slurp('janedoe', token_list)
            if user_record['feedback_type'] == 'kittens':
                api_key = environ['CATAPI_API_KEY']
                kitten_details = get_kitten(api_key)
                if kitten_details['json']:
                    if 'src' in kitten_details['json'].keys():
                        response_details = {
                            'function': 'send_photo',
                            'kwargs': {
                                'user_id': user_id,
                                'photo_url': kitten_details['json']['src']
                            }
                        }
            print(n1.extract('janedoe'))

    return response_details
Exemplo n.º 12
0
         'client_secret': oauth2_config['oauth_client_secret'],
         'auth_endpoint': oauth2_config['oauth_auth_endpoint'],
         'token_endpoint': oauth2_config['oauth_token_endpoint'],
         'redirect_uri': oauth2_config['oauth_redirect_uri'],
         'request_mimetype': oauth2_config['oauth_request_mimetype']
     }
     oauth2_client = oauth2Client(**oauth2_kwargs)
     url_kwargs = {
         'state_value': random_characters(ascii_lowercase, 48)
     }
     if oauth2_config['oauth_service_scope']:
         url_kwargs['service_scope'] = oauth2_config['oauth_service_scope'].split()
     auth_url = oauth2_client.generate_url(**url_kwargs)
 # retrieve access token
     service_name = oauth2_config['oauth_service_name']
     log_client = appdataClient(collection_name='Logs', prod_name='Fitzroy')
     path_filters = [{
         0: {'discrete_values': ['knowledge']},
         1: {'discrete_values': ['tokens']},
         2: {'discrete_values': [service_name]}}
     ]
     import yaml
     token_list = log_client.list(log_client.conditional_filter(path_filters), reverse_search=True)
     token_data = log_client.load(token_list[0])
     token_details = yaml.load(token_data.decode())
 # # test access token renewal
 #     new_details = oauth2_client.renew_token(token_details['refresh_token'])
 #     print(new_details['json'])
 #     token_details.update(**new_details['json'])
 #     new_key = 'knowledge/tokens/%s/%s/%s.yaml' % (service_name, token_details['user_id'], token_details['expires_at'])
 #     log_client.create(new_key, token_details)
Exemplo n.º 13
0
    drive_client.save(record_key, record_data)
    drive_client.save(drep_key, drep_data)
    assert drive_client.exists(drep_key)
    assert not drive_client.exists('notakey')

# test list with different filters
    data_filter = { 2:{'must_contain': ['ogg$']}}
    filter_function = drive_client.conditional_filter(data_filter)
    data_search = drive_client.list(filter_function=filter_function, max_results=3)
    prefix_search = drive_client.list(prefix='lab/dev', delimiter='drep', max_results=3)
    print(prefix_search)

# test import and export method
    try:
        from labpack.storage.appdata import appdataClient
        export_client = appdataClient(collection_name='Test Export')
        drive_client.export(export_client)
        export_status = drive_client.export(export_client, overwrite=False)
        print(export_status)
        export_list = export_client.list(filter_function=filter_function, max_results=3)
        print(export_list)
        import_status = export_client.export(drive_client)
        print(import_status)
        export_client.remove()
    except Exception as err:
        print(err)

# test load method
    new_data = drive_client.load(data_search[0], secret_key=secret_key)
    new_hash = md5(new_data).digest()
    assert old_hash == new_hash
Exemplo n.º 14
0
    def get(self, remote_path, local_path='', overwrite=False, synopsis=True):

        '''
            a method to copy a folder or file from AWS instance to local device

        :param remote_path: string with path to copy contents on remote host
        :param local_path: [optional] string with path to folder or file on local host
        :param overwrite: [optional] boolean to enable file overwrite on remote host
        :param synopsis: [optional] boolean to simplify progress messages to one line
        :return: string with response
        '''

        title = '%s.transfer' % self.__class__.__name__

    # validate inputs
        input_fields = {
            'local_path': local_path,
            'remote_path': remote_path
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # verify remote path exists
        from os import path
        self.ec2.iam.printer_on = False
        try:
            test_path_cmd = 'ls %s' % remote_path.replace(' ','\ ')
            self.script(test_path_cmd)
        except:
            raise ValueError('%s does not exist on remote host. Canceling transfer.' % remote_path)
        self.ec2.iam.printer_on = True
        remote_path_root, remote_path_node = path.split(remote_path)
        if not remote_path_root:
            remote_path_root = '~/'
        remote_path = remote_path.replace(' ', '\ ')
        remote_path_root = remote_path_root.replace(' ','\ ')

    # verify local root exists
        if local_path:
            local_path_root, local_path_node = path.split(local_path)
            if not local_path_root:
                local_path_root = './'
                local_path = path.join(local_path_root, local_path_node)
            if not path.exists(local_path_root):
                raise ValueError('%s folder does not exist on localhost. Canceling transfer.' % local_path_root)
        else:
            from copy import deepcopy
            local_path_node = deepcopy(remote_path_node)
            local_path = path.join('./', local_path_node)
            local_path_root = './'
        remote_path_node = remote_path_node.replace(' ', '\ ')
    
    # verify local path does not exist
        if not overwrite:
            if path.exists(local_path):
                raise ValueError('%s already exists on localhost. Canceling transfer.' % local_path)

    # verify installation of scp on remote host
        self.ec2.iam.printer_on = False
        if not self.scp:
            try:
                self.script('scp')
                self.scp = True
            except Exception as err:
                if str(err).find('usage: scp') > 1:
                    self.scp = True
                else:
                    raise Exception('SCP needs to be installed on remote host. Canceling transfer.\nOn remote host, try: sudo yum install -y git')

    # determine sudo privileges
        sudo_insert = 'sudo '
        user_abs = self.script('readlink -f ~/')
        remote_abs = self.script('readlink -f %s' % remote_path_root)
        if remote_abs.find(user_abs) > -1:
            sudo_insert = ''
        self.ec2.iam.printer_on = True

    # initiate transfer process
        remote_host = '[%s@%s]' % (self.login_name, self.instance_ip)
        if synopsis:
            self.ec2.iam.printer('Transferring %s:%s to %s ... ' % (remote_host, remote_path, local_path), flush=True)
            self.ec2.iam.printer_on = False
        self.ec2.iam.printer('Initiating transfer of %s:%s to %s ... ' % (remote_host, remote_path, local_path))

    # construct temporary file folder on localhost
        from labpack import __module__
        from labpack.storage.appdata import appdataClient
        client_kwargs = {
            'collection_name': 'TempFiles',
            'prod_name': __module__
        }
        tempfiles_client = appdataClient(**client_kwargs)

    # make remote folder into a tar file
        from time import time
        tar_file = 'temp%s.tar.gz' % str(time())
        local_tar_path = path.join(tempfiles_client.collection_folder, tar_file)
        remote_tar_path = path.join('~/', tar_file)
        self.ec2.iam.printer('Creating temporary file %s:%s ... ' % (remote_host, remote_tar_path))
        create_command = '%star -czf %s -C %s %s' % (sudo_insert, remote_tar_path, remote_path_root, remote_path_node)
        self.script(create_command) 

    # define cleanup functions
        def _cleanup_local(tar_file, local_tar_path):
            self.ec2.iam.printer('Cleaning up temporary file %s ... ' % local_tar_path, flush=True)
            tempfiles_client.delete(tar_file)
            self.ec2.iam.printer('done.')
        def _cleanup_remote(remote_host, tar_path):
            self.ec2.iam.printer('Cleaning up temporary file %s:%s ... ' % (remote_host, tar_path))
            self.script('rm %s' % tar_path)
            
    # initiate scp transfer of tar file
        copy_msg = 'Copying %s:%s to %s ... ' % (remote_host, remote_tar_path, local_tar_path)
        error_msg = 'Failure copying file %s:%s to localhost.' % (remote_host, remote_tar_path)

    # use paramiko on windows systems
        if self.localhost.os.sysname in ('Windows'):
            ssh_key = paramiko.RSAKey.from_private_key_file(self.pem_file)
            client = paramiko.SSHClient()
            client.load_system_host_keys()
            client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
            client.connect(hostname=self.instance_ip, username=self.ec2.iam.user_name, pkey=ssh_key)
            scp_transport = scp.SCPClient(client.get_transport())
            self.ec2.iam.printer(copy_msg, flush=True)
            try:
                response = scp_transport.get(remote_tar_path, local_tar_path)
            except:
                self.ec2.iam.printer('ERROR.')
                _cleanup_remote(remote_host, remote_tar_path)
                raise Exception(error_msg)
            client.close()
            self.ec2.iam.printer('done.')

    # use scp on other systems
        else:
            self.ec2.iam.printer(copy_msg, flush=True)
            sys_command = 'scp -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o IdentityFile="%s" %s@%s:%s "%s"' % (self.pem_file, self.login_name, self.instance_ip, remote_tar_path, local_tar_path)
            from subprocess import check_output, CalledProcessError, STDOUT
            try:
                response = check_output(sys_command, shell=True, stderr=STDOUT).decode('utf-8')
                self.ec2.iam.printer('done.')
            except CalledProcessError as err:
                self.ec2.iam.printer('ERROR.')
                _cleanup_remote(remote_host, remote_tar_path)
                print(err)
                raise Exception(error_msg)

    # extract tar file to local path and rename directory
        try:
            import tarfile
            with tarfile.open(local_tar_path, 'r:gz') as tar:
                tar.extractall(local_path_root)
                tar.close()
            if local_path_node.replace(' ','\ ') != remote_path_node:
                from shutil import move
                src = path.join(local_path_root, remote_path_node)
                dst = path.join(local_path_root, local_path_node)
                move(src, dst)
        except Exception as err:
            self.ec2.iam.printer('ERROR.')
            _cleanup_remote(remote_host, remote_tar_path)
            _cleanup_local(tar_file, local_tar_path)
            print(err)
            raise Exception(error_msg)
            
    # cleanup temporary files
        _cleanup_remote(remote_host, remote_tar_path)
        _cleanup_local(tar_file, local_tar_path)

    # return response
        self.ec2.iam.printer('Transfer of %s:%s to %s complete.' % (remote_host, remote_path, local_path))
        if synopsis:
            self.ec2.iam.printer_on = True
            self.ec2.iam.printer('done.')

        return response
Exemplo n.º 15
0
    def __init__(self, magic_file=''):

        ''' initialization method for labMagic class

        :param magic_file: [optional] string with local path to magic.mgc file
        '''

        title = '%s.__init__' % self.__class__.__name__

    # construct class field model
        from jsonmodel.validators import jsonModel
        self.fields = jsonModel(self._class_fields)

    # validate inputs
        input_fields = {
            'magic_file': magic_file
        }
        for key, value in input_fields.items():
            if value:
                object_title = '%s(%s=%s)' % (title, key, str(value))
                self.fields.validate(value, '.%s' % key, object_title)

    # construct magic method
        magic_kwargs = {
            'mime': True,
            'uncompress': True
        }
        from labpack.platforms.localhost import localhostClient
        sys_name = localhostClient().os.sysname
        if sys_name == 'Windows':
            if not magic_file:
                raise IndexError('%s(magic_file="...") is required on Windows systems.')
        import os
        if magic_file:
            if not os.path.exists(magic_file):
                raise ValueError('%s(magic_file=%s) is not a valid file path.' % (title, magic_file))
            magic_kwargs['magic_file'] = magic_file
        try:
        # workaround for module namespace conflict
            from sys import path as sys_path
            sys_path.append(sys_path.pop(0))
            import magic
            sys_path.insert(0, sys_path.pop())
            self.magic = magic.Magic(**magic_kwargs)
        except:
            raise Exception('\nmagiclab requires the python-magic module. try: pip install python-magic\npython-magic requires the C library libmagic. See documentation in labpack.parsing.magic.')

    # construct mimetypes method
        import mimetypes
        self.mimetypes = mimetypes.MimeTypes()

    # retrieve updates to mimetypes
        mimetype_urls = self.fields.schema['mimetype_urls']
        from labpack.storage.appdata import appdataClient
        mime_collection = appdataClient('Mime Types')
        mime_filter = mime_collection.conditional_filter([{-1:{'must_contain': ['mime.types']}}])
        mime_list = mime_collection.list(mime_filter)
        for key in mimetype_urls.keys():
            file_path = os.path.join(mime_collection.collection_folder, key)
            if key not in mime_list:
                file_dir = os.path.split(file_path)[0]
                if not os.path.exists(file_dir):
                    os.makedirs(file_dir)
                import requests
                try:
                    response = requests.get(mimetype_urls[key])
                except Exception:
                    from labpack.handlers.requests import handle_requests
                    request_kwargs = {'url': mimetype_urls[key]}
                    response_details = handle_requests(requests.Request(**request_kwargs))
                    print('magiclab attempted to retrieve latest mimetype registry resource at %s but ran into this non-fatal error: %s' % (mimetype_urls[key], response_details['error']))
                    break
                with open(file_path, 'wb') as f:
                    f.write(response.content)
                    f.close()
            ext_map = mimetypes.read_mime_types(file_path)
            for key, value in ext_map.items():
                self.mimetypes.add_type(value, key)
Exemplo n.º 16
0
def list(resource_type, paginate=False):

    title = 'list'

    # validate inputs
    input_fields = {'resource_type': resource_type}
    for key, value in input_fields.items():
        if value:
            object_title = '%s(%s=%s)' % (title, key, str(value))
            fields_model.validate(value, '.%s' % key, object_title)

# retrieve window size
    from os import popen
    console_rows, console_columns = popen('stty size', 'r').read().split()
    console_rows = int(console_rows)
    console_columns = int(console_columns)

    # list projects
    exit_msg = ''
    if resource_type == 'services':

        # construct registry client
        from pocketlab import __module__
        from labpack.storage.appdata import appdataClient
        registry_client = appdataClient(collection_name='Registry Data',
                                        prod_name=__module__)

        # walk registry to compile list of project
        from tabulate import tabulate
        from labpack.records.settings import load_settings
        service_list = []
        left_width = 0
        table_headers = ['Service', 'Path']
        for file_path in registry_client.localhost.walk(
                registry_client.collection_folder):
            try:
                details = load_settings(file_path)
                service_name = details['service_name']
                service_root = details['service_root']
                if len(service_name) > left_width:
                    left_width = len(service_name)
                service_list.append([service_name, service_root])
            except:
                pass

    # format list of projects
        formatted_rows = []
        for row in service_list:
            row_width = left_width + 2 + len(row[1])
            path_text = row[1]
            if row_width > console_columns:
                cut_char = row_width - console_columns
                left_index = (len(row[1]) - cut_char - 10) * -1
                if left_index > -1:
                    path_text = '%s...' % row[1]
                else:
                    path_text = '%s...%s' % (row[1][0:7], row[1][left_index:])
            formatted_rows.append([row[0], path_text])

    # print out list
        if paginate and len(formatted_rows) + 5 > console_rows:
            page_rows = []
            for i in range(len(formatted_rows)):
                page_rows.append(formatted_rows[i])
                if len(page_rows) + 4 == console_rows:
                    table_text = tabulate(page_rows, headers=table_headers)
                    table_text += '\n[press any key for more]'
                    print(table_text)
                    page_rows = []
                    input()
                elif i + 1 == len(formatted_rows):
                    table_text = tabulate(page_rows, headers=table_headers)
                    if len(page_rows) + 5 == console_rows:
                        table_text += '\n[press any key for more]'
                    print(table_text)
                    if len(page_rows) + 5 == console_rows:
                        input()
        else:
            table_text = tabulate(formatted_rows, headers=table_headers)
            print(table_text)

# list images
    elif resource_type == 'images':
        pass

    return exit_msg
Exemplo n.º 17
0
# test list record
    record_list = s3_client.list(prefix='testing/', delimiter='.yaml')
    record_filter = { 2: {'must_contain': ['json$']}}
    filter_function = s3_client.conditional_filter(record_filter)
    filter_list = s3_client.list(prefix='testing/', filter_function=filter_function)
    assert record_list[0] == filter_list[0]

# test load record
    record_load = s3_client.load(record_key, secret_key=secret_key)
    record_details = json.loads(record_load.decode())
    assert record_details == main_kwargs

# test export collection
    from labpack.storage.appdata import appdataClient
    appdata_client = appdataClient(collection_name='Unittest Local')
    print(s3_client.export(appdata_client))
    export_data = appdata_client.load(record_key, secret_key)
    assert json.loads(export_data.decode()) == main_kwargs
    print(appdata_client.export(s3_client, overwrite=False))
    export_list = appdata_client.list()
    print(export_list)
    appdata_client.remove()

# test delete record
    print(s3_client.delete(record_key))

# remove collection
    s3_client.remove()

    import pytest
except:
    print('pytest module required to perform unittests. try: pip install pytest')
    exit()

from labpack.storage.appdata import appdataClient
from labpack.performance import performlab
from jsonmodel.exceptions import InputValidationError

if __name__ == '__main__':
    
    from time import time, sleep
    from copy import deepcopy
    
# initialize client
    appdata_client = appdataClient(collection_name='Unit Tests')
    export_client = appdataClient(collection_name='Test Export')

# construct test records
    import json
    from hashlib import md5
    from labpack.compilers import drep
    secret_key = 'upside'
    test_record = {
        'dt': 1474509314.419702,
        'deviceID': '2Pp8d9lpsappm8QPv_Ps6cL0'
    }
    test_data = open('../data/test_voice.ogg', 'rb').read()
    data_key = 'lab/voice/unittest.ogg'
    record_data = json.dumps(test_record).encode('utf-8')
    record_key = 'lab/device/unittest.json'