def retrieve_flightplan(access_token, root_path='../data/'): from os import listdir, path from hashlib import md5 from labpack.records.settings import load_settings flightplan_details = {} account_dir = listdir(root_path) hash_string = md5(access_token.encode('utf-8')).hexdigest() if hash_string in account_dir: flightplan_path = '../data/%s/flightplan.json' % hash_string flightplan_details = load_settings(flightplan_path) # construct image paths for waypoint in flightplan_details['waypoints']: base_dir_path = 'public/images/%s/%s%s' % ( hash_string, waypoint['lat'], waypoint['lon']) for elevation in ('3', '10', '17'): image_dir_path = '%s/%s' % (base_dir_path, elevation) if path.exists(image_dir_path): image_dir_list = listdir(image_dir_path) for image in image_dir_list: image_name, image_ext = path.splitext(image) image_path = '/%s/%s' % (image_dir_path, image) if not 'photos' in waypoint.keys(): waypoint['photos'] = [] photo_details = { 'date': image_name, 'elevation': int(elevation), 'src': image_path } waypoint['photos'].append(photo_details) return flightplan_details
def compile_map(folder_path, file_suffix='', json_model=False, pythonic=False): from os import path from labpack.records.settings import load_settings file_map = {} file_list = compile_list(folder_path, file_suffix) for file_path in file_list: file_details = load_settings(file_path) file_key = path.split(file_path)[1].replace(file_suffix, '') if pythonic: file_key = file_key.replace(' ', '_').replace('-', '_').lower() if json_model: from jsonmodel.validators import jsonModel file_map[file_key] = jsonModel(file_details) # add any schema in metadata if 'schema' in file_map[file_key].metadata.keys(): metadata_details = file_map[file_key].metadata metadata_key = file_key + '-metadata' if pythonic: metdata_key = metadata_key.replace(' ', '_').replace( '-', '_').lower() file_map[metadata_key] = jsonModel(metadata_details) else: file_map[file_key] = file_details return file_map
def read_account(account_id='', account_email='', account_token=''): # import dependencies from labpack.records.settings import load_settings from labpack.platforms.localhost import localhostClient # construct default response account_details = {} account_results = [] localhost_client = localhostClient() # search by index feature if account_id: id_query = [{'.file_name': {'discrete_values': ['%s.yaml' % account_id]}}] id_filter = localhost_client.conditional_filter(id_query) account_results = localhost_client.list(id_filter, list_root='../data/accounts/id', max_results=1) elif account_email: import hashlib email_hash = hashlib.sha256(account_email.encode('utf-8')).hexdigest() email_query = [{'.file_name': {'discrete_values': ['%s.yaml' % email_hash ]}}] email_filter = localhost_client.conditional_filter(email_query) account_results = localhost_client.list(email_filter, list_root='../data/accounts/email', max_results=1) elif account_token: token_query = [{'.file_name': {'discrete_values': ['%s.yaml' % account_token ]}}] token_filter = localhost_client.conditional_filter(token_query) account_results = localhost_client.list(token_filter, list_root='../data/accounts/token', max_results=1) # return account details if account_results: account_details = load_settings(account_results[0]) return account_details
def read_state(state_value): from os import path from labpack.records.settings import load_settings state_details = {} state_path = '../data/oauth2/state/%s.yaml' % state_value if path.exists(state_path): state_details = load_settings(state_path) return state_details
def retrieve_services(service_list=None, all=False): ''' a method to generate the root path for one or more services :param service_list: list of strings with name of services :param all: boolean to indicate the retrieve all paths in registry :return: list of dictionaries, string with exit message insert ''' # define default returns path_list = [] msg_insert = 'local service' # add named service to service list if service_list: msg_insert = '' for i in range(len(service_list)): service = service_list[i] if msg_insert: if i + 1 == len(service_list): msg_insert += ' and ' else: msg_insert += ', ' msg_insert += '"%s"' % service service_root = retrieve_service_root(service) service_details = { 'name': service, 'path': service_root } path_list.append(service_details) # add all services in registry to service list elif all: msg_insert = 'all services' from pocketlab import __module__ from labpack.storage.appdata import appdataClient registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__) from labpack.records.settings import load_settings for file_path in registry_client.localhost.walk(registry_client.collection_folder): try: details = load_settings(file_path) service_details = { 'name': details['service_name'], 'path': details['service_root'] } path_list.append(service_details) except: pass # add local path to service list else: path_list.append({'name': '', 'path': './'}) return path_list, msg_insert
def validate_heroku(heroku_model, file_path, service_name=''): ''' a method to validate heroku.yaml configuration :param heroku_model: jsonModel object with heroku config schema :param file_path: string with path to heroku.yaml file for service :param service_name: [optional] string with name of service :return: dictionary with heroku configuration ''' # construct message insert msg_insert = 'working directory' if service_name: msg_insert = 'root directory for "%s"' % service_name msg_insert_2 = 'cred sub-folder of %s' % msg_insert # validate heroku yaml exists from os import path if not path.exists(file_path): raise ValueError( 'heroku.yaml does not exist in %s.\nTry: "lab init --heroku" in %s.' % (msg_insert_2, msg_insert)) # validate heroku yaml is valid from labpack.records.settings import load_settings try: heroku_details = load_settings(file_path) except: raise ValueError( 'heroku.yaml file in %s corrupted.\nTry deleting and running again in %s: "lab init --heroku"' % (msg_insert_2, msg_insert)) # validate heroku yaml keys from jsonmodel.exceptions import InputValidationError for key, value in heroku_details.items(): try: object_title = 'Field %s in heroku.yaml in %s' % (key, msg_insert_2) heroku_model.validate(value, '.%s' % key, object_title) except InputValidationError as err: error_msg = "Value None for field .%s failed test 'value_datatype': map" % key if err.message.find(error_msg) > -1: pass else: raise except: raise return heroku_details
def ingest_environ(model_path=''): # convert environment variables into json typed data from os import environ, path typed_dict = {} environ_variables = dict(environ) for key, value in environ_variables.items(): if value.lower() == 'true': typed_dict[key] = True elif value.lower() == 'false': typed_dict[key] = False elif value.lower() == 'null': typed_dict[key] = None elif value.lower() == 'none': typed_dict[key] = None else: try: try: typed_dict[key] = int(value) except: typed_dict[key] = float(value) except: typed_dict[key] = value # feed environment variables through model if model_path: from labpack.records.settings import load_settings if not path.exists(model_path): raise ValueError('%s is not a valid file path.' % model_path) model_dict = load_settings(model_path) from jsonmodel.validators import jsonModel model_object = jsonModel(model_dict) default_dict = model_object.ingest(**{}) for key in default_dict.keys(): if key.upper() in typed_dict: valid_kwargs = { 'input_data': typed_dict[key.upper()], 'object_title': 'Environment variable %s' % key.upper(), 'path_to_root': '.%s' % key } default_dict[key] = model_object.validate(**valid_kwargs) return default_dict return typed_dict
def inject_envvar(folder_path): ''' a method to create environment variables from file key-value pairs ''' import os from labpack.records.settings import load_settings envvar_list = [] for suffix in ['.yaml', '.yml', '.json']: envvar_list.extend(compile_list(folder_path, suffix)) for file_path in envvar_list: file_details = load_settings(file_path) for key, value in file_details.items(): key_cap = key.upper() os.environ[key_cap] = str(value) # TODO: walk lists and dicts return True
def compile_map(folder_path, file_suffix='', json_model=False): from os import path from labpack.records.settings import load_settings file_map = {} file_list = compile_list(folder_path, file_suffix) for file_path in file_list: file_details = load_settings(file_path) file_key = path.split(file_path)[1].replace(file_suffix, '') if json_model: from jsonmodel.validators import jsonModel file_map[file_key] = jsonModel(file_details) else: file_map[file_key] = file_details return file_map
def clean(verbose=True): # construct registry client from pocketlab import __module__ from labpack.storage.appdata import appdataClient registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__) # remove each file in registry without a valid path from os import path from labpack.records.settings import load_settings, remove_settings for file_path in registry_client.localhost.walk( registry_client.collection_folder): remove_file = False try: details = load_settings(file_path) service_name = details['service_name'] service_root = details['service_root'] if not path.exists(service_root): remove_file = True except: remove_file = True if remove_file: if verbose: file_root, file_ext = path.splitext(file_path) file_dir, file_name = path.split(file_root) print('Broken service "%s" removed from lab registry.' % file_name) remove_settings(file_path) # TODO remove docker containers with exit 1 status # TODO remove docker images with ^none name exit_msg = 'Lab environment has been cleaned up.' return exit_msg
self.printer('ERROR') _cleanup_temp() raise self.printer('done.') # deploy site to heroku self.printer('Deploying %s to heroku ... ' % site_folder, flush=True) try: sys_command = 'cd %s; heroku builds:create -a %s' % (temp_folder, self.subdomain) self._handle_command(sys_command, print_pipe=True) except: self.printer('ERROR') raise finally: _cleanup_temp() self.printer('Deployment complete.') return True if __name__ == '__main__': from labpack.records.settings import load_settings heroku_config = load_settings('../../../cred/heroku.yaml') heroku_kwargs = { 'account_email': heroku_config['heroku_account_email'], 'auth_token': heroku_config['heroku_auth_token'], 'verbose': True } heroku_client = herokuClient(**heroku_kwargs) heroku_client.access(heroku_config['heroku_app_subdomain'])
__author__ = 'rcj1492' __created__ = '2017.05' __license__ = 'MIT' from pocketlab.utils import inject_defaults, compile_model, compile_commands, compile_arguments if __name__ == '__main__': from pocketlab import __module__, __order__ from pocketlab.commands.home import _home_schema as home_schema from labpack.records.settings import load_settings folder_path = '../pocketlab/commands/' cli_schema = load_settings('../pocketlab/models/lab-cli.json') default_schema = load_settings('../pocketlab/models/lab-defaults.json') home_schema = inject_defaults(home_schema, default_schema) home_model = compile_model(home_schema, cli_schema) home_model.validate(home_model.schema) command_list = compile_commands(folder_path, cli_schema, __module__, __order__) assert command_list def_args, pos_args, opt_args, exc_args = compile_arguments(home_model) assert def_args assert pos_args assert opt_args defaults_injected = False for argument in opt_args: if argument['args'][0] == '-q': defaults_injected = True assert defaults_injected
from jsonmodel.loader import jsonLoader from pocketlab.methods.config import compile_yaml config_schema = jsonLoader(__module__, value['schema_path']) config_text = compile_yaml(config_schema) with open(config_path, 'wt') as f: f.write(config_text) f.close() _printer(config_path) # add readme file readme_path = 'README.md' if not path.exists(readme_path): from pocketlab.methods.config import construct_readme readme_text = construct_readme(vcs_service=vcs_service) with open(readme_path, 'wt', encoding='utf-8') as f: f.write(readme_text) f.close() _printer(readme_path) exit_msg = 'Lab framework setup in current directory.' return exit_msg if __name__ == "__main__": from labpack.records.settings import load_settings from jsonmodel.validators import jsonModel config_path = '../models/lab-config.json' config_model = jsonModel(load_settings(config_path)) print(config_model.ingest())
scheduler_kwargs.update(**scheduler_update) flask_scheduler = GeventScheduler(**scheduler_kwargs) flask_scheduler.start() # import authorization methods from labpack.handlers.requests import handle_requests from labpack.authentication.oauth2 import oauth2Client from labpack.parsing.flask import extract_request_details from server.utils import read_state, delete_state, create_token, construct_response # define landing kwargs from labpack.records.settings import load_settings landing_kwargs = { 'landing_page': True, 'id_verified': False, 'page_details': load_settings('assets/copy/lab-main.json') } @flask_app.route('/') def landing_page(): ''' the landing page ''' return render_template('landing.html', **landing_kwargs), 200 @flask_app.route('/authorize/<service_name>') def authorize_service_route(service_name=''): ''' a method to handle the oauth2 callback ''' # ingest request request_details = extract_request_details(request) flask_app.logger.debug(request_details)
# TODO create out of wallet answer method def wallet_answers(self, application_id, customer_ip, answer_dict): pass # TODO create application details method def application_details(self): pass if __name__ == '__main__': # construct client from labpack.records.settings import load_settings from labpack.handlers.requests import handle_requests capitalone_cred = load_settings('../../../cred/capitalone.yaml') deposits_kwargs = { 'client_id': capitalone_cred['capitalone_client_id'], 'client_secret': capitalone_cred['capitalone_client_secret'], 'sandbox': True, 'requests_handler': handle_requests, 'retrieve_details': True } deposits_client = depositsClient(**deposits_kwargs) print(deposits_client.products) # # test access token # deposits_client.access_token() # assert deposits_client._access_token # # # test account products
__author__ = 'rcj1492' __created__ = '2018.04' __license__ = '©2018 Collective Acuity' # retrieve system environment from os import environ system_environment = environ.get('SYSTEM_ENVIRONMENT', 'dev') # retrieve credentials from labpack.records.settings import load_settings flask_config = load_settings('../cred/flask.yaml') scheduler_config = {} # construct flask app object from flask import Flask flask_kwargs = { 'import_name': __name__, 'static_folder': 'public', 'template_folder': 'views' } app = Flask(**flask_kwargs) # define flask environments class flaskDev(object): LAB_SECRET_KEY = flask_config['flask_secret_key'] LAB_SERVER_PROTOCOL = 'http' LAB_SERVER_DOMAIN = 'localhost' LAB_SERVER_PORT = 5001 LAB_SERVER_LOGGING = 'DEBUG' LAB_SQL_SERVER = 'sqlite:///../data/records.db'
'css_assets', 'styles/bootstrap.css', 'styles/icomoon.css', 'styles/simple-line-icons.css', 'styles/project.min.css' ] # css_assets.extend(css_bundle) assets.register(*css_assets) # import speech client and databases from server.utils import construct_response, parse_query, get_attestations, post_attestation, synthesize_attestation, synthesize_attestations from server.init import speech_client, producers_map, attesters_map # define jinja content from labpack.records.settings import load_settings main_details = load_settings('copy/main.json') menu_details = load_settings('copy/menu.json') landing_kwargs = { 'menu': menu_details } landing_kwargs.update(**main_details) @app.route('/') def landing_page(): ''' landing page route ''' return render_template('dashboard.html', **landing_kwargs), 200 @app.route('/query', methods=['POST']) def query_route(): ''' query route '''
raise TimeoutError(timeout_msg) response_time = t2 - t1 if 3 - response_time > 0: delay = 3 - response_time else: delay = 0 sleep(delay) if __name__ == '__main__': # retrieve credentials from labpack.records.settings import load_settings from labpack.platforms.aws.ec2 import ec2Client pem_folder = '../../../keys' cred_path = '../../../../cred/awsLab.yaml' aws_cred = load_settings(cred_path) # determine active test instance properties client_kwargs = { 'access_id': aws_cred['aws_access_key_id'], 'secret_key': aws_cred['aws_secret_access_key'], 'region_name': aws_cred['aws_default_region'], 'owner_id': aws_cred['aws_owner_id'], 'user_name': aws_cred['aws_user_name'], 'verbose': False } ec2_client = ec2Client(**client_kwargs) instance_list = ec2_client.list_instances(tag_values=['test']) if not instance_list: raise Exception('There are no test instances running.') instance_id = instance_list[0]
__author__ = 'rcj1492' __created__ = '2017.07' __license__ = 'MIT' from labpack.storage.google.drive import driveClient if __name__ == '__main__': # initialize client import pytest from pprint import pprint from labpack.records.settings import load_settings google_tokens = load_settings('../../cred/tokens/google-drive.yaml') access_token = google_tokens['google_drive_access_token'] drive_client = driveClient(access_token, 'Unit Test') # prevent accidental use assert drive_client.drive_space == 'appDataFolder' count = 0 for id, name, mimetype in drive_client._list_directory(): count += 1 break assert not count # construct test records import json from hashlib import md5 from labpack.compilers import drep secret_key = 'upside' test_record = { 'dt': 1474509314.419702,
if not isinstance(cassandra_session, Session): raise ValueError('%s(cassandra_session) must be a cassandra.cluster.Session datatype.' % title) self.session = cassandra_session # test, create or update keyspace # test, create or update table if __name__ == '__main__': # test client init (with auth and ssl) test_public = False from labpack.records.settings import load_settings cass_cred = load_settings('../../../cred/cassandra-account.yaml') cert_path = '../../keys/root.cass.20180220.crt' cass_hostname = '127.0.0.1' cass_port = 9042 if test_public: cass_hostname = cass_cred['cassandra_database_hostname'] cass_port = cass_cred['cassandra_database_port'] cassandra_session = cassandraSession( hostname=cass_hostname, port=cass_port, username=cass_cred['cassandra_account_username'], password=cass_cred['cassandra_account_password'], cert_path=cert_path ) print(cassandra_session.session) print(cassandra_session.session.__class__)
__author__ = 'rcj1492' __created__ = '2017.07' __license__ = 'MIT' from labpack.storage.dropbox import dropboxClient if __name__ == '__main__': # initialize client import pytest from pprint import pprint from labpack.records.settings import load_settings dropbox_tokens = load_settings('../../cred/tokens/dropbox.yaml') access_token = dropbox_tokens['dropbox_access_token'] dropbox_client = dropboxClient(access_token, 'Unit Test') # construct test records import json from hashlib import md5 from labpack.compilers import drep secret_key = 'upside' test_record = { 'dt': 1474509314.419702, 'deviceID': '2Pp8d9lpsappm8QPv_Ps6cL0' } test_data = open('../data/test_voice.ogg', 'rb').read() data_key = 'lab/voice/unittest.ogg' record_data = json.dumps(test_record).encode('utf-8') record_key = 'lab/device/unittest.json' drep_data = drep.dump(test_record, secret_key) drep_key = 'lab/device/unittest.drep'
__author__ = 'rcj1492' __created__ = '2016.11' __license__ = 'MIT' from labpack.activity.moves import * if __name__ == '__main__': # import dependencies & configs from pprint import pprint from time import time, sleep from labpack.records.settings import load_settings from labpack.handlers.requests import handle_requests moves_config = load_settings('../../cred/moves.yaml') # test oauth construction from labpack.authentication.oauth2 import oauth2Client oauth_kwargs = { 'client_id': moves_config['oauth_client_id'], 'client_secret': moves_config['oauth_client_secret'], 'redirect_uri': moves_config['oauth_redirect_uri'], 'auth_endpoint': moves_config['oauth_auth_endpoint'], 'token_endpoint': moves_config['oauth_token_endpoint'], 'request_mimetype': moves_config['oauth_request_mimetype'], 'requests_handler': handle_requests } moves_oauth = oauth2Client(**oauth_kwargs) # test generate url url_kwargs = { 'service_scope': moves_config['oauth_service_scope'].split(),
raise IndexError('%s(...) requires either a photo_path, photo_id or photo_url argument' % title) # send request response_details = self._post_request(**request_kwargs) return response_details def send_voice(self, user_id, voice_id='', voice_path='', voice_url='', caption_text='', button_list=None, small_buttons=True, persist_buttons=False): return True if __name__ == '__main__': from labpack.records.settings import load_settings, save_settings from labpack.handlers.requests import handle_requests telegram_config = load_settings('../../../cred/telegram.yaml') photo_url = 'https://pbs.twimg.com/profile_images/479475632158408704/Zelyz-xr_400x400.png' photo_id = 'AgADAQADsKcxG4RH3Q85DF_-VgGr___A5y8ABVzwsrRBb8xF-wEAAQI' photo_path = '../../data/test_photo.png' file_path = '../../data/test_voice.ogg' update_path = '../../data/telegram-update.json' update_id = load_settings(update_path)['last_update'] bot_id = telegram_config['telegram_bot_id'] access_token = telegram_config['telegram_access_token'] user_id = telegram_config['telegram_admin_id'] telegram_bot = telegramBotClient(bot_id, access_token, requests_handler=handle_requests) details = telegram_bot.get_me() assert details['json']['result']['id'] == bot_id updates_details = telegram_bot.get_updates() if updates_details['json']['result']: update_list = sorted(updates_details['json']['result'], key=lambda k: k['update_id'])
__author__ = 'rcj1492' __created__ = '2018.05' __license__ = '©2018 Collective Acuity' # retrieve system environment from os import environ system_environment = environ.get('SYSTEM_ENVIRONMENT', 'dev') # retrieve credentials from labpack.records.settings import load_settings flask_config = load_settings('../cred/flask.yaml') postgres_config = load_settings('../cred/aws-postgres.yaml') polly_config = load_settings('../cred/aws-polly.yaml') scheduler_config = {} # scheduler_config = load_settings('../cred/scheduler.yaml') # mailgun_config = load_settings('../cred/mailgun.yaml') # construct postgres database url postgres_url = '' if postgres_config['aws_postgres_username']: postgres_url = 'postgres://%s:%s@%s:%s/%s' % ( postgres_config['aws_postgres_username'], postgres_config['aws_postgres_password'], postgres_config['aws_postgres_hostname'], postgres_config['aws_postgres_port'], postgres_config['aws_postgres_dbname']) # TODO construct cassandra database url and ssl cert # construct flask app object from flask import Flask
def construct_index(module_name): # retrieve index text file_text = retrieve_template('models/index.md.txt') # replace module name file_text = file_text.replace('pocketlab', module_name) return file_text if __name__ == '__main__': user_path = '../../tests/testservice/lab.yaml' standard_path = '../models/lab-config.json' from labpack.records.settings import load_settings standard_schema = load_settings(standard_path) text = compile_yaml(standard_schema, user_path) # print(text) init_path = '../__init__.py' readme_path = '../../README.rst' setup_kwargs = inject_init(init_path, readme_path, {}) # print(setup_kwargs) setup_text = open('../../setup.py').read() new_text = update_setup(setup_text) # print(new_text) module_name = 'newmodule' setup_text = construct_setup(module_name) # print(setup_text)
if updates_details['json']['result']: update_list = sorted(updates_details['json']['result'], key=lambda k: k['update_id']) offset_details = {'last_update': update_list[-1]['update_id']} telegram_data_client.create(update_key, offset_details) for update in update_list: user_id = update['message']['from']['id'] contact_id = 'telegram_%s' % user_id record_key = 'incoming/%s/%s.json' % (contact_id, str(time())) telegram_data_client.create(record_key, update) # analyze message response_details = analyze_message(update, user_id, telegram_bot_client) if response_details['function'] == 'send_message': telegram_bot_client.send_message(**response_details['kwargs']) elif response_details['function'] == 'send_photo': telegram_bot_client.send_photo(**response_details['kwargs']) # save response record_key = 'outgoing/%s/%s.json' % (contact_id, str(time())) telegram_data_client.create(record_key, response_details) return True if __name__ == '__main__': from labpack.records.settings import load_settings telegram_config = load_settings('../cred/telegram.yaml') monitor_telegram(telegram_config)
import paho.mqtt.publish as mqtt_publish mqtt_publish.single( topic=topic_string, payload=fingerprint_string, auth={ 'username': self.group_name, 'password': self.password }, hostname=self.server_url, port=port ) return True if __name__ == '__main__': from time import time from labpack.records.settings import load_settings find_cred = load_settings('../../../cred/find.yaml') find_client = findClient( group_name=find_cred['find_mqtt_group'], password=find_cred['find_mqtt_password'] ) # test password password = find_client.get_password() assert password == find_cred['find_mqtt_password'] # test locations locations = find_client.get_locations() print(locations) # test update model assert find_client.update_model()
# read audio stream if not stream_response: response_details['audio_stream'] = response_details['audio_stream'].read() # TODO usage client methods / format if self.usage_client: self.usage_client.update('polly', 'synthesis', response_details) return response_details if __name__ == '__main__': # test client initialization from labpack.records.settings import load_settings aws_cred = load_settings('../../../../cred/aws-polly.yaml') client_kwargs = { 'access_id': aws_cred['aws_polly_access_key_id'], 'secret_key': aws_cred['aws_polly_secret_access_key'], 'region_name': aws_cred['aws_polly_default_region'], 'owner_id': aws_cred['aws_polly_owner_id'], 'user_name': aws_cred['aws_polly_user_name'] } polly_client = pollyClient(**client_kwargs) # test speech synthesis test_text = 'Hopefully this works well enough to discern the accent.' test_speech = '../../../data/test_speech.mp3' with open(test_speech, 'wb') as f: response = polly_client.synthesize(test_text, voice_id='Emma') f.write(response['audio_stream'])
'code': response.status_code, 'url': response.url, 'error': '', 'json': None, 'headers': response.headers } # handle different codes if details['code'] == 200: details['json'] = response.json() else: details['error'] = response.content.decode() return details if __name__ == '__main__': from labpack.records.settings import load_settings bluemix_config = load_settings('../cred/bluemix.yaml') username = bluemix_config['bluemix_speech2text_username'] password = bluemix_config['bluemix_speech2text_password'] token_details = bluemix_token(username, password) auth_token = token_details['json']['token'] file_name = 'watson_test' file_path = '../data/%s.ogg' % file_name file_data = open(file_path, 'rb') transcribed_text = bluemix_speech2text(file_data, file_path, auth_token) print(transcribed_text)
__author__ = 'rcj1492' __created__ = '2018.04' # construct postgres url from labpack.records.id import labID from jsonmodel.validators import jsonModel from labpack.records.settings import load_settings postgres_config = load_settings('../cred/aws-postgres.yaml') postgres_url = 'postgres://%s:%s@%s:%s/%s' % ( postgres_config['aws_postgres_username'], postgres_config['aws_postgres_password'], postgres_config['aws_postgres_hostname'], postgres_config['aws_postgres_port'], postgres_config['aws_postgres_dbname']) # construct object map object_map = { 'users': load_settings('models/users.json'), 'telemetry': load_settings('models/telemetry.json') } # construct sql tables from labpack.databases.sql import sqlClient sql_tables = {} for key, value in object_map.items(): table_name = key.replace('-', '_') sql_kwargs = { 'table_name': table_name, 'database_url': 'sqlite:///../data/records.db', 'record_schema': value }
# construct request_kwargs request_kwargs = { 'url': '%s/address/validate' % self.api_endpoint, 'params': { 'address': email_address } } # send request response_details = self._get_request(**request_kwargs) return response_details if __name__ == '__main__': from labpack.records.settings import load_settings mailgun_cred = load_settings('../../../cred/mailgun.yaml') # construct client from labpack.handlers.requests import handle_requests mailgun_kwargs = { 'api_key': mailgun_cred['mailgun_api_key'], 'email_key': mailgun_cred['mailgun_email_key'], 'account_domain': mailgun_cred['mailgun_spf_route'], 'requests_handler': handle_requests } mailgun_client = mailgunClient(**mailgun_kwargs) # test validation email_address = '*****@*****.**' response_details = mailgun_client.validate_email(email_address) assert response_details['json']['is_valid']
__author__ = 'rcj1492' __created__ = '2017.04' __license__ = '©2017 Collective Acuity' from labpack.records.settings import load_settings from watson_developer_cloud import DocumentConversionV1 if __name__ == '__main__': file_path = '../../media/test-pdf-4.pdf' save_path = '../../media/test-pdf-4.json' watson_config = load_settings('../../cred/watson.yaml') username = watson_config['watson_conversion_username'] password = watson_config['watson_conversion_password'] document_conversion = DocumentConversionV1(username=username, password=password, version='2015-12-15') convert_config = {'conversion_target': 'ANSWER_UNITS'} file_data = open(file_path, "rb") response = document_conversion.convert_document(document=file_data, config=convert_config) print(response.status_code) with open(save_path, 'wt', encoding='utf-8', errors='ignore') as save_file: import json save_file.write(json.dumps(response.json(), indent=2)) save_file.close()
def list(resource_type, paginate=False): title = 'list' # validate inputs input_fields = {'resource_type': resource_type} for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) fields_model.validate(value, '.%s' % key, object_title) # retrieve window size from os import popen console_rows, console_columns = popen('stty size', 'r').read().split() console_rows = int(console_rows) console_columns = int(console_columns) # list projects exit_msg = '' if resource_type == 'services': # construct registry client from pocketlab import __module__ from labpack.storage.appdata import appdataClient registry_client = appdataClient(collection_name='Registry Data', prod_name=__module__) # walk registry to compile list of project from tabulate import tabulate from labpack.records.settings import load_settings service_list = [] left_width = 0 table_headers = ['Service', 'Path'] for file_path in registry_client.localhost.walk( registry_client.collection_folder): try: details = load_settings(file_path) service_name = details['service_name'] service_root = details['service_root'] if len(service_name) > left_width: left_width = len(service_name) service_list.append([service_name, service_root]) except: pass # format list of projects formatted_rows = [] for row in service_list: row_width = left_width + 2 + len(row[1]) path_text = row[1] if row_width > console_columns: cut_char = row_width - console_columns left_index = (len(row[1]) - cut_char - 10) * -1 if left_index > -1: path_text = '%s...' % row[1] else: path_text = '%s...%s' % (row[1][0:7], row[1][left_index:]) formatted_rows.append([row[0], path_text]) # print out list if paginate and len(formatted_rows) + 5 > console_rows: page_rows = [] for i in range(len(formatted_rows)): page_rows.append(formatted_rows[i]) if len(page_rows) + 4 == console_rows: table_text = tabulate(page_rows, headers=table_headers) table_text += '\n[press any key for more]' print(table_text) page_rows = [] input() elif i + 1 == len(formatted_rows): table_text = tabulate(page_rows, headers=table_headers) if len(page_rows) + 5 == console_rows: table_text += '\n[press any key for more]' print(table_text) if len(page_rows) + 5 == console_rows: input() else: table_text = tabulate(formatted_rows, headers=table_headers) print(table_text) # list images elif resource_type == 'images': pass return exit_msg
def retrieve_oauth2_configs(folder_path=''): ''' a method to retrieve oauth2 configuration details from files or envvar ''' # define oauth2 model oauth2_fields = { "schema": { "oauth2_app_name": "My App", "oauth2_developer_name": "Collective Acuity", "oauth2_service_name": "moves", "oauth2_auth_endpoint": "https://api.moves-app.com/oauth/v1/authorize", "oauth2_token_endpoint": "https://api.moves-app.com/oauth/v1/access_token", "oauth2_client_id": "ABC-DEF1234ghijkl-567MNOPQRST890uvwxyz", "oauth2_client_secret": "abcdefgh01234456789_IJKLMNOPQrstuv-wxyz", "oauth2_redirect_uri": "https://collectiveacuity.com/authorize/moves", "oauth2_service_scope": "activity location", "oauth2_response_type": "code", "oauth2_request_mimetype": "", "oauth2_service_logo": "https://pbs.twimg.com/profile_images/3/d_400x400.png", "oauth2_service_description": "", "oauth2_service_setup": 0.0 } } # retrieve keys, value pairs from config files in cred folder if folder_path: envvar_details = {} import os from labpack.records.settings import load_settings file_list = [] for suffix in ['.yaml', '.yml', '.json']: for file_name in os.listdir(folder_path): file_path = os.path.join(folder_path, file_name) if os.path.isfile(file_path): if file_name.find(suffix) > -1: file_list.append(file_path) for file_path in file_list: file_details = load_settings(file_path) envvar_details.update(**file_details) # or ingest environmental variables else: from labpack.records.settings import ingest_environ envvar_details = ingest_environ() # map oauth2 variables import re oauth2_map = {} for key in oauth2_fields['schema'].keys(): key_pattern = '%s$' % key[6:] key_regex = re.compile(key_pattern) for k, v in envvar_details.items(): if key_regex.findall(k.lower()): service_name = key_regex.sub('',k.lower()) if not service_name in oauth2_map.keys(): oauth2_map[service_name] = {} oauth2_map[service_name][key] = v # ingest models from jsonmodel.validators import jsonModel oauth2_model = jsonModel(oauth2_fields) oauth2_services = {} for key, value in oauth2_map.items(): valid_oauth2 = {} try: valid_oauth2 = oauth2_model.validate(value) except: pass if valid_oauth2: oauth2_services[key] = valid_oauth2 return oauth2_services
# verbosity if self.verbose: print('Building docker image...') # validate dockerfile if not path.exists('Dockerfile'): raise Exception( 'heroku requires a Dockerfile in working directory to deploy using Docker.' ) # build docker image sys_command = 'heroku container:push %s --app %s' % (docker_image, self.subdomain) self._request_command(sys_command, pipe=True) return True if __name__ == '__main__': from labpack.records.settings import load_settings heroku_config = load_settings('../../../cred/heroku.yaml') heroku_kwargs = { 'account_email': heroku_config['heroku_account_email'], 'account_password': heroku_config['heroku_account_password'], 'app_subdomain': heroku_config['heroku_app_subdomain'], 'verbose': True } heroku_client = herokuClient(**heroku_kwargs)
if max_instances: scheduler_job_defaults['max_instances'] = max_instances if scheduler_job_defaults: scheduler_configs['SCHEDULER_JOB_DEFAULTS'] = scheduler_job_defaults # adjust executor settings # scheduler_executors = {} # if scheduler_settings['scheduler_executors_type']: # scheduler_executors['type'] = scheduler_settings['scheduler_executors_type'] # if scheduler_settings['scheduler_executors_max_workers']: # scheduler_executors['max_workers'] = scheduler_settings['scheduler_executors_max_workers'] # if scheduler_executors: # scheduler_configs['SCHEDULER_EXECUTORS'] = scheduler_executors return scheduler_configs if __name__ == '__main__': # test config scheduler import os os.environ['SCHEDULER_JOB_STORE_PASS'] = '******' model_path = '../models/scheduler-model.json' from labpack.records.settings import load_settings, ingest_environ settings_model = load_settings(model_path) assert settings_model['schema']['scheduler_job_store_user'] == 'postgres' env_settings = ingest_environ(model_path) assert env_settings['scheduler_job_store_pass'] == 'test_pass' example_settings = settings_model['schema'] scheduler_config = config_scheduler(example_settings) assert scheduler_config['SCHEDULER_JOB_DEFAULTS']['coalesce']
__author__ = 'rcj1492' __created__ = '2017.07' __license__ = 'MIT' from labpack.storage.aws.s3 import _s3Client, s3Client if __name__ == '__main__': # test instantiation from pprint import pprint from labpack.records.settings import load_settings aws_cred = load_settings('../../cred/awsLab.yaml') _client_kwargs = { 'access_id': aws_cred['aws_access_key_id'], 'secret_key': aws_cred['aws_secret_access_key'], 'region_name': aws_cred['aws_default_region'], 'owner_id': aws_cred['aws_owner_id'], 'user_name': aws_cred['aws_user_name'] } _s3_client = _s3Client(**_client_kwargs) # test list bucket and verify unittesting is clean bucket_list = _s3_client.list_buckets() bucket_name = 'collective-acuity-labpack-unittest-main' log_name = 'collective-acuity-labpack-unittest-log' assert bucket_name not in bucket_list assert log_name not in bucket_list for bucket in (bucket_name, log_name): _s3_client.delete_bucket(bucket) # test create buckets
__author__ = 'rcj1492' __created__ = '2018.02' __license__ = 'MIT' from labpack.platforms.docker import dockerClient docker_client = dockerClient() from labpack.records.settings import load_settings docker_config = load_settings('../data/test_docker.yaml') docker_client.enter(docker_config['container_alias']) docker_client.rm(docker_config['container_alias'])
del details['expires_in'] return token_details if __name__ == '__main__': from labpack.storage.appdata import appdataClient from labpack.records.settings import load_settings from labpack.randomization.randomlab import random_characters from string import ascii_lowercase config_paths = [ '../../../cred/dropbox.yaml', '../../../cred/moves.yaml', '../../../cred/meetup.yaml' ] for path in config_paths: oauth2_config = load_settings(path) oauth2_kwargs = { 'client_id': oauth2_config['oauth_client_id'], 'client_secret': oauth2_config['oauth_client_secret'], 'auth_endpoint': oauth2_config['oauth_auth_endpoint'], 'token_endpoint': oauth2_config['oauth_token_endpoint'], 'redirect_uri': oauth2_config['oauth_redirect_uri'], 'request_mimetype': oauth2_config['oauth_request_mimetype'] } oauth2_client = oauth2Client(**oauth2_kwargs) url_kwargs = { 'state_value': random_characters(ascii_lowercase, 48) } if oauth2_config['oauth_service_scope']: url_kwargs['service_scope'] = oauth2_config['oauth_service_scope'].split() auth_url = oauth2_client.generate_url(**url_kwargs)
def _apply_update(root_path, service_name=''): # construct message msg_insert = 'local service' if service_name: msg_insert = 'service "%s"' % service_name # update vcs ignore import hashlib from os import path from pocketlab.methods.vcs import merge_ignores vcs_files = { 'git': { 'path': path.join(root_path, '.gitignore'), 'name': '.gitignore' }, 'mercurial': { 'path': path.join(root_path, '.hgignore'), 'name': '.hgignore' } } for key, value in vcs_files.items(): if path.exists(value['path']): old_text = open(value['path']).read() old_hash = hashlib.sha1(old_text.encode('utf-8')).hexdigest() new_text = merge_ignores(old_text, vcs_templates[key]) new_hash = hashlib.sha1(new_text.encode('utf-8')).hexdigest() if old_hash != new_hash: with open(value['path'], 'wt') as f: f.write(new_text) f.close() if verbose: print('%s file for %s updated.' % (value['name'], msg_insert)) # update lab yaml from pocketlab import __module__ from jsonmodel.loader import jsonLoader from jsonmodel.validators import jsonModel from labpack.records.settings import save_settings, load_settings config_schema = jsonLoader(__module__, 'models/lab-config.json') config_model = jsonModel(config_schema) template_config = config_model.ingest() config_path = path.join(root_path, 'lab.yaml') if path.exists(config_path): try: old_config = load_settings(config_path) template_config.update(**old_config) if old_config != template_config: from pocketlab.methods.config import compile_yaml config_text = compile_yaml(config_schema, config_path) with open(config_path, 'wt') as f: f.write(config_text) f.close() if verbose: print('lab.yaml file for %s updated.' % msg_insert) except: print('lab.yaml file for %s is corrupted. Skipped.' % msg_insert) # update setup.py setup_path = path.join(root_path, 'setup.py') if path.exists(setup_path): from pocketlab.methods.config import update_setup old_text = open(setup_path).read() old_hash = hashlib.sha1(old_text.encode('utf-8')).hexdigest() new_text = update_setup(old_text) new_hash = hashlib.sha1(new_text.encode('utf-8')).hexdigest() if old_hash != new_hash: with open(setup_path, 'wt', encoding='utf-8') as f: f.write(new_text) f.close() if verbose: print('setup.py file for %s updated.' % msg_insert)
def analyze_web(request_details): from labpack.records.settings import load_settings method_list = [] string_request = request_details['json']['details']['string'] prompt_text = '' if 'prompt' in request_details['json']['details'].keys(): prompt_text = request_details['json']['details']['prompt'] if prompt_text: prompt_placeholder = { 'function': 'messageExchange', 'kwargs': { 'input_prompt': 'Didn\'t catch that, What is your Email?', 'input_type': 'text', 'input_options': [], 'input_map': {} } } method_list.append(prompt_placeholder) return method_list if string_request == 'open profile': profile_view = { 'function': 'contentProfile', 'kwargs': load_settings('assets/copy/lab-profile.json') } method_list.append(profile_view) if string_request == 'open herd': herd_view = { 'function': 'herdService', 'kwargs': { 'navigation_details': load_settings('assets/copy/herd-navigation.json') } } method_list.append(herd_view) elif string_request == 'open map': google_config = load_settings('../cred/google.yaml') map_view = { 'function': 'mapView', 'kwargs': { 'app_subtitle': 'Map View', 'page_title': 'Map View', 'page_label': 'A Test Map using Google Maps API', 'action_button': { 'icon': 'icon-plus', 'name': 'Marker', 'label': 'Add Marker', 'onclick': 'dummy()' }, 'action_options': [], 'google_api_key': google_config['javascript_map_key'], 'latitude': 40.733507, 'longitude': -73.990028, 'zoom': 15, 'map_type': 'roadmap' } } method_list.append(map_view) elif string_request == 'open controller': import re newline_pattern = re.compile('\n\s+') controller_text = open('views/controller.html').read() controller_text = newline_pattern.sub('',controller_text) client_view = { 'function': 'controllerView', 'kwargs': { 'app_subtitle': 'Controller View', 'page_title': 'Controller View', 'page_label': 'A Socket Client Controller', 'action_button': {}, 'action_options': [ { 'name': 'List of Options' }, { 'name': 'Disconnect', 'label': 'Remove Connection', 'onclick': 'toggleConnection()' } ], 'html_text': controller_text } } method_list.append(client_view) elif string_request == 'display the lab mission': mission_dialog = { 'function': 'blockquoteDialog', 'kwargs': load_settings('assets/copy/lab-mission.json') } method_list.append(mission_dialog) elif string_request == 'display the lab protocols': protocols_dialog = { 'function': 'itemizedDialog', 'kwargs': load_settings('assets/copy/lab-protocols.json') } method_list.append(protocols_dialog) else: error_message = { 'function': 'logConsole', 'kwargs': {'message': 'requested action "%s" does not exist.' % string_request} } method_list.append(error_message) return method_list
send_time = labDT.fromEpoch(delivery_time).isoformat().replace('T', ' ')[0:19] request_kwargs['json']['send_at'] = send_time else: raise ValueError('%s(delivery_time=%s) must be a future date.' % (title, str(delivery_time))) else: raise PermissionError('%s(delivery_time) requires allow_fees=True' % title) # send request response_details = self._post_request(**request_kwargs) return response_details if __name__ == '__main__': from labpack.records.settings import load_settings mandrill_cred = load_settings('../../../cred/mandrill.yaml') # construct client from labpack.handlers.requests import handle_requests mandrill_kwargs = { 'api_key': mandrill_cred['mandrill_api_key'], 'requests_handler': handle_requests } mandrill_client = mandrillClient(**mandrill_kwargs) # test send email from time import time email_address = '*****@*****.**' send_kwargs = { 'recipient_list': [ email_address ], 'sender_email': '*****@*****.**',