def get_json(uuid, project_id, project_key, credentials_user, credentials_service, timezone, days, hours, values): tasks = [] hours = set(hours) for v in values: ts = Script(v['tag']).get_tasks() json_set_fields(ts, v['values']) tasks.extend(ts) data = { #"script":{ # "tag":self.get_tag(), # "authors":self.get_authors() #}, 'setup': { 'uuid': uuid, 'id': project_id, 'key': project_key, 'timezone': timezone, 'day': days, 'hour': sorted(hours), 'auth': { 'user': credentials_user, 'service': credentials_service, } }, 'tasks': tasks, } return data
def get_json(uuid, project_id, credentials_user, credentials_service, timezone, days, hours, values): tasks = [] hours = set(hours) for v in values: ts = Script(v['tag']).get_tasks() json_set_fields(ts, v['values']) tasks.extend(ts) data = { #"script":{ # "tag":self.get_tag(), # "authors":self.get_authors() #}, "setup":{ "uuid":uuid, "id":project_id, "timezone":timezone, "day":days, "hour":sorted(hours), "auth":{ "user":credentials_user, "service":credentials_service, } }, "tasks":tasks, } return data
def get_json(uuid, project_id, credentials_user, credentials_service, days, hours, values, constants): tasks = [] for v in values: task = Script(v['tag']).get_task() json_set_fields(task, v['values']) tasks.extend(task) data = { "setup": { "uuid": uuid, "id": project_id, "timezone": constants['timezone'], "day": days, "hour": hours, "auth": { "source": "ui", "user": credentials_user, "service": credentials_service, } }, "tasks": tasks, } return data
def create_auto_gen_test_recipes(config_fields): for root, dirs, files in os.walk(TEST_DIRECTORY_PATH): for filename in files: script_json = {} with open(TEST_DIRECTORY_PATH + filename, 'r') as f: script_json = json.load(f) # Set cal config field values into the script json_set_fields(script_json, config_fields[filename.split('.')[0]]) script = json.dumps(script_json, sort_keys=True, indent=2) f = open(AUTO_GEN_TEST_FILES + filename, "w") f.write(script) f.close()
def __init__(self, _dag_name, _script, _script_parameters=None): self.dag_name = _dag_name self.recipe = _script if _script_parameters: json_set_fields(self.recipe, _script_parameters) self.dag = None self.recipe.setdefault('setup', {}).setdefault('auth', {}) # If not given in recipe, try "user" auth information from connection if not self.recipe['setup']['auth'].get('user'): try: user_connection_extra = BaseHook.get_connection(CONNECTION_USER).extra_dejson if user_connection_extra['extra__google_cloud_platform__key_path']: self.recipe['setup']['auth']['user'] = user_connection_extra[ 'extra__google_cloud_platform__key_path'] elif user_connection_extra['extra__google_cloud_platform__keyfile_dict']: self.recipe['setup']['auth']['user'] = user_connection_extra[ 'extra__google_cloud_platform__keyfile_dict'] if user_connection_extra['extra__google_cloud_platform__project']: self.recipe['setup']['id'] = user_connection_extra[ 'extra__google_cloud_platform__project'] except Exception as e: pass # If not given in recipe, try "service" auth information from connection if not self.recipe['setup']['auth'].get('service'): try: service_connection_extra = BaseHook.get_connection( CONNECTION_SERVICE).extra_dejson if service_connection_extra['extra__google_cloud_platform__key_path']: self.recipe['setup']['auth']['service'] = service_connection_extra[ 'extra__google_cloud_platform__key_path'] elif service_connection_extra[ 'extra__google_cloud_platform__keyfile_dict']: self.recipe['setup']['auth']['service'] = service_connection_extra[ 'extra__google_cloud_platform__keyfile_dict'] keyfile_dict_json = json.loads(service_connection_extra[ 'extra__google_cloud_platform__keyfile_dict'] ) if keyfile_dict_json and keyfile_dict_json.get('project_id'): self.recipe['setup']['id'] = keyfile_dict_json['project_id'] if service_connection_extra['extra__google_cloud_platform__project']: self.recipe['setup']['id'] = service_connection_extra[ 'extra__google_cloud_platform__project'] except Exception as e: pass
def script_write(script, args, filepath=None): # insert fields into json json_set_fields(script, args) # insert variables into instructions json_set_instructions(script, args) # insert variables into description json_set_description(script, args) # produce output or run task if filepath: with open(filepath, 'w') as data_file: data_file.write(json.dumps(script, indent=2)) print('JSON Written To: ', filepath) else: print(json.dumps(script, indent=2))
def json_expand_includes(script): expanded_tasks = [] for task in script['tasks']: function, parameters = next(iter(task.items())) if function == 'include': tasks = get_project(UI_ROOT + '/' + parameters['script'])['tasks'] json_set_fields(tasks, parameters['parameters']) for t in tasks: function, parameters = next(iter(t.items())) expanded_tasks.append({function: parameters}) else: expanded_tasks.append({function: parameters}) script['tasks'] = expanded_tasks return script
def initialize_tests(scripts, tests): """Initialize all the necessary test files for Starthinker Args: None Returns: None """ # Get old fields from the config file print('UPDATE CONFIG') old_fields = {} if (os.path.exists(CONFIG_FILE)): with open(CONFIG_FILE, 'r') as f: old_fields = json.load(f) # Get new fields from test files and merge in old values fields = {} for filename, script in scripts: script_fields = json_get_fields(script) script_name = filename.split('.')[0] for field in script_fields: fields.setdefault(script_name, {}) fields[script_name][field["name"]] = old_fields.get( script_name, {}).get(field["name"], field.get("default", '')) fields[script_name][ '%s_description' % field["name"]] = '(%s) %s' % (field.get( 'kind', 'string'), field.get('description', 'No description.')) if field["name"] not in old_fields.get(script_name, {}): print('NEW FIELD ADDED', script_name, field["name"]) # Save field values to config file if fields: f = open(CONFIG_FILE, "w") f.write(json.dumps(fields, sort_keys=True, indent=2)) f.close() else: print('WARNING CONFIGURATION IS EMPTY, CHECK YOUR PATHS!') # Create recipe directory print('GENERATE RECIPES') os.makedirs(RECIPE_DIRECTORY, exist_ok=True) # Create recipes from scripts recipes = [] for filename, script in scripts: name = filename.split('.')[0] if tests and name not in tests: continue # Set config field values into the script json_set_fields(script, fields.get(name, {})) # Expand all includes to full recipe json_expand_includes(script) with open(RECIPE_DIRECTORY + filename, 'w') as f: f.write(json.dumps(script, sort_keys=True, indent=2)) recipes.append(filename) # Create log directory and clear old logs os.makedirs(LOG_DIRECTORY, exist_ok=True) # Display instructions print("") print("------") print("------------") print("------------------------") print( "Some tests require custom values. Update the necessary fields for the tests you wish to run." ) print("EDIT: " + CONFIG_FILE) print("------------------------") print( "Some tests require external assets. Join the following group to gain access." ) print("VISIT: https://groups.google.com/forum/#!forum/starthinker-assets") print("------------------------") print("------------") print("------") print("") sleep(3) return recipes
def __init__(self, _dag_name, _script, _script_parameters=None): self.dag_name = _dag_name self.recipe = _script if _script_parameters: json_set_fields(self.recipe, _script_parameters) self.dag = None
def configure_tests(tests, runs, skips, test_run_id): """Initialize the starthinker_assets/tests.json variable harness. Read all existing tests from tests/scripts/*.json and create a dictionary of each script and fields. Save that dictionary to a test harness file where developer can configure tests. Then read the test harness and create recipe files to that can be run. Write those files to tests/recipes/*.json for execution in a later step. Args: test: List of (filename, json) pairs containing all the tests. runs: List of test names that will be run, all will run if blank. skips: List of tests to skip. test_run_id: String added as a field to each test, used for namespacing. Returns: List of JSON recpies, where all fields have values from the test harness. """ # Get old fields from the config file print('UPDATE CONFIG') old_fields = {} if os.path.exists(CONFIG_FILE): with open(CONFIG_FILE, 'r') as f: old_fields = json.load(f) # Get new fields from test files and merge in old values fields = {} for filename, script in tests: script_fields = json_get_fields(script) script_name = filename.split('.')[0] for field in script_fields: if field['name'] == 'test_run_id': continue fields.setdefault(script_name, {}) fields[script_name][field['name']] = old_fields.get(script_name, {}).get( field['name'], field.get('default', '')) fields[script_name][ '%s_description' % field['name']] = '(%s) %s' % (field.get( 'kind', 'string'), field.get('description', 'No description.')) if field['name'] not in old_fields.get(script_name, {}): print('NEW FIELD ADDED', script_name, field['name']) # Save field values to config file if fields: f = open(CONFIG_FILE, 'w') f.write(json.dumps(fields, sort_keys=True, indent=2)) f.close() if test_run_id: # Inject the test run ID to the list of field values that were read from the # test config file. This is done in memory only, so that concrete test run # value are replaced every time a test runs. for script in fields: fields[script]['test_run_id'] = test_run_id else: print('WARNING CONFIGURATION IS EMPTY, CHECK YOUR PATHS!') # Create recipe directory print('GENERATE RECIPES') os.makedirs(RECIPE_DIRECTORY, exist_ok=True) # Create recipes from tests recipes = [] for filename, script in tests: name = filename.split('.')[0] if runs and name not in runs: continue if name in skips: continue # Set config field values into the script json_set_fields(script, fields.get(name, {})) # Expand all includes to full recipe json_expand_includes(script) with open(RECIPE_DIRECTORY + filename, 'w') as f: f.write(json.dumps(script, sort_keys=True, indent=2)) recipes.append(filename) # Create log directory and clear old logs os.makedirs(LOG_DIRECTORY, exist_ok=True) # Display instructions print('') print('------') print('------------') print('------------------------') print( 'Some tests require custom values. Update the necessary fields for the tests you wish to run.' ) print('EDIT: ' + CONFIG_FILE) print('------------------------') print( 'Some tests require external assets. Join the following group to gain access.' ) print('VISIT: https://groups.google.com/forum/#!forum/starthinker-assets') print('------------------------') print('------------') print('------') print('') sleep(3) return recipes
def __init__(self, _dag_name, _script, _script_parameters=None): self.dag_name = _dag_name self.recipe = _script self.dag = None if _script_parameters: json_set_fields(self.recipe, _script_parameters) self.recipe.setdefault('setup', {}).setdefault('auth', {}) # If user credentials given in recipe, skip load from connection if self.recipe['setup']['auth'].get('user'): print('Loaded User Credentials From: RECIPE JSON') # If not given in recipe, try "user" auth information from connection else: try: user_connection_extra = BaseHook.get_connection( CONNECTION_USER).extra_dejson if user_connection_extra[ 'extra__google_cloud_platform__key_path']: self.recipe['setup']['auth'][ 'user'] = user_connection_extra[ 'extra__google_cloud_platform__key_path'] print('Loaded User Credentials From: %s, Keyfile Path' % CONNECTION_USER) elif user_connection_extra[ 'extra__google_cloud_platform__keyfile_dict']: self.recipe['setup']['auth'][ 'user'] = user_connection_extra[ 'extra__google_cloud_platform__keyfile_dict'] print('Loaded User Credentials From: %s, Keyfile JSON ' % CONNECTION_USER) except Exception as e: pass # If service credentials given in recipe, skip load from connection if self.recipe['setup']['auth'].get('service'): print('Loaded Service Credentials From RECIPE JSON') # If not given in recipe, try "service" auth information from connection else: try: service_connection_extra = BaseHook.get_connection( CONNECTION_SERVICE).extra_dejson if service_connection_extra[ 'extra__google_cloud_platform__key_path']: self.recipe['setup']['auth'][ 'service'] = service_connection_extra[ 'extra__google_cloud_platform__key_path'] print('Loaded Service Credentials From: %s, Keyfile Path' % CONNECTION_SERVICE) elif service_connection_extra[ 'extra__google_cloud_platform__keyfile_dict']: self.recipe['setup']['auth'][ 'service'] = service_connection_extra[ 'extra__google_cloud_platform__keyfile_dict'] print('Loaded Service Credentials From: %s, Keyfile JSON' % CONNECTION_SERVICE) except Exception as e: pass # If project id given in recipe, skip load from connection if self.recipe['setup'].get('id'): print('Loaded Project ID From: RECIPE JSON') # If not given in recipe, try project id fetch from connections else: # check user try: user_connection_extra = BaseHook.get_connection( CONNECTION_USER).extra_dejson self.recipe['setup']['id'] = user_connection_extra.get( 'extra__google_cloud_platform__project') except: pass if self.recipe['setup'].get('id'): print('Loaded Project ID From: %s, Project Id' % CONNECTION_USER) else: # check service try: service_connection_extra = BaseHook.get_connection( CONNECTION_SERVICE).extra_dejson self.recipe['setup']['id'] = service_connection_extra.get( 'extra__google_cloud_platform__project') # check service json if self.recipe['setup'].get('id'): print('Loaded Project ID From: %s, Project Id' % CONNECTION_SERVICE) else: self.recipe['setup']['id'] = json.loads( service_connection_extra[ 'extra__google_cloud_platform__keyfile_dict'] )['project_id'] print('Loaded Project ID From: %s, Keyfile JSON' % CONNECTION_SERVICE) except: pass
def run_tests(tests): # Load values from config file fields = {} if (os.path.exists(CONFIG_FILE)): with open(CONFIG_FILE, 'r') as f: fields = json.load(f) # Create recipe directory print('GENERATE RECIPES') os.makedirs(RECIPE_DIRECTORY, exist_ok=True) # Create recipes from scripts recipes = [] for filename, script in load_tests(): name = filename.split('.')[0] if tests and name not in tests: continue # Set cal config field values into the script json_set_fields(script, fields.get(name, {})) with open(RECIPE_DIRECTORY + filename, 'w') as f: f.write(json.dumps(script, sort_keys=True, indent=2)) recipes.append(filename) # Create log directory and clear old logs os.makedirs(LOG_DIRECTORY, exist_ok=True) if not tests: print('CLEAR LOGS') for f in glob.glob(LOG_DIRECTORY + '*.log'): os.remove(f) # Create a process for each recipe execution jobs = [] for recipe in recipes: if tests and recipe.split('.')[0] not in tests: continue command = [ '%s/starthinker_virtualenv/bin/python' % UI_ROOT, '-W', 'ignore', '%s/starthinker/all/run.py' % UI_ROOT, RECIPE_DIRECTORY + recipe, '-u $STARTHINKER_USER', '-s $STARTHINKER_SERVICE', '-p $STARTHINKER_PROJECT', '--verbose', '--force', ] print('LAUNCHED:', ' '.join(command)) jobs.append({ 'recipe': recipe, 'process': subprocess.Popen(command, shell=False, cwd=UI_ROOT, stdout=subprocess.PIPE, stderr=subprocess.PIPE) }) # Monitor each job for completion and write to log i = len(jobs) while i: print('.', end='', flush=True) sleep(10) i = i - 1 poll = jobs[i]['process'].poll() if poll is not None: job = jobs.pop(i) print('\nOK:' if poll == 0 else '\nFAILED:', job['recipe'], 'REMAINING:', len(jobs), [j['recipe'].replace('.json', '') for j in jobs]) output, errors = job['process'].communicate() with open( LOG_DIRECTORY + ('OK_' if poll == 0 else 'FAILED_') + job['recipe'].replace('.json', '.log'), 'w') as f: f.write(output.decode()) f.write(errors.decode()) # Start checking jobs from end again if i == 0: i = len(jobs) print("") print("------") print("------------") print("------------------------") print('TEST RESULTS: ls -1 %s*.log' % LOG_DIRECTORY) print("------------------------") print("------------") print("------") print("")