def generate(cls, args): try: project_name, project_path = cls.validate_project() if project_name is None: cls.parser.error(colored("Invalid project.", 'red')) if args.type == "dag": path = "{}/{}/dags".format(project_path, project_name) if args.m is not None: path = os.path.join(path, args.m) if not os.path.exists(path): cls.parser.error( colored("The specified module does not exists", 'red')) Utility.generate_dag_template(project_name, args.n, path) elif args.type == "module": path = "{}/{}/dags/{}".format(project_path, project_name, args.n) test_path = "{}/tests/{}".format(project_path, args.n) mod_val = subprocess.call(['mkdir', path]) test_val = subprocess.call(['mkdir', test_path]) if mod_val != 0 or test_val != 0: cls.parser.error(colored("Unable to generate.", 'red')) print(colored("Generated successfully.", 'green')) except Exception as e: raise AfctlParserException(e)
def test_generate_dag_template(self): project_name = "tes_project" path = "/tmp" dag = "test" Utility.generate_dag_template(project_name, dag, path) expected_output = """ from airflow import DAG from datetime import datetime, timedelta default_args = { 'owner': 'tes_project', # 'depends_on_past': , # 'start_date': , # 'email': , # 'email_on_failure': , # 'email_on_retry': , # 'retries': 0 } dag = DAG(dag_id='test', default_args=default_args, schedule_interval='@once') """ current_output = open(os.path.join('/tmp', 'test_dag.py')).read() expected_output = expected_output.replace(" ", "") current_output = current_output.replace(" ", "") assert expected_output == current_output
def generate_project(args, files): try: if args.name != '.': if not os.path.exists(files['main_dir']): os.mkdir(files['main_dir']) ParserHelpers.generate_all(files) else: print("Directory already exists.") return True else: # Initialising project in existing directory project_parent_dir = Utility.find_project(os.getcwd()) if project_parent_dir is None: # Not an afctl project. Generate all directories. ParserHelpers.generate_all(files) else: # Since its an afctl project. Just populate the config files. ParserHelpers.generate_config_file(files) if args.v != None: Utility.update_config(files['project_name'], {'global': { 'airflow_version': args.v }}) return False except Exception as e: raise AfctlParserException(e)
def add_git_config(files): try: origin = subprocess.run([ 'git', '--git-dir={}'.format( os.path.join(files['main_dir'], '.git')), 'config', '--get', 'remote.origin.url' ], stdout=subprocess.PIPE) origin = origin.stdout.decode('utf-8')[:-1] if origin == '': subprocess.run(['git', 'init', files['main_dir']]) print( colored( "Git origin is not set for this repository. Run 'afctl config global -o <origin>'", 'yellow')) else: print("Updating git origin.") Utility.update_config(files['project_name'], {'global': { 'git': { 'origin': origin } }}) print("Setting origin as : {}".format(origin)) print( colored( "Set personal access token for Github. Run 'afctl config global -t <token>'", 'yellow')) except Exception as e: raise AfctlParserException(e)
def generate_project_files(files): try: sub_file = Utility.create_files([files['main_dir']], files['sub_files']) dirs = Utility.create_dirs([files['main_dir']], files['sub_dirs']) project_dirs = Utility.create_dirs([dirs[files['project_name']]], files['project_dirs']) return sub_file, dirs, project_dirs except Exception as e: raise AfctlParserException(e)
def list(cls, args): try: print(colored("Available {} :".format(args.type), 'green')) print('\n'.join(map(str, Utility.read_meta()[args.type]))) except Exception as e: raise AfctlParserException(e)
def get_project_file_names(name): try: pwd = os.getcwd() main_dir = pwd if name == '.' else os.path.join( pwd, name.lstrip('/').rstrip('/')) project_name = os.path.basename(main_dir) config_dir = Utility.CONSTS['config_dir'] config_file = Utility.project_config(project_name) sub_files = ['.afctl_project', '.gitignore', 'requirements.txt'] sub_dirs = [ project_name, 'deployments', 'migrations', 'plugins', 'tests' ] project_dirs = ['dags', 'commons'] return { 'main_dir': main_dir, 'project_name': project_name, 'config_dir': config_dir, 'config_file': config_file, 'sub_files': sub_files, 'sub_dirs': sub_dirs, 'project_dirs': project_dirs } except Exception as e: raise AfctlParserException(e)
def test_create_files(self, clean_tmp_dir): parent = ['/tmp'] child = ['one', 'two', 'three'] dirs = Utility.create_files(parent, child) assert dirs['one'] == '/tmp/one' assert os.path.exists(dirs['one']) is True assert dirs['two'] == '/tmp/two' assert os.path.exists(dirs['two']) is True assert dirs['three'] == '/tmp/three' assert os.path.exists(dirs['three']) is True
def generate_dirs(cls, main_dir, project_name): try: compose_file = docker_compose_template(project_name) deployment_compose_file = "{}/deployments/{}-docker-compose.yml".format( main_dir, project_name) with open(deployment_compose_file, 'w') as file: file.write(compose_file) print("Updating docker compose.") Utility.update_config( project_name, { 'deployment': { 'local': { 'compose': "{}/deployments/{}-docker-compose.yml".format( main_dir, project_name) } } }) except Exception as e: raise AfctlDeploymentException(e)
def validate_project(cls): try: project_name = None project_path = None pwd = os.getcwd() # If any parent of pwd contains .afctl_project. If so then it should be the project. project = Utility.find_project(pwd) if project is None: # Could not find .afctl_project cls.parser.error( colored("{} is not an afctl project.".format(pwd), 'red')) else: # Check is the dir containing .afctl_project has a config file project_name = project[0] project_path = project[1] if not os.path.exists(Utility.project_config(project_name)): cls.parser.error( colored( "Config file does not exists for {}".format( project_name), 'red')) return project_name, project_path except Exception as e: raise AfctlParserException(e)
def deploy_project(cls, args, config_file): try: if args.n is None: return True, "-n is required. Check usage." with open(Utility.project_config(config_file)) as file: config = yaml.full_load(file) project = config_file origin = config['global']['git']['origin'] token = config['global']['git']['access-token'] if origin is None or origin == '': return True, "Origin is not set for the project. Run 'afctl config global -o <origin>'" params = QuboleUtils.generate_configs(config, args) latest_commit_on_remote = QuboleUtils.fetch_latest_commit( origin, params['branch']) if token is None or token == '': print( "No personal access token found. The repository should be public." ) if latest_commit_on_remote is None: return True, "Unable to read latest commit on origin. Please make sure the current branch is present on origin." print("Latest commit of {} on origin {} found.".format( params['branch'], origin)) print("Deploying commit : {} on Qubole".format( latest_commit_on_remote)) if token is not None and token != "": origin = QuboleUtils.create_private_repo_url(origin, token) qds_command = QuboleUtils.get_shell_command( project, origin, params['branch'], latest_commit_on_remote) command = QuboleUtils.run_qds_command(params['env'], params['cluster'], params['token'], qds_command) if command.status != 'done': return True, "Deployment failed on Qubole" return False, "" except Exception as e: raise AfctlDeploymentException(e)
def test_add_and_update_configs(self, create_config_file): add_config = {'name': {'key1': 'val1', 'key2': 'val2'}} Utility.add_configs(['parent', 'child1'], PROJECT_NAME, add_config) config_file = os.path.join(PROJECT_CONFIG_DIR, PROJECT_NAME) + '.yml' expected_output = """parent: child1: name: key1: val1 key2: val2 child2: null """ current_output = open(config_file).read() expected_output = expected_output.replace(" ", "") current_output = current_output.replace(" ", "") assert expected_output == current_output add_config = {'name': {'key3': 'val3', 'key4': 'val4'}} Utility.add_configs(['parent', 'child2'], PROJECT_NAME, add_config) expected_output = """parent: child1: name: key1: val1 key2: val2 child2: name: key3: val3 key4: val4 """ current_output = open(config_file).read() expected_output = expected_output.replace(" ", "") current_output = current_output.replace(" ", "") assert expected_output == current_output update_config = {'parent': {'child2': {'name': {'key3': 'val100'}}}} Utility.update_config(PROJECT_NAME, update_config) expected_output = """parent: child1: name: key1: val1 key2: val2 child2: name: key3: val100 key4: val4 """ current_output = open(config_file).read() expected_output = expected_output.replace(" ", "") current_output = current_output.replace(" ", "") assert expected_output == current_output
def deploy_project(cls, args, config_file): try: print("Deploying afctl project to local") with open(Utility.project_config(config_file)) as file: config = yaml.full_load(file) val = subprocess.call(['docker', 'info']) if val != 0: return True, "Docker is not running. Please start docker." if args.d: os.system("docker-compose -f {} up -d".format( config['deployment']['local']['compose'])) else: os.system("docker-compose -f {} up ".format( config['deployment']['local']['compose'])) return False, "" except Exception as e: raise AfctlDeploymentException(e)
def get_subparsers(cls): subparsers = ({ 'func': cls.init, 'parser': 'init', 'help': 'Create a new Airflow project.', 'args': [['name', { 'help': 'Name of your airflow project' }], ['-v', { 'help': 'Airflow version for your project' }]] }, { 'func': cls.list, 'parser': 'list', 'help': 'Get list of operators, sensors, connectors and hooks.', 'args': [[ 'type', { 'choices': ['operators', 'sensors', 'deployment', 'hooks'], 'help': 'Choose from the options.' } ]] }, { 'func': cls.config, 'parser': 'config', 'help': 'Setup configs for your project. Read documentation for argument types.\n' + 'TYPES:\n' + ' add - add a config for your deployment.\n' + ' update - update an existing config for your deployment.\n' + ' Arguments:\n' + ' -d : Deployment Type\n' + ' -p : Project\n' + DeploymentConfig.CONFIG_DETAILS + ' global\n' + ' Arguments:\n' + ' -p : Project\n' + ' -o : Set git origin for deployment\n' + ' -t : Set personal access token\n' + ' show - Show the config file on console\n' + ' No arguments.', 'args': [['type', { 'choices': ['add', 'update', 'show', 'global'] }], ['-d', { 'choices': ['qubole'] }], ['-o'], ['-p'], ['-n'], ['-e'], ['-c'], ['-t'], ['-v']] }, { 'func': cls.deploy, 'parser': 'deploy', 'help': 'Deploy your afctl project on the preferred platform.\n' + 'TYPES:\n' + DeploymentConfig.DEPLOY_DETAILS, 'args': [['type', { 'choices': Utility.read_meta()['deployment'] }], ['-d', { 'action': 'store_true' }], ['-n']] }, { 'func': cls.generate, 'parser': 'generate', 'help': 'Generators\n' + '-n : Name of the dag file or the module\n' + '-m : Name of module where you want to generate a dag file\n', 'args': [['type', { 'choices': ['dag', 'module'] }], ['-n', { 'required': 'True' }], ['-m']] }) return subparsers
def test_return_project_config_file(self): project = "test_project" expected_path = os.path.join(PROJECT_CONFIG_DIR, project) + ".yml" path = Utility.project_config(project) assert path == expected_path
def test_find_project(self, create_project): path = '/tmp/one/two/three' project = Utility.find_project(path) assert project[0] == 'two' assert project[1] == '/tmp/one/two'
def act_on_configs(cls, args, project_name): try: # Setting global values. if args.type == "global": origin = args.o token = args.t version = args.v if args.o is None and args.t is None and args.v is None: origin = input("Git origin for deployment : ") token = input("Personal access token : ") version = input("Input airflow version : ") if origin != '' and origin is not None: Utility.update_config( project_name, {'global': { 'git': { 'origin': origin } }}) if token != '' and token is not None: Utility.update_config( project_name, {'global': { 'git': { 'access-token': token } }}) if version != '' and version is not None: Utility.update_config( project_name, {'global': { 'airflow_version': version }}) # If adding or updating configs. elif args.type == 'add' or args.type == 'update': if args.d is None: cls.parser.error( colored( "-d argument is required. Check usage. Run 'afctl config -h'", 'red')) # Sanitize values. configs, flag, msg = DeploymentConfig.validate_configs(args) if flag: cls.parser.error(colored(msg, 'red')) else: if args.type == 'update': Utility.update_config(project_name, configs) if args.type == 'add': Utility.add_configs(['deployment', args.d], project_name, configs) # Showing configs elif args.type == 'show': Utility.print_file(Utility.project_config(project_name)) except Exception as e: AfctlParserException(e)