def command(): """Render all commands spec. Render all commands with their spec, examples, schemas etc. so far created for a current version of the service. """ config = Config() commands_dir_path = os.path.join(Config.get_lily_path(), 'commands') if not os.path.exists(commands_dir_path): os.mkdir(commands_dir_path) version = config.next_version or config.version commands_path = os.path.join(commands_dir_path, f'{version}.json') with open(commands_path, 'w') as f: commands = CommandsRenderer().render() enums = commands.pop('enums') commands = { name: CommandSerializer(conf).data for name, conf in commands.items() } f.write( json.dumps({ **commands, 'enums': enums, }, indent=4, sort_keys=False)) click.secho(f'Commands rendered for to file {commands_path}', fg='green')
def test_exists(self): assert Config.exists() is True os.remove(str(self.tmpdir.join('.lily').join('config.json'))) assert Config.exists() is False
def command(): config = Config() commands_dir = os.path.join(config.get_lily_path(), 'commands') excluded = (settings.LILY_EXCLUDE_QUERY_PARSER_ALL_OPTIONAL_ASSERTIONS or []) with open(os.path.join(commands_dir, f'{config.version}.json'), 'r') as f: commands = json.loads(f.read()) for name, command in commands.items(): if name in excluded or name == 'enums': continue input_query = command['schemas'].get('input_query', {'schema': { 'required': [] }}) if input_query['schema']['required']: fields = ','.join(input_query['schema']['required']) raise CommandError( f"ERROR: query parser for '{name}' has some not optional " f"fields: [{fields}]")
def upgrade_version(upgrade_type): """Upgrade version of the repo artefacts. - update config.yaml file with version and last_commit_hash """ config = Config() repo = Repo() version = VersionRenderer() if not repo.all_changes_commited(): raise click.ClickException( 'Not all changes were commited! One cannot upgrade version with ' 'some changes still being not commited') # -- next_version config.next_version = version.render_next_version(config.version, upgrade_type) # -- next_last_commit_hash config.next_last_commit_hash = repo.current_commit_hash logger.info(f''' - Next config version upgraded to: {config.next_version} ''')
def get_commands(self, version=None): config = Config() version = version or config.version commands_dir_path = os.path.join(Config.get_lily_path(), 'commands') commands_path = os.path.join(commands_dir_path, f'{version}.json') with open(commands_path, 'r') as f: return json.loads(f.read())
def command(v): config = Config() migrations_dir_path = os.path.join(config.get_lily_path(), 'migrations') migrations_path = os.path.join(migrations_dir_path, f'{v}.json') with open(migrations_path, 'r') as f: migrations_plan = json.loads(f.read())['plan'] for app_name, migration in migrations_plan: management.call_command('migrate', app_name, migration) click.secho(f'Migrations plan for version {v} applied.', fg='green')
def command(): config = Config() migrations_dir_path = os.path.join(config.get_lily_path(), 'migrations') if not os.path.exists(migrations_dir_path): os.mkdir(migrations_dir_path) version = config.next_version or config.version migrations_path = os.path.join(migrations_dir_path, f'{version}.json') with open(migrations_path, 'w') as f: f.write(json.dumps(Renderer().render(), indent=4, sort_keys=False)) click.secho(f'Migrations plan rendered for to file {migrations_path}', fg='green')
def get_meta(self, serializer): path = inspect.getfile(serializer) path = path.replace(Config.get_project_path(), '') return { 'first_line': inspect.getsourcelines(serializer)[1], 'path': path, }
def test_create_empty__lily_folder_does_not_exist(self): self.tmpdir.join('.lily').remove() Config.create_empty('some_service') assert json.loads( self.tmpdir.join('.lily').join('config.json').read()) == { 'last_commit_hash': '... THIS WILL BE FILLED AUTOMATICALLY ...', 'name': '... PUT HERE NAME OF YOUR PROJECT ...', 'repository': '... PUT HERE URL OF REPOSITORY ...', 'src_dir': 'some_service', 'version': '... PUT HERE INITIAL VERSION ...', 'next_version': None, 'next_last_commit_hash': None, }
def test_properties__setters(self): def read_from_conf(prop): conf = json.loads(self.lily_dir.join('config.json').read()) return conf[prop] config = Config() # -- version config.version = '9.9.1' assert read_from_conf('version') == '9.9.1' # -- next_version config.next_version = '9.0.8' assert read_from_conf('next_version') == '9.0.8' # -- src_dir config.src_dir = 'entity_service' assert read_from_conf('src_dir') == 'entity_service' # -- last_commit_hash config.last_commit_hash = 'f7d87cd78' assert read_from_conf('last_commit_hash') == 'f7d87cd78' # -- next_last_commit_hash config.next_last_commit_hash = 'f7d87cd78' assert read_from_conf('next_last_commit_hash') == 'f7d87cd78'
def get_available_versions(self): commands_dir_path = os.path.join(Config.get_lily_path(), 'commands') return sorted([ commands_file.replace('.json', '') for commands_file in os.listdir(commands_dir_path) ], key=lambda x: [int(e) for e in x.split('.')], reverse=True)
def command(): examples_path = os.path.join(Config.get_lily_path(), 'examples.json') try: os.remove(examples_path) except FileNotFoundError: pass click.echo("'examples.json' was removed")
def test_properties__getters(self): config = Config() assert config.name == 'hello' assert config.src_dir == 'some_service' assert config.src_path == str(self.tmpdir.join('some_service')) assert config.repository == 'bithub' assert config.version == '0.1.9' assert config.last_commit_hash == '940594' assert config.next_version == '0.2.1' assert config.next_last_commit_hash == 'fd898fd'
def push_upgraded_version(): """Push Upgraded version and all of its artefacts. - add commit with artefacts - tag branch with the version of repo - push changes to the remote """ config = Config() repo = Repo() # -- version config.version = config.next_version config.next_version = None # -- last_commit_hash config.last_commit_hash = config.next_last_commit_hash config.next_last_commit_hash = None # -- add all artefacts coming from the post upgrade step repo.add_all() repo.commit('VERSION: {}'.format(config.version)) repo.push() logger.info(f''' - Version upgraded to: {config.version} ''')
def get_repository_uri(self): config = Config() if 'bitbucket' in config.repository: return os.path.join(config.repository, 'src', config.last_commit_hash, re.sub(r'^/', '', self.meta['path']), '#lines-{}'.format(self.meta['first_line'])) elif 'github': return os.path.join(config.repository, 'blob', config.last_commit_hash, re.sub(r'^/', '', self.meta['path']), '#L{}'.format(self.meta['first_line']))
def command(): """Render Markdown Specification for all registered Commands.""" # -- make sure that the main directory is also visible during # -- the search of all url patterns sys.path.insert(0, os.getcwd()) urlpatterns = import_module(settings.ROOT_URLCONF).urlpatterns with open(os.path.join(Config.get_lily_path(), 'API.md'), 'w') as f: f.write(MarkdownRenderer(urlpatterns).render()) click.secho( 'Successfully rendered Markdown Specification for all ' 'registered Commands', fg='green')
def get(self, request): command_names = request.input.query['commands'] is_private = request.input.query['is_private'] domain_id = request.input.query['domain_id'] version = request.input.query['version'] config = Config() commands = self.get_commands(version) enums = commands.pop('enums') if command_names: commands = { command_name: commands[command_name] for command_name in command_names } if is_private is not None: commands = { name: command for name, command in commands.items() if command['access']['is_private'] == is_private } if domain_id: commands = { name: command for name, command in commands.items() if command['meta'] ['domain']['id'].lower() == domain_id.lower() } raise self.event.Read({ 'name': config.name, 'version_info': { 'deployed': config.version, 'displayed': version or config.version, 'available': self.get_available_versions(), }, 'commands': commands, 'enums': enums, })
def copy_makefile(self, src_dir): config = Config() with open(self.base_makefile_path, 'r') as makefile: content = makefile.read() content = re.sub(r'{%\s*SRC_DIR\s*%}', src_dir, content) content = re.sub(r'{%\s*VERSION\s*%}', config.version, content) makefile_path = os.path.join(self.root_dir, '.lily', 'lily_assistant.makefile') with open(makefile_path, 'w') as f: f.write(content) click.secho('copied lily_assistant makefile to {makefile_path}'.format( makefile_path=makefile_path), fg='blue')
def get_most_current_migrations(self): base_path = os.path.join(os.getcwd(), Config().src_dir) filenames = glob.glob(os.path.join(base_path, '**/migrations/*.py')) filenames = [f for f in filenames if re.search(r'\d+\w+\.py$', f)] filenames = sorted(filenames, reverse=True) most_current_migrations = [] apps_visited = set() for f in filenames: module = f module = re.sub(f'{os.getcwd()}/', '', module) module = re.sub(r'(.py$|\.\/)', '', module) module = re.sub(r'\/', '.', module) module_parts = module.split('.') app_name, migration = module_parts[-3], module_parts[-1] if app_name not in apps_visited: apps_visited.add(app_name) most_current_migrations.append((app_name, migration, module)) return most_current_migrations
def test_get_lily_path(self): assert Config.get_lily_path() == str(self.tmpdir.join('.lily'))
def test_get_config_path(self): assert Config.get_config_path() == str( self.tmpdir.join('.lily').join('config.json'))
def test_get_project_path(self): assert Config.get_project_path() == str(self.tmpdir)
def get_examples_filepath(): return os.path.join(Config.get_lily_path(), 'examples.json')
def create_empty_config(self, src_dir): if not Config.exists(): Config.create_empty(src_dir)
def get_commands(self): commands_path = os.path.join(Config.get_lily_path(), 'commands', f'{Config().version}.json') with open(commands_path, 'r') as f: return json.loads(f.read())
def render(self, only_build=False, include_domains=None, exclude_domains=None): root_cwd = os.getcwd() # -- save it now before all directory jumps config = Config() # -- pull newest changes to the template self.template_repo.clone() # -- copy its content to the temp directory into which client will get # -- render self.repo.clone() self.template_repo.copy_to(self.repo.base_path, self.client_prefix) self.repo.cd_to_repo() self.repo.install() # -- render domains with local_cwd(root_cwd): commands_by_domain = self.group_commands_by_domain( include_domains, exclude_domains) # -- render client self.render_client_module_ts(commands_by_domain) self.render_api_ts(commands_by_domain) self.render_api_index_ts(commands_by_domain) # -- particular domains all_commands = [] for domain, commands in commands_by_domain.items(): sorted_commands = [] for command_name in sorted(commands.keys()): sorted_commands.append(commands[command_name]) self.render_domain(domain, sorted_commands) all_commands.extend(sorted_commands) # -- create SHARED folder rel_path = './projects/client/src/shared' self.repo.create_dir(rel_path) # -- access for all commands self.render_access_ts(rel_path, all_commands) # -- enums - must be run after domains rendering since it # -- renders enums self.render_enums_ts(rel_path, all_commands) self.repo.build() if not only_build: next_version = self.repo.upgrade_version(config) self.repo.add_all() self.repo.commit(next_version) self.repo.push() return next_version
def __init__(self, fn): code, firstline = inspect.getsourcelines(fn) self.filepath = inspect.getfile(fn).replace(Config.get_project_path(), '') self.start_line = firstline self.end_line = firstline + len(code) - 1
def __init__(self): self.base_path = Config.get_project_path() self.cd_to_repo()