def build_genuine(self, manifest): click.secho( f'Building {yellow(self.vendor.title())} MicroPython firmware ' f'for {yellow(self.architecture)} with manifest file {yellow(self.manifest)}.' ) with env(ESPIDF=self.espidf_path): # Build release. shell( f'make -j8 --directory=ports/{self.architecture} BOARD={self.board} FROZEN_MANIFEST={manifest}' ) # Find release artefact. try: # TODO: Put ESP-IDF version into the release name. release_board = f'{self.architecture.upper()}-{self.board.replace("_", "-")}' release_version = shell( f"""cat {self.micropython_path}/ports/{self.architecture}/build-{self.board}/genhdr/mpversion.h | grep MICROPY_GIT_TAG | cut -d'"' -f2""", capture=True, silent=True).stdout.strip() release_label = self.label firmware_path = f'{self.micropython_path}/ports/{self.architecture}/build-{self.board}/firmware.bin' application_path = f'{self.micropython_path}/ports/{self.architecture}/build-{self.board}/application.elf' return Artefact.make( name=f'{release_board}-{release_version}-{release_label}', firmware=firmware_path, application=application_path) except: pass
def uninstall(): """Uninstalls all Python dependencies.""" patt = re.compile(r'\w+=(\w+)') packages = [] for line in sp.run(('pip', 'freeze'), stdout=sp.PIPE).stdout.decode().splitlines(): if '==' in line: package, *_ = line.split('==') match = patt.search(line) if match: *_, package = match.groups() packages.append(package) stdin = os.linesep.join(packages).encode() with NamedTemporaryFile() as fn: fn.write(stdin) fn.seek(0) shell(f'cat {fn.name} | xargs pip uninstall -y')
def sync_frozen(self): frozen_path = self.pycom_frozen_path click.secho( f'Copying sources from {yellow(str(self.sources))} to frozen path {frozen_path}.' ) shell( f'rsync -auv --delete --exclude=__pycache__ {" ".join(self.sources)} {frozen_path}' ) shell(f'rm -r {frozen_path}/MicroWebSrv2', check=False, silent=True)
def release(): """Package and upload a release to pypi.""" context = click.get_current_context() context.invoke(test_readme) context.invoke(publish_docs) context.invoke(tox) context.invoke(clean, all=True) shell('python setup.py sdist bdist_wheel') shell('twine upload dist/*')
def install(idempotent): """ Install Python dependencies. """ click.secho("make sure you're using pipenv for dependency management", fg='yellow') context = click.get_current_context() if idempotent: context.invoke(uninstall) shell('pip install -e .[dev]')
def clean_build(): """Remove build artifacts.""" click.confirm( 'This will uninstall the {{cookiecutter.project_slug}} cli. ' 'You may need to run `pip install -e .` to reinstall it. ' 'Continue?', abort=True) shell('rm -fr build/') shell('rm -fr dist/') shell('rm -rf .eggs/') shell("find . -name '*.egg-info' -exec rm -fr {} +") shell("find . -name '*.egg' -exec rm -f {} +")
def test(capture, pdb, mypy): """ Run tests quickly with default Python. """ pytest_flags = ' '.join( ['-s' if not capture else '', '--pdb' if pdb else '']) shell('py.test tests/' + ' ' + pytest_flags) if mypy: shell( 'mypy {{ cookiecutter.project_slug }} tests/ --ignore-missing-imports' )
def purge_frozen(self): frozen_path = self.pycom_frozen_path if not os.path.exists(frozen_path): click.secho( f'{red("ERROR")}: Frozen path {red(frozen_path)} does not exist.' ) sys.exit(2) click.secho(f'Purging contents of frozen path {frozen_path}.') pattern = f'{frozen_path}/*' if glob(pattern): shell(f'rm -r {frozen_path}/*')
def generate_runner(): """Generate a run.py script in the current directory.""" from shell_utils import runner runner_path = Path('run.py') if runner_path.exists(): raise EnvironmentError('run.py already exists in current directory') click.secho('writing content to run.py', fg='yellow') runner_path.write_text(Path(runner.__file__).read_text()) shell('chmod +x run.py')
def tests(mypy): """ Run tests quickly with default Python. """ with cd('tests'): shell('pytest', check=False) if mypy: p = shell('mypy shell_utils tests/ --ignore-missing-imports', check=False, capture=True) if p.stdout: raise SystemExit(click.secho(p.stdout, fg='red'))
def coverage(no_browser): """Check code coverage quickly with the default Python.""" shell('coverage run --source {{ cookiecutter.project_slug }} -m pytest') shell('coverage report -m') shell('coverage html') if no_browser: return shell('open htmlcov/index.html')
def build_pycom(self): click.secho( f'Building {yellow(self.vendor.title())} MicroPython firmware ' f'for {yellow(self.architecture)} with frozen modules from {yellow(str(self.sources))}.' ) # Handle frozen modules. self.purge_frozen() if self.sources: self.sync_frozen() # Evaluate Pycom VARIANT. pycom_variant = 'BASE' build_dir = 'build' if pycom_variant == 'PYBYTES': build_dir = 'build-PYBYTES' with env(IDF_PATH=self.espidf_path): # Build release. shell( f'make -j8 --directory=esp32 BOARD={self.board} VARIANT={pycom_variant} FS=LFS release' ) # Find release artefact. try: release_board = shell( f"echo '{self.board}' | tr '[IOY]' '[ioy]'", capture=True, silent=True).stdout.strip() release_version = shell( f"""cat esp32/pycom_version.h | grep SW_VERSION_NUMBER | tail -n1 | cut -d'"' -f2""", capture=True, silent=True).stdout.strip() firmware_path = f'{self.micropython_path}/esp32/{build_dir}/{release_board}-{release_version}.tar.gz' application_path = f'{self.micropython_path}/esp32/{build_dir}/{self.board}/release/application.elf' return Artefact.make(name=f'{release_board}-{release_version}', firmware=firmware_path, application=application_path) except: pass
def save(self, release_path): if self.firmware: extension = os.path.splitext(self.firmware)[-1] shell(f'mkdir -p "{release_path}/firmware"', silent=True) shell( f'cp "{self.firmware}" "{release_path}/firmware/{self.name}{extension}"', silent=True) if self.application: extension = os.path.splitext(self.application)[-1] shell(f'mkdir -p "{release_path}/application"', silent=True) shell( f'cp "{self.application}" "{release_path}/application/{self.name}{extension}"', silent=True)
def test(): """Test db-connection. Do not forget to set environment variables for PSQL to access the database (e.g. PGPASSWORD)""" click.echo("Checking database connection.") try: output = shell('psql -c "SELECT VERSION();"', capture=True) if output.returncode == 0: click.echo('Database connection was successful') return True else: click.echo(output.stderr) click.echo('Database connection failed') return False except Exception as e: click.echo(e) click.echo('Database connection failed') return False
def migrate(schema_file, data_file, work_directory, source_db, target_db): """Migrate existing database and data to a fresh database. Do not forget to set environment variables for PSQL to access the database (e.g. PGPASSWORD)""" click.echo(f"Migrating from {source_db} to {target_db}. Using {click.format_filename(work_directory)} as working-directory.") final_sql = schema_file.read() + '\n' + data_file.read() final_sql = final_sql.replace("<%SOURCE-DB%>", source_db) final_sql = final_sql.replace("<%TARGET-DB%>", target_db) final_sql = final_sql.replace("<%DB-USER%>", os.environ['PGUSER']) final_sql_file = store_to_file(final_sql, click.format_filename(work_directory)) click.echo(f'SQL script ready for execution: {final_sql_file}') output = shell('psql -f ' + final_sql_file, capture=True) click.echo(output.stdout) if output.returncode == 0: click.echo('Migration completed successfully.') else: click.echo('Migration failed.')
def autopep8(auto_commit): """Autopep8 modules.""" def working_directory_dirty(): """Return True if the git working directory is dirty.""" return shell('git diff-index --quiet HEAD --', check=False).returncode != 0 if auto_commit and working_directory_dirty(): msg = click.style( 'working directory dirty. please commit pending changes', fg='yellow') raise EnvironmentError(msg) shell('autopep8 -i -r {{ cookiecutter.project_slug }}/ tests/') if auto_commit and working_directory_dirty(): shell('git add -u') shell("git commit -m 'autopep8 (autocommit)'", check=False)
def test_readme(): """Test README.rst to ensure it will render correctly in warehouse.""" shell('python setup.py check -r -s')
def working_directory_dirty(): """Return True if the git working directory is dirty.""" return shell('git diff-index --quiet HEAD --', check=False).returncode != 0
def __execute(command: str): return print(shell(command, capture=True, silent=True).stdout)
def publish_docs(): """ Compile docs and publish to GitHub Pages. Logic borrowed from `hugo <https://gohugo.io/tutorials/github-pages-blog/>` """ if shell('git diff-index --quiet HEAD --', check=False).returncode != 0: shell('git status') raise EnvironmentError( 'The working directory is dirty. Please commit any pending changes.' ) if shell('git show-ref refs/heads/gh-pages', check=False).returncode != 0: # initialized github pages branch shell( dedent(""" git checkout --orphan gh-pages git reset --hard git commit --allow-empty -m "Initializing gh-pages branch" git push gh-pages git checkout master """).strip()) click.secho('created github pages branch', fg='green') # deleting old publication shell('rm -rf public') shell('mkdir public') shell('git worktree prune') shell('rm -rf .git/worktrees/public/') # checkout out gh-pages branch into public shell('git worktree add -B gh-pages public gh-pages') # generating docs context = click.get_current_context() context.invoke(docs, no_browser=True) # push to github with cd('public'): shell('git add .') shell('git commit -m "Publishing to gh-pages (automated)"', check=False) shell('git push origin gh-pages --force') remotes = shell('git remote -v', capture=True).stdout.decode() match = re.search('github.com:(\w+)/(\w+).git', remotes) if match: user, repo = match.groups() click.secho( f'Your documentation is viewable at ' f'https://{user}.github.io/{repo}', fg='green')
def docs(no_browser): """ Generate Sphinx HTML documentation, including API docs. """ shell('rm -f docs/{{ cookiecutter.project_slug }}.rst') shell('rm -f docs/modules.rst') shell('rm -f docs/{{ cookiecutter.project_slug }}*') shell('sphinx-apidoc -o docs/ {{ cookiecutter.project_slug }}') with cd('docs'): shell('make clean') shell('make html') shell('cp -rf docs/_build/html/ public/') if no_browser: return shell('open public/index.html')
def tox(): """Run tests in isolated environments using tox.""" shell('tox')
def clean_test(): """Remove test and coverage artifacts.""" shell('rm -fr .tox/') shell('rm -f .coverage') shell('rm -fr htmlcov/')
def clean_pyc(): """Remove Python file artifacts.""" shell("find . -name '*.pyc' -exec rm -f {} +") shell("find . -name '*.pyo' -exec rm -f {} +") shell("find . -name '*~' -exec rm -f {} +") shell("find . -name '__pycache__' -exec rm -fr {} +")
def update_setup_requires(): """Update required vendored libraries required for package installation.""" shell('rm -rf setup_requires/') shell('pip install pipenv --target setup_requires')
def publish(username, password): """ Build and publish latest version to pypi. """ shell( f'poetry publish --build --username {username} --password {password}')
def dist(): """Build source and wheel package.""" context = click.get_current_context() context.invoke(clean, build=True) shell('python setup.py sdist') shell('python setup.py bdist_wheel')