Esempio n. 1
0
 def setUpAll(cls):
     cls.git = Git(GIT_REPO)
Esempio n. 2
0
 def _open_git(self) -> Git:
     return Git(str(self.path))
Esempio n. 3
0
class UploadAction(ConsoleAction):
    def __init__(self, args):
        folder = abspath(expanduser(args.skill_folder))
        self.entry = SkillEntry.from_folder(folder)
        skills_dir = abspath(expanduser(self.msm.skills_dir))
        if join(skills_dir, basename(folder)) != folder:
            raise MskException(
                'Skill folder, {}, not directly within skills directory, {}.'.
                format(args.skill_folder, self.msm.skills_dir))

    git = Lazy(lambda s: Git(s.entry.path))  # type: Git

    @staticmethod
    def register(parser: ArgumentParser):
        parser.add_argument('skill_folder')

    def perform(self):
        for i in listdir(self.entry.path):
            if i.lower() == 'readme.md' and i != 'README.md':
                shutil.move(join(self.entry.path, i),
                            join(self.entry.path, 'README.md'))

        creator = CreateAction(None, self.entry.name.replace('-skill', ''))
        creator.path = self.entry.path
        creator.initialize_template({'.git', '.gitignore', 'README.md'})
        self.git.add('README.md')
        creator.commit_changes()
        skill_repo = creator.create_github_repo(lambda: input('Repo name:'))
        if skill_repo:
            self.entry.url = skill_repo.html_url
            self.entry.author = self.user.login
        else:
            skill_repo = self.github.get_repo(skill_repo_name(self.entry.url))

        if not skill_repo.permissions.push:
            print(
                'Warning: You do not have write permissions to the provided skill repo.'
            )
            if ask_yes_no('Create a fork and use that instead? (Y/n)', True):
                skill_repo = self.user.create_fork(skill_repo)
                print('Created fork:', skill_repo.html_url)
                self.git.remote('rename', 'origin', 'upstream')
                self.git.remote('add', 'origin', skill_repo.html_url)

        self.entry.name = input(
            'Enter a unique skill name (ie. npr-news or grocery-list): ')

        readme_file = {i.lower(): i
                       for i in os.listdir(self.entry.path)}['readme.md']
        readme = read_file(self.entry.path, readme_file)

        last_section = None
        sections = {last_section: ''}
        for line in readme.split('\n'):
            line = line.strip()
            if line.startswith('#'):
                last_section = line.strip('# ').lower()
                sections[last_section] = ''
            else:
                sections[last_section] += '\n' + line
        del sections[None]

        if 'description' in sections:
            description = sections['description']
        else:
            description = ask_choice(
                'Which section contains the description?',
                list(sections),
                on_empty='Please create a description section in the README')

        branch = SkillData(self.entry).add_to_repo()
        self.repo.push_to_fork(branch)

        pull = create_or_edit_pr(title='Add {}'.format(self.entry.name),
                                 body=body_template.format(
                                     description=description,
                                     skill_name=self.entry.name,
                                     skill_url=skill_repo.html_url),
                                 user=self.user,
                                 branch=branch,
                                 skills_repo=self.repo.hub,
                                 repo_branch=self.branch)

        print('Created pull request: ', pull.html_url)
Esempio n. 4
0
    #config = Config()

    domain_obj = Domain()
    domain_obj.prompt_for_domain()
    print('domain set to: ' + domain_obj.domain)
    domain_obj.save()

    #print(domain_obj)

    #tmp hack for testing
    #File.delete_folder(path_base+domain_obj.domain_code+'/')
    File.force_folder_existance(path_base + domain_obj.domain_code + '/')

    env = 'dev'  #todo - need to move the to a config file
    git_obj = Git(domain_obj.domain_code, env)
    git_obj.run_wizard()

    # create file for nginx
    #output = UI.run('whoami')
    #print(output)
    #todo figure out this permission issue - folder is currently set to 777 as a temp workaround
    #todo - when chaning port # it does not seem to overwrite
    sites_available_file = '/etc/nginx/sites-available/{}'.format(
        domain_obj.domain_code)
    if not File.exists(sites_available_file):
        File.force_existance(sites_available_file)

        text = '''
server {{
  listen 80;
Esempio n. 5
0
class CreateAction(ConsoleAction):
    def __init__(self, args, name: str = None):
        colorama_init()
        if name:
            self.name = name

    @staticmethod
    def register(parser: ArgumentParser):
        pass

    @Lazy
    def name(self) -> str:
        name_to_skill = {skill.name: skill for skill in self.msm.list()}
        while True:
            name = ask_input(
                'Enter a short unique skill name (ie. "siren alarm" or "pizza orderer"):',
                lambda x: re.match(r'^[a-zA-Z \-]+$', x),
                'Please use only letter and spaces.').strip(
                    ' -').lower().replace(' ', '-')
            skill = name_to_skill.get(
                name, name_to_skill.get('{}-skill'.format(name)))
            if skill:
                print('The skill {} {}already exists'.format(
                    skill.name,
                    'by {} '.format(skill.author) * bool(skill.author)))
                if ask_yes_no('Remove it? (y/N)', False):
                    rmtree(skill.path)
                else:
                    continue
            class_name = '{}Skill'.format(to_camel(name.replace('-', '_')))
            repo_name = '{}-skill'.format(name)
            print()
            print('Class name:', class_name)
            print('Repo name:', repo_name)
            print()
            alright = ask_yes_no('Looks good? (Y/n)', True)
            if alright:
                return name

    path = Lazy(lambda s: join(s.msm.skills_dir, s.name + '-skill'))
    git = Lazy(lambda s: Git(s.path))
    short_description = Lazy(lambda s: ask_input(
        'Enter a one line description for your skill (ie. Orders fresh pizzas from the store):\n-',
    ).capitalize())
    author = Lazy(lambda s: ask_input('Enter author:'))
    intent_lines = Lazy(lambda s: [
        i.capitalize() for i in ask_input_lines(
            'Enter some example phrases to trigger your skill:', '-')
    ])
    dialog_lines = Lazy(lambda s: [
        i.capitalize() for i in ask_input_lines(
            'Enter what your skill should say to respond:', '-')
    ])
    intent_entities = Lazy(lambda s: set(
        re.findall(r'(?<={)[a-z_A-Z]*(?=})', '\n'.join(
            i for i in s.intent_lines))))
    dialog_entities = Lazy(lambda s: set(
        re.findall(r'(?<={)[a-z_A-Z]*(?=})', '\n'.join(s.dialog_lines))))
    long_description = Lazy(
        lambda s: '\n\n'.join(ask_input_lines('Enter a long description:', '>')
                              ).strip().capitalize())
    icon = Lazy(lambda s: ask_input(
        'Go to Font Awesome ({blue}fontawesome.com/cheatsheet{reset}) and choose an icon.'
        '\nEnter the name of the icon:'.format(blue=Fore.BLUE + Style.BRIGHT,
                                               reset=Style.RESET_ALL),
        validator=lambda x: requests.get(
            "https://raw.githack.com/FortAwesome/Font-Awesome/"
            "master/svgs/solid/{x}.svg".format(x=x)).ok,
        on_fail=
        "\n\n{red}Error: The name was not found. Make sure you spelled the icon name right,"
        " and try again.{reset}\n".format(red=Fore.RED + Style.BRIGHT,
                                          reset=Style.RESET_ALL)))
    color = Lazy(lambda s: ask_input(
        "Pick a {yellow}color{reset} for your icon. Find a color that matches the color scheme at"
        " {blue}mycroft.ai/colors{reset}, or pick a color at: {blue}color-hex.com.{reset}"
        "\nEnter the color hex code (including the #):".format(
            blue=Fore.BLUE + Style.BRIGHT,
            yellow=Fore.YELLOW,
            reset=Style.RESET_ALL),
        validator=lambda hex_code: hex_code[0] == "#" and len(hex_code
                                                              ) in [4, 7],
        on_fail=
        "\n{red}Check that you entered a correct hex code, and try again.{reset}\n"
        .format(red=Fore.RED + Style.BRIGHT, reset=Style.RESET_ALL)))
    category_options = [
        'Daily', 'Configuration', 'Entertainment', 'Information', 'IoT',
        'Music & Audio', 'Media', 'Productivity', 'Transport'
    ]
    category_primary = Lazy(lambda s: ask_input(
        '\nCategories define where the skill will display in the Marketplace. It must be one of the following: \n{}. \nEnter the primary category for your skill: \n-'
        .format(', '.join(s.category_options)
                ), lambda x: x in s.category_options))
    categories_other = Lazy(lambda s: [
        i.capitalize() for i in ask_input_lines(
            'Enter additional categories (optional):', '-')
    ])
    tags = Lazy(lambda s: [
        i.capitalize() for i in ask_input_lines(
            'Enter tags to make it easier to search for your skill (optional):',
            '-')
    ])

    manifest = Lazy(lambda s: manifest_template if ask_yes_no(
        message=
        "Does this Skill depend on Python Packages (PyPI), System Packages (apt-get/others), or other skills?"
        "\nThis will create a manifest.yml file for you to define the dependencies for your Skill."
        "\nCheck the Mycroft documentation at mycroft.ai/to/skill-dependencies to learn more about including dependencies, and the manifest.yml file, in Skills. (y/N)",
        default=False) else None)

    readme = Lazy(lambda s: readme_template.format(
        title_name=s.name.replace('-', ' ').title(),
        short_description=s.short_description,
        long_description=s.long_description,
        examples=''.join('* "{}"\n'.format(i) for i in s.intent_lines),
        credits=credits_template.format(author=s.author),
        icon=s.icon,
        color=s.color.upper(),
        category_primary=s.category_primary,
        categories_other=''.join('{}\n'.format(i) for i in s.categories_other),
        tags=''.join('#{}\n'.format(i) for i in s.tags),
    ))
    init_file = Lazy(lambda s: init_template.format(
        class_name=to_camel(s.name.replace('-', '_')),
        handler_name=s.intent_name.replace('.', '_'),
        handler_code='\n'.join(' ' * 8 * bool(i) + i for i in [
            "{ent} = message.data.get('{ent}')".format(ent=entity)
            for entity in sorted(s.intent_entities)
        ] + [
            "{ent} = ''".format(ent=entity)
            for entity in sorted(s.dialog_entities - s.intent_entities)
        ] + [''] * bool(
            s.dialog_entities | s.intent_entities
        ) + "self.speak_dialog('{intent}'{args})".format(
            intent=s.intent_name,
            args=", data={{\n{}\n}}".format(
                ',\n'.join("    '{ent}': {ent}".format(ent=entity)
                           for entity in s.dialog_entities | s.intent_entities)
            ) * bool(s.dialog_entities | s.intent_entities)).split('\n')),
        intent_name=s.intent_name))

    intent_name = Lazy(lambda s: '.'.join(reversed(s.name.split('-'))))

    def add_locale(self):
        makedirs(join(self.path, 'locale', self.lang))
        with open(
                join(self.path, 'locale', self.lang,
                     self.intent_name + '.intent'), 'w') as f:
            f.write('\n'.join(self.intent_lines + ['']))
        with open(
                join(self.path, 'locale', self.lang,
                     self.intent_name + '.dialog'), 'w') as f:
            f.write('\n'.join(self.dialog_lines + ['']))

    def license(self):
        """Ask user to select a license for the repo."""
        license_files = get_licenses()
        print('For uploading a skill a license is required.\n'
              'Choose one of the licenses listed below or add one later.\n')
        for num, pth in zip(range(1, 1 + len(license_files)), license_files):
            print('{}: {}'.format(num, pretty_license(pth)))
        choice = ask_input('Choose license above or press Enter to skip?')
        if choice.isdigit():
            index = int(choice) - 1
            shutil.copy(license_files[index], join(self.path, 'LICENSE.md'))
            print('\nSome of these require that you insert the project name '
                  'and/or author\'s name. Please check the license file and '
                  'add the appropriate information.\n')

    def initialize_template(self, files: set = None):
        git = Git(self.path)

        skill_template = [
            ('', lambda: makedirs(self.path)), ('locale', self.add_locale),
            ('__init__.py', lambda: self.init_file),
            ('README.md', lambda: self.readme), ('LICENSE.md', self.license),
            ('.gitignore', lambda: gitignore_template),
            ('settingsmeta.yaml', lambda: settingsmeta_template.format(
                capital_desc=self.name.replace('-', ' ').capitalize())),
            ('manifest.yml', lambda: self.manifest),
            ('.git', lambda: git.init())
        ]

        def cleanup():
            rmtree(self.path)

        if not isdir(self.path):
            atexit.register(cleanup)
        for file, handler in skill_template:
            if files and file not in files:
                continue
            if not exists(join(self.path, file)):
                result = handler()
                if isinstance(result,
                              str) and not exists(join(self.path, file)):
                    with open(join(self.path, file), 'w') as f:
                        f.write(result)
        atexit.unregister(cleanup)

    def commit_changes(self):
        if self.git.rev_parse('HEAD', with_exceptions=False) == 'HEAD':
            self.git.add('.')
            self.git.commit(message='Initial commit')

    def force_push(self,
                   get_repo_name: Callable = None) -> Optional[Repository]:
        if ask_yes_no(
                'Are you sure you want to overwrite the remote github repo? '
                'This cannot be undone and you will lose your commit '
                'history! (y/N)', False):
            repo_name = (get_repo_name
                         and get_repo_name()) or (self.name + '-skill')
            repo = self.user.get_repo(repo_name)
            self.git.push('origin', 'master', force=True)
            print('Force pushed to GitHub repo:', repo.html_url)
            return repo

    def link_github_repo(self,
                         get_repo_name: Callable = None
                         ) -> Optional[Repository]:
        if 'origin' not in Git(self.path).remote().split('\n'):
            if ask_yes_no(
                    'Would you like to link an existing GitHub repo to it? (Y/n)',
                    True):
                repo_name = (get_repo_name
                             and get_repo_name()) or (self.name + '-skill')
                repo = self.user.get_repo(repo_name)
                self.git.remote('add', 'origin', repo.html_url)
                self.git.fetch()
                try:
                    self.git.pull('origin', 'master')
                except GitCommandError as e:
                    if e.status == 128:
                        raise UnrelatedGithubHistory(repo_name) from e
                    raise
                self.git.push('origin', 'master', set_upstream=True)
                print('Linked and pushed to GitHub repo:', repo.html_url)
                return repo

    def create_github_repo(self,
                           get_repo_name: Callable = None
                           ) -> Optional[Repository]:
        if 'origin' not in Git(self.path).remote().split('\n'):
            if ask_yes_no(
                    'Would you like to create a GitHub repo for it? (Y/n)',
                    True):
                repo_name = (get_repo_name
                             and get_repo_name()) or (self.name + '-skill')
                try:
                    repo = self.user.create_repo(repo_name,
                                                 self.short_description)
                except GithubException as e:
                    if e.status == 422:
                        raise GithubRepoExists(repo_name) from e
                    raise
                self.git.remote('add', 'origin', repo.html_url)
                call(['git', 'push', '-u', 'origin', 'master'],
                     cwd=self.git.working_dir)
                print('Created GitHub repo:', repo.html_url)
                return repo
        return None

    def perform(self):
        self.initialize_template()
        self.commit_changes()
        with print_error(GithubRepoExists):
            self.create_github_repo()
        print('Created skill at:', self.path)
Esempio n. 6
0
 def tag():
     """tag a new version of this distribution"""
     git = Git('.')
     git.pull('origin', 'master')
     git.tag(tag_name)
def main():
    """
    The algorithm is roughly:

        1. Is the current HEAD associated with a tag that looks like a release
           version?
        2. If "yes" then use that as the version
        3. If "no" then is the current branch master?
        4. If "yes" the current branch is master, then inspect the branches that
           fit the convention for a release branch and choose the latest;
           increment the minor version, append .0 to form the new version (e.g.,
           releases/v3.3 becomes 3.4.0), and append a pre-release marker
        5. If "no" the current branch is not master, then determine the most
           recent tag in history; strip any pre-release marker, increment the
           patch version, and append a new pre-release marker
    """

    repo = Repo('.')
    assert not repo.bare

    head_tag_ver = check_head_tag(repo)
    if head_tag_ver:
        return head_tag_ver

    version_loose = LooseVersion('0.0.0')
    prerelease_marker = datetime.date.today().strftime('%Y%m%d') \
            + '+git' + repo.head.commit.hexsha[:10]

    if DEBUG:
        print('Calculating release version for branch: ' +
              repo.active_branch.name)
    if repo.active_branch.name == 'master':
        version_new = {}
        # Use refs (not branches) to get local branches plus remote branches
        for ref in repo.refs:
            release_branch_match = RELEASE_BRANCH_RE.match(ref.name)
            if release_branch_match:
                # Construct a candidate version from this branch name
                version_new['major'] = int(
                    release_branch_match.group('vermaj'))
                version_new['minor'] = int(
                    release_branch_match.group('vermin')) + 1
                version_new['patch'] = 0
                version_new['prerelease'] = prerelease_marker
                new_version_loose = LooseVersion(
                    str(version_new['major']) + '.' +
                    str(version_new['minor']) + '.' +
                    str(version_new['patch']) + '-' +
                    version_new['prerelease'])
                if new_version_loose > version_loose:
                    version_loose = new_version_loose
                    if DEBUG:
                        print('Found new best version "' + str(version_loose) \
                                + '" on branch "' + ref.name + '"')

    else:
        gexc = Git('.')
        tags = gexc.execute([
            'git', 'tag', '--merged', 'HEAD', '--list', '1.*', '--sort',
            'version:refname'
        ])
        if len(tags) > 0:
            release_tag_match = RELEASE_TAG_RE.match(tags.splitlines()[-1])
            if release_tag_match:
                version_new = {}
                version_new['major'] = int(release_tag_match.group('vermaj'))
                version_new['minor'] = int(release_tag_match.group('vermin'))
                version_new['patch'] = int(
                    release_tag_match.group('verpatch')) + 1
                version_new['prerelease'] = prerelease_marker
                new_version_loose = LooseVersion(
                    str(version_new['major']) + '.' +
                    str(version_new['minor']) + '.' +
                    str(version_new['patch']) + '-' +
                    version_new['prerelease'])
                if new_version_loose > version_loose:
                    version_loose = new_version_loose
                    if DEBUG:
                        print('Found new best version "' + str(version_loose) \
                                + '" from tag "' + release_tag_match.group('ver') + '"')

    return str(version_loose)
Esempio n. 8
0
# -*- coding: utf-8 -*-
from pelican import signals, contents
from git import Git, Repo, InvalidGitRepositoryError
import os
from datetime import datetime
import time
from sc import fp, fs

import locale
locale.setlocale(locale.LC_ALL, 'C')

try:
    repo = Repo(os.path.abspath('.'))
    git = Git(os.path.abspath('.'))
except InvalidGitRepositoryError as e:
    repo = None

def specs(content):
    if isinstance(content, contents.Static) \
       or repo is None \
       or content.metadata.get('scep', None) is None:
        return

    # compute fingerprint
    fw = fs.fs_wrap(content.source_path)
    cv = fp.compute_visitor()
    fw.visit(cv)
    content.fp = cv.fingerprint().compact()

    scep = int(content.metadata.get('scep'))
    content.template = 'spec'
Esempio n. 9
0
 def _open_git(self):
     self._git = Git(str(self.path))
Esempio n. 10
0
 def git_remote_clone(self,
                      repo_url=None,
                      path='/tmp/kc123',
                      branch='master'):
     with Git().custom_environment(GIT_SSH_COMMAND=self.git_ssh_cmd):
         Repo.clone_from(repo_url, path, branch)
 def __init__(self, repo_path):
     self.git = Git()
     self.repo = Repo(os.path.abspath('.'))
Esempio n. 12
0
 def _open_git(self) -> Git:
     self._open_repository()
     return Git(self.path)
Esempio n. 13
0
class UploadAction(ConsoleAction):
    def __init__(self, args):
        folder = abspath(expanduser(args.skill_folder))
        self.entry = SkillEntry.from_folder(folder)
        skills_dir = abspath(expanduser(self.msm.skills_dir))
        if join(skills_dir, basename(folder)) != folder:
            raise MskException('Skill folder, {}, not directly within skills directory, {}.'.format(
                args.skill_folder, self.msm.skills_dir
            ))
        self.skill_dir = folder

    git = Lazy(lambda s: Git(s.entry.path))  # type: Git

    @staticmethod
    def register(parser: ArgumentParser):
        pass  # Implemented in SubmitAction

    def check_valid(self):
        """Check that the skill contains all required files before uploading.
        """
        results = []
        if not (exists_in_remote(self.git, 'LICENSE.md') or
                exists_in_remote(self.git, 'LICENSE') or
                exists_in_remote(self.git, 'LICENSE.txt')):
            print('To have your Skill available for installation through the '
                  'Skills Marketplace, a license is required.\n'
                  'Please select one and add it to the skill as '
                  '`LICENSE.md.`\n'
                  'See https://opensource.org/licenses for information on '
                  'open source license options.')
            results.append(False)
        else:
            results.append(True)

        if not exists_in_remote(self.git, 'README.md'):
            print('For inclusion in the Mycroft Marketplace a README.md file '
                  'is required. please add the file and retry.')
            results.append(False)
        else:
            results.append(True)

        with open(join(self.skill_dir, 'README.md')) as f:
            readme = f.read()
        if '# About' not in readme and '# Description' not in readme:
            print('README is missing About Section needed by the Marketplace')
            results.append(False)
        else:
            results.append(True)

        if '# Category' not in readme:
            print('README is missing Category section needed by the '
                  'Marketplace')
            results.append(False)
        else:
            results.append(True)
        return all(results)

    def perform(self):
        print('Uploading a new skill to the skill repo...')

        for i in listdir(self.entry.path):
            if i.lower() == 'readme.md' and i != 'README.md':
                shutil.move(join(self.entry.path, i), join(self.entry.path, 'README.md'))

        creator = CreateAction(None, self.entry.name.replace('-skill', ''))
        creator.path = self.entry.path
        creator.initialize_template({'.git', '.gitignore', 'README.md'})
        self.git.add('README.md')
        creator.commit_changes()

        try:
            skill_repo = creator.create_github_repo(
                lambda: input('Repo name:'))
        except GithubRepoExists:
            try:
                print("A repository with that name already exists")
                skill_repo = creator.link_github_repo(
                    lambda: input('Remote repo name:'))
            except UnrelatedGithubHistory:
                print("Repository history does not seem to be related")
                skill_repo = creator.force_push(
                    lambda: input('Confirm repo name:'))
        if skill_repo:
            self.entry.url = skill_repo.html_url
            self.entry.author = self.user.login
        else:
            if not self.entry.url:
                raise NoGitRepository
            skill_repo = self.github.get_repo(skill_repo_name(self.entry.url))

        if not skill_repo.permissions.push:
            print('Warning: You do not have write permissions to the provided skill repo.')
            if ask_yes_no('Create a fork and use that instead? (Y/n)', True):
                skill_repo = self.user.create_fork(skill_repo)
                print('Created fork:', skill_repo.html_url)
                self.git.remote('rename', 'origin', 'upstream')
                self.git.remote('add', 'origin', skill_repo.html_url)

        # verify that the required files exists in origin and contain the
        # required content.
        if not self.check_valid():
            print("Please add the missing information and rerun the command.")
            return

        self.entry.name = input('Enter a unique skill name (ie. npr-news or grocery-list): ')

        readme_file = {i.lower(): i for i in os.listdir(self.entry.path)}['readme.md']
        readme = read_file(self.entry.path, readme_file)

        last_section = None
        sections = {last_section: ''}
        for line in readme.split('\n'):
            line = line.strip()
            if line.startswith('#'):
                last_section = line.strip('# ').lower()
                sections[last_section] = ''
            else:
                sections[last_section] += '\n' + line
        del sections[None]

        if 'about' in sections:
            description = sections['about']
        elif 'description' in sections:
            description = sections['description']

        branch = SkillData(self.entry).add_to_repo()
        self.repo.push_to_fork(branch)

        pull = create_or_edit_pr(
            title='Add {}'.format(self.entry.name), body=body_template.format(
                description=description, skill_name=self.entry.name, skill_url=skill_repo.html_url
            ), user=self.user, branch=branch, skills_repo=self.repo.hub,
            repo_branch=self.branch
        )

        print('Created pull request: ', pull.html_url)
Esempio n. 14
0
        exit(2)

    # Filter out forked project if fork is False
    fork = args.fork

    repos = [
        repo for repo in get_public_repos(args.username, fork)
        if fork or not repo['fork']
    ]

    # TODO: Add failsafe mechanism to print partial emails
    for repo in repos:
        printv('Cloning {}...'.format(repo['name']))

        repo_authors = {}
        Git().clone(repo['ssh_url'] if fix else repo['clone_url'])

        g = Git(repo['repo_path'])

        # Hacky way of getting remote branches
        branches = g.branch('-r').split('\n')

        # Remove references e.g. "origin/HEAD -> origin/master"
        branches = filter(filter_branch, branches)

        # Strip empty spaces
        branches = [branch.strip() for branch in branches]

        if fix:
            chdir(repo['repo_path'])
            printv(getcwd())
Esempio n. 15
0
 def __init__(self, repo_path):
     self.git = Git()
     self.git.update_environment(
         GIT_CONFIG_NOSYSTEM="true", HOME=os.getcwd(), XDG_CONFIG_HOME=os.getcwd()
     )
     self.repo = Repo(os.path.abspath("."), search_parent_directories=True)
Esempio n. 16
0
    '-p',
    '--list-of-pos',
    help=
    'A list of dvc-cc indizes that you want include in the display. You can also use slicing for example: 12:15:2 to use 12, 14.',
    nargs="+",
    type=str)
parser.add_argument(
    '-e',
    '--print-error',
    help=
    'If this parameter is set, it will print the error message, why a file or folder could not be found.',
    action='store_true')
args = parser.parse_args()

repo = DVCRepo()
g = Git()
starting_branch = g.branch().split('*')[1].split('\n')[0][1:]

# Set the password only once!
remote_name = repo.config['core']['remote']
remote_settings = repo.config['remote'][remote_name]
if 'ask_password' in remote_settings and remote_settings['ask_password']:
    remote_settings['password'] = getpass.getpass('Password for ' +
                                                  remote_settings['url'] +
                                                  ': ')
    remote_settings['ask_password'] = False

path_to_output_clean = args.path_to_output.replace('./', '_').replace(
    '/', '_').replace('\\\\', '_')
outputdir = create_output_dir(repo.root_dir, path_to_output_clean)
print('##################', outputdir)
Esempio n. 17
0
        repo.git.commit("-m", msg)
        print(f"Added Commit: {repo.commit()}")
    except GitCommandError:
        print("Nothing to commit")

    print(f"pushing to remote {branch} branch")
    repo.git.push("origin", branch)


# RUN
# ======================

git_ssh_identity_file = os.path.expanduser('~/.ssh/id_rsa')
git_ssh_cmd = 'ssh -i %s' % git_ssh_identity_file

Git().custom_environment(GIT_SSH_COMMAND=git_ssh_cmd)

repo = Repo(os.getcwd())
git = repo.git

try:
    git.checkout(CURRICULUM_BRANCH)
except GitCommandError:
    raise Exception(f"A branch called {CURRICULUM_BRANCH} must exist")

commit_message = repo.head.commit.message
notebook_to_markdown()
repo.git.add(".")
repo.git.commit("-m", commit_message)
# should raise if there are local unstaged changes
print(f"pushing to remote {CURRICULUM_BRANCH} branch")
Esempio n. 18
0
        r2id[r].append(id2class)
        # TODO: hello

    print(r2id)
    return r2id


def make_json():
    pass


df = pd.read_csv('tomcat.csv', sep='|')
bug_ids = df['bug_id'].dropna().astype(int).tolist()

git = Git('C:\\Users\\aqd14\\Documents\\GitHub\\tomcat70')

# p = re.compile('commit \w+')

try:
    with open('info.json', mode='w') as f:
        d = dict()
        for id in bug_ids:
            output = git.execute(
                command=
                'git log --all --grep=https://bz.apache.org/bugzilla/show_bug.cgi?id={}'
                .format(id))
            print('id = {}'.format(id))
            if not output:
                print('This might not be a fix for bug id = {}'.format(id))
                continue
Esempio n. 19
0
 def upload():
     """Upload the distribution to pypi, the new tag and the doc to Github"""
     options.update(
         gh_pages_update=Bunch(commit_message='Update doc to %s' % version))
     gh_pages_update()
     Git('.').push('origin', 'master', tag_name)
Esempio n. 20
0
#git checkout filename 将指定文件回滚到最近一次commit的地方
#git reset --hard 将文件回滚到指定的位置
# r.index.reset(commit="e11f478c2e99e69969caf6e190751244d7b4608d",head=True)
# git branch
# r.branches
# 获取所有的分支
# print([str(b) for b in r.branches])
#git tag
#print(r.tags)
#当前分支
#print(r.active_branch)
#???
# r.index.checkout("dev1")
# git clone
#Repo.clone_from()
# git tag -a
# r.create_tag("v1.3")
# git branch dev4
# r.create_head("dev4")
# git log
# print([i.hexsha for i in r.iter_commits()])
#git push origin master
# r.remote().push("master")
#git pull origin master
# r.remote().pull("master")

r=Git("C:\\Users\\Administrator\\Desktop\\derek")
# r.add(".")
# r.commit("-m 提交记录")

r.checkout("dev4")
Esempio n. 21
0
def main():
    #make sure the directory is empty
    if not os.listdir(gitdirectory):
        repo = git.Repo.clone_from(giturl, gitdirectory, branch='master')

    else:
        repo = git.Repo(gitdirectory)
        repo2 = git.Repo(localdirectory)
    originalrepo = Git(gitdirectory)
    commits = list(repo.iter_commits("master", max_count=1000000))
    tree = repo.head.commit.tree

    print(originalrepo.branch())

    stay = True
    current_original_commit = 0

    print(
        "quit to quit, current to check current commit sha, < for past one, > for future one, number for type commit number you want, \"complete\" to go back to a commit and update commits until up to newest and newest feature \"100commit\""
    )
    iterate = 0
    iterate100 = 0
    iteratevalue = 0
    while stay:
        if iterate == 0 and iterate100 == 0:
            userinput = input("input: ")
        if userinput == "quit":
            stay = False
        elif userinput == "current":
            print(repo.head.commit)
            print(originalrepo.committed_date)
            print("commits in the past = " + str(current_original_commit))
            continue
        elif userinput == ">":
            if current_original_commit > 0:
                current_original_commit -= 1
            else:
                print(
                    "you tried to go out of range, this is the newest commit")
                continue
        elif userinput == "<":
            if current_original_commit < len(commits) - 1:
                current_original_commit += 1
            else:
                print(
                    "you tried to go out of range, this is the oldest commit")
                continue
        elif userinput.isdigit():
            if int(userinput) < len(commits) and int(userinput) >= 0:
                current_original_commit = int(userinput)
            else:
                print("you tried to go out of range, max range is: " +
                      str(len(commits)))
                continue
        elif userinput == "complete" or iterate == 1:
            if iterate == 0:
                firstcommitnumber = int(
                    input(
                        "How far back would you like to go in commits? Input: "
                    ))
                if firstcommitnumber > len(commits):
                    print(
                        "sorry, you have gone out of the scope of the project. There are "
                        + str(len(commits)) + " total commits")
                else:
                    start_time = time.time()
                    current_original_commit = firstcommitnumber - 1
                    iterate = 1
            if iterate == 1:
                if current_original_commit > 0:
                    current_original_commit -= 1
                else:
                    time_elapsed = time.time() - start_time
                    print(
                        "You have reached the final newest commit (shown below) in "
                        + str(time_elapsed))
                    iterate = 0
        elif userinput == "100commit" or iterate100 == 1:
            if iterate100 == 0:
                firstcommitnumber = int(
                    input(
                        "How far back would you like to go in commits? Input: "
                    ))
                if firstcommitnumber > len(commits):
                    print(
                        "sorry, you have gone out of the scope of the project. There are "
                        + str(len(commits)) + " total commits")
                else:
                    start_time = time.time()
                    current_original_commit = firstcommitnumber - 1
                    iterate100 = 1
            if iterate100 == 1:
                if current_original_commit > 0:
                    if iteratevalue < 100:
                        current_original_commit -= 1
                        iteratevalue += 1
                    else:
                        confirmation = input(
                            "type \"confirm\" to run the next 50 commits: ")
                        if confirmation == "confirm":
                            iteratevalue = 0
                            current_original_commit -= 1
                else:
                    time_elapsed = time.time() - start_time
                    print(
                        "You have reached the final newest commit (shown below) in "
                        + str(time_elapsed))
                    iterate100 = 0

        else:
            print("sorry, not recognised try again")
            continue
        originalrepo.checkout(commits[current_original_commit])

        dcmp = dircmp(localdirectory, gitdirectory)
        #print("files and folders added:")
        add_diff_files(dcmp)
        #print("files and folders removed:")
        delete_diff_files(dcmp)
        #print("files and folders replaced:")
        merge_diff_files(dcmp)
        #print("DIFFERENCES" + str(dcmp.left_only) + str(dcmp.right_only) +str(dcmp.diff_files))
        print("changes complete, starting commit number: " +
              str(current_original_commit) +
              " commit(s) from the newest commit. hash: " +
              str(commits[current_original_commit]))
        #try:
        repo2 = Repo(localdirectory)
        #repo2.git.push(force=True)
        #repo2.index.add('.')
        #repo2.git.add(update=True)
        repo2.git.add("-A")
        repo2.index.commit(str(current_original_commit))
        #repo2.git.commit('-m', 'test commit', author='*****@*****.**')
        origin = repo2.remote(name='origin')
        origin.push()
        print("commit successful, pushing")
Esempio n. 22
0
def main(argv=None):
    """ Execute the application CLI.

    Arguments are taken from sys.argv by default.

    """

    args = _cmdline(argv)
    logger.start(args.logging_level)
    logger.debug("starting execution")

    #  get repo and initialize GitHeat instance
    try:
        g = Git(os.getcwd())
    except (InvalidGitRepositoryError, GitCommandError, GitCommandNotFound):
        print("Are you sure you're in an initialized git directory?")
        return 0
    githeat = Githeat(g, **vars(args))
    githeat.parse_commits()
    githeat.init_daily_contribution_map()
    githeat.compute_daily_contribution_map()
    githeat.normalize_daily_contribution_map()
    matrix = githeat.compute_graph_matrix()

    term = Terminal()
    matrix_width = githeat.get_matrix_width(matrix)
    if matrix_width > term.width:
        print("Your terminal width is smaller than the heatmap. Please consider using "
              "the --width {thin, reg, thick} argument, resizing your terminal, or "
              "merging months by including --month-merge.")
        return 0
    new_width = (term.width - matrix_width) // 2
    csr = Cursor(term.height // 2 - 3, new_width, term)

    screen = {}
    screen_dates = {}
    with term.hidden_cursor(), \
         term.raw(), \
         term.location(), \
         term.fullscreen(), \
         term.keypad():

        # Print header
        print_header_left(term, unicode(os.getcwd()), screen)
        text = u'GitHeat {}'.format(__version__)
        print_header_center(term, text, screen)
        text = u'ESC, ^c to exit'
        print_header_right(term, text, screen)

        # Print footer
        text = u'Please move cursor to navigate through map'
        print_footer_left(term, term.bold(text), screen)

        graph_right_most_x = term.width  # initialized at terminal width
        graph_left_most_x = csr.x
        graph_top_most_y = csr.y
        graph_x, graph_y = csr.x, csr.y

        #  get graph boundaries
        for i in range(7):
            #  for the week column in the matrix
            for week in matrix:
                if githeat.month_merge:
                    #  check if value in that week is just empty spaces and not colorize
                    if week.col[i][1] == githeat.width:
                        continue
                graph_x += len(githeat.width)

            graph_right_most_x = graph_x
            graph_x = graph_left_most_x  # reset x
            graph_y += 1
        graph_bottom_most_y = graph_y - 1

        #  print graph
        graph_x, graph_y = csr.x, csr.y
        print_graph(term, screen, screen_dates, graph_x, graph_y,
                    graph_left_most_x, matrix, githeat)

        # print legend
        block_separation_width = 4
        legend_x = (term.width - len(githeat.colors) * block_separation_width) // 2
        legend_y = graph_bottom_most_y + 5
        if not githeat.hide_legend:
            print_graph_legend(legend_x, legend_y,
                               githeat.width,
                               block_separation_width,
                               githeat.colors,
                               screen,
                               term)

        while True:
            cursor_color = colorize(githeat.width, ansi=15, ansi_bg=15)
            echo_yx(csr, cursor_color)
            inp = term.inkey()

            if inp in QUIT_KEYS:
                # Esc or ^c pressed
                break
            elif inp == chr(99):
                # c pressed, thus change color
                githeat.switch_to_next_color()
                #  changing colors requires regenerating matrix,
                #  because values there are colorized strings, harder to change
                matrix = githeat.compute_graph_matrix()
                #  print changed color graph
                print_graph(term, screen, screen_dates, graph_x, graph_y,
                            graph_left_most_x, matrix, githeat)

                #  print changed color legend
                if not githeat.hide_legend:
                    print_graph_legend(legend_x, legend_y,
                                       githeat.width,
                                       block_separation_width,
                                       githeat.colors,
                                       screen,
                                       term)

                #  print changed color footer
                new_cursor_date_value = screen_dates.get((csr.y, csr.x))
                if new_cursor_date_value:  # only if it needs changing
                    location = nav.home(nav.bottom(csr))
                    update_most_committers_footer(location, githeat,
                                                  new_cursor_date_value, term, screen)
                continue
            elif inp.lower() in ONE_TO_SEVEN_KEYS or inp in Q_TO_QUOTES_KEYS:
                if inp.lower() in ONE_TO_SEVEN_KEYS:
                    #  key from 1 to 7 pressed.
                    githeat.toggle_day(int(inp) - 1)
                else:
                    # key from q to ' pressed
                    githeat.toggle_month(Q_TO_QUOTES_KEYS.index(inp.lower()))

                # re-computing new daily contributions with the specified days/months
                githeat.recompute_daily_contribution_map()
                matrix = githeat.compute_graph_matrix()
                #  print new filtered graph
                print_graph(term, screen, screen_dates, graph_x, graph_y,
                            graph_left_most_x, matrix, githeat)

                continue

            else:
                n_csr = nav.lookup_move(inp.code, csr, term, githeat)

            # only allow moves within the graph boundaries
            if not is_within_boundary(graph_right_most_x, graph_top_most_y,
                                  graph_left_most_x, graph_bottom_most_y,
                                  n_csr):
                continue

            # get value at new cursor block, if it exists
            new_cursor_date_value = screen_dates.get((n_csr.y, n_csr.x))
            if new_cursor_date_value:  # Cursor is on a date block with commits
                location = nav.home(nav.bottom(csr))
                update_most_committers_footer(location, githeat,
                                              new_cursor_date_value, term, screen)
            else:

                horizontal_empty = False

                #  jump through empty values
                while not new_cursor_date_value and is_within_boundary(
                                graph_right_most_x - 1,
                        graph_top_most_y,
                                graph_left_most_x + 1,
                        graph_bottom_most_y,
                        n_csr):

                    x = n_csr.x
                    y = n_csr.y
                    if n_csr.x > csr.x:  # right move
                        x += 1
                    elif n_csr.x < csr.x:  # left move
                        x -= 1
                    else:
                        horizontal_empty = True
                        break  # skip jumping on up or down moves

                    n_csr = Cursor(y, x, term)
                    new_cursor_date_value = screen_dates.get((n_csr.y, n_csr.x))
                    if new_cursor_date_value:
                        location = nav.home(nav.bottom(csr))
                        update_most_committers_footer(location, githeat,
                                                      new_cursor_date_value, term, screen)

                if horizontal_empty or not new_cursor_date_value:
                    continue

            if n_csr != csr:
                # erase old cursor,
                prev_value = screen.get((csr.y, csr.x), u'  ')
                echo_yx(csr, prev_value)
                csr = n_csr

            if inp == chr(13):
                # ENTER pressed on date block
                commits_on_date = githeat.commits_db.get(new_cursor_date_value)

                if commits_on_date:  # if block has contributions
                    #  open commits desc terminal
                    open_commits_terminal(new_cursor_date_value,
                                          commits_on_date,
                                          githeat)
                    # redraw base terminal after exiting commits desc terminal
                    redraw(term=term, screen=screen)
                else:
                    info = u'Please choose a date with contributions \a'
                    text = unicode(new_cursor_date_value) + ' ' + info
                    print_footer_left(term, text, screen)

    logger.debug("successful completion")
    return 0
Esempio n. 23
0
    def create_package_publisher(path_to_nuclide_repo, master_tmpdir,
                                 github_access_token):
        def get_list_of_packages(package_type):
            stdout = fs.cross_platform_check_output(
                ['./scripts/dev/packages', '--package-type', package_type],
                cwd=path_to_nuclide_repo)
            # Split by newlines and then remove the last element, which will be the empty string.
            return stdout.split('\n')[:-1]

        # These are lists of absolute paths to package.json files.
        node_packages = get_list_of_packages('Node')
        atom_packages = get_list_of_packages('Atom')

        # Ensure that nuclide-installer is listed last in atom_packages. We do not want to publish a
        # new version of the installer until we are sure that all of the packages it plans to
        # install have been published.
        for index, package_json_path in enumerate(atom_packages):
            package_name = json_load(package_json_path)['name']
            if package_name == 'nuclide-installer':
                del atom_packages[index]
                atom_packages.append(package_json_path)
                break

        # These are sets of package names.
        nuclide_npm_packages = set()
        nuclide_apm_packages = set()

        publishers = []
        git = Git()
        apm = Apm(git)
        npm = Npm()
        boilerplate_files = {
            'LICENSE': os.path.join(path_to_nuclide_repo, 'LICENSE'),
        }

        # Make sure that everything needed to run the transpile script is installed.
        subprocess.check_call(['npm', 'install'],
                              cwd=os.path.join(path_to_nuclide_repo,
                                               'pkg/nuclide/node-transpiler'))
        transpile_script = os.path.join(
            path_to_nuclide_repo, 'pkg/nuclide/node-transpiler/bin/transpile')
        transpiler = Transpiler.create_transpiler(path_to_nuclide_repo,
                                                  transpile_script)

        def process_packages(packages, is_npm):
            for package_json in packages:
                package_name = json_load(package_json)['name']
                if is_npm:
                    nuclide_npm_packages.add(package_name)
                else:
                    nuclide_apm_packages.add(package_name)

                config = AbstractPublisherConfig(package_name,
                                                 os.path.dirname(package_json),
                                                 nuclide_npm_packages,
                                                 nuclide_apm_packages)
                if is_npm:
                    publisher = NpmPublisher(config, npm, master_tmpdir,
                                             transpiler, boilerplate_files)
                else:
                    publisher = ApmPublisher(config, apm, master_tmpdir,
                                             transpiler, boilerplate_files,
                                             git, github_access_token)
                publishers.append(publisher)

        # Note that the resulting publishers array will be organized such that all
        # Node packages appear in topologically sorted order followed by all Atom packages.
        process_packages(node_packages, is_npm=True)
        process_packages(atom_packages, is_npm=False)
        return PackagePublisher(publishers)
Esempio n. 24
0
                  help='Branch in the Git repository')
parser.add_option('--project', dest='project', help='Name of the project')

(options, args) = parser.parse_args()

repo_path = options.repolocation
project = options.project

branch = 'master'
if options.branch:
    branch = options.branch

print 'Repo path: ' + repo_path + ' and branch: ' + branch

repo = Repo(repo_path)
git = Git(repo_path)
head = repo.heads[0]

commits = list(repo.iter_commits(branch))
commits.reverse()
for commit in commits:
    print 'Commit: ' + commit.hexsha + ' with date: ' + str(
        commit.committed_date)
    git.checkout(commit.hexsha)
    cmd_str = 'java -jar bin/threadfix-endpoint-cli-2.4-SNAPSHOT-jar-with-dependencies.jar ' + pipes.quote(
        repo_path) + ' -json > work/' + pipes.quote(
            project) + '_attacksurface_' + pipes.quote(
                str(commit.committed_date)) + '.json'
    print 'About to generate attack surface with command: ' + cmd_str
    os.system(cmd_str)
Esempio n. 25
0
 def setUpClass(cls):
     super(TestGit, cls).setUpClass()
     cls.git = Git(cls.rorepo.working_dir)
Esempio n. 26
0
def main() -> None:
    aparser = argparse.ArgumentParser(
            description='import account statement PDFs into hledger')
    aparser.add_argument('--force', dest='force', default=False,
                         action='store_true',
                         help='overwrite existing ledgers')
    aparser.add_argument('--dry-run', dest='dry_run',
                         default=False, action='store_true',
                         help='run parsers without writing any output files')
    aparser.add_argument('--regenerate-includes', dest='regenerate_includes',
                         default=False, action='store_true',
                         help='only regenerate include files; don\'t import '
                              'new bank statements')
    aparser.add_argument('--no-merge', dest='merge',
                         default=True, action='store_false',
                         help='don\'t merge import branch after import')

    args = aparser.parse_args()

    xdg = getXDGdirectories('bank-statement-parser')
    config_file = xdg['config'] / 'import.cfg'
    config = ImportConfig.read_from_file(config_file)

    if args.regenerate_includes:
        regenerate_includes(Path.cwd(), config)
        exit(0)

    selection_script = xdg['config'] / 'select_ledger.py'
    select_ledger: Callable[[BankStatementMetadata], str]
    if selection_script.exists():
        with open(selection_script, 'r') as f:
            content = f.read()
            parse_globals: dict[str, Any] = {
                'BankStatementMetadata': BankStatementMetadata,
                }
            exec(compile(content, selection_script, 'exec'), parse_globals)
            if 'select_ledger' not in parse_globals:
                print(f'{selection_script} doesn\'t contain select_ledger'
                      ' function.',
                      file=sys.stderr)
                exit(1)
            select_ledger = parse_globals['select_ledger']
    elif len(config.ledgers) == 1:
        ledger_name = next(iter(config.ledgers))
        def select_ledger(meta: BankStatementMetadata) -> str:
            return ledger_name
    else:
        print(f'Error: {config_file} contains more than one ledger,'
              f' but {selection_script} is missing.',
              file=sys.stderr)
        exit(1)
    incoming_statements = get_metadata_of_incoming_statements(
            config.incoming_dir)
    classified = sort_incoming_statements_to_ledger_dirs(
            incoming_statements,
            select_ledger,
            )
    if any(key not in config.ledgers for key in classified.keys()):
        for key, statements in classified.items():
            if key in config.ledgers:
                continue
            mismatched_files = ', '.join(str(s.statement_path)
                                         for s in statements)
            print(f'Error: {mismatched_files} were assigned to unknown ledger'
                  f' configuration {key}. Please check {selection_script}.',
                  file=sys.stderr)
        exit(1)
    for key, statements in classified.items():
        ledger_config = config.ledgers[key]
        print(f'Importing bank statements to {ledger_config.ledger_dir}.')
        # change working directory for git status to work correctly
        os.chdir(ledger_config.ledger_dir)
        git: BaseGit
        if ledger_config.git_dir is not None:
            git = Git(ledger_config.ledger_dir, ledger_config.git_dir)
            import_branch = ledger_config.import_branch
        else:
            git = FakeGit()
            import_branch = git.current_branch()

        try:
            import_incoming_statements(statements,
                                       ledger_config.ledger_dir,
                                       git, import_branch,
                                       args.force, args.dry_run)
        except DirtyWorkingDirectoryException:
            print(f'{ledger_config.ledger_dir} contains uncommitted changes,'
                  ' please commit those before continuing.', file=sys.stderr)
            exit(1)
        # The import_transaction in import_incoming_statements automatically
        # resets the branch to the previously checked-out one after importing
        # to the import_branch.
        if (args.merge
                and isinstance(git, Git)
                and import_branch != git.current_branch()):
            try:
                git.merge(import_branch)
            except GitMergeConflictError as e:
                conflicting_files = [ledger_config.ledger_dir / c.name
                                     for c in e.conflicts]
                not_autogenerated = [p for p in conflicting_files
                                     if p.name != 'journal.hledger']
                if not_autogenerated:
                    raise RuntimeError(
                            'Could not automerge the following files:\n'
                            + '\n'.join(str(p) for p in not_autogenerated))
                write_include_files(ledger_config.ledger_dir, git)
                git.commit(f"Merge branch '{import_branch}'")
Esempio n. 27
0
def git_daemon_launched(base_path, ip, port):
    from git import Git  # Avoid circular deps.

    gd = None
    try:
        if is_win:
            ## On MINGW-git, daemon exists in .\Git\mingw64\libexec\git-core\,
            #  but if invoked as 'git daemon', it detaches from parent `git` cmd,
            #  and then CANNOT DIE!
            #  So, invoke it as a single command.
            ## Cygwin-git has no daemon.  But it can use MINGW's.
            #
            daemon_cmd = [
                'git-daemon', '--enable=receive-pack',
                '--listen=%s' % ip,
                '--port=%s' % port,
                '--base-path=%s' % base_path, base_path
            ]
            gd = Git().execute(daemon_cmd, as_process=True)
        else:
            gd = Git().daemon(base_path,
                              enable='receive-pack',
                              listen=ip,
                              port=port,
                              base_path=base_path,
                              as_process=True)
        # yes, I know ... fortunately, this is always going to work if sleep time is just large enough
        time.sleep(0.5 * (1 + is_win))
    except Exception as ex:
        msg = textwrap.dedent("""
        Launching git-daemon failed due to: %s
          Probably test will fail subsequently.

          BUT you may start *git-daemon* manually with this command:"
                git daemon --enable=receive-pack  --listen=%s --port=%s --base-path=%s  %s
          You may also run the daemon on a different port by passing --port=<port>"
          and setting the environment variable GIT_PYTHON_TEST_GIT_DAEMON_PORT to <port>
        """)
        if is_win:
            msg += textwrap.dedent(r"""

            On Windows,
              the `git-daemon.exe` must be in PATH.
              For MINGW, look into .\Git\mingw64\libexec\git-core\), but problems with paths might appear.
              CYGWIN has no daemon, but if one exists, it gets along fine (but has also paths problems)."""
                                   )
        log.warning(msg, ex, ip, port, base_path, base_path, exc_info=1)

        yield  # OK, assume daemon started manually.

    else:
        yield  # Yield outside try, to avoid catching
    finally:
        if gd:
            try:
                log.debug("Killing git-daemon...")
                gd.proc.kill()
            except Exception as ex:
                ## Either it has died (and we're here), or it won't die, again here...
                log.debug("Hidden error while Killing git-daemon: %s",
                          ex,
                          exc_info=1)
Esempio n. 28
0
 page = 1
 payload = {
     'access_token': args.token,
     'per_page': '100',
     'pagination': 'keyset'
 }
 while True:
     print 'Requesting page ' + str(page)
     payload['page'] = page
     response = requests.get(args.url + '/api/v4/projects', params=payload)
     total_pages = int(response.headers['X-Total-Pages'])
     for repo in response.json():
         if path.exists(args.workdir + repo['name']):
             print repo['name'] + ' already exists'
             continue
         tags = requests.get(args + '/api/v4/projects/' + str(repo['id']) +
                             '/repository/tags?access_token=' +
                             token).json()
         print 'Getting tags of ' + repo['name']
         if 'message' in tags:
             print 'We couldnt find any projects matching ' + repo['name']
             continue
         for tag in tags:
             rp = Repo.clone_from(
                 repo['http_url_to_repo'],
                 args.workdir + repo['name'] + path.sep + tag['name'])
             Git(rp.working_dir).checkout(tag['name'])
             print 'Cloning ' + repo['name'] + path.sep + tag['name']
     if page == total_pages:
         break
     page = page + 1
Esempio n. 29
0
def main(debug=False):
    argc = len(sys.argv)
    if argc == 1:
        mode = 'help'
        parser_args = sys.argv
    else:
        mode = sys.argv[1]
        parser_args = sys.argv[2:]
    parser = make_argument_parser()
    args = parser.parse_args(args=parser_args)

    lines = []
    print()
    lines.append(u' *******************************************')
    lines.append(u' **     Taichi Programming Language       **')
    lines.append(u' *******************************************')
    if 'TI_DEBUG' in os.environ:
        val = os.environ['TI_DEBUG']
        if val not in ['0', '1']:
            raise ValueError(
                "Environment variable TI_DEBUG can only have value 0 or 1.")
    if debug:
        lines.append(u' *****************Debug Mode****************')
        os.environ['TI_DEBUG'] = '1'
    print(u'\n'.join(lines))
    print()
    import taichi as ti
    if args.arch is not None:
        arch = args.arch
        if args.exclusive:
            arch = '^' + arch
        print(f'Running on Arch={arch}')
        os.environ['TI_WANTED_ARCHS'] = arch

    if mode == 'help':
        print(
            "    Usage: ti run [task name]        |-> Run a specific task\n"
            "           ti test                   |-> Run all the tests\n"
            "           ti benchmark              |-> Run python tests in benchmark mode\n"
            "           ti baseline               |-> Archive current benchmark result as baseline\n"
            "           ti regression             |-> Display benchmark regression test result\n"
            "           ti format                 |-> Reformat modified source files\n"
            "           ti format_all             |-> Reformat all source files\n"
            "           ti build                  |-> Build C++ files\n"
            "           ti video                  |-> Make a video using *.png files in the current folder\n"
            "           ti video_scale            |-> Scale video resolution \n"
            "           ti video_crop             |-> Crop video\n"
            "           ti video_speed            |-> Speed up video\n"
            "           ti gif                    |-> Convert mp4 file to gif\n"
            "           ti doc                    |-> Build documentation\n"
            "           ti release                |-> Make source code release\n"
            "           ti debug [script.py]      |-> Debug script\n")
        return 0

    t = time.time()
    if mode.endswith('.py'):
        import subprocess
        subprocess.call([sys.executable, mode] + sys.argv[1:])
    elif mode == "run":
        if argc <= 1:
            print("Please specify [task name], e.g. test_math")
            return -1
        print(sys.argv)
        name = sys.argv[1]
        task = ti.Task(name)
        task.run(*sys.argv[2:])
    elif mode == "debug":
        ti.core.set_core_trigger_gdb_when_crash(True)
        if argc <= 2:
            print("Please specify [file name], e.g. render.py")
            return -1
        name = sys.argv[2]
        with open(name) as script:
            script = script.read()
        exec(script, {'__name__': '__main__'})
    elif mode == "test":
        if len(args.files):
            if args.cpp:
                return test_cpp(args)
            else:
                return test_python(args)
        elif args.cpp:
            return test_cpp(args)
        else:
            ret = test_python(args)
            if ret != 0:
                return ret
            return test_cpp(args)
    elif mode == "benchmark":
        import shutil
        commit_hash = ti.core.get_commit_hash()
        with os.popen('git rev-parse HEAD') as f:
            current_commit_hash = f.read().strip()
        assert commit_hash == current_commit_hash, f"Built commit {commit_hash:.6} differs from current commit {current_commit_hash:.6}, refuse to benchmark"
        os.environ['TI_PRINT_BENCHMARK_STAT'] = '1'
        output_dir = get_benchmark_output_dir()
        shutil.rmtree(output_dir, True)
        os.mkdir(output_dir)
        os.environ['TI_BENCHMARK_OUTPUT_DIR'] = output_dir
        if os.environ.get('TI_WANTED_ARCHS') is None:
            # since we only do number-of-statements benchmark
            os.environ['TI_WANTED_ARCHS'] = 'x64'
        test_python(args)
    elif mode == "baseline":
        import shutil
        baseline_dir = get_benchmark_baseline_dir()
        output_dir = get_benchmark_output_dir()
        shutil.rmtree(baseline_dir, True)
        shutil.copytree(output_dir, baseline_dir)
        print('[benchmark] baseline data saved')
    elif mode == "regression":
        baseline_dir = get_benchmark_baseline_dir()
        output_dir = get_benchmark_output_dir()
        display_benchmark_regression(baseline_dir, output_dir, args)
    elif mode == "build":
        ti.core.build()
    elif mode == "format":
        diff = None
        if len(sys.argv) >= 3:
            diff = sys.argv[2]
        ti.core.format(diff=diff)
    elif mode == "format_all":
        ti.core.format(all=True)
    elif mode == "statement":
        exec(sys.argv[2])
    elif mode == "update":
        ti.core.update(True)
        ti.core.build()
    elif mode == "asm":
        fn = sys.argv[2]
        os.system(
            r"sed '/^\s*\.\(L[A-Z]\|[a-z]\)/ d' {0} > clean_{0}".format(fn))
    elif mode == "interpolate":
        interpolate_frames('.')
    elif mode == "doc":
        os.system('cd {}/docs && sphinx-build -b html . build'.format(
            ti.get_repo_directory()))
    elif mode == "video":
        files = sorted(os.listdir('.'))
        files = list(filter(lambda x: x.endswith('.png'), files))
        if len(sys.argv) >= 3:
            frame_rate = int(sys.argv[2])
        else:
            frame_rate = 24
        if len(sys.argv) >= 4:
            trunc = int(sys.argv[3])
            files = files[:trunc]
        ti.info('Making video using {} png files...', len(files))
        ti.info("frame_rate={}", frame_rate)
        output_fn = 'video.mp4'
        make_video(files, output_path=output_fn, frame_rate=frame_rate)
        ti.info('Done! Output video file = {}', output_fn)
    elif mode == "video_scale":
        input_fn = sys.argv[2]
        assert input_fn[-4:] == '.mp4'
        output_fn = input_fn[:-4] + '-scaled.mp4'
        ratiow = float(sys.argv[3])
        if len(sys.argv) >= 5:
            ratioh = float(sys.argv[4])
        else:
            ratioh = ratiow
        scale_video(input_fn, output_fn, ratiow, ratioh)
    elif mode == "video_crop":
        if len(sys.argv) != 7:
            print('Usage: ti video_crop fn x_begin x_end y_begin y_end')
            return -1
        input_fn = sys.argv[2]
        assert input_fn[-4:] == '.mp4'
        output_fn = input_fn[:-4] + '-cropped.mp4'
        x_begin = float(sys.argv[3])
        x_end = float(sys.argv[4])
        y_begin = float(sys.argv[5])
        y_end = float(sys.argv[6])
        crop_video(input_fn, output_fn, x_begin, x_end, y_begin, y_end)
    elif mode == "video_speed":
        if len(sys.argv) != 4:
            print('Usage: ti video_speed fn speed_up_factor')
            return -1
        input_fn = sys.argv[2]
        assert input_fn[-4:] == '.mp4'
        output_fn = input_fn[:-4] + '-sped.mp4'
        speed = float(sys.argv[3])
        accelerate_video(input_fn, output_fn, speed)
    elif mode == "gif":
        input_fn = sys.argv[2]
        assert input_fn[-4:] == '.mp4'
        output_fn = input_fn[:-4] + '.gif'
        ti.info('Converting {} to {}'.format(input_fn, output_fn))
        framerate = 24
        mp4_to_gif(input_fn, output_fn, framerate)
    elif mode == "convert":
        # http://www.commandlinefu.com/commands/view/3584/remove-color-codes-special-characters-with-sed
        # TODO: Windows support
        for fn in sys.argv[2:]:
            print("Converting logging file: {}".format(fn))
            tmp_fn = '/tmp/{}.{:05d}.backup'.format(fn,
                                                    random.randint(0, 10000))
            shutil.move(fn, tmp_fn)
            command = r'sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K]//g"'
            os.system('{} {} > {}'.format(command, tmp_fn, fn))
    elif mode == "release":
        from git import Git
        import zipfile
        import hashlib
        g = Git(ti.get_repo_directory())
        g.init()
        with zipfile.ZipFile('release.zip', 'w') as zip:
            files = g.ls_files().split('\n')
            os.chdir(ti.get_repo_directory())
            for f in files:
                if not os.path.isdir(f):
                    zip.write(f)
        ver = ti.__version__
        md5 = hashlib.md5()
        with open('release.zip', "rb") as f:
            for chunk in iter(lambda: f.read(4096), b""):
                md5.update(chunk)
        md5 = md5.hexdigest()
        commit = ti.core.get_commit_hash()[:8]
        fn = f'taichi-src-v{ver[0]}-{ver[1]}-{ver[2]}-{commit}-{md5}.zip'
        import shutil
        shutil.move('release.zip', fn)
    else:
        name = sys.argv[1]
        print('Running task [{}]...'.format(name))
        task = ti.Task(name)
        task.run(*sys.argv[2:])
    print()
    print(">>> Running time: {:.2f}s".format(time.time() - t))
    return 0
Esempio n. 30
0
from flask import Flask, jsonify, request
from git import Git
import os

application = Flask(__name__)

g = Git('/home/pxh8242/supply-chain-visibility')


@application.route('/mobileapi', methods=['GET'])
def build_mobile_api():
    branch = request.args.get('branch', default='master')
    return build_and_deploy("azureMobileApi", branch)


@application.route('/dataadapter', methods=['GET'])
def build_data_adapter():
    branch = request.args.get('branch', default='master')
    return build_and_deploy("azureDataAdapter", branch)


@application.route('/branches', methods=['GET'])
def get_list_of_branches_api():
    return jsonify(get_list_of_branches())


def build_and_deploy(project, branch):
    g.pull()
    if branch not in get_list_of_branches():
        return "Branch " + branch + " is not in the list of branches"
    os.system("bash PullAndDeploy.sh " + project + " " + branch)