def git_check(data: dict,
              references: List[Dict[str, str]] = None,
              repo_dir: str = None):
    debug = data['source'].get('debug', False)
    version = data['source'].get('version')
    if not version:
        # version is passed as None if there are no previous version, but an
        # empty dictionary playes better with how the symbol is used later
        version = {}
    repo_uri = data['source']['uri']
    if repo_dir is None:
        tmpdir = os.environ.get('TMPDIR', '/tmp')
        repo_dir = '{tmpdir}/codecommit-resource-repo-cache'.format(
            tmpdir=tmpdir)
    # TODO: resolve refs/remotes/origin/HEAD instead of hardcoding master
    branch = data['source'].get('branch', 'master')
    if debug:
        print("Branch is", branch, file=sys.stderr)

    if references is None:
        # use the head head of the branch as commit for it,
        # so repo.commit() can resolve it
        references = [{
            'ref':
            'refs/heads/{branch}'.format(branch=branch),
            'commit':
            'refs/remotes/origin/{branch}'.format(branch=branch)
        }]

    # full paths for included and ignored repository locations.
    # it doesn't matter if they don't exist: the comparison will harmlessly
    # fail
    paths = [
        os.path.abspath('%s/%s' % (repo_dir, path))
        for path in data['source'].get('paths', [])
    ]
    ignored_paths = [
        os.path.abspath('%s/%s' % (repo_dir, path))
        for path in data['source'].get('ignored_paths', [])
    ]
    # TODO implement tag_filter and skip_ci_disabled

    repo = None  # type: git.Repo
    if os.path.exists(repo_dir) and os.path.isdir(repo_dir):
        print("Repository %s already exists in %s" % (repo_uri, repo_dir),
              file=sys.stderr)
        repo = git.Repo(repo_dir)
    else:
        print("Cloning repository %s in %s" % (repo_uri, repo_dir),
              file=sys.stderr)
        repo = git.Repo.init(repo_dir)
        repo.create_remote('origin', repo_uri)

    repo.remotes.origin.fetch(branch, progress=MyProgressPrinter(), tags=True)

    head = None
    for reference in references:
        ref = reference['ref']
        if ref == "refs/heads/{branch}".format(branch=branch) or \
                ref == "refs/tags/{branch}".format(branch=branch):
            head = repo.commit(reference['commit'])
            break
    if head is None:
        print(
            "SQS passed a commit id which seems to not be valid anymore. "
            "Unless a rebase/forced push/repo manipulation happened, this "
            "seems a bug of the resource and should be investigated.",
            file=sys.stderr)
        raise RuntimeError("couldn't find commit %s" % reference['commit'])

    if debug:
        print("Checked out commit for branch is %s",
              head.hexsha,
              file=sys.stderr)

    repo.head.reset(head)
    # initialised to branch's HEAD, and updated by a valid 'ref' later
    last_version = head  # type: git.Commit

    # ref can be either the commit-id string or None
    ref = head
    try:
        last_version = repo.commit(ref)
    except git.BadName:
        # ref version is not/anymore a valid commit-id, this means
        # last_version is HEAD, as the resource specs say
        print("version.ref %s is not valid anymore, using branch HEAD instead "
              "(%s)" % last_version.hexsha,
              file=sys.stderr)

    # Obtain the commit ids between the current head and the last known version
    # (commit)
    c = head  # type: git.Commit
    parents = []  # type: List[git.Commit]

    while c.parents:

        def multiglob(filename: str, patterns: Iterable[str]) -> bool:
            return any(
                [fnmatch.fnmatch(filename, pattern) for pattern in patterns])

        is_included = True
        is_ignored = False
        if paths:
            is_included = any(
                [multiglob(path.b_path, paths) for path in c.diff()])
        if ignored_paths:
            is_ignored = any(
                [multiglob(path.b_path, paths) for path in c.diff()])

        if is_included and not is_ignored:
            parents.append(c)

        if c == last_version:
            break

        # in a multi-parent commit, the first one is always the 'current
        # branch' one, and the others are the merged branches'
        c = c.parents[0]

    parents.reverse()

    return [commit.hexsha for commit in parents]
Пример #2
0
import datetime
import git
import re
import sys
import yaml

if len(sys.argv) < 4:
    print("ERROR! invalid number of parameters")
    print()
    print("Usage:")
    print("  ./scripts/build_spec.py <spec.in> <version> <revision>")
    print()
    sys.exit(1)

repo = git.Repo(".")
neo_revision = repo.head.commit

c = repo.commit(neo_revision)
cd = datetime.datetime.fromtimestamp(c.committed_date)

pkg_version = "%s.%02d.%s" % (str(
    cd.isocalendar()[0])[-2:], cd.isocalendar()[1], sys.argv[2])

with open(sys.argv[1], 'r') as f:
    for line in f.readlines():
        if not re.match(".*__NEO_COMMIT_ID__$", line.strip()) is None:
            print("%s" % (line.rstrip().replace("__NEO_COMMIT_ID__",
                                                "%s" % neo_revision)))
            continue
Пример #3
0
 def add_tag(repo_path, version):
     import git
     repo = git.Repo(repo_path)
     repo.index.add(['config.json'])
     repo.index.commit('bumped to version {}'.format(version))
     repo.create_tag('v' + version, repo.head)
def get_repo_commit(repo_path):
    repo = git.Repo(repo_path, search_parent_directories=True)
    sha = repo.head.object.hexsha
    short_sha = repo.git.rev_parse(sha, short=4)
    return short_sha
Пример #5
0
def get_pandas_branch(slndir):
    '''
    获取当前代码仓库的分支名称
    '''
    repo = git.Repo(slndir)
    return str(repo.active_branch)
def process_changes(repopath, revision1, revision2='HEAD', report_all=False, report_ver=False):
    repo = git.Repo(repopath)
    assert repo.bare == False
    commit = repo.commit(revision1)
    diff = commit.diff(revision2)

    changes = []
    for d in diff.iter_change_type('M'):
        path = os.path.dirname(d.a_blob.path)
        if path.startswith('packages/'):
            filename = os.path.basename(d.a_blob.path)
            if filename == 'latest':
                changes.extend(compare_dict_blobs(path, d.a_blob, d.b_blob, report_all, report_ver))
            elif filename.startswith('latest.'):
                chg = ChangeRecord(path, filename, d.a_blob.data_stream.read(), d.b_blob.data_stream.read(), True)
                changes.append(chg)
        elif path.startswith('images/'):
            filename = os.path.basename(d.a_blob.path)
            if filename in img_monitor_files:
                if filename == 'files-in-image.txt':
                    alines = d.a_blob.data_stream.read().splitlines()
                    blines = d.b_blob.data_stream.read().splitlines()
                    filechanges = compare_file_lists(alines,blines)
                    if filechanges:
                        chg = ChangeRecord(path, filename, None, None, True)
                        chg.filechanges = filechanges
                        changes.append(chg)
                elif filename == 'installed-package-names.txt':
                    alines = d.a_blob.data_stream.read().splitlines()
                    blines = d.b_blob.data_stream.read().splitlines()
                    filechanges = compare_lists(alines,blines)
                    if filechanges:
                        chg = ChangeRecord(path, filename, None, None, True)
                        chg.filechanges = filechanges
                        changes.append(chg)
                else:
                    chg = ChangeRecord(path, filename, d.a_blob.data_stream.read(), d.b_blob.data_stream.read(), True)
                    changes.append(chg)
            elif filename == 'image-info.txt':
                changes.extend(compare_dict_blobs(path, d.a_blob, d.b_blob, report_all, report_ver))
            elif '/image-files/' in path:
                chg = ChangeRecord(path, filename, d.a_blob.data_stream.read(), d.b_blob.data_stream.read(), True)
                changes.append(chg)

    # Look for added preinst/postinst/prerm/postrm
    # (without reporting newly added recipes)
    addedpkgs = []
    addedchanges = []
    for d in diff.iter_change_type('A'):
        path = os.path.dirname(d.b_blob.path)
        if path.startswith('packages/'):
            filename = os.path.basename(d.b_blob.path)
            if filename == 'latest':
                addedpkgs.append(path)
            elif filename.startswith('latest.'):
                chg = ChangeRecord(path, filename[7:], '', d.b_blob.data_stream.read(), True)
                addedchanges.append(chg)
    for chg in addedchanges:
        found = False
        for pkg in addedpkgs:
            if chg.path.startswith(pkg):
                found = True
                break
        if not found:
            changes.append(chg)

    # Look for cleared preinst/postinst/prerm/postrm
    for d in diff.iter_change_type('D'):
        path = os.path.dirname(d.a_blob.path)
        if path.startswith('packages/'):
            filename = os.path.basename(d.a_blob.path)
            if filename != 'latest' and filename.startswith('latest.'):
                chg = ChangeRecord(path, filename[7:], d.a_blob.data_stream.read(), '', True)
                changes.append(chg)

    # Link related changes
    for chg in changes:
        if chg.monitored:
            for chg2 in changes:
                # (Check dirname in the case of fields from recipe info files)
                if chg.path == chg2.path or os.path.dirname(chg.path) == chg2.path:
                    if chg2.fieldname in related_fields.get(chg.fieldname, []):
                        chg.related.append(chg2)
                    elif chg.path == chg2.path and chg.path.startswith('packages/') and chg2.fieldname in ['PE', 'PV', 'PR']:
                        chg.related.append(chg2)

    if report_all:
        return changes
    else:
        return [chg for chg in changes if chg.monitored]
# reshape to be [samples][pixels][width][height]
X_train = X_train.reshape(X_train.shape[0], 1, 28, 28).astype('float32')
X_test = X_test.reshape(X_test.shape[0], 1, 28, 28).astype('float32')
# normalize inputs from 0-255 to 0-1
X_train = X_train / 255
X_test = X_test / 255
# one hot encode outputs
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[1]

# build the model
model = NeuralNet(y_test.shape[1])
# Compile the model
model.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])
# Save logs in this folder
repo = git.Repo(search_parent_directories=True)
sha = repo.head.object.hexsha
logdir = "logs/" + sha
# Fit the model
model.fit(X_train,
          y_train,
          validation_data=(X_test, y_test),
          epochs=10,
          batch_size=200,
          callbacks=[TensorBoard(log_dir=logdir)])
# Final evaluation of the model
scores = model.evaluate(X_test, y_test, verbose=0)
print("Large CNN Error: %.2f%%" % (100 - scores[1] * 100))
Пример #8
0
    - each commit (<repo>/<branch>/<commit>.html)
"""
import sys
import os
import pathlib

import git
from lys import L, raw


REPO_DIR = sys.argv[1]
OUTPUT_DIR = sys.argv[2]

template = open('template.html').read()

repo = git.Repo(REPO_DIR)

# <repo>/index.html listing branches
pathlib.Path(OUTPUT_DIR).mkdir(parents=True, exist_ok=True)
with open(f'{OUTPUT_DIR}/index.html', 'w') as f:
    html = L.ul / (
        (
            L.li / L.a(href=branch.name) / branch.name
        ) for branch in repo.branches
    )
    f.write(template.replace('{{CONTENT}}', str(html)).replace('template_files/', '../template_files/'))

for branch in repo.branches:

    # <repo>/<branch>/index.html
    pathlib.Path(f'{OUTPUT_DIR}/{branch.name}').mkdir(parents=True, exist_ok=True)
Пример #9
0
    def worker(self):
        print("-" * 20 + "git checkout " + "-" * 20)
        print("branch:" + self.git_branch)
        print("hash:" + str(self.git_hash))

        progress_delegate = git_work_progress(self)

        try:
            repo = git.Repo(self.repo_path)
            if self.GIT_SSH_COMMAND != None:
                repo.git.custom_environment(
                    GIT_SSH_COMMAND=self.GIT_SSH_COMMAND)
            print('Now repo is on branch:', repo.active_branch.name)

            if self.git_branch in repo.branches:
                #make sure on right branch
                if repo.active_branch.name != self.git_branch:
                    self.console_output('checkout branch %s...' %
                                        self.git_branch)
                    repo.branches[self.git_branch].checkout()
                #pull
                self.console_output('pull...')
                repo.remotes['origin'].pull(progress=progress_delegate)
            else:
                #if the target branch is not existed in local, checkout out it at first
                self.console_output(
                    'branch %s not existed local. update remote branches...' %
                    self.git_branch)
                origin = repo.remotes['origin']
                origin.update()
                self.console_output('checkout branch %s...' % self.git_branch)
                origin.refs[self.git_branch].checkout(b=self.git_branch)

            if self.git_hash != None:
                #TODO:判断本地是否存在 hash 对应的commit,如果存在则跳过 pull 的动作
                self.console_output('git checkout %s...' % self.git_hash)
                git_exec = repo.git
                git_exec.checkout(self.git_hash)

            if self.command != None:
                self.console_output('Exec command:%s' % self.command)
                p_command = subprocess.Popen(self.command,
                                             shell=True,
                                             bufsize=1024000,
                                             cwd=self.repo_path,
                                             stdout=subprocess.PIPE,
                                             stderr=subprocess.STDOUT)
                p_returncode = None

                while True:
                    p_output = self.non_block_read(p_command.stdout)
                    if len(p_output) != 0:
                        self.console_output(p_output)

                    p_returncode = p_command.poll()
                    if p_returncode != None:
                        break

                    time.sleep(0.01)

                if p_returncode != 0:
                    raise Exception("exce command [%s] return code !=0" %
                                    self.command)

            self.finish_ret = 'success'
        except Exception as e:
            print('Exception:', e)
            self.err_msg = str(e)
            self.finish_ret = 'failed'

        print("-" * 20 + "git checkout finish:" + self.finish_ret + "-" * 20)
Пример #10
0
def get_current_version():
    """Get the current version using git describe to check the latest tag."""
    with git.Repo() as repo:
        return repo.git.describe("--tags")
Пример #11
0
import time
import git
import config

if __name__ == '__main__':
    # LOOP FOR CHECKING IF GIT REPO HAS CHANGED
    print(str(time.ctime()) + ": Checking for updates")

    repo = git.Repo(config.PATH_ABSOLUT)
    current_hash = repo.head.object.hexsha
    print(current_hash)

    try:
        o = repo.remotes.origin
        o.pull()
        pull_hash = repo.head.object.hexsha
    except Exception as e:
        print("warning: unable to check repo, is internet connected?")
        # print(e)
        time.sleep(10)
        pull_hash = current_hash
    if current_hash != pull_hash:
        print("repo updated")
    else:
        print("repo is current")
Пример #12
0
def get_lastest_tag(git_folder):
    repo = git.Repo(git_folder, search_parent_directories=True)
    tags = sorted(repo.tags, key=lambda t: t.commit.committed_datetime)
    return tags[-1]
Пример #13
0
def get_head_sha(git_folder):
    repo = git.Repo(git_folder, search_parent_directories=True)
    return repo.head.object.hexsha
Пример #14
0
def setup_git_repo(path: str):
    repo = git.Repo(path)
    repo.git.checkout(BRANCH)
    return repo
Пример #15
0
async def updater(message):
    try:
        repo = git.Repo()
    except git.exc.InvalidGitRepositoryError as e:
        repo = git.Repo.init()
        origin = repo.create_remote(REPO_REMOTE_NAME, OFFICIAL_UPSTREAM_REPO)
        origin.fetch()
        repo.create_head(IFFUCI_ACTIVE_BRANCH_NAME, origin.refs.master)
        repo.heads.master.checkout(True)

    active_branch_name = repo.active_branch.name
    if active_branch_name != IFFUCI_ACTIVE_BRANCH_NAME:
        await message.edit(IS_SELECTED_DIFFERENT_BRANCH.format(
            branch_name=active_branch_name
        ))
        return False

    try:
        repo.create_remote(REPO_REMOTE_NAME, OFFICIAL_UPSTREAM_REPO)
    except Exception as e:
        print(e)
        pass

    temp_upstream_remote = repo.remote(REPO_REMOTE_NAME)
    temp_upstream_remote.fetch(active_branch_name)

    changelog = generate_change_log(
        repo,
        DIFF_MARKER.format(
            remote_name=REPO_REMOTE_NAME,
            branch_name=active_branch_name
        )
    )

    if not changelog:
        await message.edit("`Updating...`")
        await asyncio.sleep(8)
 
    message_one = NEW_BOT_UP_DATE_FOUND.format(
        branch_name=active_branch_name,
        changelog=changelog
    )
    message_two = NEW_UP_DATE_FOUND.format(
        branch_name=active_branch_name
    )

    if len(message_one) > 4095:
        with open("change.log", "w+", encoding="utf8") as out_file:
            out_file.write(str(message_one))
        await tgbot.send_message(
            message.chat_id,
            document="change.log",
            caption=message_two
        )
        os.remove("change.log")
    else:
        await message.edit(message_one)

    temp_upstream_remote.fetch(active_branch_name)
    repo.git.reset("--hard", "FETCH_HEAD")

    if Var.HEROKU_API_KEY is not None:
        import heroku3
        heroku = heroku3.from_key(Var.HEROKU_API_KEY)
        heroku_applications = heroku.apps()
        if len(heroku_applications) >= 1:
            if Var.HEROKU_APP_NAME is not None:
                heroku_app = None
                for i in heroku_applications:
                    if i.name == Var.HEROKU_APP_NAME:
                        heroku_app = i
                if heroku_app is None:
                    await message.edit("Invalid APP Name. Please set the name of your bot in heroku in the var `HEROKU_APP_NAME.`")
                    return
                heroku_git_url = heroku_app.git_url.replace(
                    "https://",
                    "https://*****:*****@"
                )
                if "heroku" in repo.remotes:
                    remote = repo.remote("heroku")
                    remote.set_url(heroku_git_url)
                else:
                    remote = repo.create_remote("heroku", heroku_git_url)
                asyncio.get_event_loop().create_task(deploy_start(tgbot, message, HEROKU_GIT_REF_SPEC, remote))

            else:
                await message.edit("Please create the var `HEROKU_APP_NAME` as the key and the name of your bot in heroku as your value.")
                return
        else:
            await message.edit(NO_HEROKU_APP_CFGD)
    else:
        await message.edit("No heroku api key found in `HEROKU_API_KEY` var")
Пример #16
0
def _get_version_local_git_repo(local_git_repo):
    repo = git.Repo(local_git_repo, search_parent_directories=True)
    return repo.git.rev_parse("HEAD")
Пример #17
0
def main():
    check_updates(git.Repo("."), [line.split() for line in sys.stdin])
Пример #18
0
def paste_edit(request, pk, paste_set, private_key=None):
    requested_commit = request.GET.get('commit')

    # You can technically modify anything in history and update it
    if requested_commit is None:
        commit = paste_set.commit_set.latest('id')
    else:
        commit = get_object_or_404(Commit,
                                   parent_set=paste_set,
                                   commit=requested_commit)

    previous_files = []
    for f in commit.paste_set.all():
        previous_files.append(os.path.basename(f.absolute_path))

    # Populate our initial data
    initial_data = []
    for paste in commit.paste_set.all():
        initial_data.append({
            'filename': paste.filename,
            'paste': paste.paste,
            'language': paste.language,
        })
    initial_set_meta = {
        'private': paste_set.private,
        'expires': paste_set.expires or "never",
        'anyone_can_edit': paste_set.anyone_can_edit
    }

    #TODO: turn this into a template tag and allow template to do conversion
    original_expires_time = paste_set.expires
    expires_time = None
    if original_expires_time:
        if timezone.is_naive(original_expires_time):
            original_expires_time = original_expires_time.replace(
                tzinfo=timezone.utc)
        expires_time = original_expires_time.astimezone(
            timezone.get_current_timezone())

    if request.method != 'POST':
        set_form = None
        if request.user == paste_set.owner:
            set_form_initial = {'description': paste_set.description}
            set_form = SetForm(initial=set_form_initial)
        return render_to_response(
            'paste.html', {
                'forms': PasteSetEdit(initial=initial_data),
                'set_form': set_form,
                'commit_meta_form': CommitMetaForm(),
                'set_meta_form': SetMetaForm(initial=initial_set_meta),
                'expires_time': expires_time,
                'editing': True,
            }, RequestContext(request))

    set_form = None
    set_meta_form = None
    forms = PasteSetEdit(request.POST, initial=initial_data)
    commit_meta_form = CommitMetaForm(request.POST)
    form_list = [forms, commit_meta_form]
    if request.user == paste_set.owner:
        set_form = SetForm(request.POST)
        set_meta_form = SetMetaForm(request.POST)
        form_list += [set_form, set_meta_form]

    if not all(map(lambda x: x.is_valid(), form_list)):
        return render_to_response(
            'paste.html', {
                'forms': forms,
                'set_form': set_form,
                'commit_meta_form': commit_meta_form,
                'set_meta_form': set_meta_form,
                'expires_time': expires_time,
                'editing': True,
            }, RequestContext(request))

    # Update the repo
    repo_dir = paste_set.repo
    repo = git.Repo(repo_dir)
    index = repo.index

    anonymous = commit_meta_form.cleaned_data['anonymous']

    owner = None
    if request.user.is_authenticated() and not anonymous:
        owner = request.user

    # Yes, this is horrible. I know. But there is a bug with Python Git.
    # See: https://github.com/gitpython-developers/GitPython/issues/39
    os.environ['USER'] = "******"
    if owner:
        os.environ['USER'] = owner.username

    if set_form:
        fdata = set_form.cleaned_data
        paste_set.description = fdata['description']

    if set_meta_form:
        fdata = set_meta_form.cleaned_data
        paste_set.private = fdata.get('private')
        paste_set.anyone_can_edit = fdata.get('anyone_can_edit')

    paste_set.save()

    commit = Commit.objects.create(views=0,
                                   parent_set=paste_set,
                                   commit='',
                                   owner=owner)

    # We enumerate over the forms so we can have a way to reference
    # the line numbers in a unique way relevant to the pastes.
    form_files = []
    priority_filename = os.sep.join([repo_dir, 'priority.txt'])
    with codecs.open(priority_filename, 'w', "utf-8-sig") as priority_file:
        for form_index, form in enumerate(forms):
            filename, priority = process_pasted_file(form_index, form,
                                                     repo_dir, index, commit,
                                                     True)
            form_files.append(filename)
            priority_file.write('%s: %s\n' % (filename, 'priority'))

    # Create the commit from the index
    intersected = set(form_files).intersection(previous_files)
    removed_files = list(set(previous_files) - intersected)
    for f in removed_files:
        index.remove([os.sep.join([repo_dir, f])])
    index.add([priority_filename])
    new_commit = index.commit('Modified.')
    commit.commit = new_commit
    commit.diff = _git_diff(new_commit, repo)
    commit.save()

    if not paste_set.private:
        return redirect('paste_view', pk=paste_set.pk)
    else:
        return redirect('paste_view',
                        pk=paste_set.pk,
                        private_key=paste_set.private_key)
Пример #19
0
from threading import Thread
import git
from config.git import Config as Cfg
import ix.const as const

adf_repo = git.Repo(Cfg.ADF_REPO)
bx_repo = git.Repo(Cfg.BX_REPO)
local_repo = git.Repo(Cfg.LOCAL_REPO)
remote_name = "origin/"


def _rebase_repo(repo, src_branch, dst_branch):
  repo.git.checkout(dst_branch)
  repo.git.rebase(remote_name + src_branch)
  repo.remote().push(force=True)


def _merge_repo(repo, src_branch, dst_branch, push_upstream):
  repo.git.checkout(dst_branch)
  repo.git.merge(remote_name + src_branch)
  if push_upstream:
    repo.remote().push()


def rebase_changes_all():
  """Rebase all changes according to current configuration"""
  for dst, src in const.GIT_TRACKING.items():
    rebase_changes(src, dst)


def rebase_changes(src_branch, dst_branch):
Пример #20
0
 def get_repo(self) -> git.Repo:
     return git.Repo(self.path)
Пример #21
0
    def check_modules(self, download=None, anytime=None):
        """
        Check if modules need updating and how many commits they are behind.

        If the module isn't downloaded already, it will download regardless of the download argument.

        If download is True, then they will also be downloaded or updated.

        :param download: If true, will download the modules
        :param anytime: If true, check all modules, not just modules that weren't checked recently.
        """
        if download is None:
            download = True
        if anytime is None:
            anytime = True

        modules = yield self._LocalDB.database.db_select("modules_view", where=["status = ?", 1])
        last_check_time = int(time()) - 3600 * 2

        for module in modules:
            if module["id"] not in self.modules:
                self.modules[module["id"]] = {
                    "installed_commit": None,
                    "latest_commit": None,
                    "commits_behind": None,
                    "last_check_at": module["last_check_at"],
                }
            if anytime is False:
                if module["last_check_at"] < last_check_time:
                    continue

        if anytime is False:  # remove any modules that don't need to be checked yet.
            modules = [x for x in modules if not x["last_check_at"] < last_check_time]

        if len(modules) == 0:
            logger.info("No modules need to be downloaded or updated.", count=len(modules))
            return None

        logger.debug("Checking {count} modules for downloads and updates.", count=len(modules))
        frozen_modules = []
        for module in modules:
            machine_label = module["machine_label"].lower()
            module_id = module["id"]

            if module["install_branch"] == "system":
                yield self.touch_database(module, "system", "system")
                continue

            module_path = self.download_path + f"{machine_label}/"
            if os.path.exists(module_path + ".freeze") or module["install_branch"] == "local":
                frozen_modules.append(machine_label)
                continue

            if not os.path.exists(module_path):
                try:
                    yield self.git_clone(self.download_path, machine_label, module.git_link)
                except git.GitCommandError as e:
                    logger.warn("Unable to clone module '{label}', reason: {e}", label=machine_label, e=e)
                    continue

            repo = git.Repo(module_path)
            self.modules[module_id]["installed_commit"] = repo.head.object.hexsha

            local_branches = self.local_branches(repo)

            try:
                remote_branches = yield self.git_fetch(repo)
            except git.GitCommandError as e:
                logger.warn("Unable to git fetch for module '{label}', reason: {e}",
                            label=machine_label, e=e)
                continue

            # select which module branch to use.  Use api_MODULE_API_VERSION if install_version != "develop"
            if module["install_branch"] in ("dev", "develop", "development"):
                install_branch = "master"
            else:
                if self.current_api in local_branches:
                    install_branch = self.current_api
                else:
                    if self.current_api in remote_branches:
                        install_branch = self.current_api
                    else:
                        install_branch = "master"

            self.modules[module_id]["last_check_at"] = int(time())

            logger.info("DL Module ({label}): Install branch: {branch}", label=machine_label, branch=install_branch)
            try:
                yield self.git_checkout(repo, install_branch)
            except git.GitCommandError as e:
                logger.warn("Unable to checkout branch for module '{label}', reason: {e}", label=machine_label, e=e)
                return

            commits_behind = len(list(repo.iter_commits(f"{install_branch}..origin/{install_branch}")))
            logger.warn("Module '{label}' is behind master: {commits_behind}",
                        label=module["label"], commits_behind=commits_behind)
            self.modules[module["id"]]['commits_behind'] = commits_behind

            repo = git.Repo(module_path)  # some sort of bug after all the above processes.

            if commits_behind > 0 and download is True:
                try:
                    yield self.git_pull(repo, install_branch)
                except git.GitCommandError as e:
                    logger.warn("Unable to pull branch for module '{label}', reason: {e}", label=machine_label, e=e)
                    return

            self.modules[module["id"]]['current_commit'] = repo.head.object.hexsha

            if module["require_approved"] == 0:
                continue
            else:
                installed_hash = yield self.find_approved_commit(module_id, install_branch, repo)
                if installed_hash is False:
                    self._Modules.disabled_modules[module_id] = {"reason": "No approved commit found."}
                    logger.warn("Disabled module '{label}'. Reason: No approved commit found.", label=machine_label)
            yield self.update_database(module, install_branch, repo.head.object.hexsha)

        if len(frozen_modules):
            logger.warn("Skipping download of frozen (.freeze file) modules: {modules}",
                        modules=", ".join(frozen_modules))
Пример #22
0
 def commit(self):
     if self.changed:
         repo = git.Repo(os.path.join(".tina", self.local_dir))
         commit_and_push(repo, self.name, self.new_tag)
Пример #23
0
def seekrets(repo_url, search_list=None, search_common=True, verbose=False):
    """Search for a list of strings or secret oriented regex in a repo

    Example output:

    {
        [
            {

                "commit_sha": "b788a889e484d57451944f93e2b65ed425d6bf65",
                "commit_date": "Wed Aug 24 11:11:56 2016",
                "committer_email": "*****@*****.**",
                "committer_username": "******",
                "branch": "slack",
                "repo": "ghost",
                "owner": "nir0s",
                "risks": [
                    { "blob_url": "https://github.com/nir0s/ghost/blob/.../ghost.py", "string": "AKI..." },
                    ...
                ]
            },
        ],
        ...
    }
    """
    results = []

    meta = giturlparse.parse(repo_url)
    clone = _get_repo_path(repo_url, meta)
    cloned_now = False
    if not os.path.isdir(clone):
        _clone(repo_url, clone)
        cloned_now = True
    repo = git.Repo(clone)
    reduction_list = []
    # Move to _seek_branches()
    for branch in _get_branches(repo):
        if not branch.name == 'origin/master':
            continue
        if not cloned_now:
            _pull(repo, branch)
        branch_name = _get_branch_name(branch)
        _checkout(repo, branch, branch_name)
        commits = _get_commits(repo)
        commits = _reduce_checked(commits, reduction_list)
        # TODO: Move to _seek_commits()
        previous_commit = None
        for commit in commits:
            reduction_list.append(commit)
            if not previous_commit:
                pass
            else:
                diff = previous_commit.diff(commit, create_patch=True)
                record = {
                    'commit_sha': commit.hexsha,
                    'commit_date': commit.committed_datetime.strftime('%Y-%m-%dT%H:%M:%S'),
                    'committer_email': commit.committer.email,
                    'committer_username': commit.committer.name,
                    'commit_msg': commit.message,
                    'branch': branch.name,
                    'repo': meta.name,
                    'org': meta.owner,
                    'risks': [],
                }
                if search_common:
                    populated_record = _search(
                        meta, commit, diff, record)
                    if populated_record.get('risks'):
                        results.append(populated_record)
            previous_commit = commit
    print(json.dumps(results, indent=4))
    return results
Пример #24
0
def get_git_root():

    git_repo = git.Repo(os.getcwd(), search_parent_directories=True)
    git_root = git_repo.git.rev_parse('--show-toplevel')
    return git_root
Пример #25
0
def get_pandas_hash(slndir):
    '''
    获取当前代码仓库的 HASH 版本号
    '''
    repo = git.Repo(slndir)
    return repo.head.object.hexsha
for f in files:
    src = path + f
    dst = moveto + f
    shutil.move(src, dst)
os.rmdir('{{cookiecutter.role_name}}')
shutil.rmtree(os.getcwd() + '/.git')

#create a repo using name/pass or token
#g = Github(os.environ['gituser'], os.environ['gitpass'])
g = Github(os.environ['gittoken'])
u = g.get_user()
u.create_repo('{{cookiecutter.repo_name}}')

# create working directory repo
git.Repo.init(os.getcwd())

# get working diretory repo
repo = git.Repo(os.getcwd())
# add files, remote, commit and push
repo.git.add('.')
repo.index.commit("wei commit")
remote = repo.create_remote('origin',
                            url='https://' + os.environ['gituser'] + ':' +
                            os.environ['gitpass'] + '@github.com/' +
                            '{{cookiecutter.github_user}}' + '/' +
                            '{{cookiecutter.repo_name}}' + '.git')
remote.push(refspec='{}:{}'.format('master', 'origin'))

#T O D O
# no guardar en el fichero las variables de entorno
Пример #27
0
def main(env=os.environ):
    desc='''\
Git done is a tool that performs commits using a TODO file to get the commit 
message.
Any line in your TODO file marked as done will be used as part of the commit 
message when you execute git done.

Your todo file should have one task per line. Todo tasks should start with
'TODO', while done tasks should start with 'DONE'.

Example TODO file:
DONE Use LinkedList instead of List
DONE Login functionality ## I am a comment
TODO Refactor those ugly classes
TODO Fix bug #3

Type 'git done' whenever you want to commit. If the TODO has got new lines
starting with 'DONE', those tasks will be the commit message. If there is
no task marked with 'DONE', 'git done' behaves just like 'git commit -a'.
'''
    parser = OptionParser(
        usage='%prog [options]',
        description=desc,
        formatter=PlainHelpFormatter(),
        version="%prog "+GITDONEVERSION)
    parser.add_option('-p', '--preview',
        action='store_true', dest='preview', default=False,
        help=u"shows what message would be committed, but does not commit")
    parser.add_option('-c', '--comments',
        action='store_true', dest='comments', default=False,
        help=u"includes comments (\'## ...\') in the commit message")

    # get options from command line
    (options, args) = parser.parse_args()

    # build repo reference
    repo = git.Repo(os.path.abspath(os.curdir))
    rootfolder = repo.working_tree_dir
    try:
        todofilename = repo.config_reader().get_value('gitdone', 'todofile')
    except Exception:
        todofilename = "TODO"

    # quit if todo file does not exist
    if not os.path.exists(rootfolder+'/'+todofilename):
        print("ERROR: file "+ todofilename +" not found")
        sys.exit(1)

    ## discover what files were modified
    files_modified = []
    for line in repo.git.execute(['git','diff','--name-status']).splitlines():
        if line[0] == 'M':
            files_modified.append(line.split('\t')[1])

    ## quit if todo file was not modified
    if todofilename not in files_modified:
        # preview and exit
        if options.preview:
            print('(Nothing from ' + todofilename + ')')
            return
        quit_and_normal_commit()

    ## get all done lines from the diff
    diffoutput = repo.git.execute(['git','diff','-U999999999','--',rootfolder.encode(sys.getfilesystemencoding())+'/'+todofilename.encode(sys.getfilesystemencoding())])
    log = []
    sprint_has_to_do = False
    sprint_has_new_done = False
    tags = []
    for line in diffoutput.splitlines()[5:]:
        if not options.comments:
            line = ignore_comments(line)
        # Detected TODO
        if re.compile(r'\+{0,1}[ \t]{0,1}\*{0,}[ \t]*TODO.+').match(line):
            sprint_has_to_do = True
        # Detected DONE
        elif re.compile(r'\+[ \t]{0,1}\*{0,}[ \t]*DONE.+').match(line):
            sprint_has_new_done = True
            log.append(line[1:].lstrip().replace('**', '').replace('DONE','').lstrip().expandtabs().rstrip())
        # Detected tag
        elif re.compile(r'\+{0,1}[ \t]*\>\>\>.+').match(line):
            if sprint_has_new_done and not sprint_has_to_do:
                tags.append(line[1:].lstrip().replace('>>>','').strip())
            # Reset sprint
            sprint_has_new_done = False
            sprint_has_to_do = False

    # show done lines
    if len(log) > 0:
        print("Done on this commit:\n")
    for line in log:
        print colored('    '+line, 'green')
    if len(log) > 0:
        print('') # newline

    # show tags to apply
    for tag in tags:
        print("This commit is tagged as "+colored(tag, 'yellow'))

    # preview and exit
    if options.preview:
        if not log:
            print('(Nothing from ' + todofilename + ')')
        return

    # do the final commit, if there was a message extracted from the todo file
    log = normalize_log(log)
    if not log:
        quit_and_normal_commit()
    else:
        repo.git.execute(['git','commit','--all','--message',log])
        for tag in tags:
            splitted = tag.split()
            message = ''
            if len(splitted)>=3 and splitted[1]=='-m':
                message = ' '.join(splitted[2:])
                repo.git.execute(['git','tag','-a',splitted[0],'-m',message])
            else:
                repo.git.execute(['git','tag',splitted[0].lstrip().rstrip()])
    return
Пример #28
0
def run_experiment(
        method_call,
        mode='local',
        exp_prefix='default',
        seed=None,
        variant=None,
        exp_id=0,
        prepend_date_to_exp_prefix=True,
        gpu_mode="cpu",
        gpu_id=0,
        snapshot_mode='last',
        snapshot_gap=1,
        base_log_dir=None,
        local_input_dir_to_mount_point_dict=None,  # TODO(vitchyr): test this
        # local settings
        skip_wait=False,
        # ec2 settings
        sync_interval=180,
        region='us-east-1',
        instance_type=None,
        spot_price=None,
        verbose=False,
        num_exps_per_instance=1,
        # sss settings
        time_in_mins=None,
        # ssh settings
        ssh_host=None,
        # gcp
        gcp_kwargs=None,
        python_cmd="python"
):
    """
    Usage:
    ```
    def foo(variant):
        x = variant['x']
        y = variant['y']
        logger.log("sum", x+y)
    variant = {
        'x': 4,
        'y': 3,
    }
    run_experiment(foo, variant, exp_prefix="my-experiment")
    ```
    Results are saved to
    `base_log_dir/<date>-my-experiment/<date>-my-experiment-<unique-id>`
    By default, the base_log_dir is determined by
    `config.LOCAL_LOG_DIR/`
    :param method_call: a function that takes in a dictionary as argument
    :param mode: A string:
     - 'local'
     - 'local_docker'
     - 'ec2'
     - 'here_no_doodad': Run without doodad call
    :param exp_prefix: name of experiment
    :param seed: Seed for this specific trial.
    :param variant: Dictionary
    :param exp_id: One experiment = one variant setting + multiple seeds
    :param prepend_date_to_exp_prefix: If False, do not prepend the date to
    the experiment directory.
    :param gpu_mode:
    :param snapshot_mode: See railrl.core.logging.logger
    :param snapshot_gap: See railrl.core.logging.logger
    :param base_log_dir: Will over
    :param sync_interval: How often to sync s3 data (in seconds).
    :param local_input_dir_to_mount_point_dict: Dictionary for doodad.
    :param ssh_host: the name of the host you want to ssh onto, should correspond to an entry in
    config.py of the following form:
    SSH_HOSTS=dict(
        ssh_host=dict(
            username='******',
            hostname='hostname/ip address',
        )
    )
    - if ssh_host is set to None, you will use ssh_host specified by
    config.SSH_DEFAULT_HOST
    :return:
    """
    try:
        import doodad
        import doodad.mode
        import doodad.ssh
    except ImportError:
        print("Doodad not set up! Running experiment here.")
        mode = 'here_no_doodad'
    global ec2_okayed
    global gpu_ec2_okayed
    global target_mount
    global first_sss_launch

    """
    Sanitize inputs as needed
    """
    if seed is None:
        seed = random.randint(0, 100000)
    if variant is None:
        variant = {}
    if mode == 'ssh' and base_log_dir is None:
        base_log_dir = config.SSH_LOG_DIR
    if base_log_dir is None:
        if mode == 'sss':
            base_log_dir = config.SSS_LOG_DIR
        else:
            base_log_dir = config.LOCAL_LOG_DIR

    for key, value in ppp.recursive_items(variant):
        # This check isn't really necessary, but it's to prevent myself from
        # forgetting to pass a variant through dot_map_dict_to_nested_dict.
        if "." in key:
            raise Exception(
                "Variants should not have periods in keys. Did you mean to "
                "convert {} into a nested dictionary?".format(key)
            )
    if prepend_date_to_exp_prefix:
        exp_prefix = time.strftime("%m-%d") + "-" + exp_prefix
    variant['seed'] = str(seed)
    variant['exp_id'] = str(exp_id)
    variant['exp_prefix'] = str(exp_prefix)
    variant['instance_type'] = str(instance_type)

    try:
        import git
        doodad_path = osp.abspath(osp.join(
            osp.dirname(doodad.__file__),
            os.pardir
        ))
        dirs = config.CODE_DIRS_TO_MOUNT + [doodad_path]

        git_infos = []
        for directory in dirs:
            # Idk how to query these things, so I'm just doing try-catch
            try:
                repo = git.Repo(directory)
                try:
                    branch_name = repo.active_branch.name
                except TypeError:
                    branch_name = '[DETACHED]'
                git_infos.append(GitInfo(
                    directory=directory,
                    code_diff=repo.git.diff(None),
                    code_diff_staged=repo.git.diff('--staged'),
                    commit_hash=repo.head.commit.hexsha,
                    branch_name=branch_name,
                ))
            except git.exc.InvalidGitRepositoryError:
                pass
    except ImportError:
        git_infos = None
    run_experiment_kwargs = dict(
        exp_prefix=exp_prefix,
        variant=variant,
        exp_id=exp_id,
        seed=seed,
        use_gpu=gpu_mode,
        gpu_id=gpu_id,
        snapshot_mode=snapshot_mode,
        snapshot_gap=snapshot_gap,
        git_infos=git_infos,
        script_name=main.__file__,
    )
    if mode == 'here_no_doodad':
        run_experiment_kwargs['base_log_dir'] = base_log_dir
        return run_experiment_here(
            method_call,
            **run_experiment_kwargs
        )

    """
    Safety Checks
    """

    if mode == 'ec2' or mode == 'gcp':
        if not ec2_okayed and not query_yes_no(
                "{} costs money. Are you sure you want to run?".format(mode)
        ):
            sys.exit(1)
        if not gpu_ec2_okayed and gpu_mode:
            if not query_yes_no(
                    "{} is more expensive with GPUs. Confirm?".format(mode)
            ):
                sys.exit(1)
            gpu_ec2_okayed = True
        ec2_okayed = True

    """
    GPU vs normal configs
    """
    if gpu_mode:
        docker_image = variant["gpu_doodad_docker_image"]
        if instance_type is None:
            instance_type = config.GPU_INSTANCE_TYPE
        # else:
        #     assert instance_type[0] == 'g'
        if spot_price is None:
            spot_price = config.GPU_SPOT_PRICE
    else:
        docker_image = variant["doodad_docker_image"]
        if instance_type is None:
            instance_type = config.INSTANCE_TYPE
        if spot_price is None:
            spot_price = config.SPOT_PRICE
    if mode == 'sss':
        singularity_image = config.SSS_IMAGE
    elif mode in ['local_singularity', 'slurm_singularity']:
        singularity_image = config.SINGULARITY_IMAGE
    else:
        singularity_image = None


    """
    Get the mode
    """
    mode_kwargs = {}
    if gpu_mode and mode == 'ec2':
        image_id = config.REGION_TO_GPU_AWS_IMAGE_ID[region]
        if region == 'us-east-1':
            avail_zone = config.REGION_TO_GPU_AWS_AVAIL_ZONE.get(region, "us-east-1b")
            mode_kwargs['extra_ec2_instance_kwargs'] = dict(
                Placement=dict(
                    AvailabilityZone=avail_zone,
                ),
            )
    else:
        image_id = None
    if hasattr(config, "AWS_S3_PATH"):
        aws_s3_path = config.AWS_S3_PATH
    else:
        aws_s3_path = None

    """
    Create mode
    """
    if mode == 'local':
        dmode = doodad.mode.Local(skip_wait=skip_wait)
    elif mode == 'local_docker':
        dmode = doodad.mode.LocalDocker(
            image=docker_image,
            gpu=gpu_mode,
        )
    elif mode == 'ssh':
        if ssh_host == None:
            ssh_dict = config.SSH_HOSTS[config.SSH_DEFAULT_HOST]
        else:
            ssh_dict = config.SSH_HOSTS[ssh_host]
        credentials = doodad.ssh.credentials.SSHCredentials(
            username=ssh_dict['username'],
            hostname=ssh_dict['hostname'],
            identity_file=config.SSH_PRIVATE_KEY
        )
        dmode = doodad.mode.SSHDocker(
            credentials=credentials,
            image=docker_image,
            gpu=gpu_mode,
        )
    elif mode == 'local_singularity':
        dmode = doodad.mode.LocalSingularity(
            image=singularity_image,
            gpu=gpu_mode,
        )
    elif mode == 'slurm_singularity' or mode == 'sss':
        assert time_in_mins is not None, "Must approximate/set time in minutes"
        if gpu_mode:
            kwargs = config.SLURM_GPU_CONFIG
        else:
            kwargs = config.SLURM_CPU_CONFIG
        if mode == 'slurm_singularity':
            dmode = doodad.mode.SlurmSingularity(
                image=singularity_image,
                gpu=gpu_mode,
                time_in_mins=time_in_mins,
                skip_wait=skip_wait,
                pre_cmd=config.SINGULARITY_PRE_CMDS,
                **kwargs
            )
        else:
            dmode = doodad.mode.ScriptSlurmSingularity(
                image=singularity_image,
                gpu=gpu_mode,
                time_in_mins=time_in_mins,
                skip_wait=skip_wait,
                pre_cmd=config.SSS_PRE_CMDS,
                **kwargs
            )
    elif mode == 'ec2':
        # Do this separately in case someone does not have EC2 configured
        dmode = doodad.mode.EC2AutoconfigDocker(
            image=docker_image,
            image_id=image_id,
            region=region,
            instance_type=instance_type,
            spot_price=spot_price,
            s3_log_prefix=exp_prefix,
            # Ask Vitchyr or Steven from an explanation, but basically we
            # will start just making the sub-directories within railrl rather
            # than relying on doodad to do that.
            s3_log_name="",
            gpu=gpu_mode,
            aws_s3_path=aws_s3_path,
            num_exps=num_exps_per_instance,
            **mode_kwargs
        )
    elif mode == 'gcp':
        image_name = config.GCP_IMAGE_NAME
        if gpu_mode:
            image_name = config.GCP_GPU_IMAGE_NAME

        if gcp_kwargs is None:
            gcp_kwargs = {}
        config_kwargs = {
            **config.GCP_DEFAULT_KWARGS,
            **dict(image_name=image_name),
            **gcp_kwargs
        }
        dmode = doodad.mode.GCPDocker(
            image=docker_image,
            gpu=gpu_mode,
            gcp_bucket_name=config.GCP_BUCKET_NAME,
            gcp_log_prefix=exp_prefix,
            gcp_log_name="",
            **config_kwargs
        )
    else:
        raise NotImplementedError("Mode not supported: {}".format(mode))

    """
    Get the mounts
    """
    mounts = create_mounts(
        base_log_dir=base_log_dir,
        mode=mode,
        sync_interval=sync_interval,
        local_input_dir_to_mount_point_dict=local_input_dir_to_mount_point_dict,
    )

    """
    Get the outputs
    """
    launch_locally = None
    target = config.RUN_DOODAD_EXPERIMENT_SCRIPT_PATH
    if mode == 'ec2':
        # Ignored since I'm setting the snapshot dir directly
        base_log_dir_for_script = None
        run_experiment_kwargs['force_randomize_seed'] = True
        # The snapshot dir needs to be specified for S3 because S3 will
        # automatically create the experiment director and sub-directory.
        snapshot_dir_for_script = config.OUTPUT_DIR_FOR_DOODAD_TARGET
    elif mode == 'local':
        base_log_dir_for_script = base_log_dir
        # The snapshot dir will be automatically created
        snapshot_dir_for_script = None
    elif mode == 'local_docker':
        base_log_dir_for_script = config.OUTPUT_DIR_FOR_DOODAD_TARGET
        # The snapshot dir will be automatically created
        snapshot_dir_for_script = None
    elif mode == 'ssh':
        base_log_dir_for_script = config.OUTPUT_DIR_FOR_DOODAD_TARGET
        # The snapshot dir will be automatically created
        snapshot_dir_for_script = None
    elif mode in ['local_singularity', 'slurm_singularity', 'sss']:
        base_log_dir_for_script = base_log_dir
        # The snapshot dir will be automatically created
        snapshot_dir_for_script = None
        launch_locally = True
        if mode == 'sss':
            dmode.set_first_time(first_sss_launch)
            first_sss_launch = False
            target = config.SSS_RUN_DOODAD_EXPERIMENT_SCRIPT_PATH
    elif mode == 'here_no_doodad':
        base_log_dir_for_script = base_log_dir
        # The snapshot dir will be automatically created
        snapshot_dir_for_script = None
    elif mode == 'gcp':
        # Ignored since I'm setting the snapshot dir directly
        base_log_dir_for_script = None
        run_experiment_kwargs['force_randomize_seed'] = True
        snapshot_dir_for_script = config.OUTPUT_DIR_FOR_DOODAD_TARGET
    else:
        raise NotImplementedError("Mode not supported: {}".format(mode))
    run_experiment_kwargs['base_log_dir'] = base_log_dir_for_script
    target_mount = doodad.launch_python(
        python_cmd=python_cmd,
        target=target,
        mode=dmode,
        mount_points=mounts,
        args={
            'method_call': method_call,
            'output_dir': snapshot_dir_for_script,
            'run_experiment_kwargs': run_experiment_kwargs,
            'mode': mode,
        },
        use_cloudpickle=True,
        target_mount=target_mount,
        verbose=verbose,
        launch_locally=launch_locally,
    )
Пример #29
0
 def validate(repo_path):
     import git
     repo = git.Repo(repo_path)
     if repo.active_branch != 'master':
         raise Exception, "Current branch not master in {}".format(
             repo_path)
Пример #30
0
def repo():
    '''Return a git.Repo object for the current directory'''
    return git.Repo('.')