def clone(cachedir, url, tag, todir):
    '''
    Fetches tag <tag> from git repository by URL <url> into directory <todir>
    (creating git repository there if needed).

    Uses <cachedir> as a directory to store cache.
    '''
    cachedir = cachedir / url.replace(':', '-').replace('/', '-').replace('.', '-')
    ref = 'refs/tags/%s' % tag

    todir.makedirs_p(0755)
    if cachedir.exists() and cachedir.isdir():
        cr = Repo(cachedir)
        r = cr.clone(todir, origin='cache', quiet=True)
    else:
        r = Repo.init(todir)

    origin = r.create_remote('origin', url)
    try:
        origin.fetch('+%s:%s' % (ref,ref), quiet=True)
    except AssertionError:
        raise UnableToFetchError(url, ref)

    if _has_head(r, 'build'):
        r.heads.build.ref = Tag(r, ref).commit
    else:
        r.create_head('build', Tag(r, ref).commit)
    r.heads.build.checkout(force=True)

    if cachedir.exists() and cachedir.isdir():
        r.remote('cache').push(ref, force=True)
    else:
        cachedir.makedirs_p(0755)
        cr = r.clone(cachedir, bare=True, quiet=True)
Esempio n. 2
0
    def __enter__(self):
        """ Get a session cookie to use for future requests. """

        self._entry_dir = os.path.join(querymod.configuration['repo_path'], str(self._uuid))
        if not os.path.exists(self._entry_dir) and not self._initialize:
            raise querymod.RequestError('No deposition with that ID exists!')
        try:
            if self._initialize:
                self._repo = Repo.init(self._entry_dir)
                with open(self._lock_path, "w") as f:
                    f.write(str(os.getpid()))
                self._repo.config_writer().set_value("user", "name", "BMRBDep").release()
                self._repo.config_writer().set_value("user", "email", "*****@*****.**").release()
                os.mkdir(os.path.join(self._entry_dir, 'data_files'))
            else:
                counter = 100
                while os.path.exists(self._lock_path):
                    counter -= 1
                    time.sleep(random.random())
                    if counter <= 0:
                        raise querymod.ServerError('Could not acquire entry directory lock.')
                with open(self._lock_path, "w") as f:
                    f.write(str(os.getpid()))
                self._repo = Repo(self._entry_dir)
        except NoSuchPathError:
            raise querymod.RequestError("'%s' is not a valid deposition ID." % self._uuid,
                                        status_code=404)

        return self
Esempio n. 3
0
 def clone(self, url):
     """
     Creates a local cloudlet as a git repo on disk
      from the supplied remote git URL.
     """
     try:
         temp_repo = tempfile.mkdtemp()
         validate = Repo.init(temp_repo, bare=True)
         validate.git.ls_remote(url, heads=True)
     except Exception as e:
         print colored("Error: ", "red") + "Invalid or inaccessible remote repository URL.\n"
         print e
         exit(1)
     else:
         try:
             repo = Repo.clone_from(url, self.path)
             repo.submodule_update(init=True)
         except Exception as e:
             print "Cloudlet install failed."
             print e
             exit(1)
         else:
             print "Cloudlet installed: %s" % (self.path)
     finally:
         rmtree(temp_repo)
    def process_exception(request, exception):
        exception_traceback = sys.exc_traceback

        stack = traceback.extract_tb(exception_traceback)

        repo_dir = settings.EXCEPTION_BLAME['REPO_DIR']
        repo = Repo(repo_dir)

        first_file_project = None
        for stack_file in reversed(stack):
            file_path = stack_file[0]

            if repo.git.ls_files(file_path, {'error-unmatch':True}):
                first_file_project = stack_file
                break

        if first_file_project:
            file_path = first_file_project[0]
            abs_linenumber = first_file_project[1]
            blame = repo.blame(None, file_path)

            # blame returns array with lists [commit, [lines]]
            blame_commit = [commit[0]
                            for commit in blame
                                for _ in commit[1]][abs_linenumber-1]


            author = blame_commit.author.name
            if author == 'Not Committed Yet':
                author = author + ', probably your modifications'
            else:
                author = '{} - {}'.format(author, blame_commit.author.email)

            request.META['BLAMED_DEVELOPER'] = author
Esempio n. 5
0
    def update(self):
        """Update the local cloudlet git repo on disk from any origin."""

        print "Updating cloudlet: %s" % (self.path)
        repo = Repo(self.path)
        repo.remotes.origin.pull()
        repo.submodule_update()
Esempio n. 6
0
def create_new_working_branch(branch, base, remote_name):

    repo = Repo(get_project_root())
    remote = repo.remotes[remote_name]

    if repo.is_dirty() == True:
        print "The working tree contains uncommitted changes, commit or stash "
        print "these and try again."
        return 1

    try:
        head = repo.create_head(branch, base)
        print "Summary of actions:"
        print "- A new branch " + branch + " was created, based on " + base + "."
    except OSError:
        print "A branch " + branch + " already exists!"
        return 1

    ret = remote.push(head)
    info = ret[0]
    print ("- A new remote branch " + branch + " was created on " +
           remote_name + ".")

    head.set_tracking_branch(info.remote_ref)
    print ("- Branch " + branch + " tracks remote branch " + branch +
           " from " + remote_name + ".")

    head.checkout()

    print "- You are now on branch " + branch + "."

    return 0
Esempio n. 7
0
def close_working_branch(branch, remote_name):

    repo = Repo(get_project_root())
    remote = repo.remotes[remote_name]

    if repo.is_dirty() == True:
        print "The working tree contains uncommitted changes, commit or stash "
        print "these and try again."
        return 1

    print "Summary of actions:"
    master = repo.heads.master
    master.checkout()

    repo.git.merge(branch, '--no-ff')
    print ("- Branch " + branch + " was merged into master.")

    repo.delete_head(branch, force=True)
    print ("- Branch " + branch + " was deleted.")

    ret = remote.push(":" + branch)
    print ("- Remote branch " + branch + " on " + remote_name + " was deleted.")

    remote.push(master)
    print ("- Merged changes on master were pushed to " + remote_name + ".")

    print "- You are now on branch master."

    return 0
Esempio n. 8
0
 def count_lines(self, project_info):
     try:
         os.mkdir(TMP_DIR)
         repository = project_info['repositories'][0]
         tmp_path = os.path.join(TMP_DIR, project_info['name'])
         if 'svn' in repository:
             svn_repo = RemoteClient(repository)
             svn_repo.checkout(tmp_path)
         elif 'git' in repository:
             Repo.clone_from(repository, tmp_path)
         
         project_dir = tmp_path
         trunk_path = os.path.join(tmp_path, 'trunk')
         if os.path.isdir(trunk_path):
             project_dir = trunk_path
         subprocess.call(['./cloc.exe', '--json', '--out=%s' % CLOC_OUT, project_dir])
     
         try:
             with open(CLOC_OUT) as cloc_file:
                 cloc = json.load(cloc_file)
                 project_info['cloc'] = cloc    
         finally:
             if os.path.isfile(CLOC_OUT):
                 os.remove(CLOC_OUT)
     
     finally:
         if os.path.isdir(TMP_DIR):
             subprocess.call(['rm', '-rf', TMP_DIR])
Esempio n. 9
0
    def rebase(self, issue, branch=None):
        if not issue.pull_request:
            return "Rebase is just supported in PR for now"

        pr = issue.repository.get_pull(issue.number)

        branch_name = pr.head.ref
        branched_sdk_id = pr.head.repo.full_name+'@'+branch_name

        upstream_url = 'https://github.com/{}.git'.format(pr.base.repo.full_name)
        upstream_base = pr.base.ref if not branch else branch

        with tempfile.TemporaryDirectory() as temp_dir, \
                manage_git_folder(self.gh_token, Path(temp_dir) / Path("sdk"), branched_sdk_id) as sdk_folder:

            sdk_repo = Repo(str(sdk_folder))
            configure_user(self.gh_token, sdk_repo)

            upstream = sdk_repo.create_remote('upstream', url=upstream_url)
            upstream.fetch()

            msg = sdk_repo.git.rebase('upstream/{}'.format(upstream_base))
            _LOGGER.debug(msg)
            msg = sdk_repo.git.push(force=True)
            _LOGGER.debug(msg)

            return "Rebase done and pushed to the branch"
Esempio n. 10
0
def git_create_repo(url, repo_path, remote, ref, depth=0):
    """Clone git repo from url at path"""
    if not os.path.isdir(os.path.join(repo_path, '.git')):
        if not os.path.isdir(repo_path):
            os.makedirs(repo_path)
        repo_path_output = colored(repo_path, 'cyan')
        try:
            print(' - Clone repo at ' + repo_path_output)
            Repo.init(repo_path)
        except:
            cprint(' - Failed to initialize repository', 'red')
            print('')
            shutil.rmtree(repo_path)
            sys.exit(1)
        else:
            repo = _repo(repo_path)
            remote_names = [r.name for r in repo.remotes]
            remote_output = colored(remote, 'yellow')
            if remote not in remote_names:
                try:
                    print(" - Create remote " + remote_output)
                    repo.create_remote(remote, url)
                except:
                    message = colored(" - Failed to create remote ", 'red')
                    print(message + remote_output)
                    print('')
                    shutil.rmtree(repo_path)
                    sys.exit(1)
            _checkout_ref(repo_path, ref, remote, depth)
Esempio n. 11
0
 def init(rid):
     """
     Initialize a new repository.
     """
     directory = os.path.join(settings.REPO_DIR, str(rid))
     Repo.init(directory)
     return PasteRepo(rid)
Esempio n. 12
0
    def __start_experiment(self, parameters):
        """
        Start an experiment by capturing the state of the code
        :param parameters: a dictionary containing the parameters of the experiment
        :type parameters: dict
        :return: the tag representing this experiment
        :rtype: TagReference
        """
        repository = Repo(self.__repository_directory, search_parent_directories=True)
        if len(repository.untracked_files) > 0:
            logging.warning("Untracked files will not be recorded: %s", repository.untracked_files)
        current_commit = repository.head.commit
        started_state_is_dirty = repository.is_dirty()

        if started_state_is_dirty:
            repository.index.add([p for p in self.__get_files_to_be_added(repository)])
            commit_obj = repository.index.commit("Temporary commit for experiment " + self.__experiment_name)
            sha = commit_obj.hexsha
        else:
            sha = repository.head.object.hexsha

        data = {"parameters": parameters, "started": time.time(), "description": self.__description,
                "commit_sha": sha}
        tag_object = self.__tag_repo(data, repository)

        if started_state_is_dirty:
            repository.head.reset(current_commit, working_tree=False, index=True)

        return tag_object
Esempio n. 13
0
def tarbell_install_template(args):
    """Install a project template."""
    with ensure_settings(args) as settings:
        template_url = args.get(0)

        matches = [template for template in settings.config["project_templates"] if template["url"] == template_url]
        if matches:
            puts("\n{0} already exists. Nothing more to do.\n".format(
                colored.yellow(template_url)
            ))
            sys.exit()

        puts("\nInstalling {0}".format(colored.cyan(template_url))) 
        tempdir = tempfile.mkdtemp()
        puts("\n- Cloning repo to {0}".format(colored.green(tempdir))) 
        Repo.clone_from(template_url, tempdir)
        base_path = os.path.join(tempdir, "_base/")
        filename, pathname, description = imp.find_module('base', [base_path])
        base = imp.load_module('base', filename, pathname, description)
        puts("\n- Found _base/base.py")
        try:
            name = base.NAME
            puts("\n- Name specified in base.py: {0}".format(colored.yellow(name)))
        except AttributeError:
            name = template_url.split("/")[-1]
            puts("\n- No name specified in base.py, using '{0}'".format(colored.yellow(name)))

        settings.config["project_templates"].append({"name": name, "url": template_url})
        settings.save()

        _delete_dir(tempdir)

        puts("\n+ Added new project template: {0}".format(colored.yellow(name)))
    def get_number_contributors(self):
        """Method for getting number of authors for each repository.
        :return:
            Dictionary: {<repository_name>: <number_of_different_authors>}"""

        temp_path = os.path.dirname(os.path.realpath("IPythonProject"))
        path_project = temp_path + "\\NewGitHubProjects\\"
        number_authors = {}
        # a list for containing all the different contributors and if we need it
        # all_different_authors= []

        for dir in os.listdir(path_project):
            different_authors = []
            for d in os.listdir(path_project + "\\" + dir):
                repo = Repo(path_project + "\\" + dir + "\\" + d, search_parent_directories=True)
                if repo.active_branch.is_valid():
                    commits = list(repo.iter_commits())
                    for commit in commits:
                        commit_author = commit.author
                        if commit_author not in different_authors:
                            different_authors.append(commit_author)

            # a list for containing all the different contributors and if we need it
            # all_different_contributors.append(different_contributors)
            number_authors.update({dir: len(different_authors)})

        return number_authors
Esempio n. 15
0
def main(source, dest):
    """Rename a Git repository and update its remote accordingly."""
    basicConfig(level=DEBUG)
    try:
        repo = Repo(source)
    except OSError as error:
        logger.exception('Error:')
        exit(1)
    else:
        dest = Path(dest)
        try:
            dest = dest.with_suffix('.git')
        except ValueError:
            logger.exception('Error:')
            exit(1)
        logger.info('Using dest: %s', dest)

        remote = repo.remote()
        logger.debug('Old URL: %s', remote.url)
        origin = Path(remote.url)
        logger.debug('Parent: %s', origin.parent)

        new = origin.parent / dest
        logger.info('Using URL: %s', new)

        conf = remote.config_writer
        conf.set('url', str(new))
        conf.release()

        Path(source).rename(dest)
        exit(0)
Esempio n. 16
0
def git_install_repos(repos, path):
    # Make sure gitpython is installed
    call(['pip', 'install', 'gitpython'])
    errors = []
    for repo in repos:
        for i in range(1):
            print('\n')
        print('Installing ' + repo['name'])
        loc = repo['urls']['ssh'] if not None else repo['urls']['https']
        repo_path = path + repo['name']
        try:
            Repo.clone_from(repo['urls']['ssh'], repo_path) 
            reqs = repo_path + '/requirements.txt' 
            if os.path.exists(reqs):
                if os.path.isfile(reqs):
                    call(['pip', 'install', '-r', repo_path + '/requirements.txt'])
                else:
                    print(reqs + ' is a directory')
            else:
                print(reqs + ' does not exist')

        except:
            errors.append(repo)
    print('Errors installing:\n')
    for error in errors:
        print(json.dumps(error, indent=2))
    print('Done installing repos')
Esempio n. 17
0
def clone_repos(students, syspath=os.path.join("..","repos"), url="[email protected]:%s/SoftwareDesign.git"):
  """
  Recursively removes previous copies of the repo (requires user confirmation)
  Clones the repos from the urls to a folder called repos/<username>

    students : list of student objects
    syspath : system path to copy repos to
  """
  if (raw_input("Remove current repositories? (y/n) ")) != "y":
    raise Exception("Failed to confirm. Failed to clone repos")
  
  # if other repos exist, remove them
  if os.path.exists(syspath):
    shutil.rmtree(syspath) # remove existing repos
    print "Successfully removed repos from \"%s\"" % syspath

  for s in students:
    path = syspath + s.user
    
    print "Cloning Repo: %s to %s" % (s.name, path)

    if not os.path.exists(path):
      os.makedirs(path)
    
    Repo.clone_from(url % s.user, path)

  print "Successfully cloned repos"
Esempio n. 18
0
def get_commits(repo_name, db):
    url = get_repo_url(repo_name)
    repo = Repo(url)
    commits = list(repo.iter_commits('master'))
    commits_len = len(commits)
    for i in range(0, commits_len-1):
        commit_cur = commits[i]
        commit_prev = commits[i+1]
        minus = 1
        plus = 1
        try:
            diff_str = repo.git.diff(commit_cur, commit_prev)
        except Exception:
            print "exception in encode utf8"
            
        else:
            (minus, plus) = get_diff_meta(diff_str)
        commit_data = {"uniq": repo_name + '/' + str(commit_cur),
                       "repo_name": repo_name,
                       "sha": str(commit_cur),
                       "author": commit_cur.author.name,
                       "email": commit_cur.author.email,
                       "authored_date": commit_cur.authored_date,
                       "committed_date": commit_cur.committed_date,
                       "message": commit_cur.message, 
                       "encoding": commit_cur.encoding,
                       "minus": minus, "plus": plus}
        if db["git2db"].find_one({"uniq": repo_name + '/' + str(commit_cur)}):
            continue
        else:
            db["git2db"].insert(commit_data)
Esempio n. 19
0
    def run(self):
        env = self.state.document.settings.env
        repo = Repo(env.srcdir)
        commits = repo.iter_commits()
        l = nodes.bullet_list()
        for commit in list(commits)[:10]:
            date_str = datetime.fromtimestamp(commit.authored_date)
            if '\n' in commit.message:
                message, detailed_message = commit.message.split('\n', 1)
            else:
                message = commit.message
                detailed_message = None

            item = nodes.list_item()
            item += [
                nodes.strong(text=message),
                nodes.inline(text=" by "),
                nodes.emphasis(text=str(commit.author)),
                nodes.inline(text=" at "),
                nodes.emphasis(text=str(date_str))
            ]
            if detailed_message:
                item.append(nodes.caption(text=detailed_message.strip()))
            l.append(item)
        return [l]
Esempio n. 20
0
    def __get_branch(self, path):
        """Gets required information of a repository

        :param path: Path of .git directory
        :return: Config dictionary
        """
        info = {}
        try:
            repo = Repo(path)
            info.update({'path': str(os.path.dirname(path)),
                         'branch': str(repo.head.ref),
                         'commit': str(repo.head.reference.commit),
                         'is_dirty': repo.is_dirty(),
                         'name': name_from_url(str(repo.remotes[0].url)),
                         'depth': 1,
                         'type': "git"})
            urls = {}
            remotes = repo.remotes
            for each in remotes:
                urls.update({each.name: each.url})
            info.update({'repo_url': urls})
        except Exception as e:
            self.logger.error(e)
            return {0: str(e)}
        return info
Esempio n. 21
0
    def __init__(self, config):
        from git import Repo,GitDB,GitCmdObjectDB
        self.vcs=config.get('vcs')
        self.wiki = config.get('wiki') 
        self.shadowEnabled=False

        print "Initializing Git..."
        print "    Check for existing Repo at %s" % (self.wiki['root'])
        if not os.path.isdir(self.wiki["root"]):
            print "    Cloning %s" % (self.vcs['repo'])
            Repo.clone_from(self.vcs["repo"],self.wiki["root"])

        self.repo = Repo(self.wiki["root"],odbt=GitCmdObjectDB)

        if self.vcs.get("branch",None) is not None:
            self.checkoutBranch(self.vcs.get("branch"))

        if self.vcs.get("shadow_enabled",False):
            if self.vcs.get("shadow_root",None) is not None: 
                if not os.path.isdir(self.vcs.get("shadow_root")):
                    print "Cloning Shadow Repo"
                    Repo.clone_from(self.vcs["repo"],self.vcs.get("shadow_root"))

                self.shadow = Repo(self.vcs.get("shadow_root"))
                self.checkoutBranch(self.vcs.get("shadow_branch"),repo=self.shadow)                 
Esempio n. 22
0
    def get_common_files(self):
        """
        Fetch the raw hpo-annotation-data by cloning/pulling the
        [repository](https://github.com/monarch-initiative/hpo-annotation-data.git)
        These files get added to the files object,
        and iterated over separately.
        :return:

        """

        repo_dir = '/'.join((self.rawdir, 'git'))
        REMOTE_URL = \
            "[email protected]:monarch-initiative/hpo-annotation-data.git"
        HTTPS_URL = \
            "https://github.com/monarch-initiative/hpo-annotation-data.git"

        # TODO if repo doesn't exist, then clone otherwise pull
        if os.path.isdir(repo_dir):
            shutil.rmtree(repo_dir)

        logger.info("Cloning common disease files from %s", REMOTE_URL)
        try:
            Repo.clone_from(REMOTE_URL, repo_dir)
        except GitCommandError:
            # Try with https and if this doesn't work fail
            Repo.clone_from(HTTPS_URL, repo_dir)

        return
Esempio n. 23
0
    def closeEvent(self, event):
        """
            Catching the close event to do some cleanup
        """
        def run_command(command):
            handle = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
            handle.wait()

        a_repo = Repo(self._directory)
        os.chdir(self._directory)

        try:
            active_branch_name = a_repo.active_branch.name
            should_be_cleaned = a_repo.active_branch.name == 'gitbuster_rebase'
        except TypeError:
            should_be_cleaned = True

        if should_be_cleaned:
            if a_repo.is_dirty():
                run_command("git reset --hard")

            fallback_branch_name = [branch.name for branch in a_repo.branches
                                    if branch.name != 'gitbuster_rebase'][0]
            run_command("git checkout %s" % fallback_branch_name)
            run_command("git branch -D gitbuster_rebase")
Esempio n. 24
0
def clone_git_repo(repo, checkout_dir):
    parsedrepo = giturlparse.parse(repo, False)
    directory = os.path.join(checkout_dir, parsedrepo.repo)
    if not os.path.isdir(directory):
        Repo.clone_from(repo, directory)

    return directory
Esempio n. 25
0
    def cloneGitHub(self):
        if not os.path.isdir(self.output_path + "rtl"):
            os.mkdir(self.output_path + "rtl")
        for file in os.listdir(self.output_path + "rtl"):
            if file == Repo_clone.repo:
                shutil.rmtree(self.output_path + "rtl/" + Repo_clone.repo)
        try:
            Repo.clone_from("https://github.com/{user}/{repo}".format(user=Repo_clone.user, repo=Repo_clone.repo),
                            self.output_path + "rtl/" + Repo_clone.repo)
        except:
            self.is_manual_url = input("We couldn't find the source files defined in the " + Repo_clone.repo + " core file.\n Would you like to add the URL manually? (y/n)\n")

            # No interaction with the user, if auto mode is on
            if self.is_interactive == "A":
                self.is_manual_url = "no"

            if re.match(r"[yY][eE][sS]", self.is_manual_url) or self.is_manual_url == "y":
                manual_url = input("Please add the URL: ")
                try:
                    Repo.clone_from(manual_url, self.output_path + "rtl" + Repo_clone.repo)
                except:
                    print("We couldn't find the source files.\nThe core will be skipped")
            else:
                print("We skipped the " + Repo_clone.repo + " core. Please fix the top gen config and .core files to make this core work")


        # Add files to source list
        for root, dirs, files in os.walk(self.output_path + "rtl/" + Repo_clone.repo, topdown=False, onerror=None, followlinks=False):
            for file in files:
                if file == (self.repo + ".v"):
                    self.source_list.append(os.path.join(root, file))
                    print(file)
Esempio n. 26
0
    def _clone_repos(self, all_repos):
        """Given a list of repositories, make sure they're all cloned.

        Should be called from the subclassed `Catalog` objects, passed a list
        of specific repository names.

        Arguments
        ---------
        all_repos : list of str
            *Absolute* path specification of each target repository.

        """
        for repo in all_repos:
            if not os.path.isdir(repo):
                try:
                    repo_name = os.path.split(repo)[-1]
                    self.log.warning(
                        'Cloning "' + repo + '" (only needs to be done ' +
                        'once, may take few minutes per repo).')
                    Repo.clone_from("https://github.com/astrocatalogs/" +
                                    repo_name + ".git", repo,
                                    **({'depth': self.args.clone_depth} if
                                       self.args.clone_depth > 0 else {}))
                except:
                    self.log.error("CLONING '{}' INTERRUPTED".format(repo))
                    raise

        return
Esempio n. 27
0
    def run(self):
        env = self.state.document.settings.env
        config = env.config
        repodir = env.srcdir + '/' + config["git_repository_root"]

        doc_path = env.srcdir + '/' + env.docname + config["source_suffix"]

        if self.options.get('dir', False) == None:
            doc_path = '/'.join(doc_path.split('/')[:-1])

        repo = Repo(repodir)
        commits = repo.iter_commits(paths=doc_path)
        l = nodes.bullet_list()
        revisions_to_display = self.options.get('revisions', 10)

        for commit in list(commits)[:revisions_to_display]:
            date_str = datetime.fromtimestamp(commit.authored_date)
            if '\n' in commit.message:
                message, detailed_message = commit.message.split('\n', 1)
            else:
                message = commit.message
                detailed_message = None

            item = nodes.list_item()
            item += [
                nodes.strong(text=message),
                nodes.inline(text=" by "),
                nodes.emphasis(text=str(commit.author)),
                nodes.inline(text=" at "),
                nodes.emphasis(text=str(date_str))
            ]
            if detailed_message:
                item.append(nodes.caption(text=detailed_message.strip()))
            l.append(item)
        return [l]
Esempio n. 28
0
    def update_repo(self, project):
        from ostracker.models import Commit, Contributor
        proj_dir = project.get_local_repo_dir()

        # checkout or update project
        if not os.path.exists(proj_dir):
            print 'cloning %s' % project
            os.system('git clone -q %s %s' % (project.get_remote_repo_url(),
                                              proj_dir))
        else:
            print 'updating %s' % project
            os.system('cd %s && git pull -q' % proj_dir)

        # process new commits
        repo = Repo(proj_dir)
        added = 0
        for c in repo.iter_commits():
            try:
                Commit.objects.get(id=c.sha)
            except Commit.DoesNotExist:
                added += 1
                stats = c.stats.total

                cdate = datetime.datetime.fromtimestamp(c.committed_date)

                author = Contributor.objects.lookup(c.author.name, c.author.email)

                Commit.objects.create(id=c.sha, project=project, author=author,
                                      message=c.message, time_committed=cdate,
                                      deletions=stats['deletions'],
                                      files=stats['files'],
                                      insertions=stats['insertions'],
                                      lines=stats['lines'])
        print 'added %s commits to %s' % (added, project)
Esempio n. 29
0
class Git(BaseBackend):
    """
    GIT Backend for asset versions.
    """

    def __init__(self):
        """
        Git backend constructor.
        """
        repo_dir = settings.BASE_DIR
        if hasattr(settings, 'ASSET_REPO_DIR'):
            repo_dir = settings.REPO_DIR
        self.repo = Repo(repo_dir)

    def version_long(self):
        """
        Returns the full hash of the latest commit found.

        :return: Latest commit hash (full)
        :rtype: str
        """
        return self.repo.commit().hexsha

    def version_short(self):
        """
        Returns the short hash of the latest commit found.

        :return: Latest commit hash (short)
        :rtype: str
        """
        return self.repo.commit().hexsha[:6]
    def __init__(self, *args, **kwargs):
#        unittest.TestCase.__init__(self, name) 
        import os
        import tempfile
        from git import Repo
      
        # The constructor is called multiple times by the unit testing framework.
        # Hence, we keep track of the first call to avoid multiple temporary directories.      
        if self.__class__.needs_initial:    
            self._repDir = tempfile.mkdtemp(prefix="tmp-BuildingsPy" +  "-testing-")
            print "**************************", self._repDir
            
            self.__class__.needs_initial = False
            self.__class__.repDir = self._repDir
            
            # Clone the libraries
            print "Cloning Buildings repository. This may take a while."
            print "Dir is ", self._repDir
            Repo.clone_from("https://github.com/lbl-srg/modelica-buildings", os.path.join(self._repDir, "modelica-buildings"))
            print "Cloning Annex 60 repository. This may take a while."        
            Repo.clone_from("https://github.com/iea-annex60/modelica-annex60", os.path.join(self._repDir, "modelica-annex60"))
            print "Finished cloning."
            
        else:
            self._repDir = self.__class__.repDir

        self._annex60_dir=os.path.join(self._repDir, "modelica-annex60", "Annex60")
        self._dest_dir=os.path.join(self._repDir,  "modelica-buildings", "Buildings")

        # Call constructor of parent class
        super(Test_development_merger_Annex60, self).__init__(*args, **kwargs)
Esempio n. 31
0
from asyncio.subprocess import PIPE as asyncPIPE
from datetime import datetime
from os import remove
from platform import python_version, uname
from shutil import which

import psutil
from git import Repo
from telethon import __version__, version

from userbot import ALIVE_LOGO, ALIVE_NAME, CMD_HELP, USERBOT_VERSION, StartTime, bot
from userbot.events import register

# ================= CONSTANT =================
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else uname().node
repo = Repo()
modules = CMD_HELP


# ============================================
async def get_readable_time(seconds: int) -> str:
    count = 0
    up_time = ""
    time_list = []
    time_suffix_list = ["s", "m", "h", "days"]

    while count < 4:
        count += 1
        if count < 3:
            remainder, result = divmod(seconds, 60)
        else:
Esempio n. 32
0
def test_init_git(tmp_dir):
    Repo.init(fspath(tmp_dir))
    assert isinstance(SCM(fspath(tmp_dir)), Git)
Esempio n. 33
0
    'char_storage.sql',
    'char_style.sql',
    'char_unlocks.sql',
    'char_vars.sql',
    'chars.sql',
    'conquest_system.sql',
    'delivery_box.sql',
    'linkshells.sql',
    'server_variables.sql',
    'unity_system.sql',
]
import_files = []
backups = []
database = host = port = login = password = None
db = cur = None
repo = Repo('../')
current_version = current_client = release_version = release_client = None
express_enabled = False
auto_backup = auto_update_client = True
mysql_bin = ''
mysql_env = distutils.spawn.find_executable('mysql')
if mysql_env:
    mysql_bin = os.path.dirname(mysql_env).replace('\\','/')
    if mysql_bin[-1] != '/':
        mysql_bin = mysql_bin + '/'
if os.name == 'nt':
    exe = '.exe'
else:
    exe = ''
log_errors = ' 2>>error.log'
colorama.init(autoreset=True)
Esempio n. 34
0
 def _open_repository(self) -> Repo:
     repo = Repo(str(self.path))
     if self.main_branch is None:
         self._discover_main_branch(repo)
     return repo
Esempio n. 35
0
    def clone(self, source, dest, branch):
        if not self.can_handle(source):
            raise UnhandledSource("Cannot handle {}".format(source))

        repo = Repo.clone_from(source, dest)
        repo.git.checkout(branch)
Esempio n. 36
0
import os
import sys
import subprocess
from git import Repo
from dotenv import load_dotenv
load_dotenv()

if __name__ == "__main__":

    repo_path = os.getenv('GIT_REPO_PATH')
    git_email = os.getenv('GIT_EMAIL')
    git_user_name = os.getenv('GIT_USER_NAME')
    nvm_rc = os.getenv('NVMRC_PATH')
    print(repo_path)
    my_repo = Repo(repo_path)
    print(my_repo)

    def print_repo(my_repo):
        print(f"Repo description: {my_repo.description}")
        print(f"Repo active branch is {my_repo.active_branch}")

    def locomotor(branch):
        """
        This function will...
        1. checkout the branch name provided or create a new branch with that name
        2. check .nvmrc for current node version
        """
        if branch in my_repo.branches:
            print(f"{branch} is already a branch")
            my_repo.git.checkout(branch)
        else:
Esempio n. 37
0
async def upstream(ups):
    ".update komutu ile botunun güncel olup olmadığını denetleyebilirsin."
    await ups.edit("`Güncellemeler denetleniyor...`")
    conf = ups.pattern_match.group(1)
    off_repo = UPSTREAM_REPO_URL
    force_update = False

    try:
        txt = "`Güncelleme başarısız oldu!"
        txt += "Bazı sorunlarla karşılaştık.`\n\n**LOG:**\n"
        repo = Repo()
    except NoSuchPathError as error:
        await ups.edit(f'{txt}\n`{error} klasörü bulunamadı.`')
        repo.__del__()
        return
    except GitCommandError as error:
        await ups.edit(f'{txt}\n`Git hatası! {error}`')
        repo.__del__()
        return
    except InvalidGitRepositoryError as error:
        if conf != "now":
            await ups.edit(f"`{error} klasörü bir git reposu gibi görünmüyor.\
            \nFakat bu sorunu .update now komutuyla botu zorla güncelleyerek çözebilirsin.`"
                           )
            return
        repo = Repo.init()
        origin = repo.create_remote('upstream', off_repo)
        origin.fetch()
        force_update = True
        repo.create_head('master', origin.refs.seden)
        repo.heads.seden.set_tracking_branch(origin.refs.sql)
        repo.heads.seden.checkout(True)

    ac_br = repo.active_branch.name
    if ac_br != 'master':
        await ups.edit(
            f'**[Güncelleyici]:**` Galiba Asena botunu modifiye ettin ve kendi branşını kullanıyorsun: ({ac_br}). '
            'Bu durum güncelleyicinin kafasını karıştırıyor,'
            'Güncelleme nereden çekilecek?'
            'Lütfen seden botunu resmi repodan kullan.`')
        repo.__del__()
        return

    try:
        repo.create_remote('upstream', off_repo)
    except BaseException:
        pass

    ups_rem = repo.remote('upstream')
    ups_rem.fetch(ac_br)

    changelog = await gen_chlog(repo, f'HEAD..upstream/{ac_br}')

    if not changelog and not force_update:
        await ups.edit(f'\n`Botun` **tamamen güncel!** `Branch:` **{ac_br}**\n'
                       )
        repo.__del__()
        return

    if conf != "now" and not force_update:
        changelog_str = f'**{ac_br} için yeni güncelleme mevcut!\n\nDeğişiklikler:**\n`{changelog}`'
        if len(changelog_str) > 4096:
            await ups.edit(
                "`Değişiklik listesi çok büyük, dosya olarak görüntülemelisin.`"
            )
            file = open("degisiklikler.txt", "w+")
            file.write(changelog_str)
            file.close()
            await ups.client.send_file(
                ups.chat_id,
                "degisiklikler.txt",
                reply_to=ups.id,
            )
            remove("degisiklikler.txt")
        else:
            await ups.edit(changelog_str)
        await ups.respond(
            '`Güncellemeyi yapmak için \".update now\" komutunu kullan.`')
        return

    if force_update:
        await ups.edit('`Güncel stabil userbot kodu zorla eşitleniyor...`')
    else:
        await ups.edit('`Bot güncelleştiriliyor...`')
    # Bot bir Heroku dynosunda çalışıyor, bu da bazı sıkıntıları beraberinde getiriyor.
    if HEROKU_APIKEY is not None:
        import heroku3
        heroku = heroku3.from_key(HEROKU_APIKEY)
        heroku_app = None
        heroku_applications = heroku.apps()
        if not HEROKU_APPNAME:
            await ups.edit(
                '`[HEROKU MEMEZ] Güncelleyiciyi kullanabilmek için HEROKU_APPNAME değişkenini tanımlamalısın.`'
            )
            repo.__del__()
            return
        for app in heroku_applications:
            if app.name == HEROKU_APPNAME:
                heroku_app = app
                break
        if heroku_app is None:
            await ups.edit(
                f'{txt}\n`Heroku değişkenleri yanlış veya eksik tanımlanmış.`')
            repo.__del__()
            return
        await ups.edit('`[HEROKU MEMEZ]\
                        \nUserBot Heroku dynosuna aktarılıyor, lütfen bekle...`'
                       )
        ups_rem.fetch(ac_br)
        repo.git.reset("--hard", "FETCH_HEAD")
        heroku_git_url = heroku_app.git_url.replace(
            "https://", "https://*****:*****@")
        if "heroku" in repo.remotes:
            remote = repo.remote("heroku")
            remote.set_url(heroku_git_url)
        else:
            remote = repo.create_remote("heroku", heroku_git_url)
        try:
            remote.push(refspec="HEAD:refs/heads/master", force=True)
        except GitCommandError as error:
            await ups.edit(f'{txt}\n`Karşılaşılan hatalar burada:\n{error}`')
            repo.__del__()
            return
        await ups.edit('`Güncelleme başarıyla tamamlandı!\n'
                       'Yeniden başlatılıyor...`')
    else:
        # Klasik güncelleyici, oldukça basit.
        try:
            ups_rem.pull(ac_br)
        except GitCommandError:
            repo.git.reset("--hard", "FETCH_HEAD")
        await update_requirements()
        await ups.edit('`Güncelleme başarıyla tamamlandı!\n'
                       'Yeniden başlatılıyor...`')
        # Bot için Heroku üzerinde yeni bir instance oluşturalım.
        args = [sys.executable, "main.py"]
        execle(sys.executable, *args, environ)
        return
Esempio n. 38
0
async def upstream(event):
    "For .update command, check if the bot is up to date, update if specified"
    conf = event.pattern_match.group(1).strip()
    event = await edit_or_reply(event,
                                "`Checking for updates, please wait....`")
    off_repo = UPSTREAM_REPO_URL
    force_update = False
    try:
        txt = "`Oops.. Updater cannot continue due to "
        txt += "some problems occured`\n\n**LOGTRACE:**\n"
        repo = Repo()
    except NoSuchPathError as error:
        await event.edit(f"{txt}\n`directory {error} is not found`")
        return repo.__del__()
    except GitCommandError as error:
        await event.edit(f"{txt}\n`Early failure! {error}`")
        return repo.__del__()
    except InvalidGitRepositoryError as error:
        if conf is None:
            return await event.edit(
                f"`Unfortunately, the directory {error} "
                "does not seem to be a git repository.\n"
                "But we can fix that by force updating the userbot using "
                ".update now.`")
        repo = Repo.init()
        origin = repo.create_remote("upstream", off_repo)
        origin.fetch()
        force_update = True
        repo.create_head("master", origin.refs.master)
        repo.heads.master.set_tracking_branch(origin.refs.master)
        repo.heads.master.checkout(True)
    ac_br = repo.active_branch.name
    if ac_br != UPSTREAM_REPO_BRANCH:
        await event.edit(
            "**[UPDATER]:**\n"
            f"`Looks like you are using your own custom branch ({ac_br}). "
            "in that case, Updater is unable to identify "
            "which branch is to be merged. "
            "please checkout to any official branch`")
        return repo.__del__()
    try:
        repo.create_remote("upstream", off_repo)
    except BaseException:
        pass
    ups_rem = repo.remote("upstream")
    ups_rem.fetch(ac_br)
    changelog = await gen_chlog(repo, f"HEAD..upstream/{ac_br}")
    """ - Special case for deploy - """
    if conf == "deploy":
        await event.edit("`Deploying userbot, please wait....`")
        await deploy(event, repo, ups_rem, ac_br, txt)
        return
    if changelog == "" and not force_update:
        await event.edit("\n`CATUSERBOT is`  **up-to-date**  `with`  "
                         f"**{UPSTREAM_REPO_BRANCH}**\n")
        return repo.__del__()
    if conf == "" and force_update is False:
        await print_changelogs(event, ac_br, changelog)
        await event.delete()
        return await event.respond(
            'do "[`.update now`] or [`.update deploy`]" to update.Check `.info updater` for details'
        )

    if force_update:
        await event.edit(
            "`Force-Syncing to latest stable userbot code, please wait...`")
    if conf == "now":
        await event.edit("`Updating userbot, please wait....`")
        await update(event, repo, ups_rem, ac_br)
    return
def download_commit_summaries(
        repo_owner_name, repo_name, time_budget,
        include_merge_commit=False, max_commits=100000,
        delete_repo_after_finish=True):
    if not os.path.exists('tmp_repo'):
        os.mkdir('tmp_repo')
    if not os.path.exists('commit_data'):
        os.mkdir('commit_data')
    debug(
        'Generating Summary for \"' + repo_owner_name + '/' + repo_name + '\"',
        ' in last ', time_budget, 'months'
    )
    if not include_merge_commit:
        debug('Ignoring the merge commits!')
    else:
        debug('Including the merge commits!')
    repository_path = 'tmp_repo/' + repo_name
    if not os.path.exists(repository_path):
        debug('Cloning Repository from', 'https://github.com/' + repo_owner_name + '/' + repo_name + '.git',
              'into', repository_path)
        repo = Repo.clone_from(
            'https://github.com/' + repo_owner_name + '/' + repo_name + '.git', repository_path)
    else:
        repo = Repo(repository_path)
    author_str_to_id = {}
    file_str_to_id = {}
    all_authors = []
    all_files = []
    all_commit_summaries = []
    time_margin = (datetime.datetime.now() - datetime.timedelta(time_budget * 365 / 12)) \
        if time_budget != 1 else None
    if time_margin is not None:
        timestamp_margin = time_margin.timestamp()
    commits = list(repo.iter_commits('master'))
    debug('Total commits', len(commits))
    for i, commit in enumerate(commits):
        mx = int(min(max_commits, len(commits)) / 100)
        if i % mx == 0:
            debug("Completed", i / mx, "%")
        if i == max_commits:
            break
        author_time = commit.authored_datetime
        if time_margin is not None and author_time.timestamp() < timestamp_margin:
            break
        author = commit.author
        commit_time = commit.committed_datetime
        if author_time == commit_time:
            post_commit_change = False
        else:
            post_commit_change = True
        file_ids = []
        if not include_merge_commit and len(commit.parents) > 1:
            continue
        for parent in commit.parents:
            diffs = commit.diff(parent)
            for diff in diffs:
                fp = diff.b_path.strip()
                if fp not in file_str_to_id.keys():
                    file_str_to_id[fp] = len(file_str_to_id.keys())
                    new_file = {
                        'id': file_str_to_id[fp],
                        'file_path': fp
                    }
                    all_files.append(new_file)
                file_ids.append(file_str_to_id[fp])
        author_str = author.name + '-' + author.email
        if author_str not in author_str_to_id:
            author_str_to_id[author_str] = len(author_str_to_id)
            author_dict = {
                'id': author_str_to_id[author_str],
                'name': author.name,
                'email': author.email
            }
            all_authors.append(author_dict)
        commit_summary = {
            'id': commit.hexsha,
            'author_id': author_str_to_id[author_str],
            'timestamp': author_time.timestamp(),
            'time': str(author_time),
            'files': file_ids,
            'post_commit_change': post_commit_change,
            'is_merge_commit': len(commit.parents) > 1
        }
        all_commit_summaries.append(commit_summary)

    save_dir = 'commit_data/' + repo_owner_name + '_' + repo_name
    if not os.path.exists(save_dir):
        os.mkdir(save_dir)
    author_file = open(os.path.join(save_dir, 'authors.json'), 'w')
    json.dump(all_authors, author_file)
    author_file.close()

    file_path_file = open(os.path.join(save_dir, 'files.json'), 'w')
    json.dump(all_files, file_path_file)
    file_path_file.close()

    commits_file = open(os.path.join(save_dir, 'commits.json'), 'w')
    json.dump(all_commit_summaries, commits_file)
    commits_file.close()
    if delete_repo_after_finish:
        shutil.rmtree(repository_path)
    debug('Total authors: %d\tTotal Unique files: %d\tTotal commits: %d' \
          % (len(all_authors), len(all_files), len(all_commit_summaries)))
Esempio n. 40
0
async def upstream(event):
    event = await edit_or_reply(event,
                                "`Pulling the good cat repo wait a sec ....`")
    off_repo = "https://github.com/sandy1709/catuserbot"
    catcmd = f"rm -rf .git"
    try:
        await runcmd(catcmd)
    except BaseException:
        pass
    try:
        txt = "`Oops.. Updater cannot continue due to "
        txt += "some problems occured`\n\n**LOGTRACE:**\n"
        repo = Repo()
    except NoSuchPathError as error:
        await event.edit(f"{txt}\n`directory {error} is not found`")
        return repo.__del__()
    except GitCommandError as error:
        await event.edit(f"{txt}\n`Early failure! {error}`")
        return repo.__del__()
    except InvalidGitRepositoryError:
        repo = Repo.init()
        origin = repo.create_remote("upstream", off_repo)
        origin.fetch()
        repo.create_head("master", origin.refs.master)
        repo.heads.master.set_tracking_branch(origin.refs.master)
        repo.heads.master.checkout(True)
    try:
        repo.create_remote("upstream", off_repo)
    except BaseException:
        pass
    ac_br = repo.active_branch.name
    ups_rem = repo.remote("upstream")
    ups_rem.fetch(ac_br)
    await event.edit("`Deploying userbot, please wait....`")
    await deploy(event, repo, ups_rem, ac_br, txt)
Esempio n. 41
0
def _get_updates(repo: Repo, branch: str) -> str:
    repo.remote(Config.UPSTREAM_REMOTE).fetch(branch)
    upst = Config.UPSTREAM_REPO.rstrip('/')
    return ''.join(
        f"🔨 **#{i.count()}** : [{i.summary}]({upst}/commit/{i}) 👷 __{i.author}__\n\n"
        for i in repo.iter_commits(f'HEAD..{Config.UPSTREAM_REMOTE}/{branch}'))
Esempio n. 42
0
            onto_commit = next(commit.iter_parents()).hexsha
            from_commit = commit.hexsha
            git_cli.rebase('--onto', onto_commit, from_commit)
            break

    try:
        git_cli.merge(source, squash=True)

        if not repo.is_dirty():
            sys.exit(f'Cannot merge {source} into {target}. Branches are equal and they have never been merged before.')

        git_cli.commit('-m', f'Merged:{source}:{repo.heads[source].commit}')

    except GitCommandError as exc:

        git_cli.reset(hard=True)
        sys.exit(f'Command failed: {" ".join(exc.command)}\nReason:{exc.stderr or exc.stdout}')


if __name__ == "__main__":
    arg_parser = ArgumentParser(prog="Merger", description="Merge branches")
    arg_parser.add_argument('--git-path', dest='git_path', default='.', help="Path to git repo")
    arg_parser.add_argument('source', help="Source branch")
    arg_parser.add_argument('target', help="Target branch")

    args = arg_parser.parse_args()

    repo = Repo(args.git_path)

    merge(repo, args.source, args.target)
Esempio n. 43
0
def check_and_update_repo(path_to_data):
    repo = Repo(path_to_data)
    for remote in repo.remotes:
        remote.pull()
    print "Pulled to the latest code"
    return
Esempio n. 44
0
def git_init(path, name):
    if InvalidGitRepositoryError:
        Repo.init(os.path.join(path))
Esempio n. 45
0
async def upstream(ups):
    "For .update command, check if the bot is up to date, update if specified"
    await ups.edit("`Checking for updates, please wait....`")
    conf = ups.pattern_match.group(1)
    off_repo = UPSTREAM_REPO_URL
    force_update = False

    try:
        txt = "`Oops.. Updater cannot continue due to "
        txt += "some problems occured`\n\n**LOGTRACE:**\n"
        repo = Repo()
    except NoSuchPathError as error:
        await ups.edit(f'{txt}\n`directory {error} is not found`')
        repo.__del__()
        return
    except GitCommandError as error:
        await ups.edit(f'{txt}\n`Early failure! {error}`')
        repo.__del__()
        return
    except InvalidGitRepositoryError as error:
        if conf != "now":
            await ups.edit(
                f"`Unfortunately, the directory {error} does not seem to be a git repository.\
            \nBut we can fix that by force updating the userbot using .update now.`"
            )
            return
        repo = Repo.init()
        origin = repo.create_remote('upstream', off_repo)
        origin.fetch()
        force_update = True
        repo.create_head('sql-extended', origin.refs.sql - extended)
        repo.heads.sql - extended.set_tracking_branch(origin.refs.sql -
                                                      extended)
        repo.heads.sql - extended.checkout(True)

    ac_br = repo.active_branch.name
    if ac_br != 'sql-extended':
        await ups.edit(
            f'**[UPDATER]:**` Looks like you are using your own custom branch ({ac_br}). '
            'in that case, Updater is unable to identify '
            'which branch is to be merged. '
            'please checkout to any official branch`')
        repo.__del__()
        return

    try:
        repo.create_remote('upstream', off_repo)
    except BaseException:
        pass

    ups_rem = repo.remote('upstream')
    ups_rem.fetch(ac_br)

    changelog = await gen_chlog(repo, f'HEAD..upstream/{ac_br}')

    if not changelog and not force_update:
        await ups.edit(
            f'\n`Your BOT is`  **up-to-date**  `with`  **{ac_br}**\n')
        repo.__del__()
        return

    if conf != "now" and not force_update:
        changelog_str = f'**New UPDATE available for [{ac_br}]:\n\nCHANGELOG:**\n`{changelog}`'
        if len(changelog_str) > 4096:
            await ups.edit("`Changelog is too big, view the file to see it.`")
            file = open("output.txt", "w+")
            file.write(changelog_str)
            file.close()
            await ups.client.send_file(
                ups.chat_id,
                "output.txt",
                reply_to=ups.id,
            )
            remove("output.txt")
        else:
            await ups.edit(changelog_str)
        await ups.respond('`do \".update now\" to update`')
        return

    if force_update:
        await ups.edit(
            '`Force-Syncing to latest stable userbot code, please wait...`')
    else:
        await ups.edit('`Updating userbutt, please wait....`')
    # We're in a Heroku Dyno, handle it's memez.
    if HEROKU_APIKEY is not None:
        import heroku3
        heroku = heroku3.from_key(HEROKU_APIKEY)
        heroku_app = None
        heroku_applications = heroku.apps()
        if not HEROKU_APPNAME:
            await ups.edit(
                '`[HEROKU MEMEZ] Please set up the HEROKU_APPNAME variable to be able to update userbot.`'
            )
            repo.__del__()
            return
        for app in heroku_applications:
            if app.name == HEROKU_APPNAME:
                heroku_app = app
                break
        if heroku_app is None:
            await ups.edit(
                f'{txt}\n`Invalid Heroku credentials for updating userbot dyno.`'
            )
            repo.__del__()
            return
        await ups.edit('`[HEROKU MEMEZ]\
                        \nUserbot dyno build in progress, please wait for it to complete.`'
                       )
        ups_rem.fetch(ac_br)
        repo.git.reset("--hard", "FETCH_HEAD")
        heroku_git_url = heroku_app.git_url.replace(
            "https://", "https://*****:*****@")
        if "heroku" in repo.remotes:
            remote = repo.remote("heroku")
            remote.set_url(heroku_git_url)
        else:
            remote = repo.create_remote("heroku", heroku_git_url)
        try:
            remote.push(refspec="HEAD:refs/heads/sql-extended", force=True)
        except GitCommandError as error:
            await ups.edit(f'{txt}\n`Here is the error log:\n{error}`')
            repo.__del__()
            return
        await ups.edit('`Successfully Updated!\n'
                       'Restarting, please wait...`')
    else:
        # Classic Updater, pretty straightforward.
        try:
            ups_rem.pull(ac_br)
        except GitCommandError:
            repo.git.reset("--hard", "FETCH_HEAD")
        reqs_upgrade = await update_requirements()
        await ups.edit('`Successfully Updated!\n'
                       'Bot is restarting... Wait for a second!`')
        if BOTLOG:
            await ups.client.send_message(
                BOTLOG_CHATID,
                f"`Your UserButt Successfully Updated`",
            )
        # Spin a new instance of bot
        args = [sys.executable, "-m", "userbot"]
        execle(sys.executable, *args, environ)
        return
Esempio n. 46
0
async def _pull_from_repo(repo: Repo, branch: str) -> None:
    repo.git.checkout(branch, force=True)
    repo.git.reset('--hard', branch)
    repo.remote(Config.UPSTREAM_REMOTE).pull(branch, force=True)
    await asyncio.sleep(1)
Esempio n. 47
0
class RepoUtil:
    def __init__(self,
                 dir: Path,
                 url: str,
                 project_config: ProjectConfig = None):
        self.url = url
        self.name = str(os.path.basename(url).split('.')[0])
        self.dir = dir / self.name
        self.project_config = project_config

        if self.dir.exists():
            logger.info(f"Reading repo from {self.dir}")
            self.repo = Repo(str(self.dir))
        else:
            logger.info(f"Cloning repo from {self.url}")
            self.dir.parent.mkdir(parents=True, exist_ok=True)
            self.repo = Repo.clone_from(
                self.url,
                str(self.dir),
            )

    def _get_commits(self,
                     branches: List[Union[str, Head]] = None,
                     min_age=default_min_age,
                     max_per_branch=10):
        if not branches:
            branches = get_active_branches(self.repo, min_age)

        for branch in branches:
            for commit in iter_revision(self.repo,
                                        branch.head,
                                        limit=max_per_branch):
                yield BranchCommit(branch=branch, commit=commit)

    def get_commits(self, branches: List[Union[str, Head]] = None, min_age=default_min_age, max_per_branch=10) -> \
            Iterable[BranchCommit]:
        for branch_commit in distinct(
                self._get_commits(branches, min_age, max_per_branch)):
            yield branch_commit

    def schedule_run(self, commit: str, branch: str, job_name: str = "test"):
        self.project_config.git.main_repo.set_fake_head(commit=commit,
                                                        branch=branch)

        context = dict(**self.project_config.context)

        init_job = self.project_config.jobs[0]

        if init_job.db_broken_count() >= init_job.retries:
            logger.warning(
                f"Skipping job '{init_job}' since it already has {init_job.db_broken_count()} broken builds"
            )
            yield 0
        else:
            test_job = self.project_config.get(job_name)
            for schedule_document, schedule_index in self._process_job(
                    test_job, context):
                scheduled_already = DBStats.get_schedule_repetitions(
                    schedule_index)
                schedule_document.details.repetitions -= scheduled_already
                if schedule_document.details.repetitions > 0:
                    Mongo().col_scheduler.insert(schedule_document)
                    logger.debug(
                        f"Inserted {schedule_document.details.repetitions} requests for the job:\n{test_job.pretty_index}"
                    )
                    yield schedule_document.details.repetitions
                else:
                    logger.debug(
                        f"Already scheduled {scheduled_already} runs, which is more than enough:\n{test_job.pretty_index}"
                    )

    def schedule_runs(self,
                      branches=None,
                      job_name: str = "test",
                      max_per_branch=10):
        for branch_commit in self.get_commits(branches,
                                              max_per_branch=max_per_branch):
            runs = self.schedule_run(
                str(branch_commit.commit),
                branch_commit.branch.head.remote_head,
                job_name,
            )
            total = sum(runs)
            logger.info(
                f"Scheduled {total} requests for the commit {branch_commit.commit}"
            )

    @classmethod
    def _process_job(cls, job: ProjectConfigJob, context: Dict):
        for sub_job, extra_context, variation in job.expand(context):
            rest = dict(job=sub_job)
            new_context = {**extra_context, **variation, **rest}
            schedule_index = TimerIndex(**job_util.get_index(job, new_context))
            ok_count, broken_count = DBStats.get_run_count(schedule_index)

            # TODO specify repetitions
            schedule_details = ColScheduleDetails(priority=0, repetitions=7)
            schedule_document = ColSchedule(
                index=schedule_index,
                details=schedule_details,
                status=ColScheduleStatus.NotProcessed,
                worker=None)
            yield schedule_document, schedule_index

    def to_latest(self):
        logger.info("pulling latest changes")
        self.repo.remote().pull()

    def _update_edges(self, documents: List[ColRepoInfo]):
        children = defaultdict(list)
        for d in documents:
            for p in d.parents:
                children[p].append(d.commit)

        for d in documents:
            d.children = children[d.commit]

        return documents

    def extract_info(self, per_branch, max_age, single_branch=None):
        logger.info("obtaining commit details")
        branches = single_branch if single_branch else get_active_branches(
            self.repo, max_age)

        info: Dict[Commit, List[str]] = defaultdict(list)
        documents = list()

        for branch in branches:
            branch_head = None if isinstance(branch, str) else branch.head
            branch_name = branch if not branch_head else str(branch.head)
            branch_full = f"origin/{branch_name}" if not branch_name.startswith(
                "origin/") else branch_name
            branch_short = branch_full[7:]

            for commit in iter_revision(self.repo,
                                        branch_head or branch_full,
                                        limit=per_branch,
                                        first_parent=False):
                info[commit].append(branch_short)

        for commit, branches in info.items():
            doc = ColRepoInfo()
            doc.author = commit.author.name
            doc.email = commit.author.email
            doc.commit = commit.hexsha
            doc.branches = branches
            doc.branch = None if len(branches) > 1 else branches[0]
            doc.authored_datetime = commit.authored_datetime
            doc.committed_datetime = commit.committed_datetime
            doc.message = commit.message
            doc.distance = -1
            doc.parents = [c.hexsha for c in commit.parents]
            documents.append(doc)

        logger.info("comparing changes in db")
        # to_be_updated = [doc.commit for doc in documents]
        # rexisting_cmts = [x.commit for x in Mongo().col_repo_info.find({}, {"commit": 1})]
        results = Mongo().col_repo_info.find(
            {"commit": in_list([doc.commit for doc in documents])}, ["commit"],
            raw=True)

        logger.info("traversing parents")
        documents = self._update_edges(documents)

        existing = [r.commit for r in results]
        filtered = [d for d in documents if d.commit not in existing]
        logger.info(
            f"inspected total of {len(documents)} commits, {len(filtered)} new ones"
        )

        if filtered:
            Mongo().col_repo_info.insert_many(filtered)
        else:
            logger.info(f"no new commits to add...")

        logger.info("updating commit parents and children")
        changes = list(Mongo().col_repo_info.batch_update(
            documents,
            lambda x: dict(commit=x.commit),
            lambda x: dict(parents=x.parents, children=x.children),
        ))
        logger.info(f"updated {len(changes)} parents and children")
Esempio n. 48
0
parser = argparse.ArgumentParser(description='Update DATESTAMP and generate '
                                 'ChangeLog entries')
parser.add_argument('-g', '--git-path', default='.',
                    help='Path to git repository')
parser.add_argument('-p', '--push', action='store_true',
                    help='Push updated active branches')
parser.add_argument('-d', '--dry-mode',
                    help='Generate patch for ChangeLog entries and do it'
                         ' even if DATESTAMP is unchanged; folder argument'
                         ' is expected')
parser.add_argument('-c', '--current', action='store_true',
                    help='Modify current branch (--push argument is ignored)')
args = parser.parse_args()

repo = Repo(args.git_path)
origin = repo.remotes['origin']


def update_current_branch(ref_name):
    commit = repo.head.commit
    commit_count = 1
    while commit:
        if (commit.author.email == '*****@*****.**'
                and commit.message.strip() == 'Daily bump.'):
            break
        # We support merge commits but only with 2 parensts
        assert len(commit.parents) <= 2
        commit = commit.parents[-1]
        commit_count += 1
Esempio n. 49
0
from git import Repo

# Usage: python3 misc/make_changelog.py 0.5.9

import sys

ver = sys.argv[1]

g = Repo('.')
commits = list(g.iter_commits('master', max_count=200))
begin, end = -1, 0


def format(c):
    return f'{c.summary} (by **{c.author}**)'


print('Notable changes:')

notable_changes = {}
all_changes = []

details = {
    'cpu': 'CPU backends',
    'cuda': 'CUDA backend',
    'doc': 'Documentation',
    'infra': 'Infrastructure',
    'cli': 'Command line interface',
    'ir': 'Intermediate representation',
    'lang': 'Language and syntax',
    'metal': 'Metal backend',
Esempio n. 50
0
 def __init__(self, project_dir, old_version, module_name):
     self.project_dir = project_dir
     self.old_version = old_version
     self.module_name = module_name
     self.repo = Repo(self.project_dir)
Esempio n. 51
0
def init_repository(path):
    return Repo(path)
Esempio n. 52
0
async def upstream(ups):
    pagal = await eor(ups, "`Checking for updates, please wait....`")
    conf = ups.pattern_match.group(1)
    off_repo = UPSTREAM_REPO_URL
    try:
        txt = "`Oops.. Updater cannot continue due to "
        txt += "some problems occured`\n\n**LOGTRACE:**\n"
        repo = Repo()
    except NoSuchPathError as error:
        await eod(pagal, f"{txt}\n`directory {error} is not found`", time=10)
        repo.__del__()
        return
    except GitCommandError as error:
        await eod(pagal, f"{txt}\n`Early failure! {error}`", time=10)
        repo.__del__()
        return
    except InvalidGitRepositoryError as error:
        if conf != "now":
            await eod(
                pagal,
                f"**Unfortunately, the directory {error} does not seem to be a git repository.Or Maybe it just needs a sync verification with {GIT_REPO_NAME} But we can fix that by force updating the userbot using** `.update now.`",
                time=30,
            )
            return
        repo = Repo.init()
        origin = repo.create_remote("upstream", off_repo)
        origin.fetch()
        repo.create_head("main", origin.refs.main)
        repo.heads.main.set_tracking_branch(origin.refs.main)
        repo.heads.main.checkout(True)
    ac_br = repo.active_branch.name
    if ac_br != "main":
        await eod(
            pagal,
            f"**[UPDATER]:**` You are on ({ac_br})\n Please change to main branch.`",
        )
        repo.__del__()
        return
    try:
        repo.create_remote("upstream", off_repo)
    except BaseException:
        pass
    ups_rem = repo.remote("upstream")
    ups_rem.fetch(ac_br)
    changelog = await gen_chlog(repo, f"HEAD..upstream/{ac_br}")
    if "now" not in conf:
        if changelog:
            changelog_str = f"**New UPDATE available for [[{ac_br}]]({UPSTREAM_REPO_URL}/tree/{ac_br}):\n\nCHANGELOG**\n\n{changelog}"
            if len(changelog_str) > 4096:
                await eor(pagal,
                          "`Changelog is too big, view the file to see it.`")
                file = open("output.txt", "w+")
                file.write(changelog_str)
                file.close()
                await ups.client.send_file(
                    ups.chat_id,
                    "output.txt",
                    caption=f"Do `{hndlr}update now` to update.",
                    reply_to=ups.id,
                )
                remove("output.txt")
            else:
                return await eod(
                    pagal,
                    f"{changelog_str}\n\nDo `{hndlr}update now` to update.")
        else:
            await eod(
                pagal,
                f"\n`Your BOT is`  **up-to-date**  `with`  **[[{ac_br}]]({UPSTREAM_REPO_URL}/tree/{ac_br})**\n",
                time=10,
            )
            repo.__del__()
            return
    if Var.HEROKU_API is not None:
        import heroku3

        heroku = heroku3.from_key(Var.HEROKU_API)
        heroku_app = None
        heroku_applications = heroku.apps()
        if not Var.HEROKU_APP_NAME:
            await eod(
                pagal,
                "`Please set up the HEROKU_APP_NAME variable to be able to update userbot.`",
                time=10,
            )
            repo.__del__()
            return
        for app in heroku_applications:
            if app.name == Var.HEROKU_APP_NAME:
                heroku_app = app
                break
        if heroku_app is None:
            await eod(
                pagal,
                f"{txt}\n`Invalid Heroku credentials for updating userbot dyno.`",
                time=10,
            )
            repo.__del__()
            return
        await eor(
            pagal,
            "`Userbot dyno build in progress, please wait for it to complete.`"
        )
        ups_rem.fetch(ac_br)
        repo.git.reset("--hard", "FETCH_HEAD")
        heroku_git_url = heroku_app.git_url.replace(
            "https://", "https://*****:*****@")
        if "heroku" in repo.remotes:
            remote = repo.remote("heroku")
            remote.set_url(heroku_git_url)
        else:
            remote = repo.create_remote("heroku", heroku_git_url)
        try:
            remote.push(refspec=f"HEAD:refs/heads/{ac_br}", force=True)
        except GitCommandError as error:
            await eod(pagal,
                      f"{txt}\n`Here is the error log:\n{error}`",
                      time=10)
            repo.__del__()
            return
        await eod(pagal,
                  "`Successfully Updated!\nRestarting, please wait...`",
                  time=60)
    else:
        # Classic Updater, pretty straightforward.
        try:
            ups_rem.pull(ac_br)
        except GitCommandError:
            repo.git.reset("--hard", "FETCH_HEAD")
        await updateme_requirements()
        await eod(
            pagal,
            "`Successfully Updated!\nBot is restarting... Wait for a second!`",
        )
        # Spin a new instance of bot
        args = [sys.executable, "./resources/startup/deploy.sh"]
        execle(sys.executable, *args, environ)
        return
Esempio n. 53
0
    def _build_asset(self, asset):
        """
        Build app assets.

        Args:
            self: class instance
            asset: what to build

        Returns:
            On success: True.
            On failure: False.

        """
        # make sure the build path exists
        build_path = self._path / 'build'
        build_path.mkdir(exist_ok=True)

        build_repo_path = None
        if not asset.get('folder'):
            Log.a().warning(
                'repo folder must be set when specifying a build asset'
            )
            return False

        # clone build repo
        build_repo_path = build_path / asset['folder']

        if asset.get('repo'):
            # if repo is set, clone and build it
            try:
                if asset.get('tag'):
                    Repo.clone_from(
                        asset['repo'], str(build_repo_path),
                        branch=asset['tag'], config='http.sslVerify=false'
                    )
                else:
                    Repo.clone_from(
                        asset['repo'], str(build_repo_path),
                        config='http.sslVerify=false'
                    )
            except GitError as err:
                Log.an().error(
                    'cannot clone git repo for build: %s [%s]',
                    asset['repo'], str(err)
                )
                return False

        # if repo is not set, packaged build scripts are included with the
        # workflow in the build_repo_path

        # build
        cmd = 'make -C "{}"'.format(str(build_repo_path))
        Log.some().info('build command: %s', cmd)
        cmd_result = ShellWrapper.invoke(cmd)
        if cmd_result is False:
            Log.a().warning('cannot build app: %s', cmd)
            return False

        Log.some().info('make stdout: %s', cmd_result)

        # move built assets
        # make sure asset folder exists
        if not asset.get('dst'):
            Log.a().warning('asset dst required for app %s', self._app['name'])
            return False

        if not asset.get('src'):
            Log.a().warning('asset src required for app %s', self._app['name'])
            return False

        # create asset destination
        asset_path = self._path / asset['dst']
        asset_path.mkdir(exist_ok=True)

        # set src path
        src_path = self._path / asset['src']

        if 'zip' in asset:
            # create a tar.gz of src
            cmd = 'tar -czf "{}" --directory="{}" .'.format(
                str(asset_path / '{}.tar.gz'.format(asset['zip'])),
                str(src_path)
            )
            Log.some().info('zipping: %s', cmd)
            cmd_result = ShellWrapper.invoke(cmd)
            if cmd_result is False:
                Log.a().warning('cannot zip asset src: %s', cmd)
                return False

            Log.some().info('tar stdout: %s', cmd_result)

        else:
            # move without creating tar.gz
            cmd = 'mv "{}" "{}"'.format(str(src_path), str(asset_path))
            Log.some().info('moving: %s', cmd)
            cmd_result = ShellWrapper.invoke(cmd)
            if cmd_result is False:
                Log.a().warning('cannot move asset src: %s', cmd)
                return False

            Log.some().info('mv stdout: %s', cmd_result)

        return True
Esempio n. 54
0
def create_repository(path):
    return Repo.init(path)
Esempio n. 55
0
def submit(name_of_the_problem):
    repo = Repo('.')
    repo.index.add([name_of_the_problem], force=False)
Esempio n. 56
0
            "%d %B %Y").date()
        row_cal = [
            event_date, data_cal[4].replace("\n",
                                            '').strip().split('(',
                                                              1)[0].strip(),
            data_cal[4].replace("\n",
                                '').split('(', 1)[1].split(')')[0], added_date,
            data_cal[6].replace("\n", '').replace("\r", '').strip().replace(
                ",",
                '').replace('"', ''), data_cal[9].replace("\n", '').replace(
                    "\r", '').strip().replace(",", '').replace('"',
                                                               ''), sysdate
        ]
        file_cal.write(','.join(str(e) for e in row_cal) + '\n')
        # print(','.join(str(e) for e in row_cal) + '\n')
    i += 1

print('Completed')
file_cal.close()
print('Github Push')
# github push
repo_dir = ''
repo = Repo(repo_dir)
file_list = ['coin_calendar.csv']
commit_message = 'Updated Calendar Data File'
repo.index.add(file_list)
repo.index.commit(commit_message)
origin = repo.remote('origin')
origin.push()
print('Github Push Finished')
Esempio n. 57
0
async def upstream(event):
    "For .update command, check if the bot is up to date, update if specified"
    await event.edit("`Checking for updates, please wait....`")
    conf = event.pattern_match.group(1)
    off_repo = UPSTREAM_REPO_URL
    force_update = False
    try:
        txt = "`Oops.. Updater cannot continue due to "
        txt += "some problems occured`\n\n**LOGTRACE:**\n"
        repo = Repo()
    except NoSuchPathError as error:
        await event.edit(f'{txt}\n`directory {error} is not found`')
        return repo.__del__()
    except GitCommandError as error:
        await event.edit(f'{txt}\n`Early failure! {error}`')
        return repo.__del__()
    except InvalidGitRepositoryError as error:
        if conf is None:
            return await event.edit(
                f"`Unfortunately, the directory {error} does not seem to be a git repository."
                "\nBut we can fix that by force updating the userbot using .update now.`"
            )
        repo = Repo.init()
        origin = repo.create_remote('upstream', off_repo)
        origin.fetch()
        force_update = True
        repo.create_head('master', origin.refs.master)
        repo.heads.master.set_tracking_branch(origin.refs.master)
        repo.heads.master.checkout(True)

    ac_br = repo.active_branch.name
    if ac_br != UPSTREAM_REPO_BRANCH:
        await event.edit(
            '**[UPDATER]:**\n'
            f'`Looks like you are using your own custom branch ({ac_br}). '
            'in that case, Updater is unable to identify '
            'which branch is to be merged. '
            'please checkout to any official branch`')
        return repo.__del__()
    try:
        repo.create_remote('upstream', off_repo)
    except BaseException:
        pass

    ups_rem = repo.remote('upstream')
    ups_rem.fetch(ac_br)

    changelog = await gen_chlog(repo, f'HEAD..upstream/{ac_br}')

    if changelog == '' and force_update is False:
        await event.edit(
            f'\n`Your USERBOT is`  **up-to-date**  `with`  **{UPSTREAM_REPO_BRANCH}**\n'
        )
        return repo.__del__()

    if conf is None and force_update is False:
        changelog_str = f'**New UPDATE available for [{ac_br}]:\n\nCHANGELOG:**\n`{changelog}`'
        if len(changelog_str) > 4096:
            await event.edit("`Changelog is too big, view the file to see it.`"
                             )
            file = open("output.txt", "w+")
            file.write(changelog_str)
            file.close()
            await event.client.send_file(
                event.chat_id,
                "output.txt",
                reply_to=event.id,
            )
            remove("output.txt")
        else:
            await event.edit(changelog_str)
        return await event.respond(
            'do `.update now` or `.update deploy` to update')

    if force_update:
        await event.edit(
            '`Force-Syncing to latest stable userbot code, please wait...`')
    else:
        await event.edit('`Updating aone-Kangbot, please wait....`')
    if conf == "now":
        await update(event, repo, ups_rem, ac_br)
    elif conf == "deploy":
        await deploy(event, repo, ups_rem, ac_br, txt)
    return
Esempio n. 58
0
except ImportError:
    print(
        "Error: missing dependencies! Please run this command to install them:"
    )
    print("pip install path.py requests python-dateutil GitPython==0.3.2.RC1")
    sys.exit(1)

try:
    from pygments.console import colorize
except ImportError:
    colorize = lambda color, text: text

JIRA_RE = re.compile(r"\b[A-Z]{2,}-\d+\b")
PR_BRANCH_RE = re.compile(r"remotes/edx/pr/(\d+)")
PROJECT_ROOT = path(__file__).abspath().dirname()
repo = Repo(PROJECT_ROOT)
git = repo.git


class memoized(object):
    """
    Decorator. Caches a function's return value each time it is called.
    If called later with the same arguments, the cached value is returned
    (not reevaluated).

    https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
    """
    def __init__(self, func):
        self.func = func
        self.cache = {}
Esempio n. 59
0
async def upstream(ups):
    "For .update command, check if the bot is up to date, update if specified"
    conf = ups.pattern_match.group(1)
    await ups.edit("Checking for updates, please wait....")
    off_repo = UPSTREAM_REPO_URL
    force_update = False
    try:
        txt = "Oops.. Updater cannot continue due to "
        txt += "some problems occured`\n\n**LOGTRACE:**\n"
        repo = Repo()
    except NoSuchPathError as error:
        await ups.edit(f'{txt}\ndirectory {error} is not found')
        repo.__del__()
        return
    except GitCommandError as error:
        await ups.edit(f'{txt}\nEarly failure! {error}')
        repo.__del__()
        return
    except InvalidGitRepositoryError as error:
        if conf != "now":
            await ups.edit(
                f"**Hey ßoss!!!**😁😁\n__To get the Latest update of__ \n©MARSHMELLO\n\n do |`.update now`| 😎😎 "
            )
            return
        repo = Repo.init()
        origin = repo.create_remote('upstream', off_repo)
        origin.fetch()
        force_update = True
        repo.create_head('master', origin.refs.master)
        repo.heads.master.set_tracking_branch(origin.refs.master)
        repo.heads.master.checkout(True)
    ac_br = repo.active_branch.name
    if ac_br != 'master':
        await ups.edit(
            f'**[UPDATER]:**` Looks like you are using your own custom branch ({ac_br}). '
            'in that case, Updater is unable to identify '
            'which branch is to be merged. '
            'please checkout to any official branch`')
        repo.__del__()
        return
    try:
        repo.create_remote('upstream', off_repo)
    except BaseException:
        pass
    ups_rem = repo.remote('upstream')
    ups_rem.fetch(ac_br)
    changelog = await gen_chlog(repo, f'HEAD..upstream/{ac_br}')
    if not changelog and not force_update:
        await ups.edit(
            f'\n**{ac_br} please redeploy me I have some internal problems i guess**\n'
        )
        repo.__del__()
        return
    if conf != "now" and not force_update:
        changelog_str = f'**New UPDATE available for [{ac_br}]:\n\nCHANGELOG:**\n`{changelog}`'
        if len(changelog_str) > 4096:
            await ups.edit("`Changelog is too big, view the file to see it.`")
            file = open("output.txt", "w+")
            file.write(changelog_str)
            file.close()
            await ups.client.send_file(
                ups.chat_id,
                "output.txt",
                reply_to=ups.id,
            )
            remove("output.txt")
        else:
            await ups.edit(changelog_str)
        await ups.respond("do `.update now` to update")
        return
    if force_update:
        await ups.edit(
            'Force-Syncing to latest stable userbot code, please wait master...😅😅'
        )
    else:
        await ups.edit('`Updating userbot, please wait....you arey best boss🤗😇'
                       )
    if HEROKU_API_KEY is not None:
        import heroku3
        heroku = heroku3.from_key(HEROKU_API_KEY)
        heroku_app = None
        heroku_applications = heroku.apps()
        if not HEROKU_APP_NAME:
            await ups.edit(
                'Please set up the `HEROKU_APP_NAME` variable to be able to update userbot.'
            )
            repo.__del__()
            return
        for app in heroku_applications:
            if app.name == HEROKU_APP_NAME:
                heroku_app = app
                break
        if heroku_app is None:
            await ups.edit(
                f'{txt}\n`Invalid Heroku credentials for updating userbot dyno.`'
            )
            repo.__del__()
            return
        ups_rem.fetch(ac_br)
        repo.git.reset("--hard", "FETCH_HEAD")
        heroku_git_url = heroku_app.git_url.replace(
            "https://", "https://*****:*****@")
        if "heroku" in repo.remotes:
            remote = repo.remote("heroku")
            remote.set_url(heroku_git_url)
        else:
            remote = repo.create_remote("heroku", heroku_git_url)
            await ups.edit("`⬛⬛⬛⬛ \n⬛✳️✳️⬛ \n⬛✳️✳️⬛ \n⬛⬛⬛⬛`")
            await asyncio.sleep(1)
            await ups.edit("`⬛⬛⬛⬛ \n⬛🔴🔴⬛ \n⬛🔴🔴⬛ \n⬛⬛⬛⬛`")
            await asyncio.sleep(1)
            await ups.edit("`⬛⬛⬛⬛ \n⬛🌕🌕⬛ \n⬛🌕🌕⬛ \n⬛⬛⬛⬛`")
            await asyncio.sleep(1)
            await ups.edit("`⬛⬛⬛⬛ \n⬛🔵🔵⬛ \n⬛🔵🔵⬛ \n⬛⬛⬛⬛`")
            await asyncio.sleep(1)
            await ups.edit("`⬛⬛⬛⬛ \n⬛❇️❇️⬛ \n⬛❇️❇️⬛ \n⬛⬛⬛⬛`")
            await asyncio.sleep(1)
        await ups.edit(
            "`⚜️Updating MARSHMELLO⚜️\n\nYou are the 👑KING👑 Boss!!\n\nPlease wait 5min😁😁\nThen try .mello to check` 😎😎\n\n**Powered by :-**\n©MARSHMELLO "
        )
        remote.push(refspec="HEAD:refs/heads/master", force=True)
    else:
        try:
            ups_rem.pull(ac_br)
        except GitCommandError:
            repo.git.reset("--hard", "FETCH_HEAD")
        reqs_upgrade = await update_requirements()
        await ups.edit('`Successfully Updated!\n'
                       'Bot is restarting... Wait for a second!`')
        # Spin a new instance of bot
        args = [sys.executable, "-m", "userbot"]
        execle(sys.executable, *args, environ)
        return
Esempio n. 60
0
def local_run(name_of_the_problem, type_of_run, lang):
    repo = Repo(os.getenv('PWD'))
    repo.index.add([name_of_the_problem], force=False)
    repo.index.commit(commit_msg(name_of_the_problem, type_of_run, lang))