Beispiel #1
0
 def save_favorite(self, directory, name, title=None, page = None):
     self.logger.info(__name__, 'save_favorite', directory, name)
     
     if not title == None:
         git.cd(directory + '/favorites/' + name)
         favorite = open('title', 'w+')
         favorite.write(title)
         favorite.close()
         git.add()
         output, errors = git.commit("updated " + name + " with new title " + title)
         if not errors == "":
             self.logger.error(__name__, 'save_favorite', directory, title, errors)
             return False
         return True
     elif not page == None:
         git.cd(directory + '/favorites/' + name)
         favorite = open('page', 'w+')
         favorite.write(page.serialize())
         favorite.close()
         
         favorite = open('type', 'w+')
         favorite.write(page.get_type())
         favorite.close()
         
         git.add()
         output, errors = git.commit("updated " + name + " with new page " + page.description())
         if not errors == "":
             self.logger.error(__name__, 'save_favorite', directory, page, errors)
             return False
         return True
Beispiel #2
0
    def apply(self, reverse=False, stop_on_error=True, commit=False):
        all_patches_applied = True
        failed_patches = []

        for patch in self.patches:
            # Even if `commit` is True we're not going to commit
            # individual patches, it takes too much time in the Chromium repo.
            # Applying all commits takes about 10 minutes (!) on a fast dev machine.
            # Instead of it we are going only to add all changes to the index
            # and commit them all at once later.
            applied_successfully = patch.apply(reverse=reverse,
                                               index=commit,
                                               commit=False)

            if not applied_successfully:
                all_patches_applied = False
                failed_patches.append(patch)

            should_stop_now = not applied_successfully and stop_on_error
            if should_stop_now:
                break

        if commit and not all_patches_applied:
            git.reset(self.repo_path)

        if commit and all_patches_applied:
            author = 'Electron Build Process <*****@*****.**>'
            message = 'Apply Electron patches'
            git.commit(self.repo_path, author=author, message=message)

        return (all_patches_applied, failed_patches)
Beispiel #3
0
  def apply(self, reverse=False, stop_on_error=True, commit=False):
    all_patches_applied = True
    failed_patches = []

    for patch in self.patches:
      # Even if `commit` is True we're not going to commit
      # individual patches, it takes too much time in the Chromium repo.
      # Applying all commits takes about 10 minutes (!) on a fast dev machine.
      # Instead of it we are going only to add all changes to the index
      # and commit them all at once later.
      applied_successfully = patch.apply(reverse=reverse, index=commit, commit=False)

      if not applied_successfully:
        all_patches_applied = False
        failed_patches.append(patch)

      should_stop_now = not applied_successfully and stop_on_error
      if should_stop_now:
        break

    if commit and not all_patches_applied:
      git.reset(self.repo_path)

    if commit and all_patches_applied:
      author = 'Electron Build Process <*****@*****.**>'
      message = 'Apply Electron patches'
      git.commit(self.repo_path, author=author, message=message)

    return (all_patches_applied, failed_patches)
Beispiel #4
0
def merge(source_branch, dest_branch):
    """ Performs the squash-merge. """
    if not git.check_branch_exists(source_branch):
        return fatal('Source branch does not exist.')
    if not git.check_branch_exists(dest_branch):
        return fatal('Destination branch does not exist.')
    if not git.is_clean_workdir():
        return fatal('Current working copy should be clean in order to '
                     'perform merge.')

    git.checkout(dest_branch)

    merge_commit = _get_previous_merge_commit(source_branch, dest_branch)
    if merge_commit:
        print('Branch {source} has already been merged into {dest}. '
              'Deleting the previous merge commit.'
              ''.format(source=source_branch, dest=dest_branch))
        _delete_commit(merge_commit)

    res = git.merge_squash(source_branch)
    if res.status_code != 0:
        return fatal('Automatic merge failed.\n'
                     'Run `git status` to see the conflicts.\n'
                     'Run `git reset --merge` to abort merge.')

    # FIXME currently assumes that it is run from the repo root.
    git.commit('--file=.git/SQUASH_MSG')
    print('Merged {source} into {dest}.'
          ''.format(source=source_branch, dest=dest_branch))
Beispiel #5
0
def run():
    git = git.Git('./test_folder', 'https://github.com/mascanio/test-priv.git',
                  'mascanio', env['token'])
    status = git.status()

    print(status)
    print(status.is_clean())

    git.add_all()

    git.commit('master')

    status = git.status()
    print(status)
    print(status.is_clean())

    print(git.get_current_branch())

    # print(git.push(git.get_current_branch()))
    # print(git.pull('dev', rebase=True))
    git.rebase_stash('master')

    status = git.status()
    print(status)
    print(status.is_clean())
Beispiel #6
0
Datei: pb.py Projekt: jrdavid/pb
 def import_project(self, directory):
     project_name = os.path.basename( directory )
     if project_name in self.projects:
         print("A project named '{0}' already exists.".format(project_name))
         sys.exit(1)
     # Create project entry in database
     new_pb_project = Project(project_name, self, None)
     # Copy files
     shutil.copytree( directory, new_pb_project.work_dir)
     original_dir = os.getcwd()
     print("Creating public git repo '{0}'".format(new_pb_project.public_dir))
     os.makedirs( new_pb_project.public_dir )
     original_dir = os.getcwd()
     os.chdir(new_pb_project.public_dir)
     git.init(True)
     os.chdir(new_pb_project.work_dir)
     # Initialize git repo if it doesn't exist
     if not os.path.isdir(".git"):
         # Create first import with initial content
         git.init(False)
         git.add(["."])
         git.commit("Import from {0}.".format(directory))
     # Create public repository
     if 'public' in git.remote_list():
         print("Renaming old public repo to public.orig")
         git.remote_rename('public', 'public.orig')
     print("Adding 'public' remote")
     git.remote_add("public", new_pb_project.public_dir)
     os.chdir( original_dir )
     # Open by default
     self.projects[project_name] = new_pb_project
     new_pb_project.state = "open"
     # why save? -> to update the state to 'open'
     new_pb_project.save()
Beispiel #7
0
    def _commit(self, button):
        time_formatted = self._get_time_formatted()
        
        subject_entry = self._builder.get_object('SubjectEntry')
        handles_entry = self._builder.get_object('HandlesEntry')

        git.commit('/home/kuba/Work/Study/University/10_semester/vcs/python-test', \
                   subject_entry.get_text(), time_formatted, handles_entry.get_text())
Beispiel #8
0
 def post(self, filename):
     print(filename + ' upload')
     print(DATA_DIR + filename)
     git.repo(DATA_DIR)
     with open(DATA_DIR + filename, 'wb') as f:
         f.write(self.request.body)
     git.add(DATA_DIR + filename)
     git.commit(filename + ' added')
Beispiel #9
0
 def add_favorite(self, directory,  name, title, existing=''):
     self.logger.info(__name__, 'add_favorite', directory, title)
     git.cd(directory)
     git.makedir(directory + '/favorites/' + existing + name)
     favorite = open(directory + '/favorites/' + existing + name + '/title', 'w+')
     favorite.write(title)
     favorite.close()
     git.add()
     git.commit('added new favorite ' + name + ' with title ' + title)
Beispiel #10
0
def commit(message):
    """
    Terminate a transaction on the repository
    
    Just call each 'commit' method of the different repository implementations
            
    @param message
        a commit message
    """
    git.commit(message)
Beispiel #11
0
Datei: vfs.py Projekt: Siosm/qsos
def commit(message):
    """
    Terminate a transaction on the repository
    
    Just call each 'commit' method of the different repository implementations
            
    @param message
        a commit message
    """
    git.commit(message)
Beispiel #12
0
 def sync(self, directory):
     self.logger.info(__name__, 'sync', directory)
     git.cd(directory)
     if git.changesExist():
         self.logger.warn(__name__, 'sync', 'changes detected, syncing')
         git.add(' -u')
         git.add()
         git.commit('committing repository state before loading favorites')
         return True
     self.logger.info(__name__, 'sync', 'no changes detected')
     return False
Beispiel #13
0
def put_in_repo(data):
    """
    Puts the login's assignment into their repo
    """
    path_to_subm, timestamp = get_subm(data)
    logins = get_logins(data.login)
    path_to_repo = find_path(logins, data)
    data.git_assign = utils.dirty_assign(data.git_assign)
    issue_num = model.get_issue_number(logins, data.assign)
    if not issue_num:
        path_to_template = config.TEMPLATE_DIR
        if "hw" in data.assign:
            path_to_template += "hw/"
        else:
            path_to_template += "proj/"
        if data.git_assign not in config.ASSIGN_TO_NAME_MAP:
            path_to_template += data.git_assign + "/"
        else:
            path_to_template += config.ASSIGN_TO_NAME_MAP[data.git_assign] + "/"
        copy_important_files(data, path_to_template, path_to_repo, template=True)
        git_init(path_to_repo)
        git.add(None, path=path_to_repo)
        git.commit("Initial commit", path=path_to_repo)
    else: #we want to check that we didnt mess up, and there is actually something here
        original_path = os.getcwd()
        os.chdir(path_to_repo)
        out, err = utils.run("git status")
        if "fatal: Not a git repository" in err:
            print("Issue number present, but no files in repository. Resetting issue number...")
            model.remove_issue_number(logins, data.git_assign, issue_num)
            os.chdir(original_path)
            return put_in_repo(data)
        else: #we have a partner who submitted (I think)
            if not os.path.exists('commits'):
                raise SubmissionException("Found a git repository that hasn't been committed to yet. Ignoring...")
            with open('commits', 'r') as f:
                out = f.read().strip()
            last_line = out[out.rfind("\n"):]
            if last_line.find(":") != -1:
                com_time = last_line[last_line.find(":") + 1:].strip()
                if timestamp in com_time:
                    raise SubmissionException("This timestamp ({}) has already been uploaded. Exiting...".format(timestamp))
        os.chdir(original_path)
    copy_important_files(data, path_to_subm, path_to_repo)
    with open(path_to_repo + 'commits', 'a') as f:
        f.write('{} : {}\n'.format(utils.get_timestamp_str(), timestamp))
    git.add(None, path=path_to_repo)

    files = glob.glob(path_to_repo + "*")
    for f in files:
        utils.chmod_own_grp(f)
        utils.chown_staff_master(f)
    return path_to_repo, logins
Beispiel #14
0
 def partial(self, identifier, commit_message):
     """Creates a commit.
     Args:
         identifier: string that uniquely identifies the task.
         commit_message: partial changes commit message.
     Returns:
         No data is returned.
     """
     git.add_files(git._changed_files())
     git.status("")
     git.commit(identifier + " : " + commit_message)
     self.partials_exist = True
     self.log.add_entry("Partial added: OK",
                        "Added partial commit: {} : {}".format(
                             identifier, commit_message),
                        time.strftime("%Y-%m-%d %H:%M:%S"))
Beispiel #15
0
    def put(self):
        """action sent from js. Does all the work!"""

        # get current directory (to return later)
        #might not be needed
        cwd = os.getcwd()

        try:
            # make connection to git remote server
            git = git_cnxn()

            # obtain filename and msg for commit
            data = json.loads(self.request.body.decode('utf-8'))
            filename = urllib.parse.unquote(data['filename'])
            msg = data['msg']
            add_all = data['add_all']
            pull = data['pull']
            if pull:
                git.pull()
                self.write({
                    'status':
                    200,
                    'statusText':
                    ('Success!  '
                     'Pulled from {} everything up to date!'.format(git.url))
                })
            else:
                git.pull()
                git.add(filename, add_all)
                git.commit(msg)
                git.push()
                #git_pr()
                # close connection
                self.write({
                    'status':
                    200,
                    'statusText':
                    ('Success!  '
                     'Changes to {} captured on branch {} at {}'.format(
                         filename, git.branch_nm, git.url))
                })

            # return to directory
            os.chdir(cwd)
        except ErrorPrintToJupyter as e:
            self.error_and_return(cwd, str(e).replace('\n', '</br> '))
def add_mt_disclaimers(repository, file_info):
    now = datetime.now()

    os.chdir(repository.github.git_root)

    root_folder = repository.github.single_folder \
        if repository.github.single_folder is not None else repository.github.project_folder

    for file in get_eligible_files(repository, git.ls_files(root_folder), 'en'):
        crowdin_file = get_crowdin_file(repository, file)

        target_file = 'ja/' + file[3:] if file[0:3] == 'en/' else file.replace('/en/', '/ja/')

        if not os.path.isfile('%s/%s' % (repository.github.git_root, target_file)):
            if target_file[-9:] == '.markdown':
                target_file = target_file[:-9] + '.md'
            elif target_file[-3:] == '.md':
                target_file = target_file[:-3] + '.markdown'

            if not os.path.isfile('%s/%s' % (repository.github.git_root, target_file)):
                continue

        if crowdin_file not in file_info or file_info[crowdin_file]['translated'] == file_info[crowdin_file]['approved']:
            continue

        new_lines = []

        with open(target_file, 'r') as f:
            new_lines = [
                line for line in f.readlines()
                    if line.find('<p class="alert alert-info"><span class="wysiwyg-color-blue120">') == -1
            ]

        content = '%s\n%s' % (''.join(new_lines), disclaimer_text)

        with open(target_file, 'w') as f:
            f.write(content)

        git.add(target_file)

    git.commit('-m', 'Added machine translation disclaimer %s' % now.strftime("%Y-%m-%d %H:%M:%S"))

    os.chdir(initial_dir)
Beispiel #17
0
  def txn(branch):
    
    # git ignore everything except hostery file and gitignore.
    command('git rm --cached . -r', verbose=True)
    git_ignore = open('.gitignore', 'w')
    git_ignore.write('*\n!.gitignore\n!%s'%HOSTERY_FILE)
    git_ignore.close()
    git.add('.gitignore')

    # make hostery file
    hostery_file = open(HOSTERY_FILE, 'w')
    hostery_file.write(json.dumps({}))
    hostery_file.close()

    # add, commit, push.
    git.add(HOSTERY_FILE)
    command('git status', multiline=True, verbose=True)
    git.commit('hostery init')
    git.push(branch)
Beispiel #18
0
def update_hook(ref_name,old_obj,new_obj):
    msg = git.commit(new_obj).message()
    regex = r'^' + re.escape(sign_off_str) + r'|^' + re.escape(no_verify_sign_off_str)
    if re.search( regex, msg, re.M ):
        print "git-dragon(remote): accepting commit " + new_obj
        return 0
    else:
        print "git-dragon(remote): refusing non-signed-off commit " + new_obj
        print msg
        print "---------"
        return 1
Beispiel #19
0
def save_dag(dag: DAG,
             tasks: list,
             commit_message=None,
             create_pr=False) -> None:
    def sort_task(a, b):
        if b['task_id'] in a['upstream']:
            return 1
        elif a['task_id'] in b['upstream']:
            return -1
        return 0

    dag_file = dag.full_filepath + '.new'
    with open(dag_file, 'w') as fh:
        dg = DagGenerator(dag)
        for task in sorted(tasks, key=functools.cmp_to_key(sort_task)):
            dg.add_task(task)
        dag_file_string = f'{dg.dag_string}\n{dg.tasks_string}\n{dg.dependencies_string}'
        fh.write(dg.import_string)
        fh.write(dag_file_string)

    dag_bag = DagBag()
    temp_dag = dag_bag.process_file(dag_file)
    if not temp_dag:
        # os.unlink(dag_file)
        return False
    else:
        os.rename(dag_file, dag.full_filepath)
    if commit_message is not None:
        os.chdir(os.path.dirname(__file__) + '/kirby')
        date = datetime.now()
        git.checkout(f'code-changes-{date:%Y%m%d-%H%M%s}', new=True)
        print(git.status())
        # print(git.diff())
        git.commit(commit_message, add_all=True)
        # git.push()
        # if create_pr:
        #     git.create_pr('Changes from airflow: ' + commit_message)
        # git.checkout('master')
        return True
    return False
Beispiel #20
0
 def _commit(self,
             commit,
             msg,
             env,
             output_work_dir,
             tree_id=None,
             parents=None,
             add_id=True):
     msg = self.filter_message(msg)
     if add_id:
         msg += '%s: %s' % (self._commit_id_prefix, commit.tree_id)
     if tree_id:
         assert parents is not None
         new_commit = git.commit_tree(tree_id,
                                      parents,
                                      msg,
                                      tree=output_work_dir,
                                      env=env)
         git.reset(opts=['--hard', new_commit], tree=output_work_dir)
     else:
         assert parents is None
         git.commit(msg, env=env, opts=['-a', '-q'], tree=output_work_dir)
Beispiel #21
0
  def apply(self, reverse=False, commit=False):
    # Add the change to index only if we're going to commit it later.
    patch_applied = git.apply(self.repo_path, self.file_path, directory=self.paths_prefix, index=commit, reverse=reverse)

    if not patch_applied:
      return False

    if commit:
      message = self.__get_commit_message(reverse)
      patch_committed = git.commit(self.repo_path, author=self.author, message=message)
      return patch_committed

    return True
Beispiel #22
0
  def apply(self, reverse=False, commit=False, index=False):
    # Add the change to index only if we're going to commit it later.
    add_to_index = index or commit
    patch_applied = git.apply(self.repo_path, self.file_path, directory=self.paths_prefix, index=add_to_index, reverse=reverse)

    if not patch_applied:
      return False

    if commit:
      message = self.__get_commit_message(reverse)
      patch_committed = git.commit(self.repo_path, author=self.author, message=message)
      return patch_committed

    return True
Beispiel #23
0
  def write_txn(branch):

    if sync:
      git.pull(branch)

    hostery_file = open(HOSTERY_FILE, 'r')
    all_data = json.loads(hostery_file.read())
    all_data.update(data)
    hostery_file.close()

    hostery_file = open(HOSTERY_FILE, 'w')
    hostery_file.write(json.dumps(all_data, indent=2))
    hostery_file.close()

    git.add(HOSTERY_FILE)

    # command('git status', verbose=True)

    git.commit('hostery mark')

    if sync:
      git.push(branch)

    return all_data
def generate_html_files(repository, file_info):
    now = datetime.now()

    os.chdir(repository.github.git_root)

    root_folder = repository.github.single_folder \
        if repository.github.single_folder is not None else repository.github.project_folder

    titles = {}

    for file in get_eligible_files(repository, git.ls_files(root_folder), 'en'):
        crowdin_file = get_crowdin_file(repository, file)

        target_file = 'ja/' + file[3:] if file[0:3] == 'en/' else file.replace('/en/', '/ja/')

        if not os.path.isfile('%s/%s' % (repository.github.git_root, target_file)):
            if target_file[-9:] == '.markdown':
                target_file = target_file[:-9] + '.md'
            elif target_file[-3:] == '.md':
                target_file = target_file[:-3] + '.markdown'

            if not os.path.isfile('%s/%s' % (repository.github.git_root, target_file)):
                continue

        html_file = target_file[:target_file.rfind('.')] + '.html'
        titles[html_file] = get_title(file)

        _pandoc(target_file, html_file, '--from=gfm', '--to=html')

        git.add(html_file)

    git.commit('-m', 'Updated pandoc conversion %s' % now.strftime("%Y-%m-%d %H:%M:%S"))

    os.chdir(initial_dir)

    return titles
Beispiel #25
0
 def delete_node(self, directory, fullName):
     self.logger.warn(__name__, 'delete_node', directory, fullName, ' (destructive)')
     git.cd(directory + '/favorites')
     git.rm(fullName)
     git.add(' -u')
     git.commit('deleted ' + fullName)
Beispiel #26
0
def setup_server4(hostname=None,
                  domain=None,
                  pc="1",
                  forge_modules=[
                      "puppetlabs/stdlib", "puppetlabs/concat",
                      "puppetlabs/firewall", "puppetlabs/apt"
                  ]):
    """Setup Puppet 4 server"""
    import package, util, git, service

    # Local files to copy over
    basedir = "/etc/puppetlabs"
    local_master_conf = "files/puppet-master.conf"
    remote_master_conf = basedir + "/puppet/puppet.conf"
    local_hiera_yaml = "files/hiera.yaml"
    remote_hiera_yaml = basedir + "/code/hiera.yaml"
    local_fileserver_conf = "files/fileserver.conf"
    remote_fileserver_conf = basedir + "/puppet/fileserver.conf"
    local_environments = "files/environments"
    remote_codedir = basedir + "/code"
    local_gitignore = "files/gitignore"
    remote_gitignore = basedir + "/.gitignore"
    modules_dir = basedir + "/code/environments/production/modules"

    # Verify that all the local files are in place
    try:
        open(local_master_conf)
        open(local_hiera_yaml)
    except IOError:
        print "ERROR: some local config files were missing!"
        sys.exit(1)

    # Autodetect hostname and domain from env.host, if they're not overridden
    # with method parameters
    if not hostname:
        hostname = util.get_hostname()
    if not domain:
        domain = util.get_domain()

    # Ensure that clock is correct before doing anything else, like creating SSL
    # certificates.
    util.set_clock()

    # Start the install
    install_puppetlabs_release_package(pc)
    package.install("puppetserver")
    util.put_and_chown(local_master_conf, remote_master_conf)
    util.put_and_chown(local_hiera_yaml, remote_hiera_yaml)
    util.put_and_chown(local_fileserver_conf, remote_fileserver_conf)
    util.put_and_chown(local_gitignore, remote_gitignore)
    util.add_to_path("/opt/puppetlabs/bin")
    util.set_hostname(hostname + "." + domain)
    # "facter fqdn" return a silly name on EC2 without this
    util.add_host_entry("127.0.1.1", hostname, domain)

    # Copy over template environments
    util.put_and_chown(local_environments, remote_codedir)

    # Add modules from Puppet Forge. These should in my experience be limited to
    # those which provide new types and providers. In particular puppetlabs'
    # modules which control some daemon (puppetdb, postgresql, mysql) are
    # extremely complex, very prone to breakage and nasty to debug.
    for module in forge_modules:
        add_forge_module(module)

    # Git setup
    git.install()
    git.init(basedir)
    if not exists(modules_dir):
        sudo("mkdir " + modules_dir)
    git.init(modules_dir)
    git.add_submodules(basedir=modules_dir)
    git.add_all(basedir)
    git.commit(basedir, "Initial commit")

    # Link hieradata and manifests from production to testing. This keeps the
    # testing environment identical to the production environment. The modules
    # directory in testing is separate and may (or may not) contain modules that
    # override or complement those in production.
    util.symlink(remote_codedir + "/environments/production/hieradata",
                 remote_codedir + "/environments/testing/hieradata")
    util.symlink(remote_codedir + "/environments/production/manifests",
                 remote_codedir + "/environments/testing/manifests")

    # Start puppetserver to generate the CA and server certificates/keys
    service.start("puppetserver")
    run_agent(noop="False")
		# save files and states
		json.dump(pack_statuses, open(out_dir+"mx140"+"-"+time.asctime()+".json",'w'))
		json.dump (pack_json, open('pack.json','w'))
		nf = open('../mx140/public/js/data.js','w')
		print >>nf, "var data = "+json.dumps(pack_json)+";"
		nf.close()
		cPickle.dump (buffers, open('buffers.cpk','w'))
		cPickle.dump (most_recent_ids, open('mrids.cpk','w'))
		cPickle.dump (status_buff, open('status.buff','w'))
		print ("[saved]: pack.json, buffers.cpk, mrids.cpk, status.buff")
		
		# commit to github (requieres sudo git config --global credential.helper store + login para guardar credentials)
		os.chdir('../mx140')
		git.add('--all')
		git.commit('[*-*]/~ :: Monitor Mx140 V1.0 - '+time.asctime())
 		git.create_simple_git_command('push')()
		os.chdir('../collector')
		print "\n\n[git]: ok :: " , time.asctime() 
		# then sleepover...
		
		#wait, publish something
		#current_list, current_keyword, current_asoc_ws must be ready
		try:
			current_status = u"El grupo de " + \
						rlis[current_list].upper() + \
						u" discute sobre "+ \
						current_keyword.upper() +\
						u" junto a [ " +\
						current_asoc_ws[1] + " ] y [ " +\
						current_asoc_ws[3] +\
Beispiel #28
0
        found_changelog_label = True
        break

if found_changelog_label:
    print(NOTICE + "Processing release notes." + ENDC)
    release_notes = ""
    for rnf in release_notes_files:
        release_notes += open(rnf, 'r').read().rstrip() + '\n\n'
    with open('.release-notes/next-release.md', 'a+') as next_release_notes:
        next_release_notes.write(release_notes)

    print(INFO + "Adding git changes." + ENDC)
    for rnf in release_notes_files:
        git.rm(rnf)
    git.add('.release-notes/next-release.md')
    git.commit('-m', "Updates release notes for PR #" + str(pr_id))
else:
    print(NOTICE + "Found release notes but no changelog label." + ENDC)
    for rnf in release_notes_files:
        git.rm(rnf)
    git.commit(
        '-m',
        "Removes release notes from changelog labelless PR #" + str(pr_id))

push_failures = 0
while True:
    try:
        print(INFO + "Pushing changes." + ENDC)
        git.push()
        break
    except GitCommandError:
    remote.fetch()

new_branch = f'bionic-version-{latest_tag}'

git.reset('--hard', 'upstream/master')
git.checkout('upstream/master')
try:
    git.branch('-D', new_branch)
except:
    pass
git.checkout('-b', new_branch, 'upstream/master')

with open(circlefile, 'r') as reader:
    content = reader.read()
    content_new = re.sub(
        'FROM ubuntu:.*',
        r'FROM ubuntu:' + str(latest_tag),
        content,
        flags=re.M
    )

with open(circlefile, "w") as writer:
    writer.write(content_new)
    writer.close()

changedFiles = [item.a_path for item in web_repo.index.diff(None)]
if changedFiles:
    web_repo.index.add(changedFiles)
    git.commit('-m', f'Updating bionic version to {latest_tag}')
    git.push('-v', 'origin', new_branch)
Beispiel #30
0
def setup_server4(hostname=None, domain=None, pc="1", forge_modules=["puppetlabs/stdlib", "puppetlabs/concat", "puppetlabs/firewall", "puppetlabs/apt"]):
    """Setup Puppet 4 server"""
    import package, util, git, service

    # Local files to copy over
    basedir = "/etc/puppetlabs"
    local_master_conf = "files/puppet-master.conf"
    remote_master_conf = basedir+"/puppet/puppet.conf"
    local_hiera_yaml = "files/hiera.yaml"
    remote_hiera_yaml = basedir+"/code/hiera.yaml"
    local_fileserver_conf = "files/fileserver.conf"
    remote_fileserver_conf = basedir+"/puppet/fileserver.conf"
    local_environments = "files/environments"
    remote_codedir = basedir+"/code"
    local_gitignore = "files/gitignore"
    remote_gitignore = basedir+"/.gitignore"
    modules_dir = basedir+"/code/environments/production/modules"

    # Verify that all the local files are in place
    try:
        open(local_master_conf)
        open(local_hiera_yaml)
    except IOError:
        print "ERROR: some local config files were missing!"
        sys.exit(1)

    # Autodetect hostname and domain from env.host, if they're not overridden
    # with method parameters
    if not hostname:
        hostname = util.get_hostname()
    if not domain:
        domain = util.get_domain()

    # Ensure that clock is correct before doing anything else, like creating SSL 
    # certificates.
    util.set_clock()

    # Start the install
    install_puppetlabs_release_package(pc)
    package.install("puppetserver")
    util.put_and_chown(local_master_conf, remote_master_conf)
    util.put_and_chown(local_hiera_yaml, remote_hiera_yaml)
    util.put_and_chown(local_fileserver_conf, remote_fileserver_conf)
    util.put_and_chown(local_gitignore, remote_gitignore)
    util.add_to_path("/opt/puppetlabs/bin")
    util.set_hostname(hostname + "." + domain)
    # "facter fqdn" return a silly name on EC2 without this
    util.add_host_entry("127.0.1.1", hostname, domain)

    # Copy over template environments
    util.put_and_chown(local_environments, remote_codedir)

    # Add modules from Puppet Forge. These should in my experience be limited to
    # those which provide new types and providers. In particular puppetlabs'
    # modules which control some daemon (puppetdb, postgresql, mysql) are
    # extremely complex, very prone to breakage and nasty to debug. 
    for module in forge_modules:
        add_forge_module(module)

    # Git setup
    git.install()
    git.init(basedir)
    if not exists(modules_dir):
        sudo("mkdir "+modules_dir)
    git.init(modules_dir)
    git.add_submodules(basedir=modules_dir)
    git.add_all(basedir)
    git.commit(basedir, "Initial commit")

    # Link hieradata and manifests from production to testing. This keeps the
    # testing environment identical to the production environment. The modules
    # directory in testing is separate and may (or may not) contain modules that
    # override or complement those in production.
    util.symlink(remote_codedir+"/environments/production/hieradata", remote_codedir+"/environments/testing/hieradata")
    util.symlink(remote_codedir+"/environments/production/manifests", remote_codedir+"/environments/testing/manifests")

    # Start puppetserver to generate the CA and server certificates/keys
    service.start("puppetserver")
    run_agent(noop="False")
Beispiel #31
0
Datei: pb.py Projekt: jrdavid/pb
    def create(self, project_name, language):
        """ Create a bare project. """
        if project_name in self.projects:
            print("A project named '{0}' already exists.".format(project_name))
            sys.exit(1)
        new_pb_project = Project(project_name, self, language)
        print("Creating project '{0}' ({1}).".format(project_name, language))
        new_pb_project.save()
        # Create bare project
        os.makedirs(new_pb_project.work_dir)
        original_dir = os.getcwd()
        os.chdir(new_pb_project.work_dir)
        if language in self.languages:
            print("I have templates for a '{0}' project.".format(language))
            for root, dirs, files in os.walk( new_pb_project.templates_dir ):
                # print root, dirs, files
                rel_dir = root[len(new_pb_project.templates_dir)+1:]
                for d in dirs:
                    o = os.path.join(root, d)
                    w = os.path.join(new_pb_project.work_dir, rel_dir, d)
                    os.makedirs(w)
                    print( "{0} -> {1}".format(o, w) )
                for f in files:
                    dest_file, ext = os.path.splitext(f)
                    o = os.path.join(root, f)
                    if ext == ".tmpl":
                        w = os.path.join(new_pb_project.work_dir, rel_dir, dest_file)
                        print( "{0} -> {1}".format(o, w) )
                        t = Template( file=o )
                        t.project = new_pb_project.name
                        of = open(dest_file, 'w')
                        of.write(str(t))
                    else:
                        w = os.path.join(new_pb_project.work_dir, rel_dir, f)
                        print( "{0} -> {1}".format(o, w) )
                        shutil.copy(o, w)
        else:
            print("No templates available.")

        print("Creating public git repo '{0}'".format(new_pb_project.public_dir))
        os.makedirs( new_pb_project.public_dir )
        original_dir = os.getcwd()
        os.chdir(new_pb_project.public_dir)
        git.init(True)
        os.chdir(original_dir)
        print("Initializing git repository.")
        git.init(False)
        print("Adding 'public' remote")
        git.remote_add("public", new_pb_project.public_dir)
        if language in self.languages:
            # Commit the templates
            git.add(["."])
            git.commit("Original import")
            # git.push("public")
        os.chdir(original_dir)

        # Open by default
        self.projects[project_name] = new_pb_project
        new_pb_project.state = "open"
        # why save? -> to update the state to 'open'
        new_pb_project.save()
def sync_articles(repository,
                  domain,
                  language,
                  articles,
                  article_paths,
                  refresh_articles=None,
                  refresh_paths=None):
    if refresh_articles is not None:
        logging.info('Updating translations for %d articles' %
                     len(refresh_paths))

        new_files, all_files, file_info = update_repository(
            repository, list(refresh_paths.values()), sync_sources=True)
    else:
        logging.info('Downloading latest translations for %d articles' %
                     len(article_paths))

        new_files, all_files, file_info = update_repository(
            repository, list(article_paths.values()), sync_sources=False)

    old_dir = os.getcwd()

    os.chdir(repository.github.git_root)

    for article_id, file in sorted(article_paths.items()):
        article = articles[article_id]
        target_file = language + '/' + file[3:] if file[
            0:3] == 'en/' else file.replace('/en/', '/%s/' % language)

        if not os.path.isfile(target_file):
            continue

        if language in article['label_names'] and 'mt' not in article[
                'label_names']:
            print(target_file, 'not machine translated')
            os.remove(target_file, )
            git.checkout(target_file)
            continue

        crowdin_file = get_crowdin_file(repository, file)

        if crowdin_file not in file_info:
            continue

        file_metadata = file_info[crowdin_file]

        if file_metadata['phrases'] != file_metadata['translated']:
            print(target_file, 'not fully translated')
            os.remove(target_file)
            git.checkout(target_file)
            continue

        new_title, old_content, new_content = add_disclaimer_zendesk(
            article, target_file, language)

        if old_content != new_content:
            with open(target_file, 'w') as f:
                f.write('<h1>%s</h1>\n' % new_title)
                f.write(new_content)

        git.add(target_file)

    if refresh_articles is not None:
        git.commit('-m', 'Translated new articles: %s' % datetime.now())
    else:
        git.commit('-m', 'Translated existing articles: %s' % datetime.now())

    os.chdir(old_dir)

    return file_info
Beispiel #33
0
    new_branch = f'ccxt-{latest_ccxt_version}'

    full = f'origin/{new_branch}'
    if full in remote_branches:
        print(f'"{full}" already exists... bye')
        exit(0)

    try:
        git.branch('-D', new_branch)
    except:
        #         # @todo: find a better way to check for a branch
        # git.rev_parse('--verify', new_branch)
        pass

    git.checkout('-b', new_branch, 'upstream/master')

    web_repo.index.add(['Pipfile.lock'])
    git.commit('-m', f'Updating ccxt version')
    git.push('-v', 'origin', new_branch)

    base = "master"
    head = f'ahonnecke:{new_branch}'
    print(f'Opening PR to merge "{head}" into "{base}"')
    web_repo = org.get_repo('web')
    web_repo.create_pull(
        title=f'Update ccxt to version {latest_ccxt_version}',
        body="Scripted update for the CCXT library",
        base=base,
        head=head
    )
Beispiel #34
0
    if args.dryrun:
        print('Exiting... with local changes still in place')
        exit()

    print(f'Building a new docker image from {base_dockerfile} in {build_dir}')
    os.chdir(build_dir)
    call(["docker", "build", '-t', tag, '-f', base_dockerfile, '.'])

    print(f'Finished building, tagging {tag} as {remote_tag}')
    call(["docker", "tag", tag, remote_tag])

    print(f'Tagged {remote_tag}, pushing to docker hub')
    call(["docker", 'push', remote_tag])

    print(f'Comitting changes and pushing to {tag}')
    git.commit('-m', f'Committing scripted changes')
    git.push('-v', 'origin', tag)

    head = f'{GITUSERNAME}:{tag}'
    print(f'Opening PR to merge "{head}" into "{SRCBRANCH}"')

    try:
        g = Github(GITHUBTOKEN)
        org = g.get_organization(ORGNAME)
        repo_instance = org.get_repo(REPONAME)

        repo_instance.create_pull(
            title=f'Point Dockerfile.test at new base image {remote_tag}',
            body=f'Automatically created PR that points the tester dockerfile at the new base image that was created from update_base_image.py',
            base=SRCBRANCH,
            head=head
    def _config(self, remote, conf, groups):
        """
        Builds the groups file and project.config file for a project.

        @param remote - gerrit.Remote object
        @param conf - Dict containing git config information
        @param groups - List of groups

        """
        if not self.config:
            return

        msg = "Project %s: Configuring." % self.name
        logger.info(msg)
        print msg

        repo_dir = '~/tmp'
        repo_dir = os.path.expanduser(repo_dir)
        repo_dir = os.path.abspath(repo_dir)

        uuid_dir = str(uuid4())
        repo_dir = os.path.join(repo_dir, uuid_dir)

        # Make Empty directory - We want this to stop and fail on OSError
        logger.debug(
            "Project %s: Creating directory %s" % (self.name, repo_dir)
        )
        os.makedirs(repo_dir)

        # Save the current working directory
        old_cwd = os.getcwd()

        origin = 'origin'

        try:
            # Change cwd to that repo
            os.chdir(repo_dir)

            # Git init empty directory
            git.init()

            # Add remote origin
            ssh_url = 'ssh://%s@%s:%s/%s' % (
                remote.username,
                remote.host,
                remote.port,
                self.name
            )

            git.add_remote(origin, ssh_url)

            # Fetch refs/meta/config for project
            refspec = 'refs/meta/config:refs/remotes/origin/meta/config'
            git.fetch(origin, refspec)

            # Checkout refs/meta/config
            git.checkout_branch('meta/config')

            # Get md5 of existing config
            _file = os.path.join(repo_dir, 'project.config')
            contents = ''
            try:
                with open(_file, 'r') as f:
                    contents = f.read()
            except IOError:
                pass
            existing_md5 = hashlib.md5(contents).hexdigest()

            # Get md5 of new config
            with open(self.config, 'r') as f:
                contents = f.read()
            new_md5 = hashlib.md5(contents).hexdigest()

            msg = "Project %s: Md5 comparision\n%s\n%s"
            msg = msg % (self.name, existing_md5, new_md5)
            logger.debug(msg)
            print msg

            # Only alter if checksums do not match
            if existing_md5 != new_md5:

                logger.debug(
                    "Project %s: config md5's are different." % self.name
                )

                # Update project.config file
                _file = os.path.join(repo_dir, 'project.config')
                with open(_file, 'w') as f:
                    f.write(contents)

                # Update groups file
                group_contents = groups_file_contents(groups)
                _file = os.path.join(repo_dir, 'groups')
                with open(_file, 'w') as f:
                    f.write(group_contents)

                # Git config user.email
                git.set_config('user.email', conf['git-config']['email'])

                # Git config user.name
                git.set_config('user.name', conf['git-config']['name'])

                # Add groups and project.config
                git.add(['groups', 'project.config'])

                # Git commit
                git.commit(message='Setting up %s' % self.name)

                # Git push
                git.push(origin, refspecs='meta/config:refs/meta/config')
                logger.info("Project %s: pushed configuration." % self.name)

            else:
                msg = "Project %s: config unchanged." % self.name
                logger.info(msg)
                print msg

        finally:
            # Change to old current working directory
            os.chdir(old_cwd)

            # Attempt to clean up created directory
            shutil.rmtree(repo_dir)
Beispiel #36
0
	def commit(self, flag, message=''):
		git.commit(flag, message, self.stdout)
def check_renamed_articles(repository, language, articles, section_paths):
    old_dir = os.getcwd()
    os.chdir(repository.github.git_root)

    old_article_paths = {
        get_article_id(file): file
        for file in git.ls_files('en/').split('\n')
    }

    new_article_paths = {
        str(article['id']): get_article_path(article, 'en', section_paths)
        for article in articles.values()
        if is_tracked_article(article, section_paths)
    }

    old_translated_paths = {
        get_article_id(file): file
        for file in git.ls_files(language + '/').split('\n')
    }

    new_translated_paths = {
        str(article['id']): get_article_path(article, language, section_paths)
        for article in articles.values()
        if is_tracked_article(article, section_paths)
    }

    for article_id, new_article_path in sorted(new_article_paths.items()):
        if article_id not in old_article_paths:
            continue

        old_article_path = old_article_paths[article_id]

        if old_article_path == new_article_path:
            continue

        if not os.path.exists(old_article_path):
            continue

        os.makedirs(os.path.dirname(new_article_path), exist_ok=True)
        os.rename(old_article_path, new_article_path)

        git.add(old_article_path)

        git.add(new_article_path)

    for article_id, new_article_path in sorted(new_translated_paths.items()):
        if article_id not in old_translated_paths:
            continue

        old_article_path = old_translated_paths[article_id]

        if old_article_path == new_article_path:
            continue

        if not os.path.exists(old_article_path):
            continue

        os.makedirs(os.path.dirname(new_article_path), exist_ok=True)
        os.rename(old_article_path, new_article_path)

        git.add(old_article_path)
        git.add(new_article_path)

    git.commit('-m', 'Renamed articles: %s' % datetime.now())

    os.chdir(old_dir)

    return new_article_paths
def download_zendesk_articles(repository, domain, language):
    user = init_zendesk(domain)
    logging.info('Authenticated as %s' % user['email'])
    assert (user['verified'])

    # Download current article information and rearrange articles that may have
    # moved categories.

    articles, new_tracked_articles, categories, sections, section_paths = get_zendesk_articles(
        repository, domain, language)
    article_paths = check_renamed_articles(repository, language, articles,
                                           section_paths)

    for article_id, article_path in sorted(article_paths.items()):
        if article_id not in articles:
            print('Missing data for %s with path %s' %
                  (article_id, article_path))
            continue

    # Save articles and translations that currently exist on zendesk

    save_article_metadata(domain, repository, language, articles,
                          article_paths, categories, sections)

    old_dir = os.getcwd()
    os.chdir(repository.github.git_root)

    # First pass: check if anything appears to be out of date

    candidate_article_ids = [
        article_id
        for article_id, article in sorted(articles.items()) if requires_update(
            repository, domain, article, language, article_paths[article_id])
    ]

    # Second pass: anything that looks outdated, make sure its translation metadata is up-to-date

    for article_id in candidate_article_ids:
        update_translated_at(domain, article_id, language,
                             articles[article_id], articles, section_paths)

    with open('%s/articles_%s.json' % (initial_dir, domain), 'w') as f:
        json.dump(articles, f)

    # Third pass: assume metadata is complete, check what is out of date

    refresh_articles = {
        article_id: articles[article_id]
        for article_id in candidate_article_ids
        if requires_update(repository, domain, articles[article_id], language,
                           article_paths[article_id])
    }

    # Cache the articles on disk so we can work on them without having to go back to the API

    with open('%s/articles_%s.json' % (initial_dir, domain), 'w') as f:
        json.dump(articles, f)

    refresh_paths = {
        article_id: article_paths[article_id]
        for article_id in refresh_articles.keys()
    }

    for article_id, article in refresh_articles.items():
        article_path = refresh_paths[article_id]

        os.makedirs(os.path.dirname(article_path), exist_ok=True)

        with open(article_path, 'w', encoding='utf-8') as f:
            f.write('<h1>%s</h1>\n' % article['title'])
            f.write(remove_nbsp(article['body']))

        git.add(article_path)

    git.commit('-m', 'Recently added articles: %s' % datetime.now())

    os.chdir(old_dir)

    return articles, article_paths, refresh_articles, refresh_paths
Beispiel #39
0
		except:
			git.checkout(b=monit_node)
			
		#git.checkout("HEAD", b=monit_node)

		filename = monit_node + "/" + f['applicationsolution_name'] + "_" + \
				f['url'].replace("://", "_").replace("/","_") + ".conf"
		filepath = telegraf_dir + "/" + filename
		print(filename)
		with open(filepath, 'w') as fi:
			fi.write("[[inputs.url_monitor]]\n" + \
					"\tapp = " + '"' +  f['applicationsolution_name'] + '"\n' + \
					"\taddress = " + '"' + f['url'] + '"\n' + \
					"\tresponse_timeout = \"" + response_timeout + '"\n' + \
					"\tmethod = " + '"' + f['method'] + '"\n' + \
					"\trequire_str = " + "'" + f['require_str'] + "'\n" + \
					"\trequire_code = " + "'" + f['require_code'] + "'\n" + \
					"\tfailed_count = " + f['failed_count'] + '\n' + \
					"\tfailed_timeout = " + f['timeout'] + '\n' + \
					"\tfollow_redirects = " + follow_redirects + '\n' + \
					"\tbody = " + "'" + f['body'] + "'\n" + \
					"\tinsecure_skip_verify = " + insecure_skip_verify + '\n' + \
					"\t[inputs.url_monitor.headers]\n\t\t" + h_str + '\n')

		git.add(filename)
		try:
			git.commit(m=filename)
		except:
			print(git.status())
		git.push("origin", monit_node)
Beispiel #40
0
    def _config(self, remote, conf, groups):
        """
        Builds the groups file and project.config file for a project.

        @param remote - gerrit.Remote object
        @param conf - Dict containing git config information
        @param groups - List of groups

        """
        if not self.config:
            return

        msg = "Project %s: Configuring." % self.name
        logger.info(msg)
        print msg

        repo_dir = '~/tmp'
        repo_dir = os.path.expanduser(repo_dir)
        repo_dir = os.path.abspath(repo_dir)

        uuid_dir = str(uuid4())
        repo_dir = os.path.join(repo_dir, uuid_dir)

        # Make Empty directory - We want this to stop and fail on OSError
        logger.debug("Project %s: Creating directory %s" %
                     (self.name, repo_dir))
        os.makedirs(repo_dir)

        # Save the current working directory
        old_cwd = os.getcwd()

        origin = 'origin'

        try:
            # Change cwd to that repo
            os.chdir(repo_dir)

            # Git init empty directory
            git.init()

            # Add remote origin
            ssh_url = 'ssh://%s@%s:%s/%s' % (remote.username, remote.host,
                                             remote.port, self.name)

            git.add_remote(origin, ssh_url)

            # Fetch refs/meta/config for project
            refspec = 'refs/meta/config:refs/remotes/origin/meta/config'
            git.fetch(origin, refspec)

            # Checkout refs/meta/config
            git.checkout_branch('meta/config')

            # Get md5 of existing config
            _file = os.path.join(repo_dir, 'project.config')
            contents = ''
            try:
                with open(_file, 'r') as f:
                    contents = f.read()
            except IOError:
                pass
            existing_md5 = hashlib.md5(contents).hexdigest()

            # Get md5 of new config
            with open(self.config, 'r') as f:
                contents = f.read()
            new_md5 = hashlib.md5(contents).hexdigest()

            msg = "Project %s: Md5 comparision\n%s\n%s"
            msg = msg % (self.name, existing_md5, new_md5)
            logger.debug(msg)
            print msg

            # Only alter if checksums do not match
            if existing_md5 != new_md5:

                logger.debug("Project %s: config md5's are different." %
                             self.name)

                # Update project.config file
                _file = os.path.join(repo_dir, 'project.config')
                with open(_file, 'w') as f:
                    f.write(contents)

                # Update groups file
                group_contents = groups_file_contents(groups)
                _file = os.path.join(repo_dir, 'groups')
                with open(_file, 'w') as f:
                    f.write(group_contents)

                # Git config user.email
                git.set_config('user.email', conf['git-config']['email'])

                # Git config user.name
                git.set_config('user.name', conf['git-config']['name'])

                # Add groups and project.config
                git.add(['groups', 'project.config'])

                # Git commit
                git.commit(message='Setting up %s' % self.name)

                # Git push
                git.push(origin, refspecs='meta/config:refs/meta/config')
                logger.info("Project %s: pushed configuration." % self.name)

            else:
                msg = "Project %s: config unchanged." % self.name
                logger.info(msg)
                print msg

        finally:
            # Change to old current working directory
            os.chdir(old_cwd)

            # Attempt to clean up created directory
            shutil.rmtree(repo_dir)
Beispiel #41
0
def commit(repo):
    git = repo.git
    git.add(".")
    git.commit('-m', 'first commit')
Beispiel #42
0
    pass

# base = "master"
# head = f'ahonnecke:{new_branch}'
# print(f'Opening PR to merge "{head}" into "{base}"')
# web_repo = org.get_repo('web')
# web_repo.create_pull(
#     title=f'Warm up cache',
#     body="Scripted update to warm up the CI cache",
#     base=base,
#     head=head
# )

num = input("How many times brah?: ")

for x in range(0, int(num)):
    print(f'Pushing {x}...')
    with open(cache_warmer_filename, "a") as cache_warmer_file:
        cache_warmer_file.write(f'# {x} units warmer\n')
        cache_warmer_file.close()

    web_repo.index.add([cache_warmer_filename])
    git.commit('-m', f'warming cache')
    git.push('-v', 'origin', new_branch)

    # web_repo.index.add([cache_warmer_filename])
    # web_repo.git.commit('-m', 'warming circleCI')
    # web_repo.remotes.origin.push(refspec=f'{branch}:{branch}')
    print(f'Sleeping {delay} seconds after iteration {x}...')
    sleep(delay)
Beispiel #43
0
        flags=re.M
    )

    content_newer = re.sub(
        'ADD http://chromedriver.storage.googleapis.com/.*/chromedriver_linux64.zip ',
        r'ADD http://chromedriver.storage.googleapis.com/' + str(chrome_version) + '/chromedriver_linux64.zip ',
        content_new,
        flags=re.M
    )

with open(circlefile, "w") as writer:
    writer.write(content_newer)
    writer.close()

changedFiles = [item.a_path for item in web_repo.index.diff(None)]
if changedFiles:
    web_repo.index.add(changedFiles)
    git.commit('-m', f'Updating chrome version to {chrome_version}')
    git.push('-v', 'origin', new_branch)

    base = "master"
    head = f'ahonnecke:{new_branch}'
    print(f'Opening PR to merge "{head}" into "{base}"')
    web_repo = org.get_repo('web')
    web_repo.create_pull(
        title=f'Update chrome-driver to version {chrome_version}',
        body="Scripted update for the chrome driver version",
        base=base,
        head=head
    )
Beispiel #44
0
    data = json.load(f)

data["保存时间戳"] = ticks
data["保存时间"] = localtime
data["本次运行时长"] = data["保存时间戳"] - data["启动时间戳"]
data["总游戏时长"] = data["总游戏时长"] + data["本次运行时长"]


def secondsToGameStr(sec):
    m, s = divmod(sec, 60)
    h, m = divmod(m, 60)
    return "%02d时%02d分%02d秒" % (h, m, s)


data["本次游戏时间"] = secondsToGameStr(data["本次运行时长"])
data["总游戏时间"] = secondsToGameStr(data["总游戏时长"])

# 写入 JSON 数据
with open(curFileList[0] + '\data.json', 'w', encoding='utf8') as f:
    json.dump(data, f)

logStr = "本次游戏时间 " + data["本次游戏时间"] + " 总游戏时间 " + data["总游戏时间"]

for log in mddata:
    logStr = logStr + log

git = repo.git
git.add('.')
git.commit('-m', logStr)
git.push()
# open README and update with new version
print(INFO + "Updating versions in " + readme_file + " to " + version + ENDC)
with open(readme_file, "r+") as readme:
    text = readme.read()
    for sub in subs:
        (find, replace) = sub
        text = re.sub(find, replace, text)
    readme.seek(0)
    readme.write(text)

print(INFO + "Adding git changes." + ENDC)
git.add(readme_file)
if not git.status("-s"):
    print(INFO + "No changes. Exiting." + ENDC)
    sys.exit(0)
git.commit('-m',
           f'Update {readme_file} examples to reflect new version {version}')

push_failures = 0
while True:
    try:
        print(INFO + "Pushing updated README." + ENDC)
        git.push()
        break
    except GitCommandError:
        push_failures += 1
        if push_failures <= 5:
            print(NOTICE + "Failed to push. Going to pull and try again." +
                  ENDC)
            git.pull(rebase=True)
        else:
            print(ERROR + "Failed to push again. Giving up." + ENDC)
Beispiel #46
0
    for elemento in listFolders:
        # elemento = elemento[2:len(elemento) - 2]
        downloadRecursive(ruta + elemento + "\\")

    return


def prepareLog():
    global numErrors, log, listErrors
    print 'Errors detected = ', str(numErrors)
    log.write('Errors detected = ' + str(numErrors))
    for el in listErrors:
        log.write(str(el))


# Main
print 'Run backup FTP', config.get("host")
connect()
downloadRecursive(config.get("route"))
prepareLog()

# Create commit & push
msgcommit = ", ".join([
    str(int(time.time())),
    str(numErrors) + " Errors",
    str(numFiles) + " Files",
    str(numFolders) + " Folders"
])
print "[GIT]\tPrepare commit", msgcommit
git.commit(config.get("folderdest"), msgcommit)