def fork(self, new_body): git_path = mkdtemp(prefix=settings.GIT_ROOT_PATH) git = Git(git_path) git.clone(os.path.abspath(self.git_path), git_path) new_fork = Fork.objects.create(body=new_body, parent=self, git_path=git_path) return new_fork
def get(): parser = reqparse.RequestParser() parser.add_argument('repo', location='args', required=True) parser.add_argument('port', location='args', required=True) data = parser.parse_args() repo = data['repo'] port = data['port'] try: response = requests.get(repo) if response.status_code != 200: return jsonify({ 'status': 401, 'error': '{} is not a valid URL'.format(repo) }) except: return jsonify({ 'status': 401, 'error': '{} is not a valid URL'.format(repo) }) gitcmd = Git('.') gitcmd.init() name = repo.split('/') repo_name = name[-1] repo_name = repo_name.replace('.git', '') gitcmd.clone(repo, 'repos/' + repo_name) subprocess.Popen( '.flaskapp/bin/uwsgi --http :%s --file repos/{}/wsgi.py --callable app' .format(port, repo_name)) return jsonify({ 'status': 200, 'message': 'Deployed to Raspberry Pi! {} is now running!'.format(repo_name) })
def get_all_remote_branch(url): tmp = tempfile.mkdtemp(prefix="repo_br") git = Git(tmp) git.clone(url, tmp) return [b.strip()[len('origin/'):] for b in git.branch('--remote').splitlines() if not 'HEAD' in b]
def __init__(self, manager): super(PuppetInstallMethod, self).__init__(manager) self.log = logging.getLogger(__name__) # Use effective agent user's home directory self.__path = pwd.getpwuid(os.getuid()).pw_dir # Load install items for entry in pkg_resources.iter_entry_points("puppet.items"): module = entry.load() self.log.info("repository handler %s included" % module.__name__) self._supportedItems.update({ module.__name__ : { 'name': module._name, 'description': module._description, 'container': module._container, 'options': module._options, 'module': module, }, }) # Get a backend instance for that path self.__repo_path = os.path.join(self.__path, "repo") self.__work_path = os.path.join(self.__path, "work") # Purge path if wanted db_purge = self.env.config.get('repository.db_purge',) if db_purge == "True": if os.path.exists(self.__repo_path): shutil.rmtree(self.__repo_path) if os.path.exists(self.__work_path): shutil.rmtree(self.__work_path) # Create path if it doesn't exist if not os.path.exists(self.__path): os.makedirs(self.__path, 0750) if not os.path.exists(self.__repo_path): os.makedirs(self.__repo_path, 0750) if not os.path.exists(self.__work_path): os.makedirs(self.__work_path, 0750) # Initialize git repository if not present if not os.path.exists(os.path.join(self.__repo_path, "config")): repo = Repo.create(self.__repo_path) assert repo.bare == True os.chmod(self.__repo_path, 0750) # Create master branch tmp_path = os.path.join(self.__work_path, "master") cmd = Git(self.__work_path) cmd.clone(self.__repo_path, "master") with open(os.path.join(tmp_path, "README"), "w") as f: f.write("This is an automatically managed GOsa puppet repository. Please do not modify.") logdir = self.env.config.get("puppet.report-dir", "/var/log/puppet") with open(os.path.join(tmp_path, "puppet.conf"), "w") as f: f.write("""[main] logdir=%s vardir=/var/lib/puppet ssldir=/var/lib/puppet/ssl rundir=/var/run/puppet factpath=$vardir/lib/facter templatedir=$confdir/templates report=true reports=store_gosa reportdir=$logdir """ % logdir) # Add manifests and write initial size.pp os.mkdir(os.path.join(tmp_path, "manifests")) with open(os.path.join(tmp_path, "manifests", "site.pp"), "w") as f: f.write('\nimport "nodes.pp"\n') # Add manifests and write initial size.pp with open(os.path.join(tmp_path, "manifests", "nodes.pp"), "w") as f: f.write('# Automatically managed by GOsa\n') cmd = Git(tmp_path) cmd.add("README") cmd.add("puppet.conf") cmd.add("manifests") cmd.commit(m="Initially created puppet master branch") cmd.push("origin", "master") shutil.rmtree(tmp_path) # Create SSH directory? self.ssh_path = os.path.join(self.__path, '.ssh') if not os.path.exists(self.ssh_path): os.makedirs(self.ssh_path) host = self.env.id user = pwd.getpwuid(os.getuid()).pw_name self.gen_ssh_key(os.path.join(self.ssh_path, 'id_dsa'), "%s@%s" % (user, host))
def createRelease(self, name, parent=None): super(PuppetInstallMethod, self).createRelease(name, parent) with puppet_lock: # Move to concrete directory name orig_name = name name = name.replace("/", "@") # Clone repository cmd = Git(self.__work_path) if parent: if isinstance(parent, StringTypes): parent = parent.replace("/", "@") else: parent = parent.name.replace("/", "@") self.log.debug("cloning new git branch '%s' from '%s'" % (name, parent)) cmd.clone(self.__repo_path, name, b=parent) else: self.log.debug("creating new git branch '%s'" % name) cmd.clone(self.__repo_path, name) # Switch branch, add information cmd = Git(os.path.join(self.__work_path, name)) host = self.env.id cmd.config("--global", "user.name", "GOsa management agent on %s" % host) self.log.debug("switching to newly created branch") cmd.checkout(b=name) # Remove refs if there's no parent current_dir = os.path.join(self.__work_path, name) if not parent: self.log.debug("no parent set - removing refs") cmd.symbolic_ref("HEAD", "refs/heads/newbranch") os.remove(os.path.join(current_dir, ".git", "index")) files = os.listdir(current_dir) # Remove all but .git for f in files: if f== ".git": continue if os.path.isdir(f): shutil.rmtree(os.path.join(current_dir, f)) else: os.unlink(os.path.join(current_dir, f)) # Create release info file self.log.debug("writing release info file in %s" % current_dir) with open(os.path.join(current_dir, "release.info"), "w") as f: now = datetime.now() f.write("Release: %s\n" % orig_name) f.write("Date: %s\n" % now.strftime("%Y-%m-%d %H:%M:%S")) self.log.debug("comitting new release") cmd.add("release.info") cmd.commit(m="Created release information") # Push to origin self.log.debug("pushing change to central repository") cmd.push("origin", name) return True
class CaseRepo(object): def __init__( self, root=None, folders=None, name=None, url=None, ): self.DEFAULT_BRANCH = 'master' if root is None: self.root = tempfile.mkdtemp(prefix="bugle_" + name + "_") self.is_root_tmp = True else: self.root = root self.is_root_tmp = False if name is None: self.name = os.path.basename(root) else: self.name = name self.url = url # Make sure folder is a git repo if self.is_root_tmp and self.url is None: raise Exception("New folder with no repo URL") self.git = Git(self.root) self.git_clone() self.repo = Repo(self.root) if folders is None: self.folders = ['.'] else: self.folders = folders self.files = {} self.branches = {} self.total_count = {} self.branch_count = {} print "creating repo %s under %s, folders %s" % (name, self.root, self.folders) @proxy_decorate def git_clone(self): cmd = " ".join((self.url, self.root)) print "git clone %s" % cmd print self.git.clone(self.url, self.root) def clean_up(self, forced=False): if self.is_root_tmp or forced: shutil.rmtree(self.root, True) def get_all_remote_branch(self): rlist = [] for b in self.git.branch('--remote').splitlines(): b = b.strip()[len('origin/'):] #FIXME: add ignore branch key words. if 'HEAD' in b or '/' in b or 'PVT' in b: print "ignore branch %s" % b continue rlist.append(b) print "all remote branches: %s" % beauty_dump(rlist) return rlist @property def branches_names(self): return self.branches.keys() @proxy_decorate def scan_branch(self, branch=None, folders=None): """ Scan specific branch and return the flie list which has case/kw included. :param branch: branch name, default to master :return: file list, each file is a dict of cases and kws. The list item is defined as below: files[fullpath] = dict(name=res['name'], cases=res['cases'], kws=res['kws']) """ if branch is None: branch = self.DEFAULT_BRANCH # checkout to target branch print self.git.checkout(branch) print self.git.pull() print self.repo.active_branch files = {} # scan folder list if folders is None: folders = self.folders for folder in sorted(folders): print "scan folder %s for repo %s branch %s" % (folder, self.name, branch) if folder == '.': fs = FolderScanner(self.root) else: fs = FolderScanner(os.path.join(self.root, folder)) files.update(fs.scan()) #print beauty_dump(fs.count()) self.branch_count[branch] = self.count(files) print "branch %s, count=%s" % (branch, beauty_dump(self.branch_count[branch])) return dict(branch=branch, files=files) def scan_branches(self, branches=None): """ The main entry to search branches with interested folders. :param branches: The dict param { 'branch name in str': [folder list] } :return: final files merged up. """ if branches is None: branches = {} for br in self.get_all_remote_branch(): print "automatically add branch %s" % br branches[br] = None self.branches = branches self.files = {} bs_list = [] for br, folders in branches.items(): self.branches_names.append(br) bs = self.scan_branch(br, folders=folders) bs_list.append(bs) self.files = self.merge_up(bs_list) # Process total count and figure out unique data for branches self.total_count = self.count() self.total_count['unique'] = {} for b in self.branches_names: self.total_count['unique'][b] = {'cases': 0, 'kws': 0} for k, v in self.files.items(): for c in v['cases']: if len(c['branches']) == 1: self.total_count['unique'][c['branches'][0]]['cases'] += 1 for k in v['kws']: if len(k['branches']) == 1: self.total_count['unique'][k['branches'][0]]['kws'] += 1 return self.files def merge_up(self, bs_list): """ Merge up branch scan results per Case and Keyword Name. :param bs_list: branch scan result list :return: a new list. """ if bs_list is None: return None files = {} for bs in bs_list: print "merging up branch %s with %d files" % (bs['branch'], len(bs['files'])) #print beauty_dump(bs) for k, v in bs['files'].items(): if k in files: # merge file files[k] = self._merge_file(files[k], bs['files'][k], bs['branch']) pass else: # new add files[k] = dict(name=v['name'], branches=[bs['branch']]) cases = [] for tc in v['cases']: cases.append(dict(case=tc, branches=[bs['branch']])) kws = [] for kw in v['kws']: kws.append(dict(kw=kw, branches=[bs['branch']])) files[k]['cases'] = cases files[k]['kws'] = kws # form now on, each case has a list to record branches it shows up files[k]['branches'] = [bs['branch']] # from now on, files[k] added a list with key 'branch' to record # how many branches it exist in. #print "Taken branch %s,\n%s" %(bs['branch'], beauty_dump(files)) return files @staticmethod def _merge_file(orig, new, branch): assert branch is not None assert orig is not None assert new is not None # Add file branch info. if branch not in orig['branches']: orig['branches'].append(branch) for tc2 in new['cases']: merged = False for tc1 in orig['cases']: if tc1['case'] == tc2 and branch not in tc1['branches']: tc1['branches'].append(branch) merged = True break if not merged: orig['cases'].append(dict(case=tc2, branches=[branch])) # TODO: common method shall be added. for kw2 in new['kws']: merged = False for kw1 in orig['kws']: if kw1['kw'] == kw2 and branch not in kw1['branches']: kw1['branches'].append(branch) merged = True break if not merged: orig['kws'].append(dict(kw=kw2, branches=[branch])) return orig def count(self, files=None): if files is None: files = self.files counts = {'cases': 0, 'kws': 0} counts['cases'] = sum([len(v['cases']) for k, v in files.items()]) counts['kws'] = sum([len(v['kws']) for k, v in files.items()]) return counts def dump_html(self, temp='files_table.html', output=None): if output is None: output = './' + self.name + '_case_report.html' env = Environment( loader=PackageLoader('bugle.bugle_site', 'templates')) print "loading template: %s" % temp template = env.get_template(temp) print "rendering to: %s" % output with open(output, 'wt') as f: f.write(template.render(cr=self)) print "report generated at %s" % output