def __init__(self, name=None, commit=None, create=False): """ Construct a blueprint in the new format in a backwards-compatible manner. """ self.name = name self._commit = commit # Create a new blueprint object and populate it based on this server. if create: super(Blueprint, self).__init__() import backend for funcname in backend.__all__: getattr(backend, funcname)(self) import services services.services(self) # Create a blueprint from a Git repository. elif name is not None: git.init() if self._commit is None: self._commit = git.rev_parse('refs/heads/{0}'.format(name)) if self._commit is None: raise NotFoundError(name) tree = git.tree(self._commit) blob = git.blob(tree, 'blueprint.json') content = git.content(blob) super(Blueprint, self).__init__(**json.loads(content)) # Create an empty blueprint object to be filled in later. else: super(Blueprint, self).__init__()
def init(self): if not os.path.isdir(self.pkipath): self.__calleasyrsa(['init-pki']) os.mkdir('%s/issued' % self.pkipath) git.init(self.pkipath) else: raise PkiExistsException("pki %s already exists" % self.pkipath)
def import_project(self, directory): project_name = os.path.basename( directory ) if project_name in self.projects: print("A project named '{0}' already exists.".format(project_name)) sys.exit(1) # Create project entry in database new_pb_project = Project(project_name, self, None) # Copy files shutil.copytree( directory, new_pb_project.work_dir) original_dir = os.getcwd() print("Creating public git repo '{0}'".format(new_pb_project.public_dir)) os.makedirs( new_pb_project.public_dir ) original_dir = os.getcwd() os.chdir(new_pb_project.public_dir) git.init(True) os.chdir(new_pb_project.work_dir) # Initialize git repo if it doesn't exist if not os.path.isdir(".git"): # Create first import with initial content git.init(False) git.add(["."]) git.commit("Import from {0}.".format(directory)) # Create public repository if 'public' in git.remote_list(): print("Renaming old public repo to public.orig") git.remote_rename('public', 'public.orig') print("Adding 'public' remote") git.remote_add("public", new_pb_project.public_dir) os.chdir( original_dir ) # Open by default self.projects[project_name] = new_pb_project new_pb_project.state = "open" # why save? -> to update the state to 'open' new_pb_project.save()
def init(): """ Initiate a transaction on the repository Just call each 'init' method of the different repository implementations """ git.init()
def __init__(self, name=None, commit=None, create=False): """ Construct a blueprint in the new format in a backwards-compatible manner. """ self.name = name self._commit = commit # Create a new blueprint object and populate it based on this server. if create: super(Blueprint, self).__init__() for funcname in backend.__all__: getattr(backend, funcname)(self) # Create a blueprint from a Git repository. elif name is not None: git.init() if self._commit is None: self._commit = git.rev_parse('refs/heads/%s' % (name)) if self._commit is None: raise KeyError(name) tree = git.tree(self._commit) blob = git.blob(tree, 'blueprint.json') content = git.content(blob) super(Blueprint, self).__init__(**json.loads(content)) # Create an empty blueprint object to be filled in later. else: super(Blueprint, self).__init__()
def checkout(cls, name, commit=None): git.init() if commit is None: commit = git.rev_parse('refs/heads/{0}'.format(name)) if commit is None: raise NotFoundError(name) tree = git.tree(commit) blob = git.blob(tree, 'blueprint.json') content = git.content(blob) return cls(name, commit, **json.loads(content))
def import_project(self, options ): if not os.path.isdir(self.work_path): print("No directory named '{0}' exist in '{1}'.".format( self.name, options.work)) sys.exit(1) os.chdir(self.work_path) if not os.path.isfile( os.path.join( self.work_path, ".git" ) ): git.init(False) remotes = git.remote_list() if "public" not in remotes: git.remote_add("public", self.public_path) if not os.path.isdir( self.public_path ): os.makedirs( self.public_path ) os.chdir( self.public_path ) git.init(True) self.state = "open"
def commit(self, message=''): """ Create a new revision of this blueprint in the local Git repository. Include the blueprint JSON and any source archives referenced by the JSON. """ git.init() refname = 'refs/heads/{0}'.format(self.name) parent = git.rev_parse(refname) # Start with an empty index every time. Specifically, clear out # source tarballs from the parent commit. if parent is not None: for mode, type, sha, pathname in git.ls_tree(git.tree(parent)): git.git('update-index', '--force-remove', pathname) # Add `blueprint.json` to the index. f = open('blueprint.json', 'w') f.write(self.dumps()) f.close() git.git('update-index', '--add', os.path.abspath('blueprint.json')) # Add source tarballs to the index. for filename in self.sources.itervalues(): git.git('update-index', '--add', os.path.abspath(filename)) # Add `/etc/blueprintignore` and `~/.blueprintignore` to the index. # Since adding extra syntax to this file, it no longer makes sense # to store it as `.gitignore`. f = open('blueprintignore', 'w') for pathname in ('/etc/blueprintignore', os.path.expanduser('~/.blueprintignore')): try: f.write(open(pathname).read()) except IOError: pass f.close() git.git('update-index', '--add', os.path.abspath('blueprintignore')) # Write the index to Git's object store. tree = git.write_tree() # Write the commit and update the tip of the branch. self._commit = git.commit_tree(tree, message, parent) git.git('update-ref', refname, self._commit)
def send_upstream(self, downstream, upstream): """ Sends the change indicated by the comment upstream. @param downstream - gerrit.Remote downstream object @param upstream - gerrit.Remote upstream object """ # Check if upstream project before doing anything. if not self.is_upstream_project(): return ssh = downstream.SSH() # Check to see if comment indicates a change is upstream ready if not self.is_upstream_indicated(): logger.debug("Change %s: Upstream not indicated" % self.change_id) return # Grab all of the approvals approvals = self.get_approvals(downstream.SSH()) # Check to see if comment has necessary approvals. if not self.is_upstream_approved(approvals): msg = ("Could not send to upstream: One or more labels" " not approved.") logger.debug("Change %s: %s" % (self.change_id, msg)) ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) return # Do some git stuffs to push upstream logger.debug("Change %s: Sending to upstream" % self.change_id) repo_dir = '~/tmp' repo_dir = os.path.expanduser(repo_dir) repo_dir = os.path.abspath(repo_dir) # Add uuid, want a unique directory here uuid_dir = str(uuid4()) repo_dir = os.path.join(repo_dir, uuid_dir) # Make Empty directory - We want this to stop and fail on OSError if not os.path.isdir(repo_dir): os.makedirs(repo_dir) logger.debug("Change %s: Created directory %s" % (self.change_id, repo_dir)) # Save the current working directory old_cwd = os.getcwd() try: # Change to newly created directory. os.chdir(repo_dir) # Init the cwd git.init() # Add the remotes for upstream and downstream remote_url = "ssh://%s@%s:%s/%s" git.add_remote( 'downstream', remote_url % (downstream.username, downstream.host, downstream.port, self.project)) # Figure out what user we will pose as # This every upstream user sharing the same key is kinda shady. # Default back to the configured user if username doesnt exist. # should fail in this case username = self.change_owner_username name = self.change_owner_name email = self.change_owner_email if not username: logger.debug("Change %s: Unable to use author credentials." " Defaulting to configured credentials." % self.change_id) username = upstream.username name = self._conf['git-config']['name'] email = self._conf['git-config']['email'] git.add_remote( 'upstream', remote_url % (username, upstream.host, upstream.port, self.project)) logger.debug('Change %s: Sending upstream as ' 'username %s, email %s, name %s' % (self.change_id, username, email, name)) try: env = get_review_env() # Set committer info git.set_config('user.email', email) git.set_config('user.name', name) # Download specific change to local args = [ 'git-review', '-r', 'downstream', '-d', '%s,%s' % (self.change_id, self.patchset_id) ] logger.debug('Change %s: running: %s' % (self.change_id, ' '.join(args))) out = subprocess.check_output(args, stderr=subprocess.STDOUT, env=env) logger.debug("Change %s: %s" % (self.change_id, out)) # Send downloaded change to upstream args = [ 'git-review', '-y', '-r', 'upstream', self.branch, '-t', self.topic ] logger.debug('Change %s: running: %s' % (self.change_id, ' '.join(args))) out = subprocess.check_output(args, stderr=subprocess.STDOUT, env=env) logger.debug("Change %s: %s" % (self.change_id, out)) upstream_url = self.get_upstream_url(upstream) msg = 'Sent to upstream: %s' % (upstream_url) # Send comment to downstream gerrit with link to change in # upstream gerrit ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) except subprocess.CalledProcessError as e: msg = "Could not send to upstream:\n%s" % e.output ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) logger.error("Change %s: Unable to send to upstream" % self.change_id) logger.error("Change %s: %s" % (self.change_id, out)) except Exception: msg = 'Could not send to upstream: Error running git-review' ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) logger.exception("Change %s: Unable to send to upstream" % self.change_id) finally: # Change to old current working directory os.chdir(old_cwd) # Attempt to clean up created directory shutil.rmtree(repo_dir)
def send_upstream(self, downstream, upstream): """ Sends the change indicated by the comment upstream. @param downstream - gerrit.Remote downstream object @param upstream - gerrit.Remote upstream object """ # Check if upstream project before doing anything. if not self.is_upstream_project(): return ssh = downstream.SSH() # Check to see if comment indicates a change is upstream ready if not self.is_upstream_indicated(): logger.debug("Change %s: Upstream not indicated" % self.change_id) return # Grab all of the approvals approvals = self.get_approvals(downstream.SSH()) # Check to see if comment has necessary approvals. if self.is_forced(): if not self.is_release_approved(approvals): msg = "Could not send to upstream: Release not approved." logger.debug("Change %s: %s" % (self.change_id, msg)) ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) return msg = ("***WARNING***\n%s (%s) has requested a forced upstream " "push. Bypassing all votes except for Release...\n\n" "Current votes are:\n\n%s" % (self._data['author']['name'], self._data['author']['email'], self.stringify_approvals(approvals))) logger.debug("Change %s: %s" % (self.change_id, msg)) ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) else: if not self.is_upstream_approved(approvals): msg = ("Could not send to upstream: One or more labels" " not approved.") logger.debug("Change %s: %s" % (self.change_id, msg)) ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) return # Do some git stuffs to push upstream logger.debug("Change %s: Sending to upstream" % self.change_id) repo_dir = '~/tmp' repo_dir = os.path.expanduser(repo_dir) repo_dir = os.path.abspath(repo_dir) # Add uuid, want a unique directory here uuid_dir = str(uuid4()) repo_dir = os.path.join(repo_dir, uuid_dir) # Make Empty directory - We want this to stop and fail on OSError if not os.path.isdir(repo_dir): os.makedirs(repo_dir) logger.debug( "Change %s: Created directory %s" % (self.change_id, repo_dir) ) # Save the current working directory old_cwd = os.getcwd() try: # Change to newly created directory. os.chdir(repo_dir) # Init the cwd git.init() # Add the remotes for upstream and downstream remote_url = "ssh://%s@%s:%s/%s" git.add_remote('downstream', remote_url % (downstream.username, downstream.host, downstream.port, self.project)) # Figure out what user we will pose as # This every upstream user sharing the same key is kinda shady. # Default back to the configured user if username doesnt exist. # should fail in this case username = self.patchset_uploader_username name = self.patchSet_uploader_name email = self.patchset_uploader_email if not username: logger.debug("Change %s: Unable to use author credentials." " Defaulting to configured credentials." % self.change_id) username = upstream.username name = self._conf['git-config']['name'] email = self._conf['git-config']['email'] git.add_remote('upstream', remote_url % (username, upstream.host, upstream.port, self.project)) logger.debug('Change %s: Sending upstream as ' 'username %s, email %s, name %s' % (self.change_id, username, email, name)) try: env = get_review_env() # Set committer info git.set_config('user.email', email) git.set_config('user.name', name) # Download specific change to local args = ['git-review', '-r', 'downstream', '-d', '%s,%s' % (self.change_id, self.patchset_id)] logger.debug('Change %s: running: %s' % (self.change_id, ' '.join(args))) out = subprocess.check_output(args, stderr=subprocess.STDOUT, env=env) logger.debug("Change %s: %s" % (self.change_id, out)) # Send downloaded change to upstream args = ['git-review', '-R', '-y', '-r', 'upstream', self.branch, '-t', self.topic] logger.debug('Change %s: running: %s' % (self.change_id, ' '.join(args))) out = subprocess.check_output(args, stderr=subprocess.STDOUT, env=env) logger.debug("Change %s: %s" % (self.change_id, out)) upstream_url = self.get_upstream_url(upstream) msg = 'Sent to upstream: %s' % (upstream_url) # Send comment to downstream gerrit with link to change in # upstream gerrit ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) except subprocess.CalledProcessError as e: msg = "Could not send to upstream:\n%s" % e.output ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) logger.error("Change %s: Unable to send to upstream" % self.change_id) logger.error("Change %s: %s" % (self.change_id, out)) except Exception: msg = 'Could not send to upstream: Error running git-review' ssh.exec_once('gerrit review -m %s %s' % (pipes.quote(msg), self.revision)) logger.exception("Change %s: Unable to send to upstream" % self.change_id) finally: # Change to old current working directory os.chdir(old_cwd) # Attempt to clean up created directory shutil.rmtree(repo_dir)
def setup_server4(hostname=None, domain=None, pc="1", forge_modules=[ "puppetlabs/stdlib", "puppetlabs/concat", "puppetlabs/firewall", "puppetlabs/apt" ]): """Setup Puppet 4 server""" import package, util, git, service # Local files to copy over basedir = "/etc/puppetlabs" local_master_conf = "files/puppet-master.conf" remote_master_conf = basedir + "/puppet/puppet.conf" local_hiera_yaml = "files/hiera.yaml" remote_hiera_yaml = basedir + "/code/hiera.yaml" local_fileserver_conf = "files/fileserver.conf" remote_fileserver_conf = basedir + "/puppet/fileserver.conf" local_environments = "files/environments" remote_codedir = basedir + "/code" local_gitignore = "files/gitignore" remote_gitignore = basedir + "/.gitignore" modules_dir = basedir + "/code/environments/production/modules" # Verify that all the local files are in place try: open(local_master_conf) open(local_hiera_yaml) except IOError: print "ERROR: some local config files were missing!" sys.exit(1) # Autodetect hostname and domain from env.host, if they're not overridden # with method parameters if not hostname: hostname = util.get_hostname() if not domain: domain = util.get_domain() # Ensure that clock is correct before doing anything else, like creating SSL # certificates. util.set_clock() # Start the install install_puppetlabs_release_package(pc) package.install("puppetserver") util.put_and_chown(local_master_conf, remote_master_conf) util.put_and_chown(local_hiera_yaml, remote_hiera_yaml) util.put_and_chown(local_fileserver_conf, remote_fileserver_conf) util.put_and_chown(local_gitignore, remote_gitignore) util.add_to_path("/opt/puppetlabs/bin") util.set_hostname(hostname + "." + domain) # "facter fqdn" return a silly name on EC2 without this util.add_host_entry("127.0.1.1", hostname, domain) # Copy over template environments util.put_and_chown(local_environments, remote_codedir) # Add modules from Puppet Forge. These should in my experience be limited to # those which provide new types and providers. In particular puppetlabs' # modules which control some daemon (puppetdb, postgresql, mysql) are # extremely complex, very prone to breakage and nasty to debug. for module in forge_modules: add_forge_module(module) # Git setup git.install() git.init(basedir) if not exists(modules_dir): sudo("mkdir " + modules_dir) git.init(modules_dir) git.add_submodules(basedir=modules_dir) git.add_all(basedir) git.commit(basedir, "Initial commit") # Link hieradata and manifests from production to testing. This keeps the # testing environment identical to the production environment. The modules # directory in testing is separate and may (or may not) contain modules that # override or complement those in production. util.symlink(remote_codedir + "/environments/production/hieradata", remote_codedir + "/environments/testing/hieradata") util.symlink(remote_codedir + "/environments/production/manifests", remote_codedir + "/environments/testing/manifests") # Start puppetserver to generate the CA and server certificates/keys service.start("puppetserver") run_agent(noop="False")
def _config(self, remote, conf, groups): """ Builds the groups file and project.config file for a project. @param remote - gerrit.Remote object @param conf - Dict containing git config information @param groups - List of groups """ if not self.config: return msg = "Project %s: Configuring." % self.name logger.info(msg) print msg repo_dir = '~/tmp' repo_dir = os.path.expanduser(repo_dir) repo_dir = os.path.abspath(repo_dir) uuid_dir = str(uuid4()) repo_dir = os.path.join(repo_dir, uuid_dir) # Make Empty directory - We want this to stop and fail on OSError logger.debug( "Project %s: Creating directory %s" % (self.name, repo_dir) ) os.makedirs(repo_dir) # Save the current working directory old_cwd = os.getcwd() origin = 'origin' try: # Change cwd to that repo os.chdir(repo_dir) # Git init empty directory git.init() # Add remote origin ssh_url = 'ssh://%s@%s:%s/%s' % ( remote.username, remote.host, remote.port, self.name ) git.add_remote(origin, ssh_url) # Fetch refs/meta/config for project refspec = 'refs/meta/config:refs/remotes/origin/meta/config' git.fetch(origin, refspec) # Checkout refs/meta/config git.checkout_branch('meta/config') # Get md5 of existing config _file = os.path.join(repo_dir, 'project.config') contents = '' try: with open(_file, 'r') as f: contents = f.read() except IOError: pass existing_md5 = hashlib.md5(contents).hexdigest() # Get md5 of new config with open(self.config, 'r') as f: contents = f.read() new_md5 = hashlib.md5(contents).hexdigest() msg = "Project %s: Md5 comparision\n%s\n%s" msg = msg % (self.name, existing_md5, new_md5) logger.debug(msg) print msg # Only alter if checksums do not match if existing_md5 != new_md5: logger.debug( "Project %s: config md5's are different." % self.name ) # Update project.config file _file = os.path.join(repo_dir, 'project.config') with open(_file, 'w') as f: f.write(contents) # Update groups file group_contents = groups_file_contents(groups) _file = os.path.join(repo_dir, 'groups') with open(_file, 'w') as f: f.write(group_contents) # Git config user.email git.set_config('user.email', conf['git-config']['email']) # Git config user.name git.set_config('user.name', conf['git-config']['name']) # Add groups and project.config git.add(['groups', 'project.config']) # Git commit git.commit(message='Setting up %s' % self.name) # Git push git.push(origin, refspecs='meta/config:refs/meta/config') logger.info("Project %s: pushed configuration." % self.name) else: msg = "Project %s: config unchanged." % self.name logger.info(msg) print msg finally: # Change to old current working directory os.chdir(old_cwd) # Attempt to clean up created directory shutil.rmtree(repo_dir)
from flask_cors import CORS import db import git_crawling as gc import git as g import secret_key_finder as skf from models import User from config import APP_CONFIG from error import CustomError app = Flask(__name__) CORS(app) app.secret_key = APP_CONFIG['secret_key'] db.init() g.init() skf.start(600) app.run(host='localhost') @app.route('/') def hello_route(): return 'hello, route!' # start - 암호화를 위한 bcrypt 세팅 코드 app.config['SECRET_KEY'] = APP_CONFIG['bcrypt_secret_key'] app.config['BCRYPT_LEVEL'] = APP_CONFIG['bcrypt_level'] bcrypt = Bcrypt(app) # end - bcrypt 세팅 코드
def create(self, project_name, language): """ Create a bare project. """ if project_name in self.projects: print("A project named '{0}' already exists.".format(project_name)) sys.exit(1) new_pb_project = Project(project_name, self, language) print("Creating project '{0}' ({1}).".format(project_name, language)) new_pb_project.save() # Create bare project os.makedirs(new_pb_project.work_dir) original_dir = os.getcwd() os.chdir(new_pb_project.work_dir) if language in self.languages: print("I have templates for a '{0}' project.".format(language)) for root, dirs, files in os.walk( new_pb_project.templates_dir ): # print root, dirs, files rel_dir = root[len(new_pb_project.templates_dir)+1:] for d in dirs: o = os.path.join(root, d) w = os.path.join(new_pb_project.work_dir, rel_dir, d) os.makedirs(w) print( "{0} -> {1}".format(o, w) ) for f in files: dest_file, ext = os.path.splitext(f) o = os.path.join(root, f) if ext == ".tmpl": w = os.path.join(new_pb_project.work_dir, rel_dir, dest_file) print( "{0} -> {1}".format(o, w) ) t = Template( file=o ) t.project = new_pb_project.name of = open(dest_file, 'w') of.write(str(t)) else: w = os.path.join(new_pb_project.work_dir, rel_dir, f) print( "{0} -> {1}".format(o, w) ) shutil.copy(o, w) else: print("No templates available.") print("Creating public git repo '{0}'".format(new_pb_project.public_dir)) os.makedirs( new_pb_project.public_dir ) original_dir = os.getcwd() os.chdir(new_pb_project.public_dir) git.init(True) os.chdir(original_dir) print("Initializing git repository.") git.init(False) print("Adding 'public' remote") git.remote_add("public", new_pb_project.public_dir) if language in self.languages: # Commit the templates git.add(["."]) git.commit("Original import") # git.push("public") os.chdir(original_dir) # Open by default self.projects[project_name] = new_pb_project new_pb_project.state = "open" # why save? -> to update the state to 'open' new_pb_project.save()
def setup_server4(hostname=None, domain=None, pc="1", forge_modules=["puppetlabs/stdlib", "puppetlabs/concat", "puppetlabs/firewall", "puppetlabs/apt"]): """Setup Puppet 4 server""" import package, util, git, service # Local files to copy over basedir = "/etc/puppetlabs" local_master_conf = "files/puppet-master.conf" remote_master_conf = basedir+"/puppet/puppet.conf" local_hiera_yaml = "files/hiera.yaml" remote_hiera_yaml = basedir+"/code/hiera.yaml" local_fileserver_conf = "files/fileserver.conf" remote_fileserver_conf = basedir+"/puppet/fileserver.conf" local_environments = "files/environments" remote_codedir = basedir+"/code" local_gitignore = "files/gitignore" remote_gitignore = basedir+"/.gitignore" modules_dir = basedir+"/code/environments/production/modules" # Verify that all the local files are in place try: open(local_master_conf) open(local_hiera_yaml) except IOError: print "ERROR: some local config files were missing!" sys.exit(1) # Autodetect hostname and domain from env.host, if they're not overridden # with method parameters if not hostname: hostname = util.get_hostname() if not domain: domain = util.get_domain() # Ensure that clock is correct before doing anything else, like creating SSL # certificates. util.set_clock() # Start the install install_puppetlabs_release_package(pc) package.install("puppetserver") util.put_and_chown(local_master_conf, remote_master_conf) util.put_and_chown(local_hiera_yaml, remote_hiera_yaml) util.put_and_chown(local_fileserver_conf, remote_fileserver_conf) util.put_and_chown(local_gitignore, remote_gitignore) util.add_to_path("/opt/puppetlabs/bin") util.set_hostname(hostname + "." + domain) # "facter fqdn" return a silly name on EC2 without this util.add_host_entry("127.0.1.1", hostname, domain) # Copy over template environments util.put_and_chown(local_environments, remote_codedir) # Add modules from Puppet Forge. These should in my experience be limited to # those which provide new types and providers. In particular puppetlabs' # modules which control some daemon (puppetdb, postgresql, mysql) are # extremely complex, very prone to breakage and nasty to debug. for module in forge_modules: add_forge_module(module) # Git setup git.install() git.init(basedir) if not exists(modules_dir): sudo("mkdir "+modules_dir) git.init(modules_dir) git.add_submodules(basedir=modules_dir) git.add_all(basedir) git.commit(basedir, "Initial commit") # Link hieradata and manifests from production to testing. This keeps the # testing environment identical to the production environment. The modules # directory in testing is separate and may (or may not) contain modules that # override or complement those in production. util.symlink(remote_codedir+"/environments/production/hieradata", remote_codedir+"/environments/testing/hieradata") util.symlink(remote_codedir+"/environments/production/manifests", remote_codedir+"/environments/testing/manifests") # Start puppetserver to generate the CA and server certificates/keys service.start("puppetserver") run_agent(noop="False")
def git_init(path): git.init(path=path) os.chdir(path) with open('.gitignore', 'w') as ignore_file: ignore_file.write("MY.*\n") ignore_file.write("commits")
def _config(self, remote, conf, groups): """ Builds the groups file and project.config file for a project. @param remote - gerrit.Remote object @param conf - Dict containing git config information @param groups - List of groups """ if not self.config: return msg = "Project %s: Configuring." % self.name logger.info(msg) print msg repo_dir = '~/tmp' repo_dir = os.path.expanduser(repo_dir) repo_dir = os.path.abspath(repo_dir) uuid_dir = str(uuid4()) repo_dir = os.path.join(repo_dir, uuid_dir) # Make Empty directory - We want this to stop and fail on OSError logger.debug("Project %s: Creating directory %s" % (self.name, repo_dir)) os.makedirs(repo_dir) # Save the current working directory old_cwd = os.getcwd() origin = 'origin' try: # Change cwd to that repo os.chdir(repo_dir) # Git init empty directory git.init() # Add remote origin ssh_url = 'ssh://%s@%s:%s/%s' % (remote.username, remote.host, remote.port, self.name) git.add_remote(origin, ssh_url) # Fetch refs/meta/config for project refspec = 'refs/meta/config:refs/remotes/origin/meta/config' git.fetch(origin, refspec) # Checkout refs/meta/config git.checkout_branch('meta/config') # Get md5 of existing config _file = os.path.join(repo_dir, 'project.config') contents = '' try: with open(_file, 'r') as f: contents = f.read() except IOError: pass existing_md5 = hashlib.md5(contents).hexdigest() # Get md5 of new config with open(self.config, 'r') as f: contents = f.read() new_md5 = hashlib.md5(contents).hexdigest() msg = "Project %s: Md5 comparision\n%s\n%s" msg = msg % (self.name, existing_md5, new_md5) logger.debug(msg) print msg # Only alter if checksums do not match if existing_md5 != new_md5: logger.debug("Project %s: config md5's are different." % self.name) # Update project.config file _file = os.path.join(repo_dir, 'project.config') with open(_file, 'w') as f: f.write(contents) # Update groups file group_contents = groups_file_contents(groups) _file = os.path.join(repo_dir, 'groups') with open(_file, 'w') as f: f.write(group_contents) # Git config user.email git.set_config('user.email', conf['git-config']['email']) # Git config user.name git.set_config('user.name', conf['git-config']['name']) # Add groups and project.config git.add(['groups', 'project.config']) # Git commit git.commit(message='Setting up %s' % self.name) # Git push git.push(origin, refspecs='meta/config:refs/meta/config') logger.info("Project %s: pushed configuration." % self.name) else: msg = "Project %s: config unchanged." % self.name logger.info(msg) print msg finally: # Change to old current working directory os.chdir(old_cwd) # Attempt to clean up created directory shutil.rmtree(repo_dir)