Example #1
0
	def retire(self, logfile):
		# Write the log to backup location, and flush memory
		absin = os.path.join(self.path, logfile)
		absout = os.path.join(self.gzpath, logfile + '.gz')
		login = open(absin, 'rb')

		limit = 5
		mvfiles = []
		for x in range(0, limit):
			path = absout if x == 0 else '.'.join((absout, x))

			if not os.path.exists(path):
				break

			mvfiles.append((x, path, '.'.join((absout, x + 1))))

		for i, path, nextpath in mvfiles:
			if i == 0:
				shutil.rm(path)
			else:
				shutil.move(path, nextpath)

		if not os.path.exists(os.path.dirname(absout)):
			os.makedirs(os.path.dirname(absout))
		try:
			logout = gzip.open(absout, 'wb')
			logout.writelines(login)
			logout.close()
			login.close()
		except Exception, e:
			logging.error(e)
			logging.error('Couldn\'t backup the file %s, whoops' % absin)
Example #2
0
 def delete_vault(self, vaultName):
     if self.BASE_DIR + "//Vaults" in vaultName:
         while True:
             sure = input("Delete %s Vault (Y|N): " % (vaultName))
             if sure == "Y" or "y" or "yes" or "YES":
                 rm(vaultName)
             elif sure == "N" or "n" or "no" or "NO":
                 break 
def captureAllRestartAndLogFiles(directory, mail=None):
    """
    Saves all *.log.* and *.restart.* files of a directory to a zip.

    The folder structure will be preserved.
    The zip will be mailed if a mail is set.

    Parameters
    ----------
    directory : str
        Directory to recursively find log and restart files from.
    mail : [None | str]
        If set to a string, the zip will be mailed if mutt mailing is
        supported.
    """

    if directory == ".":
        logRestartDir = "logAndRestartFilesFromRoot"
    else:
        logRestartDir = "logAndRestartFilesFrom" + directory

    # Find all the log files
    logFiles = list(pathlib.Path(directory).glob("**/*.log.*"))

    # Find all the restart files
    restartFiles = list(pathlib.Path(directory).glob("**/*.restart.*"))

    # Combine
    allFiles = (*logFiles, *restartFiles)

    for f in allFiles:
        # Add logRestartDir to directory without its root
        dst = pathlib.PurePath(logRestartDir, f.relative_to(*f.parts[:2]))
        # Clean folders marked with BAK
        if "BAK" not in str(dst):
            # Make the paths
            pathlib.Path(*dst.parts[:-1]).mkdir(parents=True, exist_ok=True)
            shutil.copy2(str(f), str(dst))

    # Compress and remove the folder
    shutil.make_archive(logRestartDir, "zip", logRestartDir)
    shutil.rmtree(logRestartDir)

    if mail:
        # Sends mail through the terminal
        theZip = logRestartDir + ".zip"
        cmd = (\
            'echo "See attachment" | mutt -a "{}" -s "Log and restart files" -- {}'\
              ).format(theZip, mail)

        process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
        output, error = process.communicate()
        print("{} sent to {}".format(theZip, mail))

        # Clean-up
        shutil.rm(theZip)
Example #4
0
def slope_fltr(dem_fn, slopelim=(0.1, 40)):
    #Note, Noh and Howat set minimum slope of 20 deg for coregistration purposes
    #perc = (0.01, 99.99)
    #slopelim = malib.calcperc(dem_slope, perc)
    #dem_slope = np.gradient(dem)
    dem_slope = geolib.gdaldem_slope(dem_fn)
    dem = iolib.fn_getma(dem_fn)
    out = np.ma.array(dem, mask=np.ma.masked_outside(dem_slope, *slopelim).mask, keep_mask=True, fill_value=dem.fill_value)
    shutil.rm(os.path.splitext(dem_fn)[0]+'_slope.tif')
    return out
Example #5
0
 def backup_xorg_conf(self):
     '''Backs up xorg.conf and xorg.conf.d, overwriting any pre-existing backups'''
     if os.path.exists(self.xorg_conf_path):
         if os.path.exists(self.xorg_conf_backup_path):
             shutil.rm(self.xorg_conf_backup_path)
         shutil.copy(self.xorg_conf_path, self.xorg_conf_backup_path)
     if os.path.exists(self.xorg_conf_d_path):
         if os.path.exists(self.xorg_conf_d_backup_path):
             shutil.rmtree(self.xorg_conf_d_backup_path)
         shutil.copytree(self.xorg_conf_d_path, self.xorg_conf_d_backup_path)
Example #6
0
 def backup_xorg_conf(self):
     '''Backs up xorg.conf and xorg.conf.d, overwriting any pre-existing backups'''
     if os.path.exists(self.xorg_conf_path):
         if os.path.exists(self.xorg_conf_backup_path):
             shutil.rm(self.xorg_conf_backup_path)
         shutil.copy(self.xorg_conf_path, self.xorg_conf_backup_path)
     if os.path.exists(self.xorg_conf_d_path):
         if os.path.exists(self.xorg_conf_d_backup_path):
             shutil.rmtree(self.xorg_conf_d_backup_path)
         shutil.copytree(self.xorg_conf_d_path, self.xorg_conf_d_backup_path)
Example #7
0
def Rm(container_id, **kwargs):
    try:
        for image_id in os.listdir('/var/myOwnDocker/ps/{0}'.format(container_id)):
            if image_id.endswith('.log'):
                shutil.rm(os.path.join('/var/myOwnDocker/ps/{0}'.format(container_id), image_id))
            else:
                subprocess.run(['btrfs', 'subvolume', 'delete', '/var/myOwnDocker/ps/{0}/{1}'.format(container_id, image_id)], check=True)
        cg = cgroups.Cgroup(container_id)
        cg.delete()
        print("rm :: Successfully deleted container with id {0}".format(container_id))
    except Exception as e:
        print("rm :: Failed to delete container with id {0}".format(container_id))
        traceback.print_exc()
    shutil.rmtree('/var/myOwnDocker/ps/{0}'.format(container_id), ignore_errors=True)
Example #8
0
def main():
    for root, dirs, _ in os.walk('.'):
        if "in" in dirs:
            with zipfile.ZipFile(root + "/testcases.zip", "w") as zf:
                i = 1
                for f in os.listdir(root + '/in'):
                    zf.write(root + '/in/' + f, '%d.in' % i)
                    i += 1
                i = 1
                for f in os.listdir(root + '/out'):
                    zf.write(root + '/out/' + f, '%d.out' % i)
                    i += 1
                zf.close()
            rm(root + "/in")
            rm(root + "/out")
Example #9
0
def slope_fltr_chm(chm_array, hi_sun_dem_fn, slopelim=(0.1, 30)):
    """Apply a filter to a chm array based on a slope mask calc'd from the associated hi-sun-elev (ground) DSM
    """
    #dem_slope = np.gradient(dem)
    dem_slope = geolib.gdaldem_slope(hi_sun_dem_fn)
    dem = iolib.fn_getma(hi_sun_dem_fn)
    ##out = np.ma.array(chm_array, mask=np.ma.masked_outside(dem_slope, *slopelim).mask, keep_mask=True, fill_value=-9999)
    ##https://stackoverflow.com/questions/35435015/extending-numpy-mask
    out = np.ma.array(*np.broadcast(
        chm_array,
        np.ma.masked_outside(dem_slope, *slopelim).mask),
                      keep_mask=True,
                      fill_value=-9999)
    shutil.rm(os.path.splitext(hi_sun_dem_fn)[0] + '_slope.tif')
    return out
Example #10
0
def render_tiles(id):
	#make sure that temp dir is empty
	if os.path.exists("temp"):
		shutil.rmtree("temp")
	os.mkdir("temp")
	tempdir = "temp/" + id
	os.mkdir(tempdir)

	#copy textures and styles.inc
	if os.path.exists("textures"):
		shutil.rmtree("textures")
	shutil.copytree(DIR_OSM2POV + "/textures", "textures")

	if os.path.exists("styles.inc"):
		shutil.rm("styles.inc")
	
	shutil.copy(DIR_OSM2POV + "/osm2pov-styles.inc", "osm2pov-styles.inc")

	
	#compute tile numbers to render
	mintile_x, maxtile_x, mintile_y, maxtile_y = getMinMaxTiles(id)
	numberoftiles = (maxtile_x - mintile_x + 1) * (maxtile_y - mintile_y + 1)
	tilecount = 0
	
	#convert the tiles to .pov
	osmfile = id + ".osm"
	for x in range(mintile_x, maxtile_x + 1):
		for y in range(mintile_y, maxtile_y + 1):
			tilecount += 1
			povfile = id + "-" + str(x) + "_" + str(y) + ".pov"
			status_progress(id, "Converting", 2, tilecount, numberoftiles)
			execute_cmd("Converting " + povfile, COMMAND_OSM2POV + " " + osmfile + " " + povfile + " " + str(x) + " " + str(y))
	status_end_phase(id, 2)

	tilecount = 0
	#render the tiles
	for x in range(mintile_x, maxtile_x + 1):
		for y in range(mintile_y, maxtile_y + 1):
			tilecount += 1
			povfile = id + "-" + str(x) + "_" + str(y) + ".pov"
			pngfile = id + "-" + str(x) + "_" + str(y) + ".png"
			status_progress(id, "Rendering", 3, tilecount, numberoftiles)
			execute_cmd("Rendering " + pngfile, COMMAND_POVRAY + " +W2048 +H2048 +B100 -D +A " + povfile)
			os.remove(povfile)
			if not(os.path.exists(tempdir + "/" + str(x))):
				os.mkdir(tempdir + "/" + str(x))
			execute_cmd("Moving output file of city '" + id + "': " + pngfile, "mv " + pngfile + " " + tempdir+"/"+str(x)+"/"+str(y)+".png")
	status_end_phase(id, 3)
Example #11
0
 def add_single_cr(self, name, source, overwrite=False):
     with open(source, 'rb') as f:
         parsed = yaml.safe_load(f.read())
     version = parsed.get('version', '1.0.0')
     target_dir = os.path.join(self.fpath, name, version)
     target_path = os.path.join(target_dir, "{}.yaml".format(name))
     if not os.path.exists(target_dir):
         os.makedirs(target_dir)
     try:
         shutil.copy(source, target_path)
     except OSError as e:
         if e.errno != errno.EEXIST:
             raise
         if not overwrite:
             raise
         shutil.rm(target_dir)
         shutil.copy(source, target_path)
Example #12
0
 def add_single_cr(self, name, source, overwrite=False):
     with open(source, 'rb') as f:
         parsed = yaml.safe_load(f.read())
     version = parsed.get('version', '1.0.0')
     target_dir = os.path.join(self.fpath, name, version)
     target_path = os.path.join(target_dir, "{}.yaml".format(name))
     if not os.path.exists(target_dir):
         os.makedirs(target_dir)
     try:
         shutil.copy(source, target_path)
     except OSError as e:
         if e.errno != errno.EEXIST:
             raise
         if not overwrite:
             raise
         shutil.rm(target_dir)
         shutil.copy(source, target_path)
Example #13
0
def render_tiles(id):
	#make sure that temp dir is empty
	if os.path.exists("temp"):
		shutil.rmtree("temp")
	os.mkdir("temp")
	tempdir = "temp/" + id
	os.mkdir(tempdir)

	#copy textures and styles.inc
	if os.path.exists("textures"):
		shutil.rmtree("textures")
	shutil.copytree(DIR_OSM2POV + "/textures", "textures")

	if os.path.exists("styles.inc"):
		shutil.rm("styles.inc")
	
	shutil.copy(DIR_OSM2POV + "/osm2pov-styles.inc", "osm2pov-styles.inc")

	
	#compute tile numbers to render
	root = cities.getroot()
	area = root.xpath("city[@id='" + id + "']/area")[0]
	mintile_x, mintile_y = deg2tile(float(area.get("top")),float(area.get("left")),12)
	maxtile_x, maxtile_y = deg2tile(float(area.get("bottom")),float(area.get("right")),12)
	mintile_y = int(math.floor((mintile_y/2)))
	maxtile_y = int(math.ceil((maxtile_y/2)))
	
	numberoftiles = (maxtile_x - mintile_x + 1) * (maxtile_y - mintile_y + 1)
	tilecount = 0
	
	#render the tiles
	osmfile = id + ".osm"
	for x in range(mintile_x, maxtile_x + 1):
		for y in range(mintile_y, maxtile_y + 1):
			tilecount += 1
			povfile = id + "-" + str(x) + "_" + str(y) + ".pov"
			pngfile = id + "-" + str(x) + "_" + str(y) + ".png"
			tileinfo = "tile " + str(tilecount) + "/" + str(numberoftiles)
			update_city_state(id, "WORKING", "Rendering " + tileinfo + "...")
			execute_cmd("Generating pov file for city '" + id + "', " + tileinfo, COMMAND_OSM2POV + " " + osmfile + " " + povfile + " " + str(x) + " " + str(y))
			execute_cmd("Rendering city '" + id + "', " + tileinfo, COMMAND_POVRAY + " +W2048 +H2048 +B100 -D +A " + povfile)
			os.remove(povfile)
			if not(os.path.exists(tempdir + "/" + str(x))):
				os.mkdir(tempdir + "/" + str(x))
			execute_cmd("Moving output file of city '" + id + "'" + tileinfo, "mv " + pngfile + " " + tempdir+"/"+str(x)+"/"+str(y)+".png")
Example #14
0
 def open(self, filename):
     from app.parameters.ExperimentParams import RDDFileInfo
     #Constructs and returns a RDDFileInfo instance.
     #TODO
     #Note: I don't need to clean up the directory. Just untarring it is enough.
     zipping = False
     if zipping:
         shutil.move(filename, filename + ".zip")
         zipper = zipfile.ZipFile(filename + ".zip", 'a',
                                  zipfile.ZIP_DEFLATED)
         zipper.extractall(filename)
         shutil.rm(filename + '.zip')
     directory = filename
     # ploader = ParametersFileReader()
     # ploader.open(directory+'/params.param')
     # params = ploader.load()
     # ploader.close()
     num_parts = 0
     with open(directory + '/num_parts') as partFile:
         num_parts = int(partFile.read())
     return RDDFileInfo(directory, num_parts)
Example #15
0
def store(path, PKGBUILD):
    '''
    This function takes a path and a PKGBUILD string and moves it to its proper place in
    the storage subsystem.
    '''
    # ensure the mountpoint is still mounted
    if not call(['mountpoint', '-q', config['mountpoint']]):
        call(['sshfs', '-oPort=' + config['port'], config['host'], config['mountpoint']])

    # delete whats already there
    if exists(config['mountpoint'] + '/' + config['basedir'] + '/' + path.split('/')[-1]):
        rm(config['mountpoint'] + '/' + config['basedir'])

    # move the new stuff over
    cp(path, config['mountpoint'] + '/' + config['basedir'] + '/' + path.split('/')[-1])

    # put the PKGBUILD in the new directory
    with open(config['mountpoint'] + '/' +  config['basedir'] + '/' + path.split('/')[-1] + '/' + 'PKGBUILD', 'w') as f:
        f.write(PKGBUILD)

    # process the PKGBUILD for name and version info
    procPkgInfo(PKGBUILD):
Example #16
0
    def deploy(self):
        """
        Build documentation in directory `root/target` and pushes it to `repo`.

        :Inputs:

            `root`: root directory where the `conf.py` file is.

            `repo`: remote repository where generated HTML content should be pushed to.

            `target`: directory relative to `root`, where generated HTML content should be
                      written to. This directory **must** be added to the repository's `.gitignore` file.
                      (default: `"site"`)

            `doc_branch`: branch where the generated documentation is pushed.
                      (default: `"gh-pages"`)

            `latest`: branch that "tracks" the latest generated documentation.
                      (default: `"develop"`)

            `local_upstream`: remote repository to fetch from.
                        (default: `None`)

            `make`: list of commands to be used to convert the markdown files to HTML.
                    (default: ['make', 'html'])
        """
        sha = log_and_execute(["git", "rev-parse", "HEAD"]).strip()
        current_branch = environ['GIT_BRANCH']
        logging.debug('current branch: %s' % current_branch)

        host_user, host_repo = get_github_username_repo(self.repo)
        logging.debug('host username: %s, host repo: %s', host_user, host_repo)

        self.upstream = "git@%s:%s/%s.git" % (HOST_URL[self.host],
                                                  host_user,
                                                  host_repo)

        logging.debug('upstream: %s' % self.upstream)
        if self.is_pull_request():
            print_with_color("Skipping documentation deployment", 'magenta')
            return

        if self.local_upstream is not None:
	    # Pull the documentation branch to avoid conflicts
            log_and_execute(["git", "checkout", self.doc_branch])
            log_and_execute(["git", "branch"])
            log_and_execute(["git", "pull", "origin", self.doc_branch])
            log_and_execute(["git", "checkout", "-f", sha])
            log_and_execute(["git", "branch"])


        enc_key_file = abspath(joinpath(self.root, "docs", ".documenter.enc"))
        has_ssh_key = isfile(enc_key_file)

        with open(enc_key_file, "r") as enc_keyfile:
            enc_key = enc_keyfile.read()

        self.key_file, _ = splitext(enc_key_file)
        with open(self.key_file, "w") as keyfile:
            keyfile.write(b64decode(enc_key))

        # Give READ/WRITE permissions
        chmod(self.key_file, stat.S_IREAD | stat.S_IWRITE)

        # self.create_ssh_config()

        tmp_dir = tempfile.mkdtemp()
        logging.debug("temporary directory is: %s" %tmp_dir)

        docs = joinpath(self.root, "docs")
        cd(docs)
        if not exists(self.target):
            mkdir(self.target)
        log_and_execute(self.make)

        # Versioned docs directories.
        latest_dir = joinpath(self.dirname, self.latest_dir)
        stable_dir = joinpath(self.dirname, self.stable_dir)
        target_dir = joinpath(docs, self.target)
        unstable_dir = joinpath(self.dirname, self.unstable_dir)

        # Setup git.
        cd(tmp_dir)
        log_and_execute(["git", "init"])
        log_and_execute(["git", "config", "user.name", "'autodocs'"])
        log_and_execute(["git", "config", "user.email", "'autodocs'"])

        # Fetch from remote and checkout the branch.
        if self.local_upstream is not None:
            log_and_execute(["git", "remote", "add", "local_upstream", self.local_upstream])

        log_and_execute(["git", "remote", "add", "upstream", self.upstream])
        log_and_execute(["git", "fetch", "upstream"])
        try:
            log_and_execute(["git", "checkout", "-b", self.doc_branch, "upstream/" + self.doc_branch])
        except RuntimeError:
            try:
                log_and_execute(["git", "checkout",
                                 "--orphan", self.doc_branch])
                log_and_execute(["git", "rm", "--cached", "-r", "."])
            except:
                raise RuntimeError("could not checkout remote branch.")

        # Copy docs to `latest`, or `stable`, `<release>`, and `<version>`
        # directories.
        destination_dir = None
        if current_branch == 'origin/' + self.latest:
            destination_dir = latest_dir
        elif current_branch == 'origin/' + self.stable:
            destination_dir = stable_dir
        else:
            destination_dir = unstable_dir

        if destination_dir is not None:
            if exists(destination_dir):
                rm(destination_dir)
            logging.debug("Copying HTML folder to %s", destination_dir)
            mv(joinpath(target_dir, "html"), destination_dir)

            if 'GIT_TAG_NAME' in environ.keys():
                logging.debug("This commit (%s) was tagged. A copy of the doc will be stored at %s.",
                              sha, environ['GIT_TAG_NAME'])
                cp(destination_dir, environ['GIT_TAG_NAME'])


        # Create a .nojekyll file so that Github pages behaves correctly with folders starting
        # with an underscore.
        touch('.nojekyll')

        with open('index.html', 'w') as f:
            f.write('<meta http-equiv="refresh" content="0; url=http://%s.github.io/%s/stable"/>' %
                    (host_user, host_repo))

        # Add, commit, and push the docs to the remote.
        log_and_execute(["git", "add", "-A", "."])
        log_and_execute(["git", "commit", "-m", "'build based on %s'" % sha])
        log_and_execute(["git", "push", "-q", "upstream", "HEAD:%s" % self.doc_branch])

        # Clean up temporary directories
        rm(target_dir)
        rm(tmp_dir)
Example #17
0
 def tearDown(self):
     if os.path.exists(testRmDir): shutil.rmtree(testRmDir)
     if os.path.exists(testIonexDir): shutil.rmtree(testIonexDir)
     if os.path.exists(fitsTestFile): shutil.rm(fitsTestFile)
     return None
Example #18
0
#!/usr/bin/env python

from shutil import rmtree as rm
from os.path import exists

# del build dir and bin dir
if exists('build'): rm('build')
if exists('bin'): rm('bin')
Example #19
0
#!/usr/bin/env python

from shutil import rmtree as rm
from os.path import exists

# del build dir and bin dir
if exists('build'): rm('build')
if exists('bin'):
    rm('bin')
Example #20
0
#!/usr/bin/env python

from shutil import rmtree as rm
from os.path import exists

# del build dir and bin dir
for d in 'build bin python/build'.split():
	if exists(d): rm(d)
	
	
	
	
	
	
	
Example #21
0
def main(args):
	url_file_path = args[0]
	out_path = args[1]
	urls = load_urls(url_file_path)
	for url in urls:
		domain = urlparse(url).netloc.replace(".", "_")
		domain_folder = out_path + "/" + domain + "/"
		record_folder = domain_folder + "record/"
		record_data_folder = record_folder + "data/"
		replay_out_folder = domain_folder + "replay--" + REPLAY_CONFIG + "/"
		if (os.path.isdir(replay_out_folder)):
			print("Folder " + replay_out_folder + " is already there! Skipping the replay")
		else:
			os.mkdir(replay_out_folder)
			for i in range(1, NUM_TRIALS + 1):
				output_folder = replay_out_folder + str(i) + "/"
				traffic_folder = output_folder + "traffic/"
				traffic_fpath = traffic_folder + domain
				packet_rule_fpath = output_folder + "packet_rule"
				log_folder = replay_out_folder + "/../"
				os.mkdir(output_folder)
				os.mkdir(traffic_folder)

				if (os.path.exists(log_folder + REPLAYSERVER_LOG_NAME)):
					shutil.rm(log_folder + REPLAYSERVER_LOG_NAME)

				# Generate delay rule file
				gen_rule = ["python", GEN_PACKET_RULE_SCRIPT, url, record_folder, REPLAY_CONFIG, packet_rule_fpath]
				subprocess.call(gen_rule)
				har_file = output_folder + domain + ".har"
				stdout_file_path = output_folder + "mahimahi-stdout"
				stdout_file = open(stdout_file_path, "w")
				
				# Call mahimahi and script to launch chrome
				mahimahi_replay = ["mm-webreplay", record_data_folder]
				mahimahi_replay_delay = ["mm-webreplay-delay", record_data_folder]
				mahimahi_network_delay = ["mm-adv-delay", DEFAULT_DELAY, packet_rule_fpath, traffic_fpath]
				web_page_load = ["nodejs", WEB_PAGE_LOAD_SCRIPT, url, har_file, BROWSER_WAIT_MARK]
				
				commands = []
				if (REPLAY_CONFIG == "without-delay"):
					commands = mahimahi_replay + web_page_load
				elif (REPLAY_CONFIG == "network-delay"):
					commands = mahimahi_replay + mahimahi_network_delay + web_page_load
				elif (REPLAY_CONFIG == "base"):
					commands = mahimahi_replay_delay + mahimahi_network_delay + web_page_load
				elif (REPLAY_CONFIG == "base-network-0.33"):
					commands = mahimahi_replay_delay + mahimahi_network_delay + web_page_load
				elif (REPLAY_CONFIG == "base-server-0.5"):
					commands = mahimahi_replay_delay + mahimahi_network_delay + web_page_load
				else:
					print("REPLAY_CONFIG " + REPLAY_CONFIG + " not found, default to base")
					commands = mahimahi_replay_delay + mahimahi_network_delay + web_page_load

				subprocess.call(commands, stdout=stdout_file)
				stdout_file.close()
				
				# Move replayserver log file
				if (os.path.exists(log_folder + REPLAYSERVER_LOG_NAME)):
					shutil.move(log_folder + REPLAYSERVER_LOG_NAME, output_folder + REPLAYSERVER_LOG_NAME)

				# Check whether recording success by finding the har file
				if (os.path.exists(har_file)):
					print("Trial " + str(i) + ": " + url + " replay success")
				else:
					print("Trial " + str(i) + ": " + url + " replay FAILED")
Example #22
0
    def create_pywikibot_config(self, user, bot_user, bot_password):
        """Create pywikibot configuration files

        It writes pywikibot configuration files in Daty directories

        Args:
            user (str): your wikimedia user;
            bot_user (str): the name of your bot;
            bot_password (str): the password of your bot;
        """

        # Paths
        config_file = join(self.exec_path, 'resources', 'user-config.py')
        password_file = join(self.exec_path, 'resources', 'user-password.py')
        config_save_file = join(self.dirs['config'], 'pywikibot',
                                'user-config.py')
        password_save_file = join(self.dirs['config'], 'pywikibot',
                                  'user-password.py')

        # Open files
        with open(config_file) as f:
            config = f.read()
        f.close()

        with open(password_file) as f:
            password_config = f.read()
        f.close()

        # Change config
        config = sub(u'your_user', user, config)
        password_config = sub(u'your_user', user, password_config)
        password_config = sub(u'your_bot_username', bot_user, password_config)
        password_config = sub(u'your_bot_password', bot_password,
                              password_config)

        # Write files
        try:
            with open(config_save_file, 'w') as f:
                f.write(config)
            f.close()
        except PermissionError as e:
            print(e)
            for dir_type, path in self.dirs.items():
                try:
                    print(path)
                    chmod_recursively(path, mode=0o777)
                    rm(path)
                except Exception as e:
                    print(e)
                    print(e.__traceback__)
            self.set_dirs()
            self.create_pywikibot_config(user, bot_user, bot_password)

        with open(password_save_file, 'w') as f:
            f.write(password_config)
        f.close()

        # Save internal config to disk
        self.data['credentials'] = {}
        self.data['credentials']['user'] = user
        self.data['credentials']['bot user'] = bot_user
        self.data['credentials']['bot password'] = bot_password
        save(self.data, join(self.dirs['config'], 'config.pkl'))
Example #23
0
            return True
        except CalledProcessError as e:
            if exception_handler:
                exception_handler(e, *exception_handler_args)
            return True
        i += 1
        print(i)


clone = "git clone https://github.com/flatpak/flatpak-builder-tools update-dependencies"


def clone_exception_handler(exception):
    if exception.output == b(already_exists_error):
        print("repository already exists, pulling changes...")
        pull = "git -C ./update-dependencies pull"
        sh(pull)


sh(clone, exception_handler=clone_exception_handler)

for x in ls():
    if is_python_package(x):
        y = get_package_name(x)
        print("Updating {}".format(y))
        update = "./update-dependencies/pip/flatpak-pip-generator {}".format(y)
        sh(update)

print("Removing temporary directory...")
rm('update-dependencies')
Example #24
0
def DeleteDirectory(dir):
    shutil.rm(dir)
Example #25
0
import os
from shutil import rmtree as rm
from tqdm import tqdm

dirs = [x for x in os.listdir() if not os.path.isfile(x)]
for i in tqdm(range(len(dirs))):
    x = dirs[i]
    os.chdir(x)
    bad_dirs = [y for y in os.listdir() if not os.path.isfile(y)]
    if len(bad_dirs) > 0:
        for j in tqdm(range(len(bad_dirs))):
            y = bad_dirs[j]
            rm(y)
            cwd = os.getcwd()
            print('Deleted folder ' + cwd + '\\Probability')
    os.chdir('..')
Example #26
0
def lambda_handler(event, context):
    keybucket = event['context']['key-bucket']
    outputbucket = event['context']['output-bucket']
    pubkey = event['context']['public-key']
    ### Source IP ranges to allow requests from, if the IP is in one of these the request will not be chacked for an api key
    ipranges = []
    for i in event['context']['allowed-ips'].split(','):
        ipranges.append(ip_network(u'%s' % i))
    ### APIKeys, it is recommended to use a different API key for each repo that uses this function
    apikeys = event['context']['api-secrets'].split(',')
    ip = ip_address(event['context']['source-ip'])
    secure = False
    for net in ipranges:
        if ip in net:
            secure = True
    if 'X-Gitlab-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Gitlab-Token'] in apikeys:
            secure = True
    if 'X-Git-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Git-Token'] in apikeys:
            secure = True
    if 'X-Gitlab-Token' in event['params']['header'].keys():
        if event['params']['header']['X-Gitlab-Token'] in apikeys:
            secure = True
    if 'X-Hub-Signature' in event['params']['header'].keys():
        for k in apikeys:
            if hmac.new(
                    str(k), str(event['context']['raw-body']),
                    hashlib.sha1).hexdigest() == str(
                        event['params']['header']['X-Hub-Signature'].replace(
                            'sha1=', '')):
                secure = True
    if not secure:
        logger.error('Source IP %s is not allowed' %
                     event['context']['source-ip'])
        raise Exception('Source IP %s is not allowed' %
                        event['context']['source-ip'])
    try:
        repo_name = event['body-json']['project']['path_with_namespace']
    except:
        repo_name = event['body-json']['repository']['full_name']
    try:
        remote_url = event['body-json']['project']['git_ssh_url']
    except:
        try:
            remote_url = 'git@' + event['body-json']['repository'][
                'links']['html']['href'].replace('https://', '').replace(
                    '/', ':', 1) + '.git'
        except:
            remote_url = event['body-json']['repository']['ssh_url']
    repo_path = '/tmp/%s' % repo_name
    creds = RemoteCallbacks(credentials=get_keys(keybucket, pubkey), )
    try:
        repository_path = discover_repository(repo_path)
        repo = Repository(repository_path)
        logger.info('found existing repo, using that...')
    except:
        logger.info('creating new repo for %s in %s' % (remote_url, repo_path))
        repo = create_repo(repo_path, remote_url, creds)
    pull_repo(repo, remote_url, creds)
    zipfile = zip_repo(repo_path, repo_name)
    push_s3(zipfile, repo_name, outputbucket)
    if cleanup:
        logger.info('Cleanup Lambda container...')
        shutil.rmtree(repo_path)
        shutil.rm(zipfile)
        shutil.rm('/tmp/id_rsa')
        shutil.rm('/tmp/id_rsa.pub')
    return 'Successfully updated %s' % repo_name