def fetch_data(links):
    # clean everything out
    delete_folder(temp_dir)
    delete_file(output_file_name)

    # download the links
    for i,link in enumerate(links):
        # extension = os.path.splitext(link)[1]
        extension = 'jpg'
        urllib.urlretrieve(link, '%s/%s.%s' % (temp_dir, i, extension))

    # zip the images
    shutil.make_archive(output_file_name, 'zip', temp_dir)

    # make watson requests
    r = requests.post(url, files={'images_file.zip': open(output_file_name + '.zip', 'rb')}, auth=(BLUEMIX_USERNAME, BLUEMIX_PASSWORD))
    

    # massage the data back
    if (r.status_code == 200):
        data = json.loads(r.text)

        for i, link in enumerate(links):
            j = get_index(data['images'], i)
            data['images'][j]['image'] = link

        return data
    # transform them back to links
    abort(403)
Example #2
0
def action_callback_destroy(args, config):
    """
    1. Disable the site
    2. Reload Apache
    3. Make an archive of the site
    4. Remove VHost
    5. Remove directories
    """
    # 1. Disable the site
    __disable_site(args, config)

    # 2. Reload Apache
    __reload_apache(args, config)

    # 3. Make an archive of the site
    backup_folder = os.path.join(args.config, 'backup')
    if not os.path.exists(backup_folder):
        os.mkdir(backup_folder, 0755)
        __fix_permission(backup_folder, 0755, login_uid, login_gid)

    archive_name = os.path.join(backup_folder, '-'.join([args.domain, time.strftime('%Y%m%dT%H%M%S'), str(uuid.uuid1())]))
    shutil.make_archive(archive_name, 'gztar', siteroot)
    __fix_permission(archive_name, 0644, login_uid, login_gid)

    # 4. Remove hosts entry
    __remove_hosts_entry(args, config)

    # 5. Remove directories
    __clean_filesystem(args, config)
def main():
# Make a copy or duplicate of an existing file
	if path.exists("textfile.txt"):
		# Get the path of the file in the current directory
		src = path.realpath("textfile.txt")
		# Separate the path part from the filename
		head, tail = path.split(src)
		print "path: " + head
		print "file: " + tail

		# Now, make a backup file
		dst = src + ".bak"
		# Then use the shell to make a copy of the file
		shutil.copy(src,dst)
		# If you want to copy over perms, modification times, and other data
		shutil.copystat(src,dst)

		# Rename the original file
		os.rename("textfile.txt", "newfile.txt")

		# Put things intp a ZIP archive
		root_dir,tail = path.split(src)
		shutil.make_archive("archive", "zip", root_dir)

		# More control over ZIP files
		with ZipFile("testzip.zip", "w") as newzip:
			newzip.write("newfile.txt")
			newzip.write("textfile.txt.bak")
Example #4
0
 def zip_dict(dst_fname):
     dir_fpath, basename = os.path.split(dst_fname)
     root_dir, dir_fname = os.path.split(dir_fpath)
     
     # uzado de zip ne funkciigas, rompas Colordict - t.e. la lasta komencas blinki senfine
     #fmt = "zip"
     fmt = "gztar"
     
     # se vortara datumo estas ĝuste en arkivo, ne en ia dosierujo, do CSS/figuroj estas ne trovataj
     save_without_folder = False # True # 
     if save_without_folder:
         fname = shutil.make_archive(o_p.join(revo_dicts_fpath, dir_fname), fmt, dir_fpath)
     else:
         fname = shutil.make_archive(o_p.join(revo_dicts_fpath, dir_fname), fmt, root_dir, base_dir=dir_fname)
     
     ifo_fname = os.path.splitext(dst_fname)[0] + ".ifo"
     with open(ifo_fname) as ifo_f:
         properties = {}
         for line in ifo_f:
             lst = line.split("=")
             if len(lst) >= 2:
                 key, value = lst[0].strip(), lst[1].strip()
                 if key and value:
                     properties[key] = value
     
     words_cnt = int(properties.get("wordcount"))
     synwordcount = properties.get("synwordcount")
     if synwordcount:
         words_cnt +=  int(synwordcount)
     fname = os.path.basename(fname)
     # du spacetoj fine estas por Markdown liniavanco
     print("http://new.bombono.org/download/revo/%(fname)s\t%(words_cnt)s  " % locals())
def fetch_sync_gateway_logs(prefix, is_perf_run=False):

    
    print("\n")

    print("Pulling logs")
    # fetch logs from sync_gateway instances
    status = run_ansible_playbook("fetch-sync-gateway-logs.yml", stop_on_fail=False)
    if status != 0:
        log.error("Error pulling logs")

    # zip logs and timestamp
    if os.path.isdir("/tmp/sg_logs"):

        date_time = time.strftime("%Y-%m-%d-%H-%M-%S")

        if is_perf_run:
            name = "/tmp/{}-sglogs".format(prefix)
        else:
            name = "/tmp/{}-{}-sglogs".format(prefix, date_time)

        shutil.make_archive(name, "zip", "/tmp/sg_logs")

        shutil.rmtree("/tmp/sg_logs")
        print("sync_gateway logs copied here {}\n".format(name))

        zip_file_path = "{}.zip".format(name)
        if is_perf_run:
            # Move perf logs to performance_results
            shutil.copy(zip_file_path, "performance_results/{}/".format(prefix))

        print("\n")
        
        return zip_file_path
Example #6
0
def config(output='/opt/canopsis/var/backups'):
	logger = logging.getLogger()
	logger.debug('Config Backup start')
	logger.debug('Output: %s' % output)
	
	logger.debug('Create output dir if not exists')
	if not os.path.exists(output):
		os.makedirs(output)	

	logger.debug('Create temp dir')
	archive_name = 'backup_config'
	tmp_dir = mkdtemp(prefix='/opt/canopsis/tmp/')

	logger.debug('Create file with installed packages')
	export_output = Popen('pkgmgr export', shell=True, stdout=PIPE)
	export_output.wait()
	f = open('/opt/canopsis/etc/.packages', 'w')
	f.writelines(export_output.stdout.read())
	f.close()

	logger.debug('Copy config files into tmp folder')
	shutil.copytree('/opt/canopsis/etc', '%s/%s' % (tmp_dir, archive_name))
	
	logger.debug('Create archive into %s' % output)
	logger.debug('Archive name: %s' % archive_name)
	shutil.make_archive('%s/%s' % (output, archive_name),
						'zip',
						tmp_dir)

	logger.debug('Remove temp dir')
	shutil.rmtree(tmp_dir)

	logger.debug('Config Backup finished')
Example #7
0
def mongo(host='localhost', output='/opt/canopsis/var/backups'):
	logger = logging.getLogger()
	logger.debug('Mongo Backup start')
	logger.debug('Host  : %s' % host)
	logger.debug('Output: %s' % output)

	logger.debug('Create temp dir')
	archive_name = 'backup_mongodb'
	tmp_dir = mkdtemp(prefix='/opt/canopsis/tmp/')
	os.makedirs('%s/%s' % (tmp_dir, archive_name))

	logger.debug('Create output dir if not exists')
	if not os.path.exists(output):
		os.makedirs(output)	


	logger.debug('Launch mongodump')
	mongodump_cmd = '/opt/canopsis/bin/mongodump --host %s --out %s/%s' % (host, tmp_dir, archive_name)
	logger.debug('Command: %s' % mongodump_cmd)
	dump_output = Popen(mongodump_cmd, shell=True)
	dump_output.wait()

	logger.debug('Create archive into %s' % output)

	shutil.make_archive('%s/%s' % (output, archive_name),
						'zip',
						tmp_dir)

	logger.debug('Remove temp dir')
	shutil.rmtree(tmp_dir)
	
	logger.debug('Mongo Backup finished')
Example #8
0
    def create_zip_files(self, dirs):
        """Create zip files."""
        logmsg.header('Creating zip files...', self.logger)

        # Create output directory to hold our zips
        output_dir = 'Zips'
        if not os.path.exists(output_dir):
            os.mkdir(output_dir)

        num_zips = 0

        # Create a progress bar
        pbar = ProgressBar(term_width=80, maxval=len(dirs)).start()

        for d in dirs:
            self.logger.debug('Zipping: "%s"' % d)
            parent_dir = os.path.join(os.path.dirname(d).split(os.path.sep)[-1], '')
            if parent_dir == self.input_dir:
                parent_dir = ''
            output_file = os.path.join(output_dir, parent_dir, os.path.basename(d))
            shutil.make_archive(output_file, format="zip", root_dir=d)
            num_zips += 1

            # Update progress bar
            pbar.update(num_zips)

        # Ensure progress bar is finished
        pbar.finish()
        time_elapsed = "(Time Elapsed: {0})".format(secs_to_mins(pbar.seconds_elapsed))

        logmsg.success('Created {0} zip files {1}'.format(num_zips,
                                                          time_elapsed), self.logger)
Example #9
0
def build_windows():
    """Build windows executables/setups."""
    utils.print_title("Updating 3rdparty content")
    update_3rdparty.run(ace=False, pdfjs=True, fancy_dmg=False)

    utils.print_title("Building Windows binaries")
    parts = str(sys.version_info.major), str(sys.version_info.minor)
    ver = ''.join(parts)
    dot_ver = '.'.join(parts)

    # Get python path from registry if possible
    try:
        reg64_key = winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE,
                                     r'SOFTWARE\Python\PythonCore'
                                     r'\{}\InstallPath'.format(dot_ver))
        python_x64 = winreg.QueryValueEx(reg64_key, 'ExecutablePath')[0]
    except FileNotFoundError:
        python_x64 = r'C:\Python{}\python.exe'.format(ver)

    out_pyinstaller = os.path.join('dist', 'qutebrowser')
    out_64 = os.path.join('dist',
                          'qutebrowser-{}-x64'.format(qutebrowser.__version__))

    artifacts = []

    from scripts.dev import gen_versioninfo
    utils.print_title("Updating VersionInfo file")
    gen_versioninfo.main()

    utils.print_title("Running pyinstaller 64bit")
    _maybe_remove(out_64)
    call_tox('pyinstaller', '-r', python=python_x64)
    shutil.move(out_pyinstaller, out_64)

    utils.print_title("Running 64bit smoke test")
    smoke_test(os.path.join(out_64, 'qutebrowser.exe'))

    utils.print_title("Building installers")
    subprocess.run(['makensis.exe',
                    '/DX64',
                    '/DVERSION={}'.format(qutebrowser.__version__),
                    'misc/qutebrowser.nsi'], check=True)

    name_64 = 'qutebrowser-{}-amd64.exe'.format(qutebrowser.__version__)

    artifacts += [
        (os.path.join('dist', name_64),
         'application/vnd.microsoft.portable-executable',
         'Windows 64bit installer'),
    ]

    utils.print_title("Zipping 64bit standalone...")
    name = 'qutebrowser-{}-windows-standalone-amd64'.format(
        qutebrowser.__version__)
    shutil.make_archive(name, 'zip', 'dist', os.path.basename(out_64))
    artifacts.append(('{}.zip'.format(name),
                      'application/zip',
                      'Windows 64bit standalone'))

    return artifacts
Example #10
0
 def make_zip(self):
     """Try to make a ZIP file."""
     try:
         # iterate over lists of folders to backup
         for folder_to_backup in self.origins:
             percentage = int(self.origins.index(folder_to_backup) /
                              len(self.origins) * 100)
             self.setLabelText(self.template.format(
                 folder_to_backup[:99], self.destination.lower()[:99],
                 self._date, datetime.now().isoformat()[:-7],
                 self.seconds_time_to_human_str(time.time() - self._time),
                 len(self.origins) - self.origins.index(folder_to_backup),
                 percentage))
             self.setValue(percentage)
             QApplication.processEvents()  # Forces the UI to Update
             log.info("Folder to backup: {}.".format(folder_to_backup))
             shutil.make_archive(folder_to_backup, "zip",
                                 folder_to_backup, logger=log)
             self.move_zip(folder_to_backup + ".zip")
     except Exception as reason:
         log.warning(reason)
     else:
         log.info("Copia de Seguridad Backup Termino bien.")
     finally:
         log.info("Finished BackUp from {} to {}.".format(
             self.origins, self.destination))
Example #11
0
def Craeate_addon_from_github(URL,local_repo_folder):
    archive_suffix="/archive/master.zip"
    print(URL)
    addonname=URL.strip('/').split('/')[-1]
    if not os.path.exists(local_repo_folder+os.sep+addonname):
        print("Making folder for addon in repo: ",addonname)
        os.makedirs(local_repo_folder+os.sep+addonname)                   
    download_file(URL+archive_suffix,local_repo_folder+os.sep+addonname+os.sep+"master.zip")
    try: 
        xml_frm_file,ziptype=zipfilehandler(local_repo_folder+os.sep+addonname+os.sep+"master.zip")
    except Exception as e:
        print("cannot create a zip from githuburl ",URL)
        return 
    root = ET.fromstring(xml_frm_file) 
    for element in root.iter("addon"):
        addon_name=element.attrib['id'] 
        addon_version=element.attrib['version']
    try:
        currntzip=zipfile.ZipFile(local_repo_folder+os.sep+addonname+os.sep+"master.zip")
        currntzip.extractall(local_repo_folder+os.sep+addonname+os.sep)
        currntzip.close()
        shutil.move(local_repo_folder+os.sep+addonname+os.sep+addon_name+"-master",local_repo_folder+os.sep+addonname+os.sep+addon_name)
        os.remove(local_repo_folder+os.sep+addonname+os.sep+"master.zip")
        shutil.make_archive(local_repo_folder+os.sep+addon_name+os.sep+addon_name+"-"+addon_version,'zip',local_repo_folder+os.sep+addon_name,addon_name)
        shutil.rmtree(local_repo_folder+os.sep+addonname+os.sep+addon_name)
    except Exception as e:
        print("could not save fil ",addonname)
Example #12
0
def generate():
	output_dir = "/tmp/" + str(random.randint(1, sys.maxint))
	output_file_name = "/tmp/" + str(random.randint(1, sys.maxint))
	try:
		data = {
				"app_name" : request.form["app_name"],
				"language" : request.form["language"],
				"company_name" : request.form["company_name"],
				"app_package_name_prefix" : request.form["app_package_name_prefix"],
				"app_class_prefix": request.form["app_class_prefix"],
				"application_id" : request.form["application_id"],
				"compile_sdk_version" : int(request.form["compile_sdk_version"]),
				"build_tools_version" : request.form["build_tools_version"],
				"min_sdk_version" : int(request.form["min_sdk_version"]),
				"target_sdk_version" : int(request.form["target_sdk_version"]),
				"crashlytics_api_key" : request.form["crashlytics_api_key"],
				"output_dir" : output_dir
		}

		grow.grow(data)

		shutil.make_archive(output_file_name, 'zip', output_dir)

		return send_file(output_file_name + ".zip", 
			mimetype="application/zip",
			as_attachment=True,
			attachment_filename=request.form["app_class_prefix"] + ".zip")
	finally:
		if os.path.exists(output_dir):
			shutil.rmtree(output_dir)
		if os.path.exists(output_file_name):
			os.remove(output_file_name)
Example #13
0
def zip_dicom(directory):
    """
    Function that zip a directory.

    :param directory: path to the directory to zip
     :type directory: str

    :return: archive -> path to the created zip file
     :rtype: str

    """

    archive = directory + '.zip'

    if (os.listdir(directory) == []):
        sys.exit(
            "The directory " + directory + " is empty and will not be zipped.")
    else:
        shutil.make_archive(directory, 'zip', directory)

    if (os.path.exists(archive)):
        shutil.rmtree(directory)
        return archive
    else:
        sys.exit(archive + " could not be created.")
Example #14
0
    def make_archive(self, path, root_dir=None, use_git=False, prefix=None):
        """Creates a tar.gz archive.

        Args:
            path (str): Path to the archive to create without extension.
            root_dir (str): Root directory from which the archive should be
                created.
        """
        if prefix:
            prefix += '/'
        if use_git:
            if root_dir:
                raise ConfigurationError("archiving with root dir with git not implemented")
            cmd = ['git', 'archive', '-o', path + '.tar.gz']
            cmd.append('--prefix=' + prefix)
            cmd.extend(['-9', 'HEAD'])
            self.run_cmd(cmd)
        else:
            # TODO: Check that root_dir is a subdirectory of the workspace
            # (this all does not work if it is the workspace itself).
            if not os.path.isabs(root_dir):
                root_dir = os.path.join(self._cwd.cwd, root_dir)
            org_dir = root_dir
            root_dir, base_dir = os.path.split(root_dir)
            # TODO: Instead of renaming the directory twice, we could use
            # tarfile directly to create the archive.
            if prefix:
                base_dir = prefix
                shutil.move(org_dir, os.path.join(root_dir, prefix))
            if not base_dir:
                base_dir = '.'
            shutil.make_archive(path, 'gztar', root_dir, base_dir)
            if prefix:
                shutil.move(os.path.join(root_dir, prefix), org_dir)
Example #15
0
 def ziptex(self): 
     localskeletond = os.path.join(WD,'skeleton')
     try:
        shutil.rmtree(localskeletond)
     except OSError:
         pass
     shutil.copytree(lspskeletond, localskeletond)
     os.chdir(localskeletond)
     localcommands = codecs.open('localcommands.tex','a', encoding='utf-8')
     localpackages = codecs.open('localpackages.tex','a', encoding='utf-8')
     localcounters = codecs.open('localcounters.tex','a', encoding='utf-8') 
     content =   codecs.open('chapters/filename.tex','w', encoding='utf-8') 
     contentorig =   codecs.open('chapters/filenameorig.tex','w', encoding='utf-8')  
     localcommands.write(self.commands)
     localcommands.write(self.environments)
     localcommands.close()
     localpackages.write(self.packages)
     localpackages.close()
     localcounters.write(self.counters)
     localcounters.close()
     content.write(self.modtext)
     content.close()
     contentorig.write(self.text)
     contentorig.close()
     os.chdir(WD)
     self.zipfn = str(uuid.uuid4())
     shutil.make_archive(self.zipfn, 'zip', localskeletond)
     shutil.move(self.zipfn+'.zip',wwwdir) 
Example #16
0
    def display_data(self, trans, data, preview=False, filename=None,
                     to_ext=None, size=None, offset=None, **kwd):
        """Documented as an old display method, but still gets called via tests etc
        This allows us to format the data shown in the central pane via the "eye" icon.
        """
        if not preview:
            trans.response.set_content_type(data.get_mime())
            trans.log_event("Display dataset id: %s" % str(data.id))

            # the target directory name
            neo4j_dir_name = '/dataset_{}_files/neo4jdb'.format(data.dataset.id)
            dir_name = str(os.path.dirname(trans.app.object_store.get_filename(data.dataset))) + neo4j_dir_name

            # generate unique filename for this dataset
            fname = ''.join(c in FILENAME_VALID_CHARS and c or '_' for c in data.name)[0:150]

            # zip the target directory (dir_name) using the fname
            shutil.make_archive(fname, 'zip', dir_name)
            download_zip = fname + '.zip'

            # setup headers for the download
            trans.response.headers['Content-Length'] = int(os.stat(download_zip).st_size)
            # force octet-stream so Safari doesn't append mime extensions to filename
            trans.response.set_content_type("application/octet-stream")
            trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy%s-[%s].%s"' % \
                                                            (data.hid, download_zip, "zip")
            return open(download_zip)
Example #17
0
def config():
	logger = logging.getLogger()
	logger.debug('Config Backup start:')

	archive_name = 'backup_config'
	tmp_dir = '%s/%s' % (backup_path, archive_name)

	if os.path.exists(tmp_dir):
		logger.debug(' + Remove old temp dir')
		shutil.rmtree(tmp_dir)

	logger.debug(' + List all packages')
	lines = []
	for package in ubik_api.db.get_installed():
		lines.append(package.name)
		lines.append('\n')
	lines = lines[:-1]
	f = open('%s/etc/.packages' % home_path, 'w')
	f.writelines(lines)
	f.close()

	logger.debug(' + Copy config files')
	shutil.copytree('%s/etc' % home_path, '%s/' % tmp_dir)
	
	logger.debug(' + Make archive')
	shutil.make_archive('%s/%s' % (backup_path, archive_name), 'zip', tmp_dir)	

	logger.debug(' + Remove temp dir')
	shutil.rmtree(tmp_dir)

	logger.debug('Config Backup finished')
Example #18
0
    def run(self):
        update_version_py()
        # unless we update this, the sdist command will keep using the old
        version = get_version()
        update_ansible_version(version)
        dest_dir = "ansible/playbooks/files"
        extra_files = ["ansible/irma-ansible/irma-ansible.py"]
        for subproj in ["common", "frontend", "brain", "probe"]:
            subproj_archive = "{}-{}".format(subproj, version)
            subproj_archive_path = os.path.join(dest_dir, subproj_archive)
            print("Packaging {}.zip".format(subproj_archive))
            extra_files.append(subproj_archive_path + ".zip")
            shutil.make_archive(subproj_archive_path, "zip", root_dir=subproj)

        webui_archive = "core-webui-{}".format(version)
        webui_archive_path = os.path.join(dest_dir, webui_archive)
        print("Packaging {}.zip".format(webui_archive))
        # webui dist is generated in
        # root_folder/frontend/web/dist
        if os.path.exists("frontend/web/dist"):
            extra_files.append(webui_archive_path + ".zip")
            shutil.make_archive(webui_archive_path, "zip",
                                base_dir="frontend/web/dist")
        self._create_manifest_git()
        self._append_manifest(extra_files)
        self.distribution.metadata.version = version
        _sdist.run(self)
        return
Example #19
0
def s_gen(file_path, num_samples):

    # If the file path is None, use the current directory
    if file_path is None:
        file_path = os.getcwd()

    output('Looking for .aif files in: {}'.format(file_path))
    files = get_files(file_path)

    if len(files) == 0:
        output_err('Could not find an files to generate samples from!')
        output_err('Try running the program from a directory that contains .aif files.')
        output_err('Exiting ...')
        sys.exit(1)

    output('Generating samples from: {}'.format(files))
    output("Creating 'samples' directory")
    sample_folder = 'samples'
    os.mkdir(sample_folder)

    for f in files:
        generate_samples(sample_folder, f, num_samples)

    output('Compressing samples into {}.zip'.format(sample_folder))

    # Zip the sample directory
    shutil.make_archive(sample_folder, 'zip', sample_folder)

    output('Deleting temporary sample directory')

    # Delete sample folder
    shutil.rmtree(sample_folder)
Example #20
0
	def ZipTests(self):
		self.__maintain_folders()
		folder = 'Test Set - '+self.folder_name
		os.chdir(self.folder_name)

		if(os.path.isdir(folder)):
			shutil.rmtree(folder,ignore_errors=True)
		if(os.path.isfile(folder+'.zip')):
			os.remove(folder+'.zip')
		os.mkdir(folder) 

		folder_escaped = pipes.quote(folder)

		if(self.ip_count==0):
			shutil.rmtree(folder,ignore_errors=True)
			os.chdir('..')
			raise Exception('Empty input and output folders')
		elif(self.ip_count!=0 and not os.path.isfile('output/output'+str(self.ip_count-1)+'.txt')):
			shutil.rmtree(folder,ignore_errors=True)
			os.chdir('..')
			raise Exception('Outputs missing for some input files')
		else:
			x1 = os.system('cp -a input '+folder_escaped)
			x2 = os.system('cp -a output '+folder_escaped)
			if(x1 or x2):
				shutil.rmtree(folder,ignore_errors=True)
				os.chdir('..')
				raise Exception('Cannot finish operation: Input/Output folder missing')
			else:
				shutil.make_archive(folder,'zip',folder)
				shutil.rmtree(folder,ignore_errors=True)
				os.chdir('..')	
Example #21
0
def stash_db_and_deliveries(home=False):
    """
    Dump the database and deliveries into the
    ``db_and_deliveries_stash/``-directory.

    :param home:
        Use ``home=yes`` to stash to ``~/.devilry_db_and_deliveries_stash``
        instead of ``<this dir>/db_and_deliveries_stash/``
    """
    stashdir = _get_stashdir(home)
    if exists(stashdir):
        rmtree(stashdir)
    mkdir(stashdir)

    # DB
    dbdumpfile = join(stashdir, "dbdump.sql")
    backup_db(dbdumpfile)
    _gzip_file(dbdumpfile)

    # Delivery files
    import logging

    logging.basicConfig(level=logging.INFO)
    log = logging.getLogger("files.zip")
    make_archive(join(stashdir, "files"), "zip", logger=log, base_dir="deliverystorehier")
Example #22
0
def build_release():
    print "build_release"

    # files in release
    FILES = ["bg.js", "load.js", "manifest.json", "css.ini",
            "maintab.html", "maintab.js", "maintab.css",
            "options.html", "options.js", "options.css",
            "config.js", "gear19.png", "gear128.png",
            "ext/beautify-css.min.js",
            "ext/beautify-html.min.js",
            "ext/beautify.min.js",
            "ext/googlecode.css",
            "ext/highlight.pack.js",
            "ext/zepto.min.js",
            ]

    # temp dir
    dir = tempfile.mkdtemp(suffix="")
    print "created", dir

    # copy files
    os.mkdir(dir+"/ext")
    for x in FILES:
        shutil.copyfile(x, dir+"/"+x)

    # make zip
    print "compressing files"
    for i, x in enumerate(os.listdir(dir)):
        print "  ", i+1, x

    shutil.make_archive("./release", "zip", dir)
    print "release.zip created"

    print "deleting", dir
    shutil.rmtree(dir)
Example #23
0
def zip_directory(manga, chapter_num, url):
    directory = application.config['UPLOAD_FOLDER'] + url + "/"
    filename = make_zip_filename(manga, chapter_num)
    # To make sure there is no recursive compression
    shutil.make_archive(directory + "../" + filename, "zip", directory)
    shutil.move(directory + "../" + filename + ".zip",
            directory + filename + ".zip")
Example #24
0
    def test_make_tarball(self):
        # creating something to tar
        root_dir, base_dir = self._create_files('')

        tmpdir2 = self.mkdtemp()
        # force shutil to create the directory
        os.rmdir(tmpdir2)
        # working with relative paths
        work_dir = os.path.dirname(tmpdir2)
        rel_base_name = os.path.join(os.path.basename(tmpdir2), 'archive')

        with support.change_cwd(work_dir):
            base_name = os.path.abspath(rel_base_name)
            tarball = make_archive(rel_base_name, 'gztar', root_dir, '.')

        # check if the compressed tarball was created
        self.assertEqual(tarball, base_name + '.tar.gz')
        self.assertTrue(os.path.isfile(tarball))
        self.assertTrue(tarfile.is_tarfile(tarball))
        with tarfile.open(tarball, 'r:gz') as tf:
            self.assertEqual(sorted(tf.getnames()),
                             ['.', './file1', './file2',
                              './sub', './sub/file3', './sub2'])

        # trying an uncompressed one
        with support.change_cwd(work_dir):
            tarball = make_archive(rel_base_name, 'tar', root_dir, '.')
        self.assertEqual(tarball, base_name + '.tar')
        self.assertTrue(os.path.isfile(tarball))
        self.assertTrue(tarfile.is_tarfile(tarball))
        with tarfile.open(tarball, 'r') as tf:
            self.assertEqual(sorted(tf.getnames()),
                             ['.', './file1', './file2',
                              './sub', './sub/file3', './sub2'])
Example #25
0
    def test_tarfile_vs_tar(self):
        root_dir, base_dir = self._create_files()
        base_name = os.path.join(self.mkdtemp(), 'archive')
        tarball = make_archive(base_name, 'gztar', root_dir, base_dir)

        # check if the compressed tarball was created
        self.assertEqual(tarball, base_name + '.tar.gz')
        self.assertTrue(os.path.isfile(tarball))

        # now create another tarball using `tar`
        tarball2 = os.path.join(root_dir, 'archive2.tar')
        tar_cmd = ['tar', '-cf', 'archive2.tar', base_dir]
        subprocess.check_call(tar_cmd, cwd=root_dir)

        self.assertTrue(os.path.isfile(tarball2))
        # let's compare both tarballs
        self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))

        # trying an uncompressed one
        tarball = make_archive(base_name, 'tar', root_dir, base_dir)
        self.assertEqual(tarball, base_name + '.tar')
        self.assertTrue(os.path.isfile(tarball))

        # now for a dry_run
        tarball = make_archive(base_name, 'tar', root_dir, base_dir,
                               dry_run=True)
        self.assertEqual(tarball, base_name + '.tar')
        self.assertTrue(os.path.isfile(tarball))
Example #26
0
def main():
  parse_command_line()
  version = get_version()

  dir_name = APP_NAME + '-' + version
  if not stable_build:
    dir_name += '-canary'
  if debug_build:
    dir_name += '-dbg'
  print(dir_name)
  out_dir = os.path.join(BUILD_DIR, dir_name)
  archive_path = out_dir + '.zip'
  delete(out_dir, archive_path)
  copy_files(SOURCE_DIR, out_dir, FILES_TO_COPY)

  background_js_files = process_manifest(out_dir, version)
  compile_js(os.path.join(out_dir, 'js', 'background.js'),
             background_js_files,
             BACKGROUND_COMPILATION_LEVEL,
             BACKGROUND_EXTERNS)
  js_files = process_index(out_dir)
  compile_js(os.path.join(out_dir, TARGET_JS),
             js_files,
             COMPILATION_LEVEL,
             JS_EXTERNS)

  print('Archiving', archive_path)
  shutil.make_archive(out_dir, 'zip',
                      root_dir=os.path.abspath(BUILD_DIR),
                      base_dir=dir_name,
                      verbose=True)
Example #27
0
 def domove(self, current):
     #directory
     if self.dir_list[current][2] == True:
         #clobber 
         if self.dir_list[current][3] == 'yes':
             shutil.make_archive(save_root + current + ".zip", "zip", self.dir_list[current][0] )
         else:
             #noclobber
             #get date and time
             date = datetime.datetime.now()
             date = date.isoformat().split('.')[0:-1][0].replace(':',"")
             shutil.make_archive(save_root + current + date + ".zip", "zip", self.dir_list[current][0] )
     else:
     #single file
         if self.dir_list[current][3] == 'yes':
             #clobber
             zf = zipfile.ZipFile(save_root + current + '.zip', 'w')
             zf.write(self.dir_list[current][0])
             zf.close()
         else:
             #append to zipfile 
             date = datetime.datetime.now()
             date = date.isoformat().split('.')[0:-1][0].replace(':',"")
             zf = zipfile.ZipFile(save_root + current + date+ '.zip', 'w')
             
             zf.write(self.dir_list[current][0])
             zf.close()
     return
             
             
     pass
Example #28
0
    def test_make_archive_owner_group(self):
        # testing make_archive with owner and group, with various combinations
        # this works even if there's not gid/uid support
        if UID_GID_SUPPORT:
            group = grp.getgrgid(0)[0]
            owner = pwd.getpwuid(0)[0]
        else:
            group = owner = 'root'

        base_dir, root_dir, base_name =  self._create_files()
        base_name = os.path.join(self.mkdtemp() , 'archive')
        res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
                           group=group)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name, 'zip', root_dir, base_dir)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name, 'tar', root_dir, base_dir,
                           owner=owner, group=group)
        self.assertTrue(os.path.exists(res))

        res = make_archive(base_name, 'tar', root_dir, base_dir,
                           owner='kjhkjhkjg', group='oihohoh')
        self.assertTrue(os.path.exists(res))
Example #29
0
    def _backup_linear_log(self, fs_now, current_work_dir, backup_format, backup_work_dir, backup_history):

        delete_previous_backups = False

        # Aren't we going to exceed the limit?
        max_backups = min(backup_history, MAX_BACKUPS)
        backup_contents = []

        for item in os.listdir(backup_work_dir):
            item = os.path.join(backup_work_dir, item)

            # We tally files only and assume each file must be one of ours so we can safely drop it if need be
            if os.path.isfile(item):
                backup_contents.append(item)

        len_backups = len(backup_contents)

        # It's the first backup or we're past the limit so we need a fresh prefix
        if not len_backups or len_backups >= max_backups:
            next_prefix = _first_prefix
        else:
            next_prefix = str(len_backups).zfill(len(_first_prefix))

        # Also, we need to delete previous backups if we're starting anew
        if len_backups >= max_backups:
            delete_previous_backups = True

        backup_name = '{}-{}'.format(next_prefix, fs_now)
        backup_path = os.path.join(backup_work_dir, backup_name)
        shutil.make_archive(backup_path, backup_format, current_work_dir, verbose=True, logger=self.logger)

        if delete_previous_backups:
            self._delete(backup_contents)
Example #30
0
def main():
# make a duplicate of an existing file
  if path.exists("textfile.txt"):
    # get the path to the file in the current directory
    src = path.realpath("textfile.txt");      
   # separate the path part from the filename
    head, tail = path.split(src)
    print "path: " + head
    print "file: " + tail 
    
  # let's make a backup copy by appending "bak" to the name
    dst = src + ".bak"
    # now use the shell to make a copy of the file
    shutil.copy(src,dst)
    
    # copy over the permissions, modification times, and other info
    shutil.copystat(src, dst)  
 
    # rename the original file
    os.rename("textfile.txt", "newfile.txt")
    
    # now put things into a ZIP archive
    root_dir,tail = path.split(src)
    shutil.make_archive("archive", "zip", root_dir)

    # more fine-grained control over ZIP files
    with ZipFile("testzip.zip","w") as newzip:
      newzip.write("newfile.txt")
      newzip.write("textfile.txt.bak")     
Example #31
0
def runner(dbase, run_directory, orphan_directory=None, hold_directory=None):
    """
    High-throughput calculation runner.
    
    Parameters
    ----------
    dbase : iprPy.Database
        The database to interact with.
    run_directory : str
        The path to the directory where the calculation instances to run are
        located.
    orphan_directory : str, optional
        The path for the orphan directory where incomplete calculations are
        moved.  If None (default) then will use 'orphan' at the same level as
        the run_directory.
    hold_directory : str, optional
        The path for the hold directory where tar archives that failed to be
        uploaded are moved to.  If None (default) then will use 'hold' at the
        same level as the run_directory.
    """
    # Get path to Python executable running this script
    py_exe = sys.executable
    if py_exe is None:
        py_exe = 'python'

    # Get absolute path to run_directory
    run_directory = os.path.abspath(run_directory)

    # Get original working directory
    original_dir = os.getcwd()

    # Define runner log file
    d = datetime.datetime.now()
    pid = os.getpid()

    runner_log_dir = Settings().runner_log_directory
    if not os.path.isdir(runner_log_dir):
        os.makedirs(runner_log_dir)
    log_file = os.path.join(
        runner_log_dir, '%04i-%02i-%02i-%02i-%02i-%06i-%i.log' %
        (d.year, d.month, d.day, d.minute, d.second, d.microsecond, pid))

    # Set default orphan_directory
    if orphan_directory is None:
        orphan_directory = os.path.join(os.path.dirname(run_directory),
                                        'orphan')

    # Set default orphan_directory
    if hold_directory is None:
        hold_directory = os.path.join(os.path.dirname(run_directory), 'hold')

    # Start runner log file
    with open(log_file, 'a') as log:

        # Change to the run directory
        os.chdir(run_directory)

        # Initialize bidfailcount counter
        bidfailcount = 0

        # Announce the runner's pid
        print(f'Runner started with pid {pid}', flush=True)

        # flist is the running list of calculations
        flist = os.listdir(run_directory)
        while len(flist) > 0:

            # Pick a random calculation from the list
            index = random.randint(0, len(flist) - 1)
            sim = flist[index]

            # Submit a bid and check if it succeeded
            if bid(sim):

                # Reset bidfailcount
                bidfailcount = 0

                # Move to simulation directory
                os.chdir(sim)
                log.write('%s\n' % sim)

                # Check that the calculation has calc_*.py, calc_*.in and
                # record in the database
                try:
                    record = dbase.get_record(name=sim)
                    calc_py = get_file('calc_*.py')
                    calc_in = get_file('calc_*.in')

                # Pass ConnectionErrors forward killing runner
                except requests.ConnectionError as e:
                    raise requests.ConnectionError(e)

                # If not complete, zip and move to the orphan directory
                except:
                    log.write(
                        'Incomplete simulation: moved to orphan directory\n\n')
                    os.chdir(run_directory)
                    if not os.path.isdir(orphan_directory):
                        os.makedirs(orphan_directory)
                    shutil.make_archive(os.path.join(orphan_directory, sim),
                                        'gztar',
                                        root_dir=run_directory,
                                        base_dir=sim)
                    removecalc(os.path.join(run_directory, sim))
                    flist = os.listdir(run_directory)
                    continue

                # Check if any files in the calculation folder are incomplete
                # records
                error_flag = False
                ready_flag = True

                for fname in glob.iglob('*'):
                    parent_sim, ext = os.path.splitext(os.path.basename(fname))
                    if ext in ('.json', '.xml'):
                        parent = DM(fname)
                        try:
                            status = parent.find('status')

                            # Check parent record in database to see if it has completed
                            if status == 'not calculated':
                                parent_record = dbase.get_record(
                                    name=parent_sim)
                                try:
                                    status = parent_record.content.find(
                                        'status')

                                    # Mark flag if still incomplete
                                    if status == 'not calculated':
                                        ready_flag = False
                                        break

                                    # Skip if parent calculation failed
                                    elif status == 'error':
                                        with open(os.path.basename(fname),
                                                  'w') as f:
                                            parent_record.content.json(
                                                fp=f, indent=4)
                                        error_flag = True
                                        error_message = 'parent calculation issued an error'
                                        break

                                    # Ignore if unknown status
                                    else:
                                        raise ValueError('unknown status')

                                # Copy parent record to calculation folder if it is now complete
                                except:
                                    with open(os.path.basename(fname),
                                              'w') as f:
                                        parent_record.content.json(fp=f,
                                                                   indent=4)
                                    log.write(
                                        'parent %s copied to sim folder\n' %
                                        parent_sim)

                            # skip if parent calculation failed
                            elif status == 'error':
                                error_flag = True
                                error_message = 'parent calculation issued an error'
                                break
                        except:
                            continue

                # Handle calculations that have unfinished parents
                if not ready_flag:
                    bid_files = glob.glob('*.bid')
                    os.chdir(run_directory)
                    for bid_file in bid_files:
                        os.remove(os.path.join(sim, bid_file))
                    flist = [parent_sim]
                    log.write('parent %s not ready\n\n' % parent_sim)
                    continue

                # Run the calculation
                try:
                    assert not error_flag, error_message
                    run = subprocess.Popen([py_exe, calc_py, calc_in, sim],
                                           stderr=subprocess.PIPE)
                    error_message = run.stderr.read()
                    try:
                        error_message = error_message.decode('UTF-8')
                    except:
                        pass

                    # Load results.json
                    try:
                        model = DM('results.json')

                    # Throw errors if no results.json
                    except:
                        error_flag = True
                    assert not error_flag, error_message
                    log.write('sim calculated successfully\n')

                # Catch any errors and build results.json
                except:
                    model = record.content
                    keys = list(model.keys())
                    record_type = keys[0]
                    model[record_type]['status'] = 'error'
                    model[record_type]['error'] = str(sys.exc_info()[1])
                    with open('results.json', 'w') as f:
                        model.json(fp=f, indent=4)
                    log.write('error: %s\n' % model[record_type]['error'])

                # Update record
                tries = 0
                while tries < 10:
                    try:
                        dbase.update_record(content=model, name=sim)
                        break
                    except:
                        tries += 1
                if tries == 10:
                    os.chdir(run_directory)
                    log.write('failed to update record\n')
                else:
                    # Archive calculation and add to database or hold_directory
                    try:
                        dbase.add_tar(root_dir=run_directory, name=sim)
                    except:
                        log.write('failed to upload archive\n')
                        if not os.path.isdir(hold_directory):
                            os.makedirs(hold_directory)
                        shutil.move(sim + '.tar.gz', hold_directory)
                    os.chdir(run_directory)
                    removecalc(os.path.join(run_directory, sim))
                log.write('\n')

            # Else if bid(sim) failed
            else:
                bidfailcount += 1

                # Stop unproductive worker after 10 consecutive bid fails
                if bidfailcount > 10:
                    print("Didn't find an open simulation", flush=True)
                    break

                # Pause for 10 seconds before trying again
                time.sleep(10)

            # Regenerate flist and flush log file
            flist = os.listdir(run_directory)
            log.flush()
            os.fsync(log.fileno())
        print('No simulations left to run', flush=True)
        os.chdir(original_dir)
def upload_site(site_dir, s3, to_bucket, to_key):
    with tempfile.NamedTemporaryFile() as tmp_file:
        site_zip_file = shutil.make_archive(tmp_file.name, 'zip', site_dir)
        s3.upload_file(site_zip_file, to_bucket, to_key)
Example #33
0
def zip_folders(folders):
    print('Zipping...')
    for folder in folders:
        shutil.make_archive('{}'.format(folder), 'zip', '{}'.format(folder))
        shutil.rmtree('{}'.format(folder))
Example #34
0
"""

import shutil, tempfile

# Config
ignore_patterns = (
    "__pycache__",
    "*.pyc",
    ".git",
    ".gitignore",
    ".vscode",
    "startup.blend1",
    "TODO.md",
    "dev",
    "docs",
    "verification",
)
output_filename = "blenderfds"

# Make
with tempfile.TemporaryDirectory() as tmpdirname:
    shutil.copytree(
        "..",
        tmpdirname + "/blenderfds",
        symlinks=False,
        ignore=shutil.ignore_patterns(*ignore_patterns),
    )
    shutil.make_archive(output_filename, "zip", tmpdirname)

print(f"Done: {output_filename}.zip")
        answer_dict["task1"][language][word] = task_1_answer
        answer_dict["task2"][language][word] = task_2_answer

##############################################
# Evaluate the answer and save it ############
##############################################

pp = pprint.PrettyPrinter(indent=4)
pp.pprint(answer_dict)

for task in answer_dict:
    for language in answer_dict[task]:

        task_path = f"{paths['answer_path_main']}{task}/"
        os.makedirs(task_path, exist_ok=True)

        with open(f"{task_path}{language}.txt", 'w', encoding='utf-8') as f_out:
            for word in answer_dict[task][language]:
                answer = int(answer_dict[task][language][word]) if task == "task1" else \
                    float(answer_dict[task][language][word])
                if language == "english":
                    for t_word in english_target_words:
                        if word == t_word[:-3]:
                            word = t_word
                f_out.write('\t'.join((word, str(answer) + '\n')))

shutil.make_archive(paths['out_zip_path_main'], 'zip', paths['in_zip_path_main'])

print("done")
                    if match2:
                        path = rename_to + match2.group(
                            1) + "/" + match2.group(2) + "/"
                    if match and match2:
                        new_filename = new_line + ".pdf"
                        os.makedirs(os.path.dirname(path), exist_ok=True)
                        shutil.copy(fullpath, path + new_filename)


def two_d(addLeading):
    addLeading = str(addLeading)
    if (len(addLeading) < 2):
        addLeading = "0" + addLeading
        return addLeading


if __name__ == '__main__':
    extract_text()
    currentMonth = datetime.now().month - 1
    currentYear = datetime.now().year
    #currentMonth = two_d(currentMonth)
    zip_dir = str(currentYear) + "/" + str(currentMonth)
    print(zip_dir)
    zip_dir = rename_to + zip_dir
    zip_file = str(currentMonth) + ".zip"
    cwd = os.getcwd()
    print(cwd)
    shutil.make_archive(str(currentMonth), 'zip', zip_dir)
    shutil.move(zip_file, rename_to)
    shutil.move(os.path.join(cwd, zip_file), os.path.join(rename_to, zip_file))
def perform():
    if request.method == 'POST':
        try:
            temdir = tempfile.gettempdir()
            template_file = request.files['template']
            template_file_name = secure_filename(template_file.filename)
            template_file.save(os.path.join(temdir, template_file_name))

            font_size = request.form['fontsize']

            name_file = request.files['csv']
            name_file_name = secure_filename(name_file.filename)
            name_file.save(os.path.join(temdir, name_file_name))

            # font color conversion to rgb format
            color_code_hex = request.form['fontcolor']
            value = color_code_hex.lstrip('#')
            lv = len(value)
            color_code_rgb = tuple(
                int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3))

            # converion from rgb to bgr for cv2 processing
            color_code_bgr = color_code_rgb[::-1]

            font = cv2.FONT_HERSHEY_COMPLEX
            fontScale = int(font_size)
            color = color_code_bgr
            thickness = 5

            # form namesList on file type of names file
            fileFormat = name_file_name.split('.')[-1]  # get the .format
            if fileFormat == 'txt':
                names = open(f'/tmp/{name_file_name}')  # names uploaded
                namesList = [name[:-1] for name in names]
            elif fileFormat == 'csv':
                import pandas as pd
                df = pd.read_csv(f'/tmp/{name_file_name}')
                namesList = df[
                    'Name']  # set the default value for name column in csv file
            else:
                raise Exception(
                    f'Names file format not supported: {fileFormat}')

            for name in namesList:
                text = name.upper()
                img = cv2.imread(f'/tmp/{template_file_name}')

                cert_len = img.shape[1]
                cert_mid = cert_len // 2
                txtsize = cv2.getTextSize(text, font, fontScale, thickness)
                txt_len = txtsize[0][0]
                if (txt_len % 2 == 0):
                    mid = txt_len // 2
                else:
                    mid = (txt_len // 2) + 1

                org = (cert_mid - mid, 560)

                img1 = cv2.putText(img, text, org, font, fontScale, color,
                                   thickness, cv2.LINE_AA)
                path = r"/tmp/"  #path to save the certificates
                cv2.imwrite(os.path.join(path, text + ".png"), img1)
                # compressing to zip to upload
                shutil.make_archive('./static/certificates', 'zip', '/tmp')
            return render_template(
                'home.html', c='certificates.zip',
                con='block')  # returning cerificates in zip to download
        except Exception as e:
            print(e)
    return render_template('home.html', certificates='', con='none')
def execution():
    execution_instance = {}

    if 'config_uuid' in request.form:
        config_uuid = request.form['config_uuid']
        config_entry = persistence['configuration'][config_uuid]
        execution_instance['config'] = config_entry

        # Create UUID for execution
        execution_uuid = str(uuid.uuid4())
        execution_instance['uuid'] = execution_uuid

        # Execution folder will be below configuration folder
        execution_path = os.path.join(storage_path, config_uuid,
                                      execution_uuid)

        # '-n' cli mode (mandatory)
        # '-e' generate report dashboard
        # '-o' output folder for report dashboard
        # '-j' run log file name
        # '-l' test sample results filename
        jmeter_cli_call = [
            jmeter_executable, '-n', '-e', '-o ' +
            os.path.join(execution_path, test_results_dashboard_folder_name),
            '-j ' + os.path.join(execution_path, test_run_log_filename),
            '-l ' + os.path.join(execution_path, test_sample_results_filename)
        ]

        # Possible extensions
        # * -r -R remote (server mode)
        # * -H -P Proxy
        # * many more ( jmeter -? )
        # * Parameter string for -Dpropkey=propvalue

        if 'host' in config_entry:
            jmeter_target_host = config_entry['host']
            current_app.logger.info(f'Setting host to {jmeter_target_host}')
            jmeter_cli_call.append('-JHOST=' + jmeter_target_host)

        if 'port' in config_entry:
            jmeter_target_port = config_entry['port']
            current_app.logger.info(f'Setting port to {jmeter_target_port}')
            jmeter_cli_call.append('-JPORT=' + jmeter_target_port)

        if 'test_plan_path' in config_entry:
            os.mkdir(execution_path)
            jmeter_cli_call.append(
                '-t ' +
                os.path.join(storage_path, config_entry['test_plan_path']))

            if 'properties_path' in config_entry:
                jmeter_cli_call.append(
                    '-p ' + os.path.join(config_entry['properties_path']))

        else:
            return "Configuration does not contain a test plan.", status.HTTP_404_NOT_FOUND

        execution_instance['cli_call'] = jmeter_cli_call

        current_app.logger.info(f'JMeter CLI call: {str(jmeter_cli_call)}')

        execution_start = datetime.datetime.now()
        os.system(' '.join(jmeter_cli_call))
        execution_end = datetime.datetime.now()

        execution_instance['execution_start'] = execution_start
        execution_instance['execution_end'] = execution_end

        with tempfile.NamedTemporaryFile() as tf:
            tmp_zip_file = shutil.make_archive(tf.name, 'zip', execution_path)
            shutil.copy2(tmp_zip_file,
                         os.path.join(execution_path, result_zip_file_name))

        persistence['execution'][execution_uuid] = execution_instance

        return jsonify(execution_instance), status.HTTP_201_CREATED

    else:
        return "No configuration with that ID found.", jsonify(
            persistence), status.HTTP_404_NOT_FOUND
Example #39
0
    sys.stdout.write("Au revoir ! \n")
    sys.exit(0)


sys.stdout.write("saisissez q pour quitter ou Ctr c \n")

while True:
    sys.stdout.write("Chemin du fichier à sauvegarder: \n")
    src = input()
    sys.stdout.write("Répertoire où stocker le fichier: \n")
    dst = input()
    signal.signal(signal.SIGINT, leave)
    if src == q or dst == q:
        leave()
    isHere = False
    shutil.make_archive('folder_test', 'gztar', 'folder_test')

    if not os.path.exists(dst):
        os.makedirs(dst)  # On vérifie la présence d 'un fichier
    try:
        if isHere == False:
            sys.stdout.write("Vérification du fichier data \n")
            for file in os.listdir(dst):
                if file == "folder_test.tar.gz":
                    ishere = True
                    sys.stdout.write("Comparaison des deux sauvegardes\n")
                    with gzip.open(dst) as file:
                        old_save = f.read()
                    with gzip.open(src + ".tar.gz") as file:
                        new_save = f.read()
                    if new_save != old_save:
Example #40
0
def copy_issues(issues_directory, upload_directory, tag_name):
    """Copies issue files (only top 5 smallest files from each folder) into the upload_directory

    Args:
        issues_directory (string): Issues directory
        upload_directory (string): Upload directory
        tag_name (string): Tag name for zip file

    Returns:
        [type]: [description]
    """
    # Create upload_directory
    if not os.path.isdir(upload_directory):
        os.makedirs(upload_directory)

    # Create temp directory to copy all issues to upload. We don't want to create a sub-folder
    # inside issues_directory because that will also get included twice.
    with TempDir() as prep_directory:

        def sorter_by_size(pair):
            """ Sorts the pair (file_name, file_size) tuple in ascending order of file_size

            Args:
                pair ([(string, int)]): List of tuple of file_name, file_size
            """
            pair.sort(key=lambda x: x[1], reverse=False)
            return pair

        summary_of_summary = []
        for file_path, dirs, files in walk(issues_directory, topdown=True):
            filename_with_size = []
            # Credit: https://stackoverflow.com/a/19859907
            dirs[:] = [d for d in dirs]
            for name in files:
                if not name.lower().endswith(".g.cs"):
                    continue

                curr_file_path = path.join(file_path, name)
                size = getsize(curr_file_path)
                filename_with_size.append((curr_file_path, size))

            if len(filename_with_size) == 0:
                continue
            summary_file = path.join(file_path, "summary.txt")
            summary_of_summary.append("**** " + file_path)
            with open(summary_file, 'r') as sf:
                summary_of_summary.append(sf.read())
            filename_with_size.append(
                (summary_file, 0))  # always copy summary.txt

            # Copy atmost 5 files from each bucket
            sorted_files = [
                f[0] for f in sorter_by_size(filename_with_size)[:6]
            ]  # sorter_by_size(filename_with_size)[:6]
            print('### Copying below files from {0} to {1}:'.format(
                issues_directory, prep_directory))
            print('')
            print(os.linesep.join(sorted_files))
            for src_file in sorted_files:
                dst_file = src_file.replace(issues_directory, prep_directory)
                dst_directory = path.dirname(dst_file)
                if not os.path.exists(dst_directory):
                    os.makedirs(dst_directory)
                try:
                    shutil.copy2(src_file, dst_file)
                except PermissionError as pe_error:
                    print('Ignoring PermissionError: {0}'.format(pe_error))

        issues_summary_file_name = "issues-summary-{}.txt".format(tag_name)
        print("Creating {} in {}".format(issues_summary_file_name,
                                         upload_directory))
        issues_summary_file = os.path.join(upload_directory,
                                           issues_summary_file_name)
        with open(issues_summary_file, 'w') as sf:
            sf.write(os.linesep.join(summary_of_summary))

        # Also copy the issues-summary inside zip folder
        dst_issue_summary_file = os.path.join(prep_directory,
                                              issues_summary_file_name)
        try:
            shutil.copy2(issues_summary_file, dst_issue_summary_file)
        except PermissionError as pe_error:
            print('Ignoring PermissionError: {0}'.format(pe_error))

        # Zip compress the files we will upload
        zip_path = os.path.join(prep_directory, "AllIssues-" + tag_name)
        print("Creating archive: " + zip_path)
        shutil.make_archive(zip_path, 'zip', prep_directory)

        zip_path += ".zip"
        dst_zip_path = os.path.join(upload_directory,
                                    "AllIssues-" + tag_name + ".zip")
        print("Copying {} to {}".format(zip_path, dst_zip_path))
        try:
            shutil.copy2(zip_path, dst_zip_path)
        except PermissionError as pe_error:
            print('Ignoring PermissionError: {0}'.format(pe_error))

        src_antigen_log = os.path.join(issues_directory,
                                       get_antigen_filename(tag_name))
        dst_antigen_log = os.path.join(upload_directory,
                                       get_antigen_filename(tag_name))
        print("Copying {} to {}".format(src_antigen_log, dst_antigen_log))
        try:
            shutil.copy2(src_antigen_log, dst_antigen_log)
        except PermissionError as pe_error:
            print('Ignoring PermissionError: {0}'.format(pe_error))
Example #41
0
import os
import shutil


def copy_folder(src, dst):
    try:
        shutil.copytree(src, dst)
    except OSError as exc:
        if exc.errno == errno.ENOTDIR:
            shutil.copy(src, dst)
        else:
            raise


tempPath = '../release/ARCMF.VR'
if not os.path.exists(tempPath):
    os.makedirs(tempPath)

missionPath = "{}/ARCMF.VR".format(tempPath)

copy_folder("../ARCMF.VR", missionPath)

shutil.make_archive('../release/ARCMF', 'zip', tempPath)
shutil.rmtree(tempPath)
Example #42
0
# Copy configuration
configurations = [
    n for n in listdir(f'{SRCDIR}/configurations') if not n.startswith('_')
]

for configuration in configurations:
    configuration_name = f'infra-quickstart-{configuration}'
    configuration_src = f'{SRCDIR}/configurations/{configuration}'
    configuration_dist = f'{DISTDIR}/{configuration_name}'
    archive_name = f'{configuration_dist}-{version}'
    manifests_src = f'{SRCDIR}/manifests'
    manifests_dist = f'{configuration_dist}/manifests'
    cicd_src = f'{SRCDIR}/ci-cd'
    cicd_dist = f'{configuration_dist}/ci-cd'

    copytree(configuration_src, configuration_dist)
    copytree(manifests_src, manifests_dist)
    copytree(cicd_src, cicd_dist)

    # Replace templated variable with version in clusters.tf
    jinja = Environment(loader=FileSystemLoader(configuration_dist))
    template = jinja.get_template('clusters.tf')
    data = template.render(version=version)

    with open(f'{configuration_dist}/clusters.tf', 'w') as f:
        f.write(data)
        # always include newline at end of file
        f.write('\n')

    make_archive(archive_name, 'zip', f'{DISTDIR}', configuration_name)
Example #43
0
            shutil.copyfile(currentPath, tempPath)

print(">>> All merges completed. Generating necessary files. <<<")

## Simple stuffs, just getting all the pack names and put them in pack.mcmeta
Packs = getRelevantPacks("..\\..\\pack.mcmeta")

## Strips the path so only the pack name left
Packs = [
    directory.replace("\\data\\minecraft\\..\\..\\pack.mcmeta", "")
    for directory in Packs
]

Directory = "temp\\centrosome\\"
ensure_dir(Directory)

with open(Directory + "pack.mcmeta", "w+") as file:
    Json = {"pack": {"pack_format": 5, "description": Packs}}
    Json.update(
        {"description": "Merged Packs using Cocoon's Datapack Merger."})
    file.write(json.dumps(Json, sort_keys=False, indent=4))

shutil.make_archive("centrosome", "zip", Directory)
print(">>>>> Datapacks Merged & Packed <<<<<")

#shutil.rmtree('temp', ignore_errors=True)

print(">>>>>> Deleted Temp Folders <<<<<<")
input("Press Enter to Close Terminal.")
exit(0)
Example #44
0
    temp_directory = common.generate_temp_path()
    os.makedirs(temp_directory)
    kosmos_version = get_kosmos_version(args)

    auto_build = False
    if hasattr(args, 'auto'):
        auto_build = args.auto

    version_messages = init_version_messages(args, kosmos_version)

    build_messages = modules.build(temp_directory, kosmos_version,
                                   args.command, auto_build)

    common.delete_path(args.output)

    if build_messages is not None:
        version_messages += build_messages

        if args.command == common.Command.SDSetup:
            shutil.move(temp_directory, args.output)
        else:
            shutil.make_archive(
                os.path.splitext(args.output)[0], 'zip', temp_directory)

        common.delete_path(os.path.join(os.getcwd(), 'tmp'))

        for message in version_messages:
            print(message)
    else:
        common.delete_path(os.path.join(os.getcwd(), 'tmp'))
Example #45
0
             cur_file = open(str(costume['baseLayerID'])+os.path.splitext(costume['baseLayerMD5'])[1],"wb")
             cur_file.write(cur_file_data)
             cur_file.close()
     if "sounds" in json_obj.keys():
         sound_id = 0
         for sound in json_obj['sounds']:
             cur_file_data = requests.get(getMD5(sound['md5'])).content
             cur_file = open(str(sound_id)+".wav","wb")
             cur_file.write(cur_file_data)
             cur_file.close()
             sound_id+=1
     with open("sprite.json","w") as temp:
         json.dump(json_obj,temp,indent=4)
     if not nozipsprites:
         os.chdir(backpack_folder)
         make_archive("Sprite - " + item['name'],'zip',"Sprite - " + item['name'])
         os.rename("Sprite - " + item['name'] + '.zip',"Sprite - " + item['name'] + '.sprite2')
         rmtree("Sprite - " + item['name'])
         #os.system("zip -r 'Sprite - " + item['name'] + ".zip'
 elif item['type'] == "image":
     if verbose:
         print("Costume - Name:",item['name'])
     if not overwrite:
         if user_input:
             os.chdir(user_mkdir("Costume - " + item['name']))
         elif normal:
             os.chdir(safe_mkdir("Costume - " + item['name']))
     else:
         os.chdir(just_mkdir("Costume - " + item['name']))
         
     with open(item['name']+os.path.splitext(item['md5'])[1],"wb") as temp:
Example #46
0
                                            known_lines.append(char_line)
                                            append_to_file(temp_file,line)
                                   if time.strptime(line_date['Date'],'%Y-%m-%d %H:%M:%S')>time.strptime(range_stop,'%Y-%m-%d %H:%M:%S'):
                                        break
                                if line_date['Error']!=None and start_found==True:
                                    log_file_to_save.write(line)
                                    if char_line not in known_lines:
                                        known_lines.append(char_line)
                                        append_to_file(temp_file, line)
                    # Save statistics and close file
                    log_stat_info['NumberOfLines']=len(known_lines)
                    statistics_list.append(log_stat_info)
                    log_file_to_save.close()


    not_relevant_lines=['### '+item['Log']+' ###' for item in statistics_list if item['NumberOfLines']==0]
    statistics_list=[item for item in statistics_list if item['NumberOfLines']!=0]
    print_in_color('Statisctics - number of lines per log', 'yellow')
    print_list(statistics_list)
    print_in_color('Skipped logs (LogTool was not able to detect timestamps in log content), are:', 'yellow')
    print_list(skipped_logs)
    append_to_file(result_file,'### Statistics: Number of lines per file:')
    write_list_to_file(result_file,statistics_list,False)
    for line in open(temp_file, 'r').readlines():
        if line.strip() not in not_relevant_lines:
            append_to_file(result_file,line)
    os.remove(temp_file)
    exec_command_line_command('gzip '+result_file)
    shutil.make_archive(result_dir, 'zip', result_dir)
    print('Execution time:'+str(time.time()-start_time))
    print('SUCCESS!!!')
Example #47
0
    targetName = 'pros.exe'
else:
    targetName = 'pros'

setup(name='pros-cli',
      version=open('version').read().strip(),
      packages=modules,
      url='https://github.com/purduesigbots/pros-cli',
      license='MPL-2.0',
      author='Purdue ACM Sigbots',
      author_email='*****@*****.**',
      description='Command Line Interface for managing PROS projects',
      options={"build_exe": build_exe_options},
      install_requires=install_reqs,
      executables=[Executable('proscli/main.py', targetName=targetName)])

if sys.argv[1] == 'build_exe':
    import py_compile
    import distutils.util
    build_dir = './build/exe.{}-{}.{}'.format(distutils.util.get_platform(),
                                              sys.version_info[0],
                                              sys.version_info[1])
    py_compile.compile('./prosconductor/providers/githubreleases.py',
                       cfile='{}/githubreleases.pyc'.format(build_dir))
    import shutil
    import platform
    shutil.make_archive(
        'pros_cli-{}-win-{}'.format(
            open('version').read().strip(),
            platform.architecture()[0]), 'zip', build_dir, '.')
Example #48
0
def main(cluster, runtime, mail, username, password):
    for script in ['test-functional.py', 'test-restapi-get.py']:
        if not os.path.isfile(script):
            print '{} is not in current work directory {}'.format(script, os.getcwd())
            return
        
    CPU_COMMAND = "cores=$((`grep -c ^processor /proc/cpuinfo`-1)) && for i in `seq 1 $cores`; do while : ; do : ; done & done && sleep {} && for i in `seq 1 $cores`; do kill %$i; done".format(30)

    # 3 test batches currently
    commands = [
        [
            'test-functional.py {} --category diag-pingpong-tournament --timeout 2000 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
        ],
        [
            'test-functional.py {} --category diag-pingpong-parallel --timeout 2000 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
        ],
        [
            'test-functional.py {} --category clusrun --command "echo -n test" --result "test" --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
        ],
        [
            'test-functional.py {} --category clusrun --command "ping localhost" --cancel 10 --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category clusrun --command "ping localhost" --cancel 30 --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category clusrun --command "ping localhost" --cancel 60 --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category clusrun --command "sleep 10" --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category clusrun --command "sleep 30" --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category clusrun --command "sleep 60" --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category clusrun --command "echo -n test" --result "test" --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category clusrun --command "whoami" --result "root\\n" --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category clusrun --command "hostname" --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category diag-pingpong-tournament --timeout 2000 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category diag-pingpong-parallel --timeout 2000 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category diag-pingpong-tournament --cancel 10 --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-functional.py {} --category diag-pingpong-parallel --cancel 30 --timeout 200 --continuous {} --username {} --password {}'.format(cluster, runtime, username, password),
            'test-restapi-get.py {} --continuous {}'.format(cluster, runtime, username, password)
        ],
    ]

    startTime = formatdate(localtime=True)

    # adapt to different OS
    runOnWindows = ''
    runOnLinux = 'python '
    if os.name == 'nt':
        prefix = runOnWindows
    else:
        prefix = runOnLinux
    commands = [[prefix + command for command in batch] for batch in commands]

    # create log directory
    logDir = '{}/test_logs/{}'.format(os.getcwd(), time.time())
    os.makedirs(logDir)
    logs = [['batch{}-thread{}.log'.format(j, i) for i in range(len(commands[j]))] for j in range(len(commands))]

    # start and wait test threads per batch
    for i in range(len(commands)):
        threads = [subprocess.Popen(commands[i][j], shell = True, stdout = open('{}/{}'.format(logDir, logs[i][j]), 'w'), stderr = subprocess.STDOUT) for j in range(len(commands[i]))]
        wait = [thread.wait() for thread in threads]

    # get the results from logs
    results = {}
    for log in os.listdir(logDir):
        with open('{}/{}'.format(logDir, log), 'r') as f:
            result = f.readlines()[-1]
        results[log] = result

    endTime = formatdate(localtime=True)
    
    mailBody = '<h4>Time:</h4>' + '<b>{}</b> &nbsp&nbsp - &nbsp&nbsp <b>{}</b>'.format(startTime, endTime) \
             + '<h4><br/>Results:</h4>' + '<br/><br/>'.join(['<br/>'.join(['{}: {}'.format(log, results[log]) for log in batch]) for batch in logs]) \
             + '<h4><br/>Details:</h4>' + '<br/><br/>'.join(['<br/>'.join(["<b>Log file</b>: {}".format(logs[i][j]), \
                                                                           "<b>Command</b>: {}".format(commands[i][j]), \
                                                                           "<b>Result</b>: {}".format(results[logs[i][j]])]) for i in range(len(commands)) for j in range(len(commands[i]))])
    if mail:
        # send notification mail
        shutil.make_archive(logDir, 'zip', logDir)
        with open(logDir+'.zip', 'rb') as f:
            attachment = MIMEApplication(f.read())
        attachment['Content-Disposition'] = 'attachment; filename="logs.zip"'
        sender = mail['Sender']
        to = mail['To']
        cc = mail['Cc']
        receivers = to + cc
        message = MIMEMultipart()
        message['From'] = Header(socket.gethostname(), 'utf-8')
        message['To'] = COMMASPACE.join(to)
        message['Cc'] = COMMASPACE.join(cc)
        message['Subject'] = 'Continuous functional test result for cluster {}'.format(cluster)
        message.attach(MIMEText(mailBody, 'html'))
        message.attach(attachment)
        smtp = smtplib.SMTP(mail['SmtpServer'])
        smtp.starttls()
        smtp.ehlo()
        smtp.login(mail['UserName'], mail['Password'])
        smtp.sendmail(sender, receivers, message.as_string())
    else:
        with open(logDir+'.html', 'w') as f:
            f.write(mailBody)
Example #49
0
 def _create_zip(self, src, dest, name):
     """Zips the Go package in src named 'name' into dest."""
     shutil.make_archive(os.path.join(dest, name), 'zip', root_dir=src)
def update_pf(Data, pc, tc):

    pf, projects = model.retrieve_pf()

    pf[4] = "   <title>" + Data[0] + "</title>"
    pf[32] = "      <a class=\"navbar-brand js-scroll\" href=\"#page-top\">" + Data[
        0] + "</a>"
    pf[68] = "          <h1 class=\"intro-title mb-4\">" + Data[0] + "</h1>"
    pf[69] = "          <p class=\"intro-subtitle\"><span class=\"text-slider-items\">" + Data[
        1] + "," + Data[2] + "," + Data[3] + "," + Data[
            4] + "</span><strong class=\"text-slider\"></strong></p>"
    pf[91] = "                      <p><span class=\"title-s\">Name: </span> <span>" + Data[
        0] + "</span></p>"
    pf[92] = "                          <p><span class=\"title-s\">Profile: </span> <span>" + Data[
        1] + "</span></p>"
    pf[93] = "                          <p><span class=\"title-s\">Email: </span> <span>" + Data[
        5] + "</span></p>"
    pf[94] = "                          <p><span class=\"title-s\">Phone: </span> <span>" + Data[
        6] + "</span></p>"
    pf[100] = "                  <span>" + Data[
        7] + "</span> <span class=\"pull-right\">" + Data[8] + "%</span>"
    pf[105] = "                  <span>" + Data[
        9] + "</span> <span class=\"pull-right\">" + Data[10] + "%</span>"
    pf[110] = "                  <span>" + Data[
        11] + "</span> <span class=\"pull-right\">" + Data[12] + "%</span>"
    pf[115] = "                  <span>" + Data[
        13] + "</span> <span class=\"pull-right\">" + Data[14] + "%</span>"
    pf[130] = Data[15]
    pf[163] = "              <h2 class=\"s-title\">" + Data[16] + "</h2>"
    pf[160] = "              <span class=\"ico-circle\"><i class=" + Data[
        17] + "></i></span>"
    pf[165] = Data[18]
    pf[176] = "              <h2 class=\"s-title\">" + Data[19] + "</h2>"
    pf[178] = "              <span class=\"ico-circle\"><i class=" + Data[
        20] + "></i></span>"
    pf[173] = Data[21]
    pf[189] = "              <h2 class=\"s-title\">" + Data[22] + "</h2>"
    pf[186] = "              <span class=\"ico-circle\"><i class=" + Data[
        23] + "></i></span>"
    pf[191] = Data[24]
    pf[202] = "              <h2 class=\"s-title\">" + Data[25] + "</h2>"
    pf[199] = "              <span class=\"ico-circle\"><i class=" + Data[
        26] + "></i></span>"
    pf[204] = Data[27]
    pf[215] = "              <h2 class=\"s-title\">" + Data[28] + "</h2>"
    pf[212] = "              <span class=\"ico-circle\"><i class=" + Data[
        29] + "></i></span>"
    pf[217] = Data[30]
    pf[228] = "              <h2 class=\"s-title\">" + Data[31] + "</h2>"
    pf[225] = "              <span class=\"ico-circle\"><i class=" + Data[
        32] + "></i></span>"
    pf[230] = Data[33]
    pf[249] = "              <p class=\"counter\">" + Data[34] + "</p>"
    pf[260] = "              <p class=\"counter\">" + Data[35] + "</p>"
    pf[271] = "              <p class=\"counter\">" + Data[36] + "</p>"
    pf[521] = "                      <li><a href=" + Data[
        -4] + "><span class=\"ico-circle\"><i class=\"ion-social-linkedin\"></i></span></a></li>"
    pf[522] = "                      <li><a href=" + Data[
        -3] + "><span class=\"ico-circle\"><i class=\"ion-social-github\"></i></span></a></li>"
    pf[523] = "                      <li><a href=" + Data[
        -2] + "><span class=\"ico-circle\"><i class=\"ion-android-globe\"></i></span></a></li>"
    pf[524] = "                      <li><a href=" + Data[
        -1] + "><span class=\"ico-circle\"><i class=\"ion-social-instagram\"></i></span></a></li>"

    current = 37
    updated_indx = 305
    for i in range(int(pc)):
        if (i < 6):
            pf[updated_indx] = "                    <h2 class=\"w-title\">" + Data[
                current] + "</h2>"
            pf[updated_indx +
               2] = "                      <a href=" "><span class=\"w-ctegory\">" + Data[
                   current + 1] + "</span> / <span class=\"w-date\">" + Data[
                       current + 2] + "</span></a>"
            if (i < 5):
                updated_indx += 21
            elif (i == 5):
                updated_indx += 11
        else:
            pf[updated_indx] += "\n<div class=\"col-md-4\"><div class=\"work-box\"><a href=\"img/work-5.jpg\" data-lightbox=\"gallery-mf\"><div class=\"work-img\"><img src=\"img/work-5.jpg\" alt=\"\" class=\"img-fluid\"></div><div class=\"work-content\"><div class=\"row\"><div class=\"col-sm-8\"><h2 class=\"w-title\">" + Data[
                current] + "</h2><div class=\"w-more\"><span class=\"w-ctegory\">" + Data[
                    current + 1] + "</span> / <span class=\"w-date\">" + Ddata[
                        current +
                        2] + "</span></div></div><div class=\"col-sm-4\"></div></div></div></a></div></div>\n"
        current += 3

    testi_indx = 439
    for i in range(int(tc)):
        if (i == 0):
            pf[testi_indx] = "                <span class=\"author\">" + Data[
                current] + "</span>"
            pf[testi_indx + 4] = Data[current + 1]
            testi_indx += 12
        else:
            pf[testi_indx] += "\n<div class=\"container\"><div class=\"row\"><div class=\"col-md-12\"><div id=\"testimonial-mf\" class=\"owl-carousel owl-theme\"><div class=\"testimonial-box\"><div class=\"author-test\"><img src=\"img/testimonial-2.jpg\" alt=\"\" class=\"rounded-circle b-shadow-a\"><span class=\"author\">" + Data[
                current] + "</span></div><div class=\"content-test\"><p class=\"description lead\">" + Data[
                    current +
                    1] + "</p><span class=\"comit\"><i class=\"fa fa-quote-right\"></i></span></div></div></div></div></div></div>\n"

        current += 2
    pf = pf[:updated_indx + 10] + pf[422:]

    s = " "

    pf = s.join(pf)

    copy_tree("layouts/template1", "cache")

    os.remove("cache/index.html")
    with open("cache/index.html", "w") as f:
        f.write(pf)

    temp = gen.gen()
    shutil.make_archive(temp, 'zip', "cache/")
    return temp
# The Python Standard Library also supports creating TAR and ZIP archives using the high-level methods
# in the shutil module. The archiving utilities in shutil allow you to create, read, and extract ZIP and TAR archives.
# These utilities rely on the lower level tarfile and zipfile modules.
#
# Working With Archives Using shutil.make_archive()
# shutil.make_archive() takes at least two arguments: the name of the archive and an archive format.
# By default, it compresses all the files in the current directory into the archive format specified
# in the format argument. You can pass in an optional root_dir argument to compress files in
# a different directory. .make_archive() supports the zip, tar, bztar, and gztar archive formats.
#
# This is how to create a TAR archive using shutil:

import shutil

# shutil.make_archive(base_name, format, root_dir)
shutil.make_archive('data/backup', 'tar', 'data/')

# This copies everything in data/ and creates an archive called backup.tar in the filesystem and returns its name.
# To extract the archive, call .unpack_archive():

shutil.unpack_archive('backup.tar', 'extract_dir/')

# Calling .unpack_archive() and passing in an archive name and destination directory extracts the contents
# of backup.tar into extract_dir/. ZIP archives can be created and extracted in the same way.

Example #52
0
def create_output_workflow(workflow_name):
    make_archive(workflow_name, 'zip', OUTPUT_PATH)
    base = os.path.splitext(f'{workflow_name}.zip')[0]
    os.rename(f'{workflow_name}.zip', base + '.knwf')
    rmtree(INPUT_PATH)
    rmtree(OUTPUT_PATH)
Example #53
0
import requests
import zipfile
import io
import pathlib
from io import BytesIO
import base64
import shutil
import os

# Endpoint of API defined
url = f'http://thehandler.hopto.org:4000/trainForMe'
save_path = './recorded_gestures'
#zipping
print('Zipping recorded data as data.zip')
output_filename = 'data'
shutil.make_archive(output_filename, 'zip', save_path)
print('Data zipping complete')
"""
# encoding the zip file into base64 format
with open("data.zip", "rb") as f: #take a look
    bytes = f.read()
    encoded = base64.b64encode(bytes)


# defining request body of the API call

payload = {
	"ZipFile": encoded.decode('ascii')
}

headers = {
Example #54
0
def main(argv=None):
    parser = get_parser()
    arguments = parser.parse_args(argv)
    verbose = arguments.v
    set_global_loglevel(verbose=verbose)

    # See if there's a configuration file and import those options
    if arguments.config is not None:
        print('configuring')
        with open(arguments.config, 'r') as conf:
            _, ext = os.path.splitext(arguments.config)
            if ext == '.json':
                config = json.load(conf)
            if ext == '.yml' or ext == '.yaml':
                config = yaml.load(conf, Loader=yaml.Loader)

        # Warn people if they're overriding their config file
        if len(argv) > 2:
            warnings.warn(UserWarning('Using the `-config|-c` flag with additional arguments is discouraged'))

        # Check for unsupported arguments
        orig_keys = set(vars(arguments).keys())
        config_keys = set(config.keys())
        if orig_keys != config_keys:
            for k in config_keys.difference(orig_keys):
                del config[k]  # Remove the unknown key
                warnings.warn(UserWarning(
                    'Unknown key "{}" found in your configuration file, ignoring.'.format(k)))

        # Update the default to match the config
        parser.set_defaults(**config)

        # Reparse the arguments
        arguments = parser.parse_args(argv)

    # Set up email notifications if desired
    do_email = arguments.email_to is not None
    if do_email:
        email_to = arguments.email_to
        if arguments.email_from is not None:
            email_from = arguments.email_from
        else:
            email_from = arguments.email_to

        smtp_host, smtp_port = arguments.email_host.split(":")
        smtp_port = int(smtp_port)
        email_pass = getpass('Please input your email password:\n')

        def send_notification(subject, message):
            send_email(email_to, email_from,
                       subject=subject,
                       message=message,
                       passwd=email_pass,
                       smtp_host=smtp_host,
                       smtp_port=smtp_port)

        while True:
            send_test = input('Would you like to send a test email to validate your settings? [Y/n]:\n')
            if send_test.lower() in ['', 'y', 'n']:
                break
            else:
                print('Please input y or n')

            if send_test.lower() in ['', 'y']:
                send_notification('sct_run_batch: test notification', 'Looks good')

    # Set up output directories and create them if they don't already exist
    path_output = os.path.abspath(os.path.expanduser(arguments.path_output))
    path_results = os.path.join(path_output, 'results')
    path_data_processed = os.path.join(path_output, 'data_processed')
    path_log = os.path.join(path_output, 'log')
    path_qc = os.path.join(path_output, 'qc')
    path_segmanual = os.path.abspath(os.path.expanduser(arguments.path_segmanual))
    script = os.path.abspath(os.path.expanduser(arguments.script))
    path_data = os.path.abspath(os.path.expanduser(arguments.path_data))

    for pth in [path_output, path_results, path_data_processed, path_log, path_qc]:
        os.makedirs(pth, exist_ok=True)

    # Check that the script can be found
    if not os.path.exists(script):
        raise FileNotFoundError('Couldn\'t find the script script at {}'.format(script))

    # Setup overall log
    batch_log = open(os.path.join(path_log, arguments.batch_log), 'w')

    # Duplicate init_sct message to batch_log
    print('\n--\nSpinal Cord Toolbox ({})\n'.format(__version__), file=batch_log, flush=True)

    # Tee IO to batch_log and std(out/err)
    orig_stdout = sys.stdout
    orig_stderr = sys.stderr

    sys.stdout = Tee(batch_log, orig_stdout)
    sys.stderr = Tee(batch_log, orig_stderr)

    def reset_streams():
        sys.stdout = orig_stdout
        sys.stderr = orig_stderr

        # Display OS

    print("INFO SYSTEM")
    print("-----------")
    platform_running = sys.platform
    if platform_running.find('darwin') != -1:
        os_running = 'osx'
    elif platform_running.find('linux') != -1:
        os_running = 'linux'
    print('OS: ' + os_running + ' (' + platform.platform() + ')')

    # Display number of CPU cores
    print('CPU cores: Available: {} | Threads used by ITK Programs: {}'.format(multiprocessing.cpu_count(), arguments.itk_threads))

    # Display RAM available
    print("RAM: Total {} MB | Available {} MB | Used {} MB".format(
        int(psutil.virtual_memory().total / 1024 / 1024),
        int(psutil.virtual_memory().available / 1024 / 1024),
        int(psutil.virtual_memory().used / 1024 / 1024),
    ))

    # Log the current arguments (in yaml because it's cleaner)
    print('\nINPUT ARGUMENTS')
    print("---------------")
    print(yaml.dump(vars(arguments)))

    # Display script version info
    print("SCRIPT")
    print("------")
    print("git commit: {}".format(__get_commit(path_to_git_folder=os.path.dirname(script))))
    print("git origin: {}".format(__get_git_origin(path_to_git_folder=os.path.dirname(script))))
    print("Copying script to output folder...")
    try:
        # Copy the script and record the new location
        script_copy = os.path.abspath(shutil.copy(script, arguments.path_output))
        print("{} -> {}".format(script, script_copy))
        script = script_copy
    except shutil.SameFileError:
        print("Input and output folder are the same. Skipping copy.")
        pass
    except IsADirectoryError:
        print("Input folder is a directory (not a file). Skipping copy.")
        pass

    print("Setting execute permissions for script file {} ...".format(arguments.script))
    script_stat = os.stat(script)
    os.chmod(script, script_stat.st_mode | S_IEXEC)

    # Display data version info
    print("\nDATA")
    print("----")
    print("git commit: {}".format(__get_commit(path_to_git_folder=path_data)))
    print("git origin: {}\n".format(__get_git_origin(path_to_git_folder=path_data)))

    # Find subjects and process inclusion/exclusions
    subject_dirs = [f for f in os.listdir(path_data) if f.startswith(arguments.subject_prefix)]

    # Handle inclusion lists
    assert not ((arguments.include is not None) and (arguments.include_list is not None)),\
        'Only one of `include` and `include-list` can be used'

    if arguments.include is not None:
        subject_dirs = [f for f in subject_dirs if re.search(arguments.include, f) is not None]

    if arguments.include_list is not None:
        # TODO decide if we should warn users if one of their inclusions isn't around
        subject_dirs = [f for f in subject_dirs if f in arguments.include_list]

    # Handle exclusions
    assert not ((arguments.exclude is not None) and (arguments.exclude_list is not None)),\
        'Only one of `exclude` and `exclude-list` can be used'

    if arguments.exclude is not None:
        subject_dirs = [f for f in subject_dirs if re.search(arguments.exclude, f) is None]

    if arguments.exclude_list is not None:
        subject_dirs = [f for f in subject_dirs if f not in arguments.exclude_list]

    # Determine the number of jobs we can run simulataneously
    if arguments.jobs < 1:
        jobs = multiprocessing.cpu_count() + arguments.jobs
    else:
        jobs = arguments.jobs

    print("RUNNING")
    print("-------")
    print("Processing {} subjects in parallel. (Worker processes used: {}).".format(len(subject_dirs), jobs))

    # Run the jobs, recording start and end times
    start = datetime.datetime.now()

    # Trap errors to send an email if a script fails.
    try:
        with multiprocessing.Pool(jobs) as p:
            run_single_dir = functools.partial(run_single,
                                               script=script,
                                               script_args=arguments.script_args,
                                               path_segmanual=path_segmanual,
                                               path_data=path_data,
                                               path_data_processed=path_data_processed,
                                               path_results=path_results,
                                               path_log=path_log,
                                               path_qc=path_qc,
                                               itk_threads=arguments.itk_threads,
                                               continue_on_error=arguments.continue_on_error)
            results = list(p.imap(run_single_dir, subject_dirs))
    except Exception as e:
        if do_email:
            message = ('Oh no there has been the following error in your pipeline:\n\n'
                       '{}'.format(e))
            try:
                # I consider the multiprocessing error more significant than a potential email error, this
                # ensures that the multiprocessing error is signalled.
                send_notification('sct_run_batch errored', message)
            except Exception:
                raise e

            raise e
        else:
            raise e

    end = datetime.datetime.now()

    # Check for failed subjects
    fails = [sd for (sd, ret) in zip(subject_dirs, results) if ret.returncode != 0]

    if len(fails) == 0:
        status_message = '\nHooray! your batch completed successfully :-)\n'
    else:
        status_message = ('\nYour batch completed but some subjects may have not completed '
                          'successfully, please consult the logs for:\n'
                          '{}\n'.format('\n'.join(fails)))
    print(status_message)

    # Display timing
    duration = end - start
    timing_message = ('Started: {} | Ended: {} | Duration: {}\n'.format(
        start.strftime('%Hh%Mm%Ss'),
        end.strftime('%Hh%Mm%Ss'),
        (datetime.datetime.utcfromtimestamp(0) + duration).strftime('%Hh%Mm%Ss')))
    print(timing_message)

    if do_email:
        send_notification('sct_run_batch: Run completed',
                          status_message + timing_message)

    open_cmd = 'open' if sys.platform == 'darwin' else 'xdg-open'

    print('To open the Quality Control (QC) report on a web-browser, run the following:\n'
          '{} {}/index.html'.format(open_cmd, path_qc))

    if arguments.zip:
        file_zip = 'sct_run_batch_{}'.format(time.strftime('%Y%m%d%H%M%S'))
        path_tmp = os.path.join(tempfile.mkdtemp(), file_zip)
        os.makedirs(os.path.join(path_tmp, file_zip))
        for folder in [path_log, path_qc, path_results]:
            shutil.copytree(folder, os.path.join(path_tmp, file_zip, os.path.split(folder)[-1]))
        shutil.make_archive(os.path.join(path_output, file_zip), 'zip', path_tmp)
        shutil.rmtree(path_tmp)
        print("\nOutput zip archive: {}.zip".format(os.path.join(path_output, file_zip)))

    reset_streams()
    batch_log.close()
        # Rename the GUI executables.
        if project == "GUI":
            linuxOutputFile = outputDirectory + "/Nexus-LU-Launcher"
            windowsOutputFile = outputDirectory + "/Nexus-LU-Launcher.exe"
            if os.path.exists(linuxOutputFile):
                os.remove(linuxOutputFile)
            elif os.path.exists(windowsOutputFile):
                os.remove(windowsOutputFile)
            if os.path.exists(outputDirectory + "/NLUL.GUI"):
                os.rename(outputDirectory + "/NLUL.GUI", linuxOutputFile)
            elif os.path.exists(outputDirectory + "/NLUL.GUI.exe"):
                os.rename(outputDirectory + "/NLUL.GUI.exe", windowsOutputFile)

        # Create the archive.
        shutil.make_archive(
            "bin/NLUL-" + project + "-" + platform[0], "zip",
            "NLUL." + project + "/bin/Release/" + dotNetVersion + "/" +
            platform[1] + "/publish")

# Clear the existing macOS GUI release.
if os.path.exists("bin/NLUL-GUI-macOS-x64.zip"):
    print("Clearing the macOS x64 NLUL-GUI release.")
    os.remove("bin/NLUL-GUI-macOS-x64.zip")

# Package the macOS release.
print("Packaging macOS release.")
dotNetVersion = os.listdir("NLUL.GUI/bin/Release/")[0]
for macOsBuild in MACOS_PACKAGE_BUILDS:
    shutil.copytree(
        "NLUL.GUI/bin/Release/" + dotNetVersion + "/" + macOsBuild[1] +
        "/publish", "bin/NLUL-GUI-" + macOsBuild[0] +
        "/Nexus LU Launcher.app/Contents/MacOS")
def lambda_handler(event, context):
    params = None
    logger.info('Event %s', event)
    OAUTH_token = event['context']['git-token']
    OutputBucket = event['context']['output-bucket']
    # temp_archive = '/tmp/archive.zip'
    # Identify git host flavour
    hostflavour = 'generic'
    if 'X-Hub-Signature' in event['params']['header'].keys():
        hostflavour = 'githubent'
    elif 'X-Gitlab-Event' in event['params']['header'].keys():
        hostflavour = 'gitlab'
    elif 'User-Agent' in event['params']['header'].keys():
        if event['params']['header']['User-Agent'].startswith(
                'Bitbucket-Webhooks'):
            hostflavour = 'bitbucket'
        elif event['params']['header']['User-Agent'].startswith(
                'GitHub-Hookshot'):
            hostflavour = 'github'
        elif 'Bitbucket-' in event['params']['header']['User-Agent']:
            hostflavour = 'bitbucket-server'
    elif event['body-json']['publisherId'] == 'tfs':
        hostflavour = 'tfs'

    headers = {}
    branch = 'master'
    if hostflavour == 'githubent':
        archive_url = event['body-json']['repository']['archive_url']
        owner = event['body-json']['repository']['owner']['name']
        name = event['body-json']['repository']['name']
        # replace the code archive download and branch reference placeholders
        archive_url = archive_url.replace('{archive_format}',
                                          'zipball').replace(
                                              '{/ref}', '/master')
        # add access token information to archive url
        archive_url = archive_url + '?access_token=' + OAUTH_token
    elif hostflavour == 'github':
        archive_url = event['body-json']['repository']['archive_url']
        owner = event['body-json']['repository']['owner']['login']
        name = event['body-json']['repository']['name']
        # replace the code archive download and branch reference placeholders
        branch_name = event['body-json']['ref'].replace('refs/heads/', '')
        archive_url = archive_url.replace('{archive_format}',
                                          'zipball').replace(
                                              '{/ref}', '/' + branch_name)
        # add access token information to archive url
        archive_url = archive_url + '?access_token=' + OAUTH_token
    elif hostflavour == 'gitlab':
        #https://gitlab.com/jaymcconnell/gitlab-test-30/repository/archive.zip?ref=master
        archive_root = event['body-json']['project']['http_url'].strip('.git')
        project_id = event['body-json']['project_id']
        branch = event['body-json']['ref'].replace('refs/heads/', '')
        archive_url = "https://gitlab.com/api/v4/projects/{}/repository/archive.zip".format(
            project_id)
        params = {'private_token': OAUTH_token, 'sha': branch}

        owner = event['body-json']['project']['namespace']
        name = event['body-json']['project']['name']

    elif hostflavour == 'bitbucket':
        branch = event['body-json']['push']['changes'][0]['new']['name']
        archive_url = event['body-json']['repository']['links']['html'][
            'href'] + '/get/' + branch + '.zip'
        owner = event['body-json']['repository']['owner']['username']
        name = event['body-json']['repository']['name']
        r = requests.post('https://bitbucket.org/site/oauth2/access_token',
                          data={'grant_type': 'client_credentials'},
                          auth=(event['context']['oauth-key'],
                                event['context']['oauth-secret']))
        if 'error' in r.json().keys():
            logger.error('Could not get OAuth token. %s: %s' %
                         (r.json()['error'], r.json()['error_description']))
            raise Exception('Failed to get OAuth token')
        headers['Authorization'] = 'Bearer ' + r.json()['access_token']
    elif hostflavour == 'tfs':
        archive_url = event['body-json']['resourceContainers']['account'][
            'baseUrl'] + 'DefaultCollection/' + event['body-json'][
                'resourceContainers']['project'][
                    'id'] + '/_apis/git/repositories/' + event['body-json'][
                        'resource']['repository']['id'] + '/items'
        owner = event['body-json']['resource']['pushedBy']['displayName']
        name = event['body-json']['resource']['repository']['name']
        pat_in_base64 = base64.encodestring(':%s' %
                                            event['context']['git-token'])
        headers['Authorization'] = 'Basic %s' % pat_in_base64
        headers['Authorization'] = headers['Authorization'].replace('\n', '')
        headers['Accept'] = 'application/zip'
    elif hostflavour == 'bitbucket-server':
        clone_urls = event['body-json']['repository']['links']['clone']
        http_clone_url = None
        for clone_url in clone_urls:
            if clone_url.get('name') == "http":
                http_clone_url = clone_url.get('href')
        if http_clone_url is None:
            raise Exception(
                "Could not find http clone url from the webhook payload")

        if len(event['body-json']['changes']) != 1:
            raise Exception("Could not handle the number of changes")
        change = event['body-json']['changes'][0]
        url_parts = urlparse(http_clone_url)
        owner = event['body-json']['repository']['project']['name']
        name = event['body-json']['repository']['name']
        archive_url = "{scheme}://{netloc}/rest/api/latest/projects/{project}/repos/{repo}/archive?at={hash}&format=zip".format(
            scheme=url_parts.scheme,
            netloc=url_parts.netloc
            if os.environ.get("SCM_HOSTNAME_OVERRIDE", '') == '' else
            os.environ.get("SCM_HOSTNAME_OVERRIDE"),
            project=owner,
            repo=name,
            hash=change['toHash'],
        )
        branch = change['refId'].replace('refs/heads/', '')
        secret = base64.b64encode(":".join(
            [event['context']['oauth-key'], event['context']['oauth-secret']]))
        headers['Authorization'] = 'Basic ' + secret

    # download the code archive via archive url
    logger.info('Downloading archive from %s' % archive_url)
    r = requests.get(archive_url,
                     verify=verify,
                     headers=headers,
                     params=params)
    f = StringIO(r.content)
    zip = ZipFile(f)
    path = '/tmp/code'
    zipped_code = '/tmp/zipped_code'
    try:
        shutil.rmtree(path)
        os.remove(zipped_code + '.zip')
    except:
        pass
    finally:
        os.makedirs(path)
    # Write to /tmp dir without any common preffixes
    zip.extractall(path, get_members(zip))

    # Create zip from /tmp dir without any common preffixes
    s3_archive_file = "%s/%s/%s/%s.zip" % (owner, name, branch, name)
    shutil.make_archive(zipped_code, 'zip', path)
    logger.info("Uploading zip to S3://%s/%s" %
                (OutputBucket, s3_archive_file))
    s3_client.upload_file(zipped_code + '.zip', OutputBucket, s3_archive_file)
    logger.info('Upload Complete')
 def get_analysis_zip(self):
     shutil.make_archive(self.analysis_output_folder, 'zip', self.analysis_output_folder)
     return self.analysis_output_folder + ".zip"
Example #58
0
    required.remove('opencv-python')

# PyQt
# Naming conflict with PyPI package.
# `pyqt` package should be installed via conda instead
# cf. https://github.com/ContinuumIO/anaconda-issues/issues/1554
if PY3 and 'CONDA_PREFIX' in os.environ:
    required.remove('pyqt5; python_version >= "3"')

# compress psychojs to a zip file for packaging
# only takes 0.5s but could skip if you prefer
if ('-noJS' in argv) or not exists('psychojs') or ('clean' in argv):
    pass
else:
    import shutil
    shutil.make_archive(join('psychopy', 'psychojs'),
                        'zip', 'psychojs')

# regenerate __init__.py only if we're in the source repos (not in a source
# zip file)
try:
    from packaging import createInitFile   # won't exist in a sdist.zip
    writeNewInit = True
except ImportError:
    writeNewInit = False

if writeNewInit:
    # determine what type of dist is being created
    # (install and bdist might do compiliing and then build platform is needed)
    for arg in argv:
        if arg.startswith('bdist') or arg.startswith('install'):
            dist = 'bdist'
 def get_pictures_zip(self):
     shutil.make_archive(self.pictures_output_folder, 'zip', self.pictures_output_folder)
     return self.pictures_output_folder + ".zip"
Example #60
0
    f1.close()
    f2 = open(os.getcwd() + "/a2/main.clj.html", "w+")
    f2.write(clojureSite)
    f2.close()
    f3 = open(os.getcwd() + "/a3/Main.scala.html", "w+")
    f3.write(scalaSite)
    f3.close()
    f4 = open(os.getcwd() + "/a4/main.pl.html", "w+")
    f4.write(prologSite)
    f4.close()
    f5 = open(os.getcwd() + "/a5/Main.py.html", "w+")
    f5.write(pythonSite)
    f5.close()


assignments = parser()
generateHTML(assignments)
email = "*****@*****.**"
shutil.make_archive('csc344', 'zip', os.getcwd())
msg = MIMEMultipart()
attach = open("csc344.zip", "rb")
msg['From'] = email
msg['To'] = email
part = MIMEApplication(attach.read(), Name="csc344.zip")
msg.attach(part)
part.add_header('Content-Disposition', "attachment; filename= " + "csc344.zip")
server = smtplib.SMTP_SSL('smtp.aol.com', 465)
server.login(email, "laxbro4life123")
server.sendmail(email, email, msg.as_string())
server.close()