Example #1
0
    def __init__(self, filename, UserData=None, new=None):

        if filename:
            self.filename = os.path.abspath(filename)
        else:
            self.filename = None
        # initialize isZip, but leave it until first save to decide the file type
        self.isZip = True

        if new:
            self.tempfile = "%s/%i.exp~" % (UserData.GetTempDir()\
                            ,random.randint(0, 1000))
            self.NewScreen()
            return
        elif zipfile.is_zipfile(filename) and UserData:
            # setup temp file
            self.tempfile = "%s/%s%i.exp~" % (UserData.GetTempDir(),os.path.split(filename)[1]\
                            ,random.randint(0, 1000))
            self.StartTemp()
            self.ReadZip()
        elif zipfile.is_zipfile(filename):
            self.tempfile = "tempfile.zip"
            self.StartTemp()
            self.ReadZip()
            self.isZip = False
        else:
           self.ReadFile()
Example #2
0
def addpoints_to_scan(scanid,req_format,parnames,points):
    point_requests = []
    basic_requests = []

    for i,p in enumerate(points):
        coordinates = p['coordinates']
        data = p['data']

        if os.path.isdir(data):
            _,archive = tempfile.mkstemp()
            with zipfile.ZipFile(archive,'w') as archivefile:
                for d,_,fl in os.walk(data):
                    for f in fl:
                        archivefile.write(os.path.join(d,f), arcname = os.path.relpath(os.path.join(d,f),data))
        elif zipfile.is_zipfile(data):
            archive = data
        else:
            print data
            print os.path.isdir(data)
            print zipfile.is_zipfile(data)
            raise click.ClickException('point data needs to be zipfiles or directory')

        pointdict = dict(zip(parnames,coordinates))
        pr = recastapi.request.write.point_request_with_coords(scanid,pointdict)
        point_requests += [{'point':pointdict,'id':pr['id']}]
        br = recastapi.request.write.basic_request_with_archive(pr['id'],archive,req_format)
        basic_requests += [{'point':pointdict,'id':br['id'], 'point_request': pr['id']}]
        click.secho('uploaded {}/{} requests'.format(i+1,len(points)))
    return point_requests, basic_requests
	def Create(self, add_files = []):
		dir_name, base_name = os.path.split(self.fn)
		name, ext = os.path.splitext(base_name)

		### output zip file
		zout = zipfile.ZipFile(self.fn, "w")

		### for all files wich could be added
		for fn in filter(lambda f: os.path.exists(f) or zipfile.is_zipfile(os.path.dirname(f)), add_files):
			fn_dir_name, fn_base_name = os.path.split(fn)
			fn_name, fn_ext = os.path.splitext(fn_base_name)
			### if adding file is compressed, we decompress and add it
			if zipfile.is_zipfile(fn_dir_name):
				zin = zipfile.ZipFile(fn_dir_name, 'r')
				buffer = zin.read(fn_base_name)
				### if not .dat file and the name of file is not the same with the zip file
				#if fn_ext == '.py':
					#zout.writestr("%s%s"%(name,fn_ext), buffer)
				#else:
				zout.writestr(fn_base_name, buffer)
				zin.close()
			else:
				zout.write(fn, fn_base_name)

		zout.close()
Example #4
0
 def download_update(self):
     if self.update_available:
         if os.path.exists(paths.UPDATE_FILE_PATH) and zipfile.is_zipfile(paths.UPDATE_FILE_PATH):
             self.logger.info('Update package already downloaded')
         else:
             self.logger.info('Downloading update package...')
             try:
                 urllib.urlretrieve(self.update_download_url, paths.UPDATE_FILE_PATH)
             except Exception as e:
                 error_message = 'Update download failed: error while downloading from %s\nDetails: %s' %\
                                     ( self.update_download_url, str(e) )
                 self.logger.warning(error_message)
                 return error_message
             else:
                 if zipfile.is_zipfile(paths.UPDATE_FILE_PATH):
                     self.logger.info('...update successfully downloaded!')
                     self.update_available = False
                     log.send_logs()
                 else:
                     error_message = 'Error: corrupted update package.'
                     self.logger.warning(error_message)
                     try:
                         os.remove(paths.UPDATE_FILE_PATH)
                     except:
                         self.logger.warning("Unable to remove corrupted update package.")
                     else:
                         self.logger.warning("Corrupted update package removed.")
                     return error_message
Example #5
0
def getZipFilePath(filePath, stopPath=""):
    """
    Detect if part or all of the given path points to a ZIP file

    @param filePath: string
        The full path to the resource

    @return: tuple(string, string)
        Returns a tuple with the following content:
        1. path to the ZIP file in OS format (using OS path separator)
        2. ZIP internal path to the requested file in ZIP format
    """
    assert isinstance(filePath, str), "Invalid file path %s" % filePath
    assert isinstance(stopPath, str), "Invalid stop path %s" % stopPath
    # make sure the file path is normalized and uses the OS separator
    filePath = normOSPath(filePath, True)
    if is_zipfile(filePath):
        return filePath, ""

    parentPath = filePath
    stopPathLen = len(stopPath)
    while len(parentPath) > stopPathLen:
        if is_zipfile(parentPath):
            return parentPath, normZipPath(filePath[len(parentPath) :])
        nextSubPath = dirname(parentPath)
        if nextSubPath == parentPath:
            break
        parentPath = nextSubPath
    raise IOError("Invalid ZIP path %s" % filePath)
Example #6
0
	def GetModule(filename):
		""" Give module object from python file path. Warning, the name of python_file must be the same of the classe name.
		"""

		dir_name = os.path.dirname(filename)

		### if python_file is ...../toto.amd/Atomic_Model.py, then the parent dir is zipfile.
		if zipfile.is_zipfile(dir_name):
			zf = ZipManager.Zip(dir_name)
			return zf.GetModule()
		elif zipfile.is_zipfile(filename):
			zf = ZipManager.Zip(filename)
			return zf.GetModule()
		### if python file is on the web !
		elif filename.startswith(('http','https')):
			net = Net(filename)
			return net.GetModule()
		### pure python file
		else:

			### add path to sys.path
			if dir_name not in sys.path:
				sys.path.append(dir_name)

			module_name = os.path.basename(filename).split('.py')[0]

			# find and load module
			try:
				f, fn, description = imp.find_module(module_name, [dir_name])
				module = imp.load_module(module_name, f, fn, description)
				f.close()
				return module

			except Exception, info:
				return sys.exc_info()
Example #7
0
 def process_log_path(self, log_path):
     """
     Detect if log_path is a zip, then unzip it and return log's location.
     :param log_path:
     :return: log location - if the log_path is not a zip
              unzipped location - if log_path is a zip
              list of zipped logs - if log_path contains the zipped logs
     """
     if zipfile.is_zipfile(log_path):
         dir_path = os.path.dirname(os.path.abspath(log_path))
         # extracting zip to current path
         # it is required that all logs are zipped in a folder
         with zipfile.ZipFile(log_path, 'r') as z:
             if any('/' in fis for fis in z.namelist()):
                 unzip_folder = z.namelist()[0].split('/')[0]
             else:
                 unzip_folder = ''
             z.extractall(dir_path)
         if unzip_folder:
             self.cleanup = True
         return os.path.join(dir_path, unzip_folder)
     elif any(zipfile.is_zipfile(os.path.join(log_path, z))
              for z in os.listdir(log_path)):
         zip_list = []
         for z in os.listdir(log_path):
             zip_file_path = os.path.join(log_path, z)
             if zipfile.is_zipfile(zip_file_path):
                 zip_list.append(self.process_log_path(zip_file_path))
         return zip_list
     else:
         return log_path
Example #8
0
    def walkzip(self, path):
        """Walks a path independent of whether it includes a zipfile or not"""
        if os.path.exists(path) and os.path.isdir(path):
            for dirpath, _dirnames, filenames in os.walk(path):
                for filename in filenames:
                    # Run through files as we always used to
                    yield os.path.join(dirpath[len(path) + len(os.path.sep):], filename)
        else:
            index = -1
            zippath = None
            while path.find(os.path.sep, index + 1) > -1:
                index = path.find(os.path.sep, index + 1)
                if zipfile.is_zipfile(path[:index]):
                    zippath = path[:index]
                    break
            else:
                if zipfile.is_zipfile(path):
                    zippath = path

            # Now yield the files
            if zippath:
                zipf = zipfile.ZipFile(zippath)
                prefix = path[len(zippath):].strip(os.path.sep)
                # If there's a prefix, ensure it ends in a slash
                if len(prefix):
                    prefix += os.path.sep
                for fn in zipf.namelist():
                    # Zipfiles seem to always list contents using / as their separator
                    fn = fn.replace('/', os.path.sep)
                    if fn.startswith(prefix) and not fn.endswith(os.path.sep):
                        # We're a file in the zipfile
                        yield fn[len(prefix):]
Example #9
0
def open_possibly_compressed_file(filename):
    if not os.path.exists(filename):
        raise IOError("cannot find file `" + filename + "'")
    if sys.version_info[:2] < (2, 6) and zipfile.is_zipfile(filename):
        raise IOError("cannot unpack a ZIP file with Python %s" %
                      '.'.join(map(str, sys.version_info)))
    try:
        is_zipfile = zipfile.is_zipfile(filename)
    except:
        is_zipfile = False
    if is_zipfile:
        zf1 = zipfile.ZipFile(filename, "r")
        if len(zf1.namelist()) != 1:
            raise IOError("cannot compare with a zip file that contains "
                          "multiple files: `" + filename + "'")
        if sys.version_info < (3, 0):
            return zf1.open(zf1.namelist()[0], 'r')
        else:
            return io.TextIOWrapper(
                zf1.open(zf1.namelist()[0], 'r'), encoding='utf-8', newline='')
    elif filename.endswith('.gz'):
        if sys.version_info < (3, 0):
            return gzip.open(filename, "r")
        elif sys.version_info[:2] == (3, 2):
            return io.TextIOWrapper(
                MyGzipFile(filename), encoding='utf-8', newline='')
        else:
            return io.TextIOWrapper(
                gzip.open(filename, 'r'), encoding='utf-8', newline='')
    else:
        return open(filename, "r")
Example #10
0
def read_class_path(class_path):
    '''Cache content of all jars.
    Begin with rt.jar
    '''

    # folders for lookup for class files
    lookup_paths = []
    # content of all jars (name->path to jar)
    jars = {}
    # content of rt.jar
    rt = {}

    # first check local rt.jar
    local_path = os.path.dirname(os.path.realpath(__file__))
    RT_JAR = os.path.join(local_path, "../rt/rt.jar")
    if not os.path.isfile(RT_JAR):
        JAVA_HOME = os.environ.get('JAVA_HOME')
        if JAVA_HOME is None:
            raise Exception("JAVA_HOME is not set")
        if not os.path.isdir(JAVA_HOME):
            raise Exception("JAVA_HOME must be a folder: %s" % JAVA_HOME)

        RT_JAR = os.path.join(JAVA_HOME, "lib/rt.jar")
        if not os.path.exists(RT_JAR) or os.path.isdir(RT_JAR):
            RT_JAR = os.path.join(JAVA_HOME, "jre/lib/rt.jar")
            if not os.path.exists(RT_JAR) or os.path.isdir(RT_JAR):
                raise Exception("rt.jar not found")

    if not zipfile.is_zipfile(RT_JAR):
        raise Exception("rt.jar is not a zip: %s" % RT_JAR)

    read_from_jar(RT_JAR, rt)

    current = os.getcwd()

    splitter = None
    if ":" in class_path:
        splitter = ":"
    elif ";" in class_path:
        splitter = ";"
    elif "," in class_path:
        splitter = ","
    else:
        splitter = ":"
    cpaths = class_path.split(splitter)
    for p in cpaths:
        p = p.strip()
        path = os.path.join(current, p)
        if not os.path.exists(path):
            raise Exception("Wrong class path entry: %s (path not found %s)",
                            p, path)
        if os.path.isdir(path):
            lookup_paths.append(path)
        else:
            if zipfile.is_zipfile(path):
                read_from_jar(path, jars)
            else:
                raise Exception("Class path entry %s is not a jar file" % path)

    return (lookup_paths, jars, rt)
Example #11
0
    def __init__(self, original, modified):

        self.TEXTCHARS = ''.join(map(chr, [7,8,9,10,12,13,27] + range(0x20, 0x100)))
        self.ALLBYTES = ''.join(map(chr, range(256)))

        self.changelist = [
            (0, 'No Change'),
            (1, 'File Removed'),
            (2, 'File Added'),
            (3, 'Binary File Change'),
            (4, 'Text File Change (With Diff)'),
            (5, 'Text File Change (Without Diff)'),
        ]

        if os.path.exists(original) and os.path.exists(modified):
            if zipfile.is_zipfile(original) and zipfile.is_zipfile(modified):
                self.original = zipfile.ZipFile(original)
                self.modified = zipfile.ZipFile(modified)
            else:
                raise EpubDiffError("One or more files are not Epub/Zip files")
        else:
            raise EpubDiffError("One or more files do not exist")

        self.doclist = [(self.original, 0), (self.modified, 1)]
        self.manifest = {}
        self.difflog = []
Example #12
0
def subtitlesDownload(listOfId):
	global pocitadlo
	print("Stahuji titulky k filmu s opacnym jazykem...")
	for item in listOfId:
	  pocitadlo = pocitadlo + 1
	  download = "http://www.opensubtitles.org/cs/subtitleserve/sub/"+item
	  if not os.path.exists("ENG"):
	    print("Vytvarim slozku ENG")
	    os.makedirs("ENG")
	  urlObjDownload = urllib2.urlopen(download)
	  outFile = open("ENG/sub"+`pocitadlo`,"wb")
	  outFile.write(urlObjDownload.read())
	  if(zipfile.is_zipfile(outFile)):
	    isZipFile = True
	  else:
	    isZipFile = False
	  outFile.close()

	print "Rozbaluji stazene zip file s titulkama..."        
	pocitadlo = pocitadlo + 1
	for i in range(1,pocitadlo):
		if(zipfile.is_zipfile("ENG/sub"+`i`)):
			zfile = zipfile.ZipFile("ENG/sub"+`i`)
			for name in zfile.namelist():			  
			  if(re.search(".*\.nfo$",name)):
			    continue
			  
			  (dirname, filename) = os.path.split(name)
			  zfile.extract(name, "ENG")
		else:
		  try:
		    os.rename("ENG/sub"+`i`, "ENG/sub"+`i`+".srt")
		  except:
		    pass
Example #13
0
File: zip.py Project: saxix/pip
 def list(self, options, args):
     if args:
         raise InstallationError("You cannot give an argument with --list")
     for path in sorted(self.paths()):
         if not os.path.exists(path):
             continue
         basename = os.path.basename(path.rstrip(os.path.sep))
         if os.path.isfile(path) and zipfile.is_zipfile(path):
             if os.path.dirname(path) not in self.paths():
                 logger.notify("Zipped egg: %s" % display_path(path))
             continue
         if (
             basename != "site-packages"
             and basename != "dist-packages"
             and not path.replace("\\", "/").endswith("lib/python")
         ):
             continue
         logger.notify("In %s:" % display_path(path))
         logger.indent += 2
         zipped = []
         unzipped = []
         try:
             for filename in sorted(os.listdir(path)):
                 ext = os.path.splitext(filename)[1].lower()
                 if ext in (".pth", ".egg-info", ".egg-link"):
                     continue
                 if ext == ".py":
                     logger.info("Not displaying %s: not a package" % display_path(filename))
                     continue
                 full = os.path.join(path, filename)
                 if os.path.isdir(full):
                     unzipped.append((filename, self.count_package(full)))
                 elif zipfile.is_zipfile(full):
                     zipped.append(filename)
                 else:
                     logger.info("Unknown file: %s" % display_path(filename))
             if zipped:
                 logger.notify("Zipped packages:")
                 logger.indent += 2
                 try:
                     for filename in zipped:
                         logger.notify(filename)
                 finally:
                     logger.indent -= 2
             else:
                 logger.notify("No zipped packages.")
             if unzipped:
                 if options.sort_files:
                     unzipped.sort(key=lambda x: -x[1])
                 logger.notify("Unzipped packages:")
                 logger.indent += 2
                 try:
                     for filename, count in unzipped:
                         logger.notify("%s  (%i files)" % (filename, count))
                 finally:
                     logger.indent -= 2
             else:
                 logger.notify("No unzipped packages.")
         finally:
             logger.indent -= 2
Example #14
0
    def test_make_zipfile(self):
        # creating something to zip
        root_dir, base_dir = self._create_files()

        tmpdir2 = self.mkdtemp()
        # force shutil to create the directory
        os.rmdir(tmpdir2)
        # working with relative paths
        work_dir = os.path.dirname(tmpdir2)
        rel_base_name = os.path.join(os.path.basename(tmpdir2), 'archive')

        with support.change_cwd(work_dir):
            base_name = os.path.abspath(rel_base_name)
            res = make_archive(rel_base_name, 'zip', root_dir)

        self.assertEqual(res, base_name + '.zip')
        self.assertTrue(os.path.isfile(res))
        self.assertTrue(zipfile.is_zipfile(res))
        with zipfile.ZipFile(res) as zf:
            self.assertEqual(sorted(zf.namelist()),
                    ['dist/', 'dist/file1', 'dist/file2',
                     'dist/sub/', 'dist/sub/file3', 'dist/sub2/',
                     'outer'])

        with support.change_cwd(work_dir):
            base_name = os.path.abspath(rel_base_name)
            res = make_archive(rel_base_name, 'zip', root_dir, base_dir)

        self.assertEqual(res, base_name + '.zip')
        self.assertTrue(os.path.isfile(res))
        self.assertTrue(zipfile.is_zipfile(res))
        with zipfile.ZipFile(res) as zf:
            self.assertEqual(sorted(zf.namelist()),
                    ['dist/', 'dist/file1', 'dist/file2',
                     'dist/sub/', 'dist/sub/file3', 'dist/sub2/'])
Example #15
0
def main(arg):
    if os.path.isdir(arg):
        rootdir = arg
        for parent, dirnames, filenames in os.walk(rootdir):
            for filename in filenames:
                filePath = os.path.join(parent, filename)
                print(filePath)
                if zipfile.is_zipfile(filePath):
                    try:
                        with zipfile.ZipFile(filePath, mode="r") as z:
                            print_infos(z)
                            if len(files_list) > 0:
                                print_sub_zips(z)
                                files_list.clear()
                    except zipfile.BadZipFile as z:
                        print(filePath, e)
    elif os.path.isfile(arg):
        if zipfile.is_zipfile(arg):
            try:
                with zipfile.ZipFile(arg, mode="r") as z:
                    print_infos(z)
                    if len(files_list) > 0:
                        print_sub_zips(z)
                        files_list.clear()
            except zipfile.BadZipFile as z:
                print(filePath, e)
Example #16
0
def _get_layer_values(layer, upload_session, expand=0):
    layer_values = []
    if upload_session:
        absolute_base_file = upload_session.base_file[0].base_file
        tempdir = upload_session.tempdir

        if not os.path.isfile(absolute_base_file):
            tmp_files = [f for f in os.listdir(tempdir) if os.path.isfile(os.path.join(tempdir, f))]
            for f in tmp_files:
                if zipfile.is_zipfile(os.path.join(tempdir, f)):
                    absolute_base_file = unzip_file(os.path.join(tempdir, f), '.shp', tempdir=tempdir)
                    absolute_base_file = os.path.join(tempdir,
                                                      absolute_base_file)
        elif zipfile.is_zipfile(absolute_base_file):
            absolute_base_file = unzip_file(upload_session.base_file[0].base_file,
                                            '.shp', tempdir=tempdir)
            absolute_base_file = os.path.join(tempdir,
                                              absolute_base_file)
        inDataSource = ogr.Open(absolute_base_file)
        lyr = inDataSource.GetLayer(str(layer.name))
        limit = 100
        for feat in islice(lyr, 0, limit):
            feat_values = json_loads_byteified(feat.ExportToJson()).get('properties')
            for k in feat_values.keys():
                type_code = feat.GetFieldDefnRef(k).GetType()
                binding = feat.GetFieldDefnRef(k).GetFieldTypeName(type_code)
                feat_value = feat_values[k] if str(feat_values[k]) != 'None' else 0
                if expand > 0:
                    ff = {'value': feat_value, 'binding': binding}
                    feat_values[k] = ff
                else:
                    feat_values[k] = feat_value
            layer_values.append(feat_values)
    return layer_values
Example #17
0
def main():
    parser = argparse.ArgumentParser(description='Manipulate Comic Book archives (split, extract, trim)')
    ops_group = parser.add_mutually_exclusive_group()

    parser.add_argument('input', help="Path to comic book archive (cbz/cbr/zip/rar). Multiple files for join are allowed", default=None, nargs="+")
    ops_group.add_argument('-j', '--join', help="Join input files in specified order", action="store_true", default=False)
    ops_group.add_argument('-x', '--extract', help="Extract ranges to new archive. Format 3,4,10-19")
    parser.add_argument('-r', '--resize', help="Resize images e.g. 1600x1200, x1200 (height only), 1600x (width only) ", default=None)
    parser.add_argument('-f', '--iformat', help="Convert images to formart (png/jpg)", default=None)
    parser.add_argument('-o', '--output', help="Output filename")

    args=parser.parse_args()

    if args.join is True:
        for file in args.input:
            if not zipfile.is_zipfile(file):
                print ("ERROR! Invalid zip file - ", file)
                sys.exit(-1)
        
        if args.output is None:
            args.output = generate_archive_name(args.input[0])
        if get_user_confirmation("Join files and create new archive?"):
            join_selected_archives(args.input, args.output)
    elif args.input is not None:
        if len(args.input) > 1:
            print ("More than one input file specified. This is valid only with -j/--join switch.")
            sys.exit(-1)
        comic_file =  args.input[0]
        if zipfile.is_zipfile(comic_file):
            sorted_files = get_sorted_filelist(comic_file)
            #print ("Files in archive (Excl. directories) - ", len(sorted_files))
            
            if args.output is None:
                args.output = generate_archive_name(comic_file)
            if args.extract is not None:
                pages_2_extract = parse_range(args.extract, len(sorted_files))
                if len(pages_2_extract.keys()) == 0:
                    print ("Invalid range specification")
                else:
                    graphic = page_range_graphic(pages_2_extract, len(sorted_files))
                    print ("\n{} of {} pages will be extracted\n\n{}\n".format(len(pages_2_extract), len(sorted_files), graphic))
                    count = 0
                    selected_pages = []
                    for file in sorted_files:
                        count += 1
                        if count in  pages_2_extract:
                            selected_pages.append(file)
                    #print ("Selected Pages - ", selected_pages)
                    if get_user_confirmation("Extract files and create new archive?"):
                        create_archive_from_extracted(comic_file, args.output, selected_pages, args.resize, args.iformat)

            elif args.resize or args.iformat is not None:
                if get_user_confirmation("Process files and create new archive?"):
                    create_archive_from_extracted(comic_file, args.output, sorted_files, args.resize, args.iformat)
        else:
            print ("ERROR! Invalid zip file - ", comic_file)

    print ("Done!\n")
Example #18
0
    def _move_data_file(self, saveloc, json_):
        """
        - Look at _state attribute of object.
        - Find all fields with 'isdatafile' attribute as True.
        - If there is a key in json_ corresponding with
          'name' of the fields with True 'isdatafile' attribute
        - then
          - move that datafile and
          - update the key in the json_ to point to new location
        """
        fields = self._state.get_field_by_attribute('isdatafile')

        for field in fields:
            if field.name not in json_:
                continue

            raw_paths = json_[field.name]

            if isinstance(raw_paths, list):
                for i, p in enumerate(raw_paths):
                    d_fname = os.path.split(p)[1]

                    if zipfile.is_zipfile(saveloc):
                        # add datafile to zip archive
                        with zipfile.ZipFile(saveloc, 'a',
                                             compression=zipfile.ZIP_DEFLATED,
                                             allowZip64=self._allowzip64) as z:
                            if d_fname not in z.namelist():
                                z.write(p, d_fname)
                    else:
                        # move datafile to saveloc
                        if p != os.path.join(saveloc, d_fname):
                            shutil.copy(p, saveloc)

                    # always want to update the reference so it is relative
                    # to saveloc
                    json_[field.name][i] = d_fname
            else:
                # data filename
                d_fname = os.path.split(json_[field.name])[1]

                if zipfile.is_zipfile(saveloc):
                    # add datafile to zip archive
                    with zipfile.ZipFile(saveloc, 'a',
                                         compression=zipfile.ZIP_DEFLATED,
                                         allowZip64=self._allowzip64) as z:
                        if d_fname not in z.namelist():
                            z.write(json_[field.name], d_fname)
                else:
                    # move datafile to saveloc
                    if json_[field.name] != os.path.join(saveloc, d_fname):
                        shutil.copy(json_[field.name], saveloc)

                # always want to update the reference so it is relative
                # to saveloc
                json_[field.name] = d_fname

        return json_
	def GetAttr(self, row, col, kind):
		"""
		"""

		attr = gridlib.GridCellAttr()
		val = self.GetValue(row, col)

		### format font of attr
		if col == 0:
			attr.SetReadOnly(True)
			attr.SetFont(wx.Font(10, wx.SWISS, wx.NORMAL, wx.BOLD))
			#attr.SetBackgroundColour("light blue")
		elif col == 2:
			attr.SetReadOnly(True)
			attr.SetFont(wx.Font(10, wx.SWISS, wx.ITALIC, wx.NORMAL))
		else:
			### load color in cell for pen and fill
			if isinstance(val, list):
				### if elem in list begin by #. It is color.
				for s in filter(lambda a: a.startswith('#'), map(str, val)):
					attr.SetBackgroundColour(s)
					break

		### TODO : a ameliorer car bad_filename_path_flag ne prend pas en compte python_path.
		### relechir sur comment faire en sorte de ne pas donner la main a la simulation
		### en fonction de la validite des deux criteres plus bas

		### if the path dont exists, background color is red
		try:

			### if the type of cell is string
			if isinstance(val, (str, unicode)):

				if col == 1:

					v = self.GetValue(row, 0)

					### if bad filemane (for instance generator)
					m = re.match('[a-zA-Z]*(ile)[n|N](ame)[_-a-zA-Z0-9]*', v, re.IGNORECASE)

					### if filename is match and not exist (ensuring that the filename are extension)
					if m is not None and not os.path.exists(self.GetValue(row, 1)) and os.path.splitext(self.GetValue(row, 1))[-1] != '':
						self.bad_flag.update({v:False})
						attr.SetBackgroundColour("pink")

					### if the python path is not found
					if v == "python_path":
						### si un le modèle est un fichier python et que le path n'existe pas ou si c'est un amd ou cmd et que le fichier modèle n'existe pas
						if (not os.path.exists(self.model.python_path) and not zipfile.is_zipfile(self.model.model_path)) or\
							(not os.path.exists(self.model.model_path) and zipfile.is_zipfile(self.model.model_path)):
							self.bad_flag.update({v:False})
							attr.SetBackgroundColour("pink")

			return attr

		except Exception, info:
			sys.stderr.write(_('Error in GetAttr : %s'%info))
			return
Example #20
0
 def list(self, options, args):
     if args:
         raise InstallationError(
             'You cannot give an argument with --list')
     for path in sorted(self.paths()):
         if not os.path.exists(path):
             continue
         basename = os.path.basename(path.rstrip(os.path.sep))
         if os.path.isfile(path) and zipfile.is_zipfile(path):
             if os.path.dirname(path) not in self.paths():
                 logger.notify('Zipped egg: %s' % display_path(path))
             continue
         if (basename != 'site-packages' and basename != 'dist-packages'
             and not path.replace('\\', '/').endswith('lib/python')):
             continue
         logger.notify('In %s:' % display_path(path))
         logger.indent += 2
         zipped = []
         unzipped = []
         try:
             for filename in sorted(os.listdir(path)):
                 ext = os.path.splitext(filename)[1].lower()
                 if ext in ('.pth', '.egg-info', '.egg-link'):
                     continue
                 if ext == '.py':
                     logger.info('Not displaying %s: not a package' % display_path(filename))
                     continue
                 full = os.path.join(path, filename)
                 if os.path.isdir(full):
                     unzipped.append((filename, self.count_package(full)))
                 elif zipfile.is_zipfile(full):
                     zipped.append(filename)
                 else:
                     logger.info('Unknown file: %s' % display_path(filename))
             if zipped:
                 logger.notify('Zipped packages:')
                 logger.indent += 2
                 try:
                     for filename in zipped:
                         logger.notify(filename)
                 finally:
                     logger.indent -= 2
             else:
                 logger.notify('No zipped packages.')
             if unzipped:
                 if options.sort_files:
                     unzipped.sort(key=lambda x: -x[1])
                 logger.notify('Unzipped packages:')
                 logger.indent += 2
                 try:
                     for filename, count in unzipped:
                         logger.notify('%s  (%i files)' % (filename, count))
                 finally:
                     logger.indent -= 2
             else:
                 logger.notify('No unzipped packages.')
         finally:
             logger.indent -= 2
Example #21
0
    def load_file_list(self,args):
        """ Find all images """
        self.files = []
        self.index = 0

        for arg in args:
          print "arg:", arg
          if self.WALK_INSTEAD_LISTDIR:    
            #print "WALK"
            for directory, sub_directories, files in os.walk(arg):
                print "dir:%s sub:%s files:%s", (directory, sub_directories, files)
                for filename in files:
                    #print "allfile:", filename
                    filepath = os.path.join(directory, filename)
                    if is_image(filepath):
                        self.files.append(filepath)
                        print "dirFile:", filename
                    elif zipfile.is_zipfile(filepath):
                        print 'TODO: handle zip files. %20s  %s' % (filepath, zipfile.is_zipfile(filepath))
                print "%d images."% len(self.files)
            if zipfile.is_zipfile(arg):
                print 'TODO: handle zip files. %20s' % (arg)
          else:
            print "LIST"
            for filename in os.listdir(arg):
                print "allfile:", filename
                if is_image(filename):
                    self.files.append(filename)
                    print "File:", filename
                    #print "Images:", self.files
                elif zipfile.is_zipfile(filename):
                    print 'TODO: handle zip files. %20s  %s' % (filename, zipfile.is_zipfile(filename))

        #print "Images:", self.files
        print "TOTAL: %d images."% len(self.files)
        # sort in order of date of file
        if g_sort:
            if g_sort == "byDatetime":
                print "Sort by date/time:", g_sort
                self.files.sort(key=lambda s: os.stat(s).st_mtime)
                #self.files.sort(key=lambda s: os.path.getmtime(s))
                #self.files.sort(key=lambda s: os.path.getctime(s))
            else:
                print "Sort by name/number:", g_sort
                def getint(name):
                    basename = name.partition('.')
                    alpha, num = basename.split('_')
                    return int(num)
                #self.files.sort(key=getint)
                self.files.sort(key=lambda s: s)
            # no else needed?? default is sorted by name as they're read in. ehrrr. nope.
        else:
            print "No sort:", g_sort

        # debug test list sorted?
        for i in range(0, len(self.files)):
            print "sortfile:", self.files[i]
Example #22
0
def zip_test(fpath):
    Complete_dir = "/home/wang/Desktop/result/Complete"
    Damage_dir ="/home/wang/Desktop/result/Damage"
    print fpath 
    if zipfile.is_zipfile(fpath) is True:
       cut_and_paste_file(fpath, Complete_dir)
    else:
       cut_and_paste_file(fpath, Damage_dir)
    return zipfile.is_zipfile(fpath) 
Example #23
0
def is_zip_path(path):
    """ Returns True if the path refers to a zip file. """

    filepath = path
    while not is_zipfile(filepath) and \
              splitdrive(filepath)[1] != '\\' \
              and splitdrive(filepath)[1] != '/':
        filepath = dirname(filepath)

    return is_zipfile(filepath)
Example #24
0
 def test_update_zips(self):
     current_dir =getcwd()
     update_resume_zips()
     self.assertEqual(getcwd(),current_dir)
     self.assertTrue(isfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_major.zip'])))
     self.assertTrue(isfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_year.zip'])))
     self.assertTrue(zipfile.is_zipfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_major.zip'])))
     self.assertTrue(zipfile.is_zipfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_year.zip'])))
     self.assertTrue(exists(RESUMES_BY_MAJOR_LOCATION()))
     self.assertTrue(exists(RESUMES_BY_YEAR_LOCATION()))
Example #25
0
 def is_zipfile(data):
     """zipfile.is_zipfile for PY26 compatibility"""
     try:
         return zipfile.is_zipfile(data)
     except TypeError:
         from tempfile import mkstemp
         _, tmp = mkstemp()
         with open(tmp, 'w') as f:
             f.write(data.getvalue())
             f.flush()
             return zipfile.is_zipfile(tmp)
Example #26
0
def list_py_file_paths(directory, safe_mode=True):
    """
    Traverse a directory and look for Python files.

    :param directory: the directory to traverse
    :type directory: unicode
    :param safe_mode: whether to use a heuristic to determine whether a file
    contains Airflow DAG definitions
    :return: a list of paths to Python files in the specified directory
    :rtype: list[unicode]
    """
    file_paths = []
    if directory is None:
        return []
    elif os.path.isfile(directory):
        return [directory]
    elif os.path.isdir(directory):
        patterns = []
        for root, dirs, files in os.walk(directory, followlinks=True):
            ignore_file = [f for f in files if f == '.airflowignore']
            if ignore_file:
                f = open(os.path.join(root, ignore_file[0]), 'r')
                patterns += [p for p in f.read().split('\n') if p]
                f.close()
            for f in files:
                try:
                    file_path = os.path.join(root, f)
                    if not os.path.isfile(file_path):
                        continue
                    mod_name, file_ext = os.path.splitext(
                        os.path.split(file_path)[-1])
                    if file_ext != '.py' and not zipfile.is_zipfile(file_path):
                        continue
                    if any([re.findall(p, file_path) for p in patterns]):
                        continue

                    # Heuristic that guesses whether a Python file contains an
                    # Airflow DAG definition.
                    might_contain_dag = True
                    if safe_mode and not zipfile.is_zipfile(file_path):
                        with open(file_path, 'rb') as f:
                            content = f.read()
                            might_contain_dag = all(
                                [s in content for s in (b'DAG', b'airflow')])

                    if not might_contain_dag:
                        continue

                    file_paths.append(file_path)
                except Exception:
                    log = LoggingMixin().log
                    log.exception("Error while examining %s", f)
    return file_paths
Example #27
0
def test_cd():
    assert repr(pth('/bogus').cd) == "pth.WorkingDir('/bogus')"

    with Story(['zipfile.is_zipfile', 'os.chdir', 'os.getcwd', 'os.path.exists', 'os.stat']) as story:
        zipfile.is_zipfile('/bogus') == False  # returns
        os.stat(pth.WorkingDir('/bogus')) == os.stat_result((
            17407, 2621441, 2049, 43, 0, 0, 3805184, 1406286835, 1408573505, 1408573505))  # returns
        os.getcwd() == '/current'  # returns
        os.chdir(pth.WorkingDir('/bogus')) == None  # returns

    with story.replay(strict=False):
        pth('/bogus').cd()
Example #28
0
 def index(self, path):
     in_zip = zipfile.is_zipfile(path)
     subdirs = []
     for sub in self.list_files(path):
         if sub == ".hg":
             continue
         elif os.path.isdir(path + "/" + sub) or zipfile.is_zipfile(path + "/" + sub):
             subdirs.append(path + "/" + sub)
         else:
             self.mapfile(path + "/" + sub, in_zip)
     for sub in subdirs:
         self.index(sub)
    def clean(self):
        cleaned_data = super(UploadFileForm, self).clean()
        outputdir = tempfile.mkdtemp()
        files = self.files.getlist("file")
        validfiles = []

        # Create list of all potentially valid files, exploding first level zip files
        for file in files:
            if not validate_extension(file.name):
                self.add_error("file", "Filetype not supported.")
                continue

            if is_zipfile(file):
                with ZipFile(file) as zip:
                    for zipname in zip.namelist():
                        if not validate_extension(zipname):
                            self.add_error("file", "Filetype in zip not supported.")
                            continue
                        validfiles.append(zipname)
            else:
                validfiles.append(file.name)
        # Make sure shapefiles have all their parts
        if not validate_shapefiles_have_all_parts(validfiles):
            self.add_error("file", "Shapefiles must include .shp,.dbf,.shx,.prj")
        # Unpack all zip files and create list of cleaned file objects
        cleaned_files = []
        for file in files:
            if file.name in validfiles:
                with open(os.path.join(outputdir, file.name), "w") as outfile:
                    for chunk in file.chunks():
                        outfile.write(chunk)
                cleaned_files.append(outfile)
            elif is_zipfile(file):
                with ZipFile(file) as zip:
                    for zipfile in zip.namelist():
                        if zipfile in validfiles:
                            with zip.open(zipfile) as f:
                                with open(os.path.join(outputdir, zipfile), "w") as outfile:
                                    shutil.copyfileobj(f, outfile)
                                    cleaned_files.append(outfile)

        # After moving files in place make sure they can be opened by inspector
        inspected_files = []
        for cleaned_file in cleaned_files:
            cleaned_file_path = os.path.join(outputdir, cleaned_file.name)
            if not validate_inspector_can_read(cleaned_file_path):
                self.add_error("file", "Inspector could not read file {} or file is empty".format(cleaned_file_path))
                continue
            inspected_files.append(cleaned_file)

        cleaned_data["file"] = inspected_files
        return cleaned_data
Example #30
0
def unpack_file(filename, location, content_type, link):
    filename = os.path.realpath(filename)
<<<<<<< HEAD
    if (content_type == 'application/zip' or
            filename.endswith('.zip') or
            filename.endswith('.whl') or
            zipfile.is_zipfile(filename)):
=======
    if (content_type == 'application/zip'
            or filename.endswith('.zip')
            or filename.endswith('.whl')
            or zipfile.is_zipfile(filename)):
>>>>>>> bde4533e29dfedadf6bcf9d451baa615bc828a59
 def _setupSysPath(self):
     assert not self.pathSetUp
     zipit(self.pathExtensionName, self.pathExtensionName + '.zip')
     self.pathExtensionName += '.zip'
     assert zipfile.is_zipfile(self.pathExtensionName)
     PathModificationTests._setupSysPath(self)
Example #32
0
def cached_path(
    url_or_filename,
    cache_dir=None,
    force_download=False,
    proxies=None,
    resume_download=False,
    user_agent: Union[Dict, str, None] = None,
    extract_compressed_file=False,
    force_extract=False,
    local_files_only=False,
) -> Optional[str]:
    """
    Given something that might be a URL (or might be a local path),
    determine which. If it's a URL, download the file and cache it, and
    return the path to the cached file. If it's already a local path,
    make sure the file exists and then return the path.
    Args:
        cache_dir: specify a cache directory to save the file to (overwrite the default cache dir).
        force_download: if True, re-dowload the file even if it's already cached in the cache dir.
        resume_download: if True, resume the download if incompletly recieved file is found.
        user_agent: Optional string or dict that will be appended to the user-agent on remote requests.
        extract_compressed_file: if True and the path point to a zip or tar file, extract the compressed
            file in a folder along the archive.
        force_extract: if True when extract_compressed_file is True and the archive was already extracted,
            re-extract the archive and overide the folder where it was extracted.

    Return:
        None in case of non-recoverable file (non-existent or inaccessible url + no cache on disk).
        Local path (string) otherwise
    """
    if cache_dir is None:
        cache_dir = TRANSFORMERS_CACHE
    if isinstance(url_or_filename, Path):
        url_or_filename = str(url_or_filename)
    if isinstance(cache_dir, Path):
        cache_dir = str(cache_dir)

    if is_remote_url(url_or_filename):
        # URL, so get it from the cache (downloading if necessary)
        output_path = get_from_cache(
            url_or_filename,
            cache_dir=cache_dir,
            force_download=force_download,
            proxies=proxies,
            resume_download=resume_download,
            user_agent=user_agent,
            local_files_only=local_files_only,
        )
    elif os.path.exists(url_or_filename):
        # File, and it exists.
        output_path = url_or_filename
    elif urlparse(url_or_filename).scheme == "":
        # File, but it doesn't exist.
        raise EnvironmentError("file {} not found".format(url_or_filename))
    else:
        # Something unknown
        raise ValueError(
            "unable to parse {} as a URL or as a local path".format(
                url_or_filename))

    if extract_compressed_file:
        if not is_zipfile(output_path) and not tarfile.is_tarfile(output_path):
            return output_path

        # Path where we extract compressed archives
        # We avoid '.' in dir name and add "-extracted" at the end: "./model.zip" => "./model-zip-extracted/"
        output_dir, output_file = os.path.split(output_path)
        output_extract_dir_name = output_file.replace(".", "-") + "-extracted"
        output_path_extracted = os.path.join(output_dir,
                                             output_extract_dir_name)

        if os.path.isdir(output_path_extracted) and os.listdir(
                output_path_extracted) and not force_extract:
            return output_path_extracted

        # Prevent parallel extractions
        lock_path = output_path + ".lock"
        with FileLock(lock_path):
            shutil.rmtree(output_path_extracted, ignore_errors=True)
            os.makedirs(output_path_extracted)
            if is_zipfile(output_path):
                with ZipFile(output_path, "r") as zip_file:
                    zip_file.extractall(output_path_extracted)
                    zip_file.close()
            elif tarfile.is_tarfile(output_path):
                tar_file = tarfile.open(output_path)
                tar_file.extractall(output_path_extracted)
                tar_file.close()
            else:
                raise EnvironmentError(
                    "Archive format of {} could not be identified".format(
                        output_path))

        return output_path_extracted

    return output_path
Example #33
0
from __future__ import print_function
import contextlib
import os
import sys
import json
import shutil
import tempfile
import optparse
import zipfile

# Try to detect if we're running from source via the buck repo by
# looking for the .arcconfig file.  If found, add the appropriate
# deps to our python path, so we can find the twitter libs and
# setuptools at runtime.  Also, locate the `pkg_resources` modules
# via our local setuptools import.
if not zipfile.is_zipfile(sys.argv[0]):
    # Remove twitter.common.python from the import path - it may be eagerly
    # loaded as part of site-packages.
    sys.modules.pop('twitter', None)
    sys.modules.pop('twitter.common', None)
    sys.modules.pop('twitter.common.python', None)

    buck_root = os.sep.join(__file__.split(os.sep)[:-6])
    sys.path.insert(
        0, os.path.join(buck_root,
                        'third-party/py/twitter-commons/src/python'))
    sys.path.insert(0, os.path.join(buck_root, 'third-party/py/setuptools'))

import pkg_resources

from twitter.common.python.pex_builder import PEXBuilder
Example #34
0
    def do_POST(self):
        """This method services the POST request typically from either the Tenant or the Cloud Verifier.

        Only tenant and cloudverifier uri's are supported. Both requests require a nonce parameter.
        The Cloud verifier requires an additional mask parameter.  If the uri or parameters are incorrect, a 400 response is returned.
        """
        rest_params = config.get_restful_params(self.path)

        if rest_params is None:
            config.echo_json_response(self, 405,
                                      "Not Implemented: Use /keys/ interface")
            return

        content_length = int(self.headers.get('Content-Length', 0))
        if content_length <= 0:
            logger.warning(
                'POST returning 400 response, expected content in message. url:  '
                + self.path)
            config.echo_json_response(self, 400, "expected content in message")
            return

        post_body = self.rfile.read(content_length)
        json_body = json.loads(post_body)

        b64_encrypted_key = json_body['encrypted_key']
        decrypted_key = crypto.rsa_decrypt(self.server.rsaprivatekey,
                                           base64.b64decode(b64_encrypted_key))

        have_derived_key = False

        if rest_params["keys"] == "ukey":
            self.server.add_U(decrypted_key)
            self.server.auth_tag = json_body['auth_tag']
            self.server.payload = json_body.get('payload', None)
            have_derived_key = self.server.attempt_decryption(self)
        elif rest_params["keys"] == "vkey":
            self.server.add_V(decrypted_key)
            have_derived_key = self.server.attempt_decryption(self)
        else:
            logger.warning('POST returning  response. uri not supported: ' +
                           self.path)
            config.echo_json_response(self, 400, "uri not supported")
            return
        logger.info('POST of %s key returning 200' %
                    (('V', 'U')[rest_params["keys"] == "ukey"]))
        config.echo_json_response(self, 200, "Success")

        # no key yet, then we're done
        if not have_derived_key:
            return

        # woo hoo we have a key
        # ok lets write out the key now
        secdir = secure_mount.mount(
        )  # confirm that storage is still securely mounted

        # clean out the secure dir of any previous info before we extract files
        if os.path.isdir("%s/unzipped" % secdir):
            shutil.rmtree("%s/unzipped" % secdir)

        # write out key file
        f = open(secdir + "/" + self.server.enc_keyname, 'w')
        f.write(base64.b64encode(self.server.K).decode())
        f.close()

        # stow the U value for later
        tpm.write_key_nvram(self.server.final_U)

        # optionally extend a hash of they key and payload into specified PCR
        tomeasure = self.server.K

        # if we have a good key, now attempt to write out the encrypted payload
        dec_path = "%s/%s" % (secdir,
                              config.get('cloud_agent', "dec_payload_file"))
        enc_path = "%s/encrypted_payload" % config.WORK_DIR

        dec_payload = None
        enc_payload = None
        if self.server.payload is not None:
            dec_payload = crypto.decrypt(self.server.payload,
                                         bytes(self.server.K))

            enc_payload = self.server.payload
        elif os.path.exists(enc_path):
            # if no payload provided, try to decrypt one from a previous run stored in encrypted_payload
            with open(enc_path, 'rb') as f:
                enc_payload = f.read()
            try:
                dec_payload = crypto.decrypt(enc_payload, self.server.K)
                logger.info("Decrypted previous payload in %s to %s" %
                            (enc_path, dec_path))
            except Exception as e:
                logger.warning(
                    "Unable to decrypt previous payload %s with derived key: %s"
                    % (enc_path, e))
                os.remove(enc_path)
                enc_payload = None

        # also write out encrypted payload to be decrytped next time
        if enc_payload is not None:
            with open(enc_path, 'wb') as f:
                f.write(self.server.payload.encode('utf-8'))

        # deal with payload
        payload_thread = None
        if dec_payload is not None:
            tomeasure = tomeasure + dec_payload
            # see if payload is a zip
            zfio = io.BytesIO(dec_payload)
            if config.getboolean(
                    'cloud_agent',
                    'extract_payload_zip') and zipfile.is_zipfile(zfio):
                logger.info("Decrypting and unzipping payload to %s/unzipped" %
                            secdir)
                with zipfile.ZipFile(zfio, 'r') as f:
                    f.extractall('%s/unzipped' % secdir)

                # run an included script if one has been provided
                initscript = config.get('cloud_agent', 'payload_script')
                if initscript != "":

                    def initthread():
                        env = os.environ.copy()
                        env['AGENT_UUID'] = self.server.agent_uuid
                        proc = subprocess.Popen(["/bin/bash", initscript],
                                                env=env,
                                                shell=False,
                                                cwd='%s/unzipped' % secdir,
                                                stdout=subprocess.PIPE,
                                                stderr=subprocess.STDOUT)
                        while True:
                            line = proc.stdout.readline()
                            if line == '' and proc.poll() is not None:
                                break
                            if line:
                                logger.debug("init-output: %s" % line.strip())
                        # should be a no-op as poll already told us it's done
                        proc.wait()

                    if not os.path.exists("%s/unzipped/%s" %
                                          (secdir, initscript)):
                        logger.info(
                            "No payload script %s found in %s/unzipped" %
                            (initscript, secdir))
                    else:
                        logger.info(
                            "Executing payload script: %s/unzipped/%s" %
                            (secdir, initscript))
                        payload_thread = threading.Thread(target=initthread)
            else:
                logger.info("Decrypting payload to %s" % dec_path)
                with open(dec_path, 'wb') as f:
                    f.write(dec_payload)
            zfio.close()

        # now extend a measurement of the payload and key if there was one
        pcr = config.getint('cloud_agent', 'measure_payload_pcr')
        if 0 < pcr < 24:
            logger.info("extending measurement of payload into PCR %s" % pcr)
            measured = tpm.hashdigest(tomeasure)
            tpm.extendPCR(pcr, measured)

        if payload_thread is not None:
            payload_thread.start()

        return
Example #35
0
def open_stream(
    source: comma.typing.SourceType,
    encoding: str = None,
    no_request: bool = False,
) -> typing.Optional[typing.TextIO]:
    """
    Returns a seekable stream for text data that is properly decoded
    and ready to be read: The `source` can be actual data, a local file
    path, or a URL; it is possible to provide a stream that is compressed
    using ZIP. (This method will store all the data in memory.)
    """

    if source is None:
        return

    # local variable to keep track of the (most accurate for the user)
    # caption of the source
    internal_name = None

    # is this a STRING?
    if type(source) is str:
        source = typing.cast(typing.AnyStr, source)

        # multiline?
        if "\n" in source or "\r" in source:
            newline = comma.helpers.detect_line_terminator(sample=source,
                                                           default="\n")

            # change to a standard newline
            source = source.replace(newline, "\n")

            return io.StringIO(initial_value=source, newline="\n")

        internal_name = source

        # is this a FILE?
        local_path = is_local(location=source)
        if local_path is not None:
            source = open(local_path, mode="rb")

        # is this a URL?
        elif not no_request and is_url(location=source):

            response = requests.get(url=source, allow_redirects=True)

            if not response.ok:
                return None

            if encoding is None:
                encoding = response.encoding

            if encoding is not None:
                source = io.StringIO(response.text)
            else:
                source = io.BytesIO(response.content)

        else:
            return None

    # is this BYTES?
    if type(source) is bytes:
        source = typing.cast(bytes, source)
        source = io.BytesIO(source)

    # is this a STREAM?
    if hasattr(source, "seekable"):

        # is it not seekable? if so, make it seekable
        if not source.seekable():

            # if not, read in all the data
            data = source.read()

            if type(data) is str:
                source = io.StringIO(data)

            elif type(data) is bytes:
                source = io.BytesIO(data)

            else:
                raise ValueError(
                    "provided source is neither StringIO nor BytesIO")

        # is it compressed? if so, unzip it
        if zipfile.is_zipfile(source):
            zipsource = zipfile.ZipFile(source, mode="r")

            names = zipsource.namelist()

            count_total = 0
            count_csv = 0

            csv_filename = None

            for name in names:
                count_total += 1
                if os.path.splitext(name)[1].lower() == ".csv":
                    count_csv += 1
                    csv_filename = name

            if count_total == 1:
                # if only one file, we don't care if it is a CSV (we assume)
                data = zipsource.read(name=names[0])
                source = io.BytesIO(data)

            elif count_total > 1 and count_csv == 1:
                # if exactly one CSV, we know what to do
                data = zipsource.read(name=csv_filename)
                source = io.BytesIO(data)

            elif count_total == 0:
                raise ValueError(
                    "it seems the provided source is ZIP compressed; but "
                    "there are unknown issues unzipping it (or the archive "
                    "is empty)")

            else:
                # other situations are unclear
                raise ValueError("provided ZIP source is ambiguous, "
                                 "contains multiple files: {}".format(names))

    # if at this point, has not been converted to stream, error
    if not hasattr(source, "seekable"):
        return None

    # look at a sample and analyze
    source.seek(0)
    sample = source.read(MAX_SAMPLE_CHUNKSIZE)
    source.seek(0)  # fixed this bug with tests! :-)

    # detect encoding if bytestring
    if type(sample) is bytes:
        if encoding is None:
            encoding = comma.extras.detect_encoding(sample)
        source = io.TextIOWrapper(source, encoding=encoding)

    # try to add useful metadata
    if internal_name is not None:
        try:
            source.buffer.name = internal_name
        except AttributeError:
            pass

    return source
Example #36
0
    def process_file(self, filepath, only_if_updated=True, safe_mode=True):
        """
        Given a path to a python module or zip file, this method imports
        the module and look for dag objects within it.
        """
        from airflow.models.dag import DAG  # Avoid circular import

        found_dags = []

        # if the source file no longer exists in the DB or in the filesystem,
        # return an empty list
        # todo: raise exception?
        if filepath is None or not os.path.isfile(filepath):
            return found_dags

        try:
            # This failed before in what may have been a git sync
            # race condition
            file_last_changed_on_disk = datetime.fromtimestamp(
                os.path.getmtime(filepath))
            if only_if_updated \
                    and filepath in self.file_last_changed \
                    and file_last_changed_on_disk == self.file_last_changed[filepath]:
                return found_dags

        except Exception as e:
            self.log.exception(e)
            return found_dags

        mods = []
        is_zipfile = zipfile.is_zipfile(filepath)
        if not is_zipfile:
            if safe_mode and os.path.isfile(filepath):
                with open(filepath, 'rb') as f:
                    content = f.read()
                    if not all([s in content for s in (b'DAG', b'airflow')]):
                        self.file_last_changed[
                            filepath] = file_last_changed_on_disk
                        # Don't want to spam user with skip messages
                        if not self.has_logged:
                            self.has_logged = True
                            self.log.info(
                                "File %s assumed to contain no DAGs. Skipping.",
                                filepath)
                        return found_dags

            self.log.debug("Importing %s", filepath)
            org_mod_name, _ = os.path.splitext(os.path.split(filepath)[-1])
            mod_name = ('unusual_prefix_' +
                        hashlib.sha1(filepath.encode('utf-8')).hexdigest() +
                        '_' + org_mod_name)

            if mod_name in sys.modules:
                del sys.modules[mod_name]

            with timeout(
                    configuration.conf.getint('core',
                                              "DAGBAG_IMPORT_TIMEOUT")):
                try:
                    m = imp.load_source(mod_name, filepath)
                    mods.append(m)
                except Exception as e:
                    self.log.exception("Failed to import: %s", filepath)
                    self.import_errors[filepath] = str(e)
                    self.file_last_changed[
                        filepath] = file_last_changed_on_disk

        else:
            zip_file = zipfile.ZipFile(filepath)
            for mod in zip_file.infolist():
                head, _ = os.path.split(mod.filename)
                mod_name, ext = os.path.splitext(mod.filename)
                if not head and (ext == '.py' or ext == '.pyc'):
                    if mod_name == '__init__':
                        self.log.warning("Found __init__.%s at root of %s",
                                         ext, filepath)
                    if safe_mode:
                        with zip_file.open(mod.filename) as zf:
                            self.log.debug("Reading %s from %s", mod.filename,
                                           filepath)
                            content = zf.read()
                            if not all(
                                [s in content for s in (b'DAG', b'airflow')]):
                                self.file_last_changed[filepath] = (
                                    file_last_changed_on_disk)
                                # todo: create ignore list
                                # Don't want to spam user with skip messages
                                if not self.has_logged:
                                    self.has_logged = True
                                    self.log.info(
                                        "File %s assumed to contain no DAGs. Skipping.",
                                        filepath)

                    if mod_name in sys.modules:
                        del sys.modules[mod_name]

                    try:
                        sys.path.insert(0, filepath)
                        m = importlib.import_module(mod_name)
                        mods.append(m)
                    except Exception as e:
                        self.log.exception("Failed to import: %s", filepath)
                        self.import_errors[filepath] = str(e)
                        self.file_last_changed[
                            filepath] = file_last_changed_on_disk

        for m in mods:
            for dag in list(m.__dict__.values()):
                if isinstance(dag, DAG):
                    if not dag.full_filepath:
                        dag.full_filepath = filepath
                        if dag.fileloc != filepath and not is_zipfile:
                            dag.fileloc = filepath
                    try:
                        dag.is_subdag = False
                        self.bag_dag(dag, parent_dag=dag, root_dag=dag)
                        if isinstance(dag._schedule_interval,
                                      six.string_types):
                            croniter(dag._schedule_interval)
                        found_dags.append(dag)
                        found_dags += dag.subdags
                    except (CroniterBadCronError, CroniterBadDateError,
                            CroniterNotAlphaError) as cron_e:
                        self.log.exception("Failed to bag_dag: %s",
                                           dag.full_filepath)
                        self.import_errors[dag.full_filepath] = \
                            "Invalid Cron expression: " + str(cron_e)
                        self.file_last_changed[dag.full_filepath] = \
                            file_last_changed_on_disk
                    except AirflowDagCycleException as cycle_exception:
                        self.log.exception("Failed to bag_dag: %s",
                                           dag.full_filepath)
                        self.import_errors[dag.full_filepath] = str(
                            cycle_exception)
                        self.file_last_changed[dag.full_filepath] = \
                            file_last_changed_on_disk

        self.file_last_changed[filepath] = file_last_changed_on_disk
        return found_dags
Example #37
0
 def read_text(self, filename):
     if is_zipfile(sys.argv[0]):
         zf = ZipFileWithPermissions(sys.argv[0])
         for name in zf.namelist():
             if name and self._match_file(name, filename):
                 return zf.read(name).decode(encoding="utf-8")
Example #38
0
def zipzip(zip_path, *args, **kwargs):  #pylint: disable=too-many-locals
    """creates or updates the zip file at `zip_path`
    with contents given by the `*args`, which can be
    paths to files and/or directories, glob definitons
    are not supported.

    If the zip file exists, new items will be added to it,
    otherwise the zip file will be newly created.

    If an item added already exists in the zipfile,
    the old item is replaced with the new one.

    If existing file is not zip, raises `ValueError` exception.
    """
    zlib.Z_DEFAULT_COMPRESSION = 9
    exclude_dirs = kwargs.get('exclude_dirs', list())
    exclude_files = kwargs.get('exclude_files', list())
    exclude_dir_types = kwargs.get('exclude_dir_types', list())
    exclude_file_types = kwargs.get('exclude_file_types', list())
    ignore_subpath = kwargs.get('ignore_subpath', '')

    if os.path.exists(zip_path):
        if not zipfile.is_zipfile(zip_path):
            raise ValueError('`zip_path` must be a zip file, if exists')

    with zipfile.ZipFile(zip_path, 'a', zipfile.ZIP_DEFLATED) as zip_file:
        for item in args:
            if os.path.isfile(item):
                if item in exclude_files:
                    continue
                with warnings.catch_warnings():
                    warnings.simplefilter('ignore')
                    if ignore_subpath and item.startswith(ignore_subpath):
                        arcname = item[len(ignore_subpath):]
                        if arcname:
                            zip_file.write(item, arcname)
                        else:
                            zip_file.write(item)
                    else:
                        zip_file.write(item)

            elif os.path.isdir(item):
                for dir_path, _, file_names in os.walk(item):

                    def is_excluded_dir(dname):  #pylint: disable=missing-docstring
                        for ex_dir in exclude_dirs:
                            if dname.startswith(ex_dir):
                                my_dl = len(dname)
                                ex_dl = len(ex_dir)
                                return my_dl == ex_dl or dname[ex_dl] == '/'
                        return False

                    if is_excluded_dir(dir_path):
                        continue

                    if any([
                            dir_path.endswith(dirtype)
                            for dirtype in exclude_dir_types
                    ]):
                        continue

                    for file_name in file_names:
                        if any([
                                file_name.endswith(filetype)
                                for filetype in exclude_file_types
                        ]):
                            continue
                        with warnings.catch_warnings():
                            warnings.simplefilter('ignore')
                            zip_file.write(os.path.join(dir_path, file_name))
Example #39
0
def iter_files_distros(path=None, repeated_distro='first'):
    if path is None:
        path = sys.path

    # Distributions found earlier in path will shadow those with the same name
    # found later. If these distributions used different module names, it may
    # actually be possible to import both, but in most cases this shadowing
    # will be correct.
    distro_names_seen = set()

    for folder in path:
        if folder.rstrip('/\\').endswith('.egg'):
            # Gah, eggs
            egg_name = osp.basename(folder)
            if '-' in egg_name:
                distro = Distribution(*egg_name.split('-')[:2])

                if (repeated_distro == 'first') \
                        and (distro.name in distro_names_seen):
                    continue
                distro_names_seen.add(distro.name)
            else:
                distro = None
            
            if osp.isdir(folder):
                ep_path = osp.join(folder, 'EGG-INFO', 'entry_points.txt')
                if osp.isfile(ep_path):
                    cp = CaseSensitiveConfigParser()
                    cp.read(ep_path)
                    yield cp, distro

            elif zipfile.is_zipfile(folder):
                z = zipfile.ZipFile(folder)
                try:
                    info = z.getinfo('EGG-INFO/entry_points.txt')
                except KeyError:
                    continue
                cp = CaseSensitiveConfigParser()
                with z.open(info) as f:
                    fu = io.TextIOWrapper(f)
                    cp.read_file(fu,
                        source=osp.join(folder, 'EGG-INFO', 'entry_points.txt'))
                yield cp, distro
            
        for path in itertools.chain(
            glob.iglob(osp.join(folder, '*.dist-info', 'entry_points.txt')),
            glob.iglob(osp.join(folder, '*.egg-info', 'entry_points.txt'))
        ):
            distro_name_version = osp.splitext(osp.basename(osp.dirname(path)))[0]
            if '-' in distro_name_version:
                distro = Distribution(*distro_name_version.split('-', 1))

                if (repeated_distro == 'first') \
                        and (distro.name in distro_names_seen):
                    continue
                distro_names_seen.add(distro.name)
            else:
                distro = None
            cp = CaseSensitiveConfigParser()
            cp.read(path)
            yield cp, distro
Example #40
0
def extract_archive(from_path, to_path=None, overwrite=False, logger=None):
    """Extract archive.

    Args:
        from_path: the path of the archive.
        to_path: the root path of the extracted files (directory of from_path)
        overwrite: overwrite existing files (False)

    Returns:
        List of paths to extracted files even if not overwritten.

    Examples:
    --------
        >>> url = 'http://www.quest.dcs.shef.ac.uk/wmt16_files_mmt/validation.tar.gz'
        >>> from_path = './validation.tar.gz'
        >>> to_path = './'
        >>> kdmt.download.download_from_url(url, from_path)
        >>> kdmt.file.extract_archive(from_path, to_path)
        >>> ['.data/val.de', '.data/val.en']
        >>> kdmt.download.download_from_url(url, from_path)
        >>> kdmt.file.extract_archive(from_path, to_path)
        >>> ['.data/val.de', '.data/val.en']

    """

    if to_path is None:
        to_path = os.path.dirname(from_path)

    if from_path.endswith(('.tar.gz', '.tgz')):
        if logger:
            logger.info('Opening tar file {}.'.format(from_path))
        with tarfile.open(from_path, 'r') as tar:
            files = []
            for file_ in tar:
                file_path = os.path.join(to_path, file_.name)
                if file_.isfile():
                    files.append(file_path)
                    if os.path.exists(file_path):
                        if logger:
                            logger.info('{} already extracted.'.format(file_path))
                        if not overwrite:
                            continue
                tar.extract(file_, to_path)
            if logger:
                logger.info('Finished extracting tar file {}.'.format(from_path))
            return files

    elif from_path.endswith('.zip'):
        assert zipfile.is_zipfile(from_path), from_path
        if logger:
            logger.info('Opening zip file {}.'.format(from_path))
        with zipfile.ZipFile(from_path, 'r') as zfile:
            files = []
            for file_ in zfile.namelist():
                file_path = os.path.join(to_path, file_)
                files.append(file_path)
                if os.path.exists(file_path):
                    if logger:
                        logger.info('{} already extracted.'.format(file_path))
                    if not overwrite:
                        continue
                zfile.extract(file_, to_path)
        files = [f for f in files if os.path.isfile(f)]
        if logger:
            logger.info('Finished extracting zip file {}.'.format(from_path))
        return files

    elif from_path.endswith('.gz'):
        if logger:
            logger.info('Opening gz file {}.'.format(from_path))
        default_block_size = 65536
        filename = from_path[:-3]
        files = [filename]
        with gzip.open(from_path, 'rb') as gzfile, \
                open(filename, 'wb') as d_file:
            while True:
                block = gzfile.read(default_block_size)
                if not block:
                    break
                else:
                    d_file.write(block)
            d_file.write(block)
        if logger:
            logger.info('Finished extracting gz file {}.'.format(from_path))
        return files

    else:
        raise NotImplementedError(
            "We currently only support tar.gz, .tgz, .gz and zip achives.")
Example #41
0
    def plcupload(self):
        u"""Lädt das angegebene Projekt auf den RevPi.
        @return True, bei erfolgreicher Verarbeitung"""
        tup = self.lst_typeup.index(self.var_typeup.get())
        dirselect = ""
        dirtmp = None
        filelist = []
        fileselect = None
        foldername = ""
        rscfile = None

        if tup == 0:
            # Datei
            fileselect = tkfd.askopenfilenames(
                parent=self.master,
                title="Upload Python program...",
                initialdir=self.opt.get("plcupload_dir", homedir),
                filetypes=(("Python", "*.py"), (_("All files"), "*.*"))
            )
            if type(fileselect) == tuple and len(fileselect) > 0:
                for file in fileselect:
                    filelist.append(file)

        elif tup == 1:
            # Ordner
            dirselect = tkfd.askdirectory(
                parent=self.master,
                title=_("Folder to upload"),
                mustexist=True,
                initialdir=self.opt.get("plcupload_dir", homedir)
            )

            # Ordnernamen merken um diesen auf RevPi anzulegen
            foldername = os.path.basename(dirselect)

            if type(dirselect) == str and dirselect != "":
                filelist = self.create_filelist(dirselect)

        elif tup == 2:
            # Zip
            fileselect = tkfd.askopenfilename(
                parent=self.master,
                title=_("Upload Zip archive..."),
                initialdir=self.opt.get("plcupload_file", ""),
                initialfile=self.revpi + ".zip",
                filetypes=(
                    (_("Zip archive"), "*.zip"), (_("All files"), "*.*")
                )
            )
            if type(fileselect) == str and fileselect != "":
                # Zipdatei prüfen
                if zipfile.is_zipfile(fileselect):
                    dirtmp = mkdtemp()
                    fhz = zipfile.ZipFile(fileselect)
                    fhz.extractall(dirtmp)
                    fhz.close()

                    filelist = self.create_filelist(dirtmp)
                    dirselect, rscfile = self.check_replacedir(dirtmp)

                else:
                    tkmsg.showerror(
                        _("Error"),
                        _("The specified file is not a ZIP archive."),
                        parent=self.master
                    )
                    return False

        elif tup == 3:
            # TarGz
            fileselect = tkfd.askopenfilename(
                parent=self.master,
                title=_("Upload TarGz archiv..."),
                initialdir=self.opt.get("plcupload_file", ""),
                initialfile=self.revpi + ".tar.gz",
                filetypes=(
                    (_("TGZ archive"), "*.tar.gz"), (_("All files"), "*.*")
                )
            )
            if type(fileselect) == str and fileselect != "":

                # Tar-Datei prüfen
                if tarfile.is_tarfile(fileselect):
                    dirtmp = mkdtemp()
                    fht = tarfile.open(fileselect)
                    fht.extractall(dirtmp)
                    fht.close()

                    filelist = self.create_filelist(dirtmp)
                    dirselect, rscfile = self.check_replacedir(dirtmp)

                else:
                    tkmsg.showerror(
                        _("Error"),
                        _("The specified file is not a TAR archive."),
                        parent=self.master
                    )
                    return False

        # Wenn keine Dateien gewählt
        if len(filelist) == 0:
            return True

        # Vor Übertragung aufräumen wenn ausgewählt
        if self.var_cleanup.get() and not self.xmlcli.plcuploadclean():
            tkmsg.showerror(
                _("Error"),
                _("There was an error deleting the files on the "
                    "Revolution Pi."),
                parent=self.master
            )
            return False

        # Aktuell konfiguriertes Programm lesen (für uploaded Flag)
        opt_program = self.xmlcli.get_config()
        opt_program = opt_program.get("plcprogram", "none.py")
        self.uploaded = True
        ec = 0

        for fname in filelist:

            if fname == rscfile:
                continue

            # FIXME: Fehlerabfang bei Dateilesen
            with open(fname, "rb") as fh:

                # Dateinamen ermitteln
                if dirselect == "":
                    sendname = os.path.basename(fname)
                else:
                    # Ordnernamen in Dateipfad für RevPi übernehmen
                    sendname = os.path.join(
                        foldername,
                        fname.replace(dirselect, "")[1:]
                    )

                # Prüfen ob Dateiname bereits als Startprogramm angegeben ist
                if sendname == opt_program:
                    self.uploaded = False

                # Datei übertragen
                try:
                    ustatus = self.xmlcli.plcupload(
                        Binary(gzip.compress(fh.read())), sendname)
                except Exception:
                    ec = -2
                    break

                if not ustatus:
                    ec = -1
                    break

        if ec == 0:
            tkmsg.showinfo(
                _("Success"),
                _("The PLC program was transferred successfully."),
                parent=self.master
            )

            if self.var_picup.get():
                if rscfile is not None:
                    self.setpictoryrsc(rscfile)
                else:
                    tkmsg.showerror(
                        _("Error"),
                        _("There is no piCtory configuration in this "
                            "archive."),
                        parent=self.master
                    )

            # Einstellungen speichern
            if tup == 0:
                self.opt["plcupload_dir"] = os.path.dirname(fileselect[0])
            elif tup == 1:
                self.opt["plcupload_dir"] = dirselect
            else:
                self.opt["plcupload_file"] = os.path.dirname(fileselect)

            self.opt["typeup"] = self.var_typeup.get()
            self.opt["picup"] = self.var_picup.get()
            _savedefaults(self.revpi, self.opt)

        elif ec == -1:
            tkmsg.showerror(
                _("Error"),
                _("The Revolution Pi could not process some parts of the "
                    "transmission."),
                parent=self.master
            )

        elif ec == -2:
            tkmsg.showerror(
                _("Error"),
                _("Errors occurred during transmission"),
                parent=self.master
            )

        # Temp-Dir aufräumen
        if dirtmp is not None:
            rmtree(dirtmp)

        return True
Example #42
0
def VerificationZip(fichier=""):
    """ Vérifie que le fichier est une archive zip valide """
    return zipfile.is_zipfile(fichier)
Example #43
0
    def __init__(self,
                 filename,
                 gdalDataset,
                 gdalMetadata,
                 fast=False,
                 fixgcp=True,
                 **kwargs):
        if not os.path.split(filename.rstrip('/'))[1][:3] in ['S1A', 'S1B']:
            raise WrongMapperError('%s: Not Sentinel 1A or 1B' % filename)

        if not IMPORT_SCIPY:
            raise NansatReadError(
                'Sentinel-1 data cannot be read because scipy is not installed'
            )

        if zipfile.is_zipfile(filename):
            zz = zipfile.PyZipFile(filename)
            # Assuming the file names are consistent, the polarization
            # dependent data should be sorted equally such that we can use the
            # same indices consistently for all the following lists
            # THIS IS NOT THE CASE...
            mds_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'measurement/s1' in fn
            ]
            calibration_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/calibration/calibration-s1' in fn
            ]
            noise_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/calibration/noise-s1' in fn
            ]
            annotation_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'annotation/s1' in fn
            ]
            manifest_files = [
                '/vsizip/%s/%s' % (filename, fn) for fn in zz.namelist()
                if 'manifest.safe' in fn
            ]
            zz.close()
        else:
            mds_files = glob.glob('%s/measurement/s1*' % filename)
            calibration_files = glob.glob(
                '%s/annotation/calibration/calibration-s1*' % filename)
            noise_files = glob.glob('%s/annotation/calibration/noise-s1*' %
                                    filename)
            annotation_files = glob.glob('%s/annotation/s1*' % filename)
            manifest_files = glob.glob('%s/manifest.safe' % filename)

        if (not mds_files or not calibration_files or not noise_files
                or not annotation_files or not manifest_files):
            raise WrongMapperError(filename)

        # convert list of MDS files into dictionary. Keys - polarizations in upper case.
        mds_files = {
            os.path.basename(ff).split('-')[3].upper(): ff
            for ff in mds_files
        }
        polarizations = list(mds_files.keys())

        # read annotation files
        self.annotation_data = self.read_annotation(annotation_files)
        if not fast and fixgcp:
            self.correct_geolocation_data()

        # read manifest file
        manifest_data = self.read_manifest_data(manifest_files[0])

        # very fast constructor without any bands only with some metadata and geolocation
        self._init_empty(manifest_data, self.annotation_data)

        # skip adding bands in the fast mode and RETURN
        if fast:
            return

        # Open data files with GDAL
        gdalDatasets = {}
        for pol in polarizations:
            gdalDatasets[pol] = gdal.Open(mds_files[pol])

            if not gdalDatasets[pol]:
                raise WrongMapperError('%s: No Sentinel-1 datasets found' %
                                       mds_files[pol])

        # Check metadata to confirm it is Sentinel-1 L1
        metadata = gdalDatasets[polarizations[0]].GetMetadata()

        # create full size VRTs with incidenceAngle and elevationAngle
        annotation_vrts = self.vrts_from_arrays(
            self.annotation_data, ['incidenceAngle', 'elevationAngle'])
        self.band_vrts.update(annotation_vrts)

        # create full size VRTS with calibration LUT
        calibration_names = ['sigmaNought', 'betaNought']
        calibration_list_tag = 'calibrationVectorList'
        for calibration_file in calibration_files:
            pol = '_' + os.path.basename(calibration_file).split(
                '-')[4].upper()
            xml = self.read_vsi(calibration_file)
            calibration_data = self.read_calibration(xml, calibration_list_tag,
                                                     calibration_names, pol)
            calibration_vrts = self.vrts_from_arrays(calibration_data,
                                                     calibration_names, pol,
                                                     True, 1)
            self.band_vrts.update(calibration_vrts)

        # create full size VRTS with noise LUT
        for noise_file in noise_files:
            pol = '_' + os.path.basename(noise_file).split('-')[4].upper()
            xml = self.read_vsi(noise_file)
            if '<noiseVectorList' in xml:
                noise_list_tag = 'noiseVectorList'
                noise_name = 'noiseLut'
            elif '<noiseRangeVectorList' in xml:
                noise_list_tag = 'noiseRangeVectorList'
                noise_name = 'noiseRangeLut'
            noise_data = self.read_calibration(xml, noise_list_tag,
                                               [noise_name], pol)
            noise_vrts = self.vrts_from_arrays(noise_data, [noise_name], pol,
                                               True, 1)
            self.band_vrts.update(noise_vrts)

        #### Create metaDict: dict with metadata for all bands
        metaDict = []
        bandNumberDict = {}
        bnmax = 0
        for pol in polarizations:
            dsPath, dsName = os.path.split(mds_files[pol])
            name = 'DN_%s' % pol
            # A dictionary of band numbers is needed for the pixel function
            # bands further down. This is not the best solution. It would be
            # better to have a function in VRT that returns the number given a
            # band name. This function exists in Nansat but could perhaps be
            # moved to VRT? The existing nansat function could just call the
            # VRT one...
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            band = gdalDatasets[pol].GetRasterBand(1)
            dtype = band.DataType
            metaDict.append({
                'src': {
                    'SourceFilename': mds_files[pol],
                    'SourceBand': 1,
                    'DataType': dtype,
                },
                'dst': {
                    'name': name,
                },
            })
        # add bands with metadata and corresponding values to the empty VRT
        self.create_bands(metaDict)
        '''
        Calibration should be performed as

        s0 = DN^2/sigmaNought^2,

        where sigmaNought is from e.g.
        annotation/calibration/calibration-s1a-iw-grd-hh-20140811t151231-20140811t151301-001894-001cc7-001.xml,
        and DN is the Digital Numbers in the tiff files.

        Also the noise should be subtracted.

        See
        https://sentinel.esa.int/web/sentinel/sentinel-1-sar-wiki/-/wiki/Sentinel%20One/Application+of+Radiometric+Calibration+LUT

        The noise correction/subtraction is implemented in an independent package "sentinel1denoised"
        See
        https://github.com/nansencenter/sentinel1denoised
        '''

        # Get look direction
        longitude, latitude = self.transform_points(
            calibration_data['pixel'].flatten(),
            calibration_data['line'].flatten())
        longitude.shape = calibration_data['pixel'].shape
        latitude.shape = calibration_data['pixel'].shape
        sat_heading = initial_bearing(longitude[:-1, :], latitude[:-1, :],
                                      longitude[1:, :], latitude[1:, :])
        look_direction = scipy.ndimage.interpolation.zoom(
            np.mod(sat_heading + 90, 360),
            (np.shape(longitude)[0] / (np.shape(longitude)[0] - 1.), 1))

        # Decompose, to avoid interpolation errors around 0 <-> 360
        look_direction_u = np.sin(np.deg2rad(look_direction))
        look_direction_v = np.cos(np.deg2rad(look_direction))
        look_u_VRT = VRT.from_array(look_direction_u)
        look_v_VRT = VRT.from_array(look_direction_v)
        lookVRT = VRT.from_lonlat(longitude, latitude)
        lookVRT.create_band([{
            'SourceFilename': look_u_VRT.filename,
            'SourceBand': 1
        }, {
            'SourceFilename': look_v_VRT.filename,
            'SourceBand': 1
        }], {'PixelFunctionType': 'UVToDirectionTo'})

        # Blow up to full size
        lookVRT = lookVRT.get_resized_vrt(self.dataset.RasterXSize,
                                          self.dataset.RasterYSize, 1)

        # Store VRTs so that they are accessible later
        self.band_vrts['look_u_VRT'] = look_u_VRT
        self.band_vrts['look_v_VRT'] = look_v_VRT
        self.band_vrts['lookVRT'] = lookVRT

        metaDict = []
        # Add bands to full size VRT
        for pol in polarizations:
            name = 'sigmaNought_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename': (self.band_vrts[name].filename),
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })
            name = 'noise_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': {
                    'SourceFilename':
                    self.band_vrts['%s_%s' % (noise_name, pol)].filename,
                    'SourceBand': 1
                },
                'dst': {
                    'name': name
                }
            })

        name = 'look_direction'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        metaDict.append({
            'src': {
                'SourceFilename': self.band_vrts['lookVRT'].filename,
                'SourceBand': 1
            },
            'dst': {
                'wkv': 'sensor_azimuth_angle',
                'name': name
            }
        })

        for pol in polarizations:
            dsPath, dsName = os.path.split(mds_files[pol])
            name = 'sigma0_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': [{
                    'SourceFilename': self.filename,
                    'SourceBand': bandNumberDict['DN_%s' % pol],
                }, {
                    'SourceFilename':
                    self.band_vrts['sigmaNought_%s' % pol].filename,
                    'SourceBand':
                    1
                }],
                'dst': {
                    'wkv':
                    'surface_backwards_scattering_coefficient_of_radar_wave',
                    'PixelFunctionType': 'Sentinel1Calibration',
                    'polarization': pol,
                    'suffix': pol,
                },
            })
            name = 'beta0_%s' % pol
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            metaDict.append({
                'src': [{
                    'SourceFilename': self.filename,
                    'SourceBand': bandNumberDict['DN_%s' % pol]
                }, {
                    'SourceFilename':
                    self.band_vrts['betaNought_%s' % pol].filename,
                    'SourceBand':
                    1
                }],
                'dst': {
                    'wkv':
                    'surface_backwards_brightness_coefficient_of_radar_wave',
                    'PixelFunctionType': 'Sentinel1Calibration',
                    'polarization': pol,
                    'suffix': pol,
                },
            })

        self.create_bands(metaDict)

        # Add incidence angle as band
        name = 'incidence_angle'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        src = {
            'SourceFilename': self.band_vrts['incidenceAngle'].filename,
            'SourceBand': 1
        }
        dst = {'wkv': 'angle_of_incidence', 'name': name}
        self.create_band(src, dst)
        self.dataset.FlushCache()

        # Add elevation angle as band
        name = 'elevation_angle'
        bandNumberDict[name] = bnmax + 1
        bnmax = bandNumberDict[name]
        src = {
            'SourceFilename': self.band_vrts['elevationAngle'].filename,
            'SourceBand': 1
        }
        dst = {'wkv': 'angle_of_elevation', 'name': name}
        self.create_band(src, dst)
        self.dataset.FlushCache()

        # Add sigma0_VV
        if 'VV' not in polarizations and 'HH' in polarizations:
            name = 'sigma0_VV'
            bandNumberDict[name] = bnmax + 1
            bnmax = bandNumberDict[name]
            src = [{
                'SourceFilename': self.filename,
                'SourceBand': bandNumberDict['DN_HH'],
            }, {
                'SourceFilename': (self.band_vrts['sigmaNought_HH'].filename),
                'SourceBand':
                1,
            }, {
                'SourceFilename': self.band_vrts['incidenceAngle'].filename,
                'SourceBand': 1
            }]
            dst = {
                'wkv':
                'surface_backwards_scattering_coefficient_of_radar_wave',
                'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
                'polarization': 'VV',
                'suffix': 'VV'
            }
            self.create_band(src, dst)
            self.dataset.FlushCache()
Example #44
0
    def _addon_details(self, addon_path):
        """
        Returns a dictionary of details about the addon.

        :param addon_path: path to the add-on directory or XPI

        Returns::

            {'id':      u'*****@*****.**', # id of the addon
             'version': u'1.4',                # version of the addon
             'name':    u'Rainbow',            # name of the addon
             'unpack':  False }                # whether to unpack the addon
        """

        details = {'id': None, 'unpack': False, 'name': None, 'version': None}

        def get_namespace_id(doc, url):
            attributes = doc.documentElement.attributes
            namespace = ""
            for i in range(attributes.length):
                if attributes.item(i).value == url:
                    if ":" in attributes.item(i).name:
                        # If the namespace is not the default one remove 'xlmns:'
                        namespace = attributes.item(i).name.split(':')[1] + ":"
                        break
            return namespace

        def get_text(element):
            """Retrieve the text value of a given node"""
            rc = []
            for node in element.childNodes:
                if node.nodeType == node.TEXT_NODE:
                    rc.append(node.data)
            return ''.join(rc).strip()

        if not os.path.exists(addon_path):
            raise IOError('Add-on path does not exist: %s' % addon_path)

        try:
            if zipfile.is_zipfile(addon_path):
                # Bug 944361 - We cannot use 'with' together with zipFile because
                # it will cause an exception thrown in Python 2.6.
                try:
                    compressed_file = zipfile.ZipFile(addon_path, 'r')
                    manifest = compressed_file.read('install.rdf')
                finally:
                    compressed_file.close()
            elif os.path.isdir(addon_path):
                with open(os.path.join(addon_path, 'install.rdf'), 'r') as f:
                    manifest = f.read()
            else:
                raise IOError(
                    'Add-on path is neither an XPI nor a directory: %s' %
                    addon_path)
        except (IOError, KeyError) as e:
            raise AddonFormatError(str(e), sys.exc_info()[2])

        try:
            doc = minidom.parseString(manifest)

            # Get the namespaces abbreviations
            em = get_namespace_id(doc, 'http://www.mozilla.org/2004/em-rdf#')
            rdf = get_namespace_id(
                doc, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')

            description = doc.getElementsByTagName(rdf + 'Description').item(0)
            for node in description.childNodes:
                # Remove the namespace prefix from the tag for comparison
                entry = node.nodeName.replace(em, "")
                if entry in details.keys():
                    details.update({entry: get_text(node)})
            if details.get('id') is None:
                for i in range(description.attributes.length):
                    attribute = description.attributes.item(i)
                    if attribute.name == em + 'id':
                        details.update({'id': attribute.value})
        except Exception as e:
            raise AddonFormatError(str(e), sys.exc_info()[2])

        # turn unpack into a true/false value
        if isinstance(details['unpack'], str):
            details['unpack'] = details['unpack'].lower() == 'true'

        # If no ID is set, the add-on is invalid
        if details.get('id') is None:
            raise AddonFormatError('Add-on id could not be found.')

        return details
Example #45
0
    def __init__(self,file=''):
        print('正在处理ROM信息...该过程需要1s-2min分钟不等')
        '''获取ROM信息 输入的文件可以是线刷包,也可以是卡刷包'''
        size=os.path.getsize(file)
        if os.path.exists(file)==False or size==0:
            print('E:请选择一个正确的文件!!!')
            self.flag=1#无效文件路径
            return
        self.file=file
        if file.find('payload.bin')>-1:
            self.abflag=True
            self.flag=3
            print('发现A/B(System As Root)更新文件(安卓10动态分区)')
            return
        if file.find('.ozip') > -1 and zipfile.is_zipfile(file)==True:
            with open(file,'rb') as fr:
                magic=fr.read(12)
                if magic==b"OPPOENCRYPT!" or magic[:2]==b"PK":
                    self.ozip=True
                    print('发现OPPO OZIP! 需要解密后才能读取ROM信息')
                fr.close()
                return
            print('这个ROM可能不是OPPO OZIP?!')
        if file.find('.tar.md5') > -1 and tarfile.is_tarfile(file):
            self.samsumgodinfile=True
            a=str(get_saminfo(file))
            if a:
                a=a.replace("b'",'')
                a=a.replace(".tar\\n'",'')
                li=a.split(' ')
                a=li[2].split('_')
                print('ROM类型:'+a[0]+'\n版本:'+a[1]+'\n发行标志:'+a[5]+'\n固件类型:offical')
                print('发现三星odin线刷文件!')
                print('W:只有ROM类型为AP才支持解包出系统镜像')
                return
            print('Maybe:发现三星odin线刷文件?!')
        if file.find('.tgz') > -1 and tarfile.is_tarfile(file):
            #MIUI
            tar = tarfile.open(file, "r:gz")
            l=tar.getnames()
            for a in l:
                if a.find('system.img')>-1 :
                    self.flag=3
                    self.miuitar=True
                    print('Maybe:MIUI 线刷包找到')
                    return
                elif a.find('super.img')>-1:
                    self.flag=3
                    self.miuitar=True
                    self.super=True
                    print('Maybe:MIUI 线刷包找到')                    
                    return
        if zipfile.is_zipfile(file)==False:
            print('E:不支持的格式!!!!')
            self.flag=2
            return
        self.file=file
        z=zipfile.ZipFile(file)
        self.l=z.namelist()
        self.flag=4
        #z.close()
        if 'system.img' in self.l:
            self.olnyimg=True

        if 'system/framework/framework.jar' in self.l:
            self.onlyfolder=True

        if 'system.new.dat.br' in self.l and 'system.transfer.list' in self.l:
            self.brotil=True

        if 'system.new.dat' in self.l and 'system.transfer.list' in self.l:
            self.newdat=True

        if 'system.transfer.list' in self.l:
            z.extract('system.transfer.list')
            f = open('system.transfer.list', 'r')
            v = int(f.readline())
            f.close()
            if v == 1:
                print('Android Lollipop 5.0 检测到!\n')
                self.androidVersion='Lollipop 5.0 API 21'
            elif v == 2:
                print('Android Lollipop 5.1 检测到!\n')
                self.androidVersion='Lollipop 5.1 API 22'
            elif v == 3:
                print('Android Marshmallow 6.x 检测到!\n')
                self.androidVersion='Marshmallow 6.x API 23'
            elif v == 4:
                print('Android Nougat 7.x / Oreo 8.x 或更高版本检测到!\n')
                self.androidVersion='Nougat 7.x or higher API 24+'
            os.remove('system.transfer.list')
        if 'payload.bin' in self.l:
            self.abflag=True
            self.flag=4
            print('发现A/B(System As Root)更新文件(安卓10动态分区)')

        if 'META-INF/com/android/metadata' in self.l:
            z.extract('META-INF/com/android/metadata')
            f=open('META-INF/com/android/metadata', encoding='UTF-8')
            l=[]
            for i in f:l.append(i.strip())
            f.close()
            os.remove('META-INF/com/android/metadata')
            for i in l:
                x=i.split('=')
                if x[0]=='post-build':
                    text=x[1]
                    self.info=text.split('/')
                    if len(self.info)==6:
                        print('ROM制造商:'+self.info[0]+'\n手机代号:'+self.info[1]+'\n版本:'+self.info[2]+'\nAndroid开发版本:'+self.info[3]+'\n固件版本:'+self.info[4]+'\n发行标志:'+self.info[5])
                        z.close()
                        return
                    else:
                        print('您的设备指纹可能已经被修改,无法获取ROM信息!!!')
        else:
            print('metadata文件不存在?!')
            z.close()
            

        for names in self.l:#prop获取Android版本
            if names.find('build.prop') > -1:
                try:z.extract(names)
                except:pass
                if os.path.exists(names):
                    f=open(names, encoding='UTF-8')
                    l=[]
                    for i in f:l.append(i.strip())
                    f.close()
                    os.remove(names)
                    for i in l:
                        x=i.split('=')
                        if x[0]=='ro.build.fingerprint':#Android 指纹库
                            text=x[1]
                            self.info=text.split('/')
                            if len(self.info)==6:
                                print('ROM制造商:'+self.info[0]+'\n手机代号:'+self.info[1]+'\n版本:'+self.info[2]+'\nAndroid开发版本:'+self.info[3]+'\n固件版本:'+self.info[4]+'\n发行标志:'+self.info[5])
                                z.close()
                                return
                            else:
                                print('您的设备指纹可能已经被修改,无法获取ROM信息!!!')

        if 'META-INF/com/google/android/updater-script' in self.l:
            z.extract('META-INF/com/google/android/updater-script')
            f=open('META-INF/com/google/android/updater-script', encoding='UTF-8')
            l=[]
            for i in f:l.append(i.strip())
            f.close()
            os.remove('META-INF/com/google/android/updater-script')
            for i in l:
                if 'ui_print("Target:' in i:
                    i=i.replace('ui_print("Target:','')
                    i=i.replace('");','')
                    i=i.replace(' ','')
                    self.info=i.split('/')
                    if len(self.info)==6:
                        print('ROM制造商:'+self.info[0]+'\n手机代号:'+self.info[1]+'\n版本:'+self.info[2]+'\nAndroid开发版本:'+self.info[3]+'\n固件版本:'+self.info[4]+'\n发行标志:'+self.info[5])
                        z.close()
                        return
                if (i.find('update-binary') > -1 and i.find('ummy') > -1) or  i.find('#MAGISK') > -1:
                    self.flag=5
                    print('发现该压缩包为功能性卡刷包!(Magisk/oepngapps/ak2/ak3/etc.')
                    z.close()
                    return
            print('W:无法从updater-script获取ROM信息!!')
        if zipfile.is_zipfile(file)==False:
            if file.find('.kdz') > -1:
                print('Maybe:发现LG .kdz文件!\n正在测试是否为 .kdz文件...')
                if lg_kd_kdz(file).islgkdzfile():
                    self.lgkdz=True
                    self.flag=3
                    self.type=3
                    print('发现LG .kdz文件!')
                    return
                else:
                    print('这个文件可能不是LG .kdz文件?')
                    self.flag=2
                    return
            if file.find('.dz') > -1:
                print('Maybe:发现LG .dz文件!\n正在测试是否为 .dz文件...')
                if lg_kd_kdz(file).islgdzfile():
                    self.lgkd=True
                    self.flag=3
                    self.type=3  
                    print('发现LG .dz文件!')
                else:
                    print('这个文件可能不是LG .dz文件?')
                    self.flag=2                    
                    return
            print('无效不可读格式?')
            self.flag=2
            return
        z.close()
Example #46
0
 def create_mod(self):
     if not current_user:
         return {'error': True, 'reason': 'You are not logged in.'}, 401
     if not current_user.public:
         return {
             'error': True,
             'reason': 'Only users with public profiles may create mods.'
         }, 403
     name = request.form.get('name')
     game = request.form.get('game')
     short_description = request.form.get('short-description')
     version = request.form.get('version')
     game_version = request.form.get('game-version')
     license = request.form.get('license')
     ckan = request.form.get('ckan')
     zipball = request.files.get('zipball')
     # Validate
     if not name \
         or not short_description \
         or not version \
         or not game \
         or not game_version \
         or not license \
         or not zipball:
         return {'error': True, 'reason': 'All fields are required.'}, 400
     # Validation, continued
     if len(name) > 100 \
         or len(short_description) > 1000 \
         or len(license) > 128:
         return {
             'error': True,
             'reason': 'Fields exceed maximum permissible length.'
         }, 400
     if ckan == None:
         ckan = False
     else:
         ckan = (ckan.lower() == "true" or ckan.lower() == "yes"
                 or ckan.lower() == "on")
     test_game = Game.query.filter(Game.id == game).first()
     if not test_game:
         return {'error': True, 'reason': 'Game does not exist.'}, 400
     test_gameversion = GameVersion.query.filter(
         GameVersion.game_id == test_game.id).filter(
             GameVersion.friendly_version == game_version).first()
     if not test_gameversion:
         return {
             'error': True,
             'reason': 'Game version does not exist.'
         }, 400
     game_version_id = test_gameversion.id
     mod = Mod()
     mod.user = current_user
     mod.name = name
     mod.game_id = game
     mod.short_description = short_description
     mod.description = self.default_description
     mod.ckan = ckan
     mod.license = license
     # Save zipball
     filename = secure_filename(name) + '-' + secure_filename(
         version) + '.zip'
     base_path = os.path.join(
         secure_filename(current_user.username) + '_' +
         str(current_user.id), secure_filename(name))
     full_path = os.path.join(self.cfg['storage'], base_path)
     if not os.path.exists(full_path):
         os.makedirs(full_path)
     path = os.path.join(full_path, filename)
     if os.path.isfile(path):
         # We already have this version
         # We'll remove it because the only reason it could be here on creation is an error
         os.remove(path)
     zipball.save(path)
     if not zipfile.is_zipfile(path):
         os.remove(path)
         return {
             'error': True,
             'reason': 'This is not a valid zip file.'
         }, 400
     version = ModVersion(secure_filename(version), game_version_id,
                          os.path.join(base_path, filename))
     mod.versions.append(version)
     db.add(version)
     # Save database entry
     db.add(mod)
     db.commit()
     mod.default_version_id = version.id
     db.commit()
     ga = Game.query.filter(Game.id == game).first()
     session['game'] = ga.id
     notify_ckan.delay(mod.id, 'create')
     return {
         'url': url_for("mods.mod", id=mod.id, mod_name=mod.name),
         "id": mod.id,
         "name": mod.name
     }
Example #47
0
 def jarOkay(path):
     return zipfile.is_zipfile(path)
Example #48
0
                for k, v in myconf.items():
                    if k == "order" and args.order is None:
                        args.order = v
                    if k == "reverse" and args.reverse is False:
                        args.reverse = v
                    if k == "repeat" and args.repeat is None:
                        args.repeat = int(v)
            except Exception as e:
                pass

            if args.order is None:
                args.order = "numeric"
            if args.repeat is None:
                args.repeat = 1

            if zipfile.is_zipfile(str(fp)):
                zf = zipfile.ZipFile(str(fp))
                for i in zf.infolist():
                    if not i.is_dir():
                        with zf.open(i.filename) as mf:
                            bin = mf.read()
                        arcname = i.filename.replace('/', "_")
                        p = Path(td) / Path(arcname)
                        print(p, len(bin))
                        p.write_bytes(bin)
                files = [p for p in Path(str(td)).glob(args.handle)]

            if fp.is_dir():
                for seq, p in enumerate(fp.glob(args.handle)):
                    if seq >= args.maxpage:
                        break
Example #49
0
def extract_archive(pkg_name, name, path):
    pkg_build_path = contrib_build_dir + '\\' + pkg_name
    if tarfile.is_tarfile(path):
        return extract_tar(pkg_build_path, name, path)
    elif zipfile.is_zipfile(path):
        return extract_zip(pkg_build_path, name, path)
Example #50
0
def is_zip_file(content):
    stream = BytesIO(content)
    return zipfile.is_zipfile(stream)
Example #51
0
    def __init__(self, dPath=None):
        self.tempDir = False
        errorFlag = True
        try:
            if dPath is None:
                #: the :py:class:`~pyslet.vfs.VirtualFilePath` to the package's directory
                self.dPath = vfs.defaultFS.mkdtemp('.d', 'imscpv1p2-')
                self.tempDir = True
                self.packageName = 'imscp'
            else:
                if type(dPath) in StringTypes:
                    dPath = vfs.defaultFS(dPath)
                self.dPath = dPath.abspath()
                head, tail = self.dPath.split()
                self.packageName = tail
                if self.dPath.isdir():
                    # existing directory
                    pass
                elif self.dPath.exists():
                    # is this a zip archive?
                    f = self.dPath.open("rb")
                    try:
                        if zipfile.is_zipfile(f):
                            name, ext = tail.splitext()
                            if ext.lower() == ".zip":
                                self.packageName = name
                            self.ExpandZip(f)
                        else:
                            # anything else must be a manifest file
                            self.dPath = head
                            mPath = tail
                            head, tail = self.dPath.split()
                            if str(mPath.normcase()) != 'imsmanifest.xml':
                                raise CPManifestError(
                                    "%s must be named imsmanifest.xml" %
                                    str(mPath))
                            self.packageName = str(tail)
                    finally:
                        f.close()
                else:
                    self.dPath.mkdir()
            if not isinstance(self.dPath, vfs.VirtualFilePath):
                import traceback
                traceback.print_stack()
            mPath = self.dPath.join('imsmanifest.xml')
            if mPath.exists():
                self.manifest = self.ManifestDocumentClass(
                    baseURI=str(uri.URI.from_virtual_path(mPath)))
                """The :py:class:`ManifestDocument` object representing the imsmanifest.xml file.
				
				The file is read (or created) on construction."""
                self.manifest.read()
                if not isinstance(self.manifest.root, Manifest):
                    raise CPManifestError(
                        "%s not a manifest file, found %s::%s " %
                        (mPath, self.manifest.root.ns,
                         self.manifest.root.xmlname))
            else:
                self.manifest = self.ManifestDocumentClass(
                    root=Manifest,
                    baseURI=str(uri.URI.from_virtual_path(mPath)))
                self.manifest.root.set_id(
                    self.manifest.get_unique_id('manifest'))
                md = self.manifest.root.add_child(
                    self.manifest.root.MetadataClass)
                md.add_child(md.SchemaClass).set_value("IMS Content")
                md.add_child(md.SchemaVersionClass).set_value("1.2")
                self.manifest.create()
            self.SetIgnoreFiles(IGNOREFILES_RE)
            self.fileTable = {}
            """The fileTable is a dictionary that maps package relative file
			paths to the :py:class:`File` objects that represent them in the
			manifest.
			
			It is possible for a file to be referenced multiple times (although
			dependencies were designed to take care of most cases it is still
			possible for two resources to share a physical file, or even for a
			resource to contain multiple references to the same file.)  Therefore,
			the dictionary values are lists of :py:class:`File` objects.
	
			If a file path maps to an empty list then a file exists in the package
			which is not referenced by any resource.  In some packages it is commone
			for auxiliary files such as supporting schemas to be included in
			packages without a corresponding :py:class:`File` object so an empty
			list does not indicate that the file can be removed safely.  These files
			are still included when packaging the content package for
			interchange.
			
			Finally, if a file referred to by a :py:class:`File` object in the
			manifest is missing an entry is still created in the fileTable.  You
			can walk the keys of the fileTable testing if each file exists to
			determine if some expected files are missing from the package.
			
			The keys in fileTable are VirtualFilePath instances.  To convert a
			string to an appropriate instance use the :py:meth:`FilePath` method."""
            self.RebuildFileTable()
            errorFlag = False
        finally:
            if errorFlag:
                self.Close()
Example #52
0
 def __init__(self, archive_path: str):
     assert zipfile.is_zipfile(archive_path)
     super().__init__(archive_path, depth=0)
Example #53
0
 def _has_distribution(self):
     if is_zipfile(sys.argv[0]):
         zf = ZipFileWithPermissions(sys.argv[0])
         for name in zf.namelist():
             if name and self._match_file(name, ""):
                 return True
Example #54
0
def read_plugin(pathzipfile):
    ''' process uploaded plugin. '''
    #test if valid zipfile
    if not zipfile.is_zipfile(pathzipfile):
        raise botslib.PluginError(_(u'Plugin is not a valid file.'))

    #read index file
    try:
        myzipimport = zipimport.zipimporter(pathzipfile)
        importedbotsindex = myzipimport.load_module('botsindex')
        pluglist = importedbotsindex.plugins[:]
        if 'botsindex' in sys.modules:
            del sys.modules['botsindex']
    except:
        txt = botslib.txtexc()
        raise botslib.PluginError(
            _(u'Error in plugin. Nothing is written. Error:\n%(txt)s'),
            {'txt': txt})
    else:
        botsglobal.logger.info(_(u'Plugin is OK.'))
        botsglobal.logger.info(_(u'Start writing to database.'))

    #write content of index file to the bots database
    try:
        read_index2database(pluglist)
    except:
        txt = botslib.txtexc()
        raise botslib.PluginError(
            _(u'Error writing plugin to database. Nothing is written. Error:\n%(txt)s'
              ), {'txt': txt})
    else:
        botsglobal.logger.info(_(u'Writing to database is OK.'))

    #write files to the file system.
    botsglobal.logger.info(_(u'Start writing to files'))
    try:
        warnrenamed = False  #to report in GUI files have been overwritten.
        myzip = zipfile.ZipFile(pathzipfile, mode="r")
        orgtargetpath = botsglobal.ini.get('directories', 'botspath')
        if (orgtargetpath[-1:] in (os.path.sep, os.path.altsep)
                and len(os.path.splitdrive(orgtargetpath)[1]) > 1):
            orgtargetpath = orgtargetpath[:-1]
        for zipfileobject in myzip.infolist():
            if zipfileobject.filename not in [
                    'botsindex.py', 'README', 'botssys/sqlitedb/botsdb',
                    'config/bots.ini'
            ] and os.path.splitext(
                    zipfileobject.filename)[1] not in ['.pyo', '.pyc']:
                #~ botsglobal.logger.info(u'Filename in zip "%s".',zipfileobject.filename)
                if zipfileobject.filename[0] == '/':
                    targetpath = zipfileobject.filename[1:]
                else:
                    targetpath = zipfileobject.filename
                #convert for correct environment: repacle botssys, config, usersys in filenames
                if targetpath.startswith('usersys'):
                    targetpath = targetpath.replace(
                        'usersys',
                        botsglobal.ini.get('directories', 'usersysabs'), 1)
                elif targetpath.startswith('botssys'):
                    targetpath = targetpath.replace(
                        'botssys', botsglobal.ini.get('directories',
                                                      'botssys'), 1)
                elif targetpath.startswith('config'):
                    targetpath = targetpath.replace(
                        'config', botsglobal.ini.get('directories', 'config'),
                        1)
                targetpath = botslib.join(orgtargetpath, targetpath)
                #targetpath is OK now.
                botsglobal.logger.info(
                    _(u'    Start writing file: "%(targetpath)s".'),
                    {'targetpath': targetpath})

                if botslib.dirshouldbethere(os.path.dirname(targetpath)):
                    botsglobal.logger.info(
                        _(u'        Create directory "%(directory)s".'),
                        {'directory': os.path.dirname(targetpath)})
                if zipfileobject.filename[
                        -1] == '/':  #check if this is a dir; if so continue
                    continue
                if os.path.isfile(targetpath):  #check if file already exists
                    try:  #this ***sometimes*** fails. (python25, for static/help/home.html...only there...)
                        warnrenamed = True
                    except:
                        pass
                source = myzip.read(zipfileobject.filename)
                target = open(targetpath, "wb")
                target.write(source)
                target.close()
                botsglobal.logger.info(
                    _(u'        File written: "%(targetpath)s".'),
                    {'targetpath': targetpath})
    except:
        txt = botslib.txtexc()
        myzip.close()
        raise botslib.PluginError(
            _(u'Error writing files to system. Nothing is written to database. Error:\n%(txt)s'
              ), {'txt': txt})
    else:
        myzip.close()
        botsglobal.logger.info(_(u'Writing files to filesystem is OK.'))
        return warnrenamed
Example #55
0
    def clean(self):
        cleaned = super(LayerUploadForm, self).clean()
        dbf_file = shx_file = prj_file = xml_file = None
        base_name = base_ext = None
        if zipfile.is_zipfile(cleaned["base_file"]):
            filenames = zipfile.ZipFile(cleaned["base_file"]).namelist()
            for filename in filenames:
                name, ext = os.path.splitext(filename)
                if ext.lower() == '.shp':
                    if base_name is not None:
                        raise forms.ValidationError(
                            "Only one shapefile per zip is allowed")
                    base_name = name
                    base_ext = ext
                elif ext.lower() == '.dbf':
                    dbf_file = filename
                elif ext.lower() == '.shx':
                    shx_file = filename
                elif ext.lower() == '.prj':
                    prj_file = filename
                elif ext.lower() == '.xml':
                    xml_file = filename
            if base_name is None:
                raise forms.ValidationError(
                    "Zip files can only contain shapefile.")
        else:
            base_name, base_ext = os.path.splitext(cleaned["base_file"].name)
            if cleaned["dbf_file"] is not None:
                dbf_file = cleaned["dbf_file"].name
            if cleaned["shx_file"] is not None:
                shx_file = cleaned["shx_file"].name
            if cleaned["prj_file"] is not None:
                prj_file = cleaned["prj_file"].name
            if cleaned["xml_file"] is not None:
                xml_file = cleaned["xml_file"].name

        if base_ext.lower() not in (".shp", ".tif", ".tiff", ".geotif",
                                    ".geotiff"):
            raise forms.ValidationError(
                "Only Shapefiles and GeoTiffs are supported. You uploaded a %s file"
                % base_ext)
        if base_ext.lower() == ".shp":
            if dbf_file is None or shx_file is None:
                raise forms.ValidationError(
                    "When uploading Shapefiles, .shx and .dbf files are also required."
                )
            dbf_name, __ = os.path.splitext(dbf_file)
            shx_name, __ = os.path.splitext(shx_file)
            if dbf_name != base_name or shx_name != base_name:
                raise forms.ValidationError(
                    "It looks like you're uploading "
                    "components from different Shapefiles. Please "
                    "double-check your file selections.")
            if prj_file is not None:
                if os.path.splitext(prj_file)[0] != base_name:
                    raise forms.ValidationError(
                        "It looks like you're "
                        "uploading components from different Shapefiles. "
                        "Please double-check your file selections.")
            if xml_file is not None:
                if os.path.splitext(xml_file)[0] != base_name:
                    if xml_file.find('.shp') != -1:
                        # force rename of file so that file.shp.xml doesn't
                        # overwrite as file.shp
                        if cleaned.get("xml_file"):
                            cleaned["xml_file"].name = '%s.xml' % base_name

        return cleaned
Example #56
0
 def _zip(self, path):
     '''return list of file names in zip'''
     if not zipfile.is_zipfile(path):
         return None
     z = zipfile.ZipFile(path)
     return z.namelist()
Example #57
0
    cls = AbstractMacProfile
    cls.__name__ = 'Mac' + profilename.replace('.', '_') + arch

    return cls

################################
# Track down the zip files
# Push them through the factory
# Check whether ProfileModifications will work

new_classes = []

for path in set(plugins.__path__):
    for path, _, files in os.walk(path):
        for fn in files:
            if zipfile.is_zipfile(os.path.join(path, fn)):
                new_classes.append(MacProfileFactory(zipfile.ZipFile(os.path.join(path, fn))))

class MacOverlay(obj.ProfileModification):
    conditions = {'os': lambda x: x == 'mac'}
    before = ['BasicObjectClasses']

    def modification(self, profile):
        profile.merge_overlay(mac_overlay)

class MacObjectClasses(obj.ProfileModification):

    conditions = {'os': lambda x: x == 'mac'}
    before = ['BasicObjectClasses']

    def modification(self, profile):
Example #58
0
def _is_legacy_zip_format(filename):
    if zipfile.is_zipfile(filename):
        infolist = zipfile.ZipFile(filename).infolist()
        return len(infolist) == 1 and not infolist[0].is_dir()
    return False
Example #59
0
 def unzip(self):#system.img
     if self.rominfo.flag==1:
         print('无效格式!!!')
         sys.exit(1)
     if quiet==0:
         if input('是否解包卡刷包zip文件?y/n>>>')=='n':
             print('取消.')
             sys.exit(0)           
     if self.rominfo.flag==5:
         z=zipfile.ZipFile(self.file)
         z.extractall(path='flashable')
         z.close()
         print('功能性卡刷包解包完成.输出目录:flashable')
         print('Done.')
         sys.exit(0)
     if self.rominfo.abflag==True and zipfile.is_zipfile(self.file)==True:          
         z=zipfile.ZipFile(self.file)
         z.extract('payload.bin')
         z.close()
         self.file='payload.bin'
         self.abunpack()
         print('Done.')
         sys.exit(0)
     if self.rominfo.onlyfolder==True:
         z=zipfile.ZipFile(self.file)
         for name in z.namelist() :
             if name.find('system')==0:
                 z.extract(name)
         z.close()
         print('Done.')
         sys.exit(0)
     if self.rominfo.olnyimg==True:
         z=zipfile.ZipFile(self.file)
         z.extract('system.img')
         z.close()
         if self.unpacktodir==1:
             self.file='system.img'
             self.imgunpack()
         print('Done.')
         sys.exit(0)
     if self.rominfo.brotil==True:
         z=zipfile.ZipFile(self.file)
         z.extract('system.transfer.list')
         z.extract('system.new.dat.br')
         z.close()
         self.brotli()
         self.newdatunpack()
         if self.unpacktodir==1:
             self.file='system.img'
             self.imgunpack()
         print('Done.')
         sys.exit(0)
     if self.rominfo.newdat==True:
         z=zipfile.ZipFile(self.file)
         z.extract('system.transfer.list')
         z.extract('system.new.dat')
         z.close()
         self.newdatunpack()
         if self.unpacktodir==1:
             self.file='system.img'
             self.imgunpack()
         print('Done.')
         sys.exit(0)
     if self.unpacktodir==0:
         print('Done! 输出的到的目录: /')
         return
     else:pass
Example #60
0
 def update_mod(self, mod_id):
     if current_user == None:
         return {'error': True, 'reason': 'You are not logged in.'}, 401
     mod = Mod.query.filter(Mod.id == mod_id).first()
     if not mod:
         return {'error': True, 'reason': 'Mod not found.'}, 404
     editable = False
     if current_user:
         if current_user.admin:
             editable = True
         if current_user.id == mod.user_id:
             editable = True
         if any([
                 u.accepted and u.user == current_user
                 for u in mod.shared_authors
         ]):
             editable = True
     if not editable:
         return {'error': True, 'reason': 'Not enought rights.'}, 401
     version = request.form.get('version')
     changelog = request.form.get('changelog')
     game_version = request.form.get('game-version')
     notify = request.form.get('notify-followers')
     zipball = request.files.get('zipball')
     if not version \
         or not game_version \
         or not zipball:
         # Client side validation means that they're just being pricks if they
         # get here, so we don't need to show them a pretty error reason
         # SMILIE: this doesn't account for "external" API use --> return a json error
         return {'error': True, 'reason': 'All fields are required.'}, 400
     test_gameversion = GameVersion.query.filter(
         GameVersion.game_id == Mod.game_id).filter(
             GameVersion.friendly_version == game_version).first()
     if not test_gameversion:
         return {
             'error': True,
             'reason': 'Game version does not exist.'
         }, 400
     game_version_id = test_gameversion.id
     if notify == None:
         notify = False
     else:
         notify = (notify.lower() == "true" or notify.lower() == "yes")
     filename = secure_filename(
         mod.name) + '-' + secure_filename(version) + '.zip'
     base_path = os.path.join(
         secure_filename(current_user.username) + '_' +
         str(current_user.id), secure_filename(mod.name))
     full_path = os.path.join(self.cfg['storage'], base_path)
     if not os.path.exists(full_path):
         os.makedirs(full_path)
     path = os.path.join(full_path, filename)
     for v in mod.versions:
         if v.friendly_version == secure_filename(version):
             return {
                 'error':
                 True,
                 'reason':
                 'We already have this version. Did you mistype the version number?'
             }, 400
     if os.path.isfile(path):
         os.remove(path)
     zipball.save(path)
     if not zipfile.is_zipfile(path):
         os.remove(path)
         return {
             'error': True,
             'reason': 'This is not a valid zip file.'
         }, 400
     version = ModVersion(secure_filename(version), game_version_id,
                          os.path.join(base_path, filename))
     version.changelog = changelog
     # Assign a sort index
     if len(mod.versions) == 0:
         version.sort_index = 0
     else:
         version.sort_index = max([v.sort_index for v in mod.versions]) + 1
     mod.versions.append(version)
     mod.updated = datetime.now()
     if notify:
         self.email.send_update_notification(mod, version, current_user)
     db.add(version)
     db.commit()
     mod.default_version_id = version.id
     db.commit()
     notify_ckan.delay(mod_id, 'update')
     return {
         'url': url_for("mods.mod", id=mod.id, mod_name=mod.name),
         "id": version.id
     }