Пример #1
0
def l1b_metadata_before_noaa_15():
    try:
        os.stat('tmp/cache/n12gac10bit.l1b')
    except:
        return 'skip'

    gdal.SetConfigOption('L1B_FETCH_METADATA', 'YES')
    gdal.SetConfigOption('L1B_METADATA_DIRECTORY', 'tmp')
    ds = gdal.Open('tmp/cache/n12gac10bit.l1b')
    gdal.SetConfigOption('L1B_FETCH_METADATA', None)
    gdal.SetConfigOption('L1B_METADATA_DIRECTORY', None)
    del ds

    f = open('tmp/n12gac10bit.l1b_metadata.csv', 'rb')
    l = f.readline().decode('ascii')
    if l != 'SCANLINE,NBLOCKYOFF,YEAR,DAY,MS_IN_DAY,FATAL_FLAG,TIME_ERROR,DATA_GAP,DATA_JITTER,INSUFFICIENT_DATA_FOR_CAL,NO_EARTH_LOCATION,DESCEND,P_N_STATUS,BIT_SYNC_STATUS,SYNC_ERROR,FRAME_SYNC_ERROR,FLYWHEELING,BIT_SLIPPAGE,C3_SBBC,C4_SBBC,C5_SBBC,TIP_PARITY_FRAME_1,TIP_PARITY_FRAME_2,TIP_PARITY_FRAME_3,TIP_PARITY_FRAME_4,TIP_PARITY_FRAME_5,SYNC_ERRORS,CAL_SLOPE_C1,CAL_INTERCEPT_C1,CAL_SLOPE_C2,CAL_INTERCEPT_C2,CAL_SLOPE_C3,CAL_INTERCEPT_C3,CAL_SLOPE_C4,CAL_INTERCEPT_C4,CAL_SLOPE_C5,CAL_INTERCEPT_C5,NUM_SOLZENANGLES_EARTHLOCPNTS\n':
        print(l)
        return 'fail'
    l = f.readline().decode('ascii')
    if l != '3387,0,1998,84,16966146,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.102000,-4.130000,0.103000,-4.210000,-0.001677,1.667438,-0.157728,156.939636,-0.179833,179.775742,51\n':
        print(l)
        return 'fail'
    f.close()

    os.unlink('tmp/n12gac10bit.l1b_metadata.csv')

    return 'success'
Пример #2
0
def ismount(path):
    """Test whether a path is a mount point"""
    try:
        s1 = os.stat(path)
        s2 = os.stat(os.path.join(path, '..'))
    except OSError, inst:
        raise CommandException(inst.errno, "os.stat")
Пример #3
0
 def can_retrieve_cache(self):
     if not getattr(self, "outputs", None):
         return None
     sig = self.signature()
     ssig = Utils.to_hex(self.uid()) + Utils.to_hex(sig)
     dname = os.path.join(self.generator.bld.cache_global, ssig)
     try:
         t1 = os.stat(dname).st_mtime
     except OSError:
         return None
     for node in self.outputs:
         orig = os.path.join(dname, node.name)
         try:
             shutil.copy2(orig, node.abspath())
             os.utime(orig, None)
         except (OSError, IOError):
             Logs.debug("task: failed retrieving file")
             return None
     try:
         t2 = os.stat(dname).st_mtime
     except OSError:
         return None
     if t1 != t2:
         return None
     for node in self.outputs:
         node.sig = sig
         if self.generator.bld.progress_bar < 1:
             self.generator.bld.to_log("restoring from cache %r\n" % node.abspath())
     self.cached = True
     return True
Пример #4
0
def l1b_clouds_after_noaa_15():
    try:
        os.stat('tmp/cache/n16gac10bit.l1b')
    except:
        return 'skip'

    ds = gdal.Open('tmp/cache/n16gac10bit.l1b')
    md = ds.GetMetadata('SUBDATASETS')
    expected_md = {
      'SUBDATASET_2_NAME' : 'L1B_CLOUDS:"tmp/cache/n16gac10bit.l1b"',
      'SUBDATASET_2_DESC' : 'Clouds from AVHRR (CLAVR)'
    }
    for key in expected_md:
        if md[key] != expected_md[key]:
            print(md)
            return 'fail'
    ds = None

    ds = gdal.Open('L1B_CLOUDS:"tmp/cache/n16gac10bit.l1b"')
    cs = ds.GetRasterBand(1).Checksum()
    if cs != 0:
        gdaltest.post_reason('fail')
        print(cs)
        return 'fail'

    return 'success'
Пример #5
0
def run_debugger(testname, pythonfile, pydb_opts='', args='',
                 outfile=None):
    global srcdir, builddir, pydir

    rightfile   = os.path.join(srcdir, 'data', "%s.right" % testname)

    os.environ['PYTHONPATH']=os.pathsep.join(sys.path)
    cmdfile     = os.path.join(srcdir, "%s.cmd"   % testname)
    outfile     = "%s.out" % testname
    outfile_opt = '--output=%s ' % outfile

    # print "builddir: %s, cmdfile: %s, outfile: %s, rightfile: %s" % \
    # (builddir, cmdfile, outfile, rightfile)

    if os.path.exists(outfile): os.unlink(outfile)

    cmd = "%s --command %s %s %s %s %s" % \
          (pydb_path, cmdfile, outfile_opt, pydb_opts, pythonfile, args)
    
    os.system(cmd)
    fromfile  = rightfile
    fromdate  = time.ctime(os.stat(fromfile).st_mtime)
    fromlines = open(fromfile, 'U').readlines()
    tofile    = outfile
    todate    = time.ctime(os.stat(tofile).st_mtime)
    tolines   = open(tofile, 'U').readlines()
    diff = list(difflib.unified_diff(fromlines, tolines, fromfile,
                                     tofile, fromdate, todate))
    if len(diff) == 0:
        os.unlink(outfile)
    for line in diff:
        print line,
    return len(diff) == 0
Пример #6
0
    def populate_tree(self, tree, node):
        if tree.set(node, "type") != "directory":
            return

        path = tree.set(node, "path")
        tree.delete(*tree.get_children(node))

        parent = tree.parent(node)
        for p in sorted(os.listdir(path)):
            ptype = None
            p = os.path.join(path, p).replace("\\", "/")
            if os.path.isdir(p):
                ptype = "directory"
            elif os.path.isfile(p):
                ptype = "file"

            fname = os.path.split(p)[1]
            id = tree.insert(node, "end", text=fname, values=[p, ptype])

            if ptype == "directory":
                if fname not in (".", ".."):
                    tree.insert(id, 0, text="dummy")
                    tree.item(id, text=fname)
            elif ptype == "file":
                size = os.stat(p).st_size * 1e-3
                date = os.stat(p).st_mtime
                tree.set(id, "size", "%d Kb" % size)
Пример #7
0
    def _stage_final_image(self):
        try:
            makedirs(self.__ensure_isodir() + "/LiveOS")

            self._resparse()

            if not self.skip_minimize:
                create_image_minimizer(self.__isodir + "/LiveOS/osmin.img", self._image, self.compress_type)

            if self.skip_compression:
                shutil.move(self._image, self.__isodir + "/LiveOS/ext3fs.img")
                if os.stat(self.__isodir + "/LiveOS/ext3fs.img").st_size >= 4*1024*1024*1024:
                    self._isofstype = "udf"
                    logging.warn("Switching to UDF due to size of LiveOS/ext3fs.img")
            else:
                makedirs(os.path.join(os.path.dirname(self._image), "LiveOS"))
                shutil.move(self._image,
                            os.path.join(os.path.dirname(self._image),
                                         "LiveOS", "ext3fs.img"))
                mksquashfs(os.path.dirname(self._image),
                           self.__isodir + "/LiveOS/squashfs.img",
                           self.compress_type)
                if os.stat(self.__isodir + "/LiveOS/squashfs.img").st_size >= 4*1024*1024*1024:
                    self._isofstype = "udf"
                    logging.warn("Switching to UDF due to size of LiveOS/squashfs.img")


            self.__create_iso(self.__isodir)
        finally:
            shutil.rmtree(self.__isodir, ignore_errors = True)
            self.__isodir = None
Пример #8
0
def do_configtest(engine, alt_config):
    print('\n' + 79 * '-' + '\n')
    print('Config resolution order:')
    for path in config.get_config_resolution_order(alt_config):
        if os.path.exists(path):
            print(path)
        else:
            print(path + ' (not present)')
    print('\n' + 79 * '-' + '\n')
    print('Effective configuration:\n')
    print('# begin dosocs2 config\n')
    config.dump_to_file(sys.stdout)
    print('\n# end dosocs2 config')
    print('\n' + 79 * '-' + '\n')
    print('Testing specified scanner paths...')
    scanner_config_pattern = r'scanner_(.*?)_path'
    for key in sorted(config.config.keys()):
        result = re.match(scanner_config_pattern, key)
        if result:
            print(result.group(1) + ' ({})...'.format(config.config[key]), end='')
            try:
                os.stat(config.config[key])
            except EnvironmentError:
                print('does not exist or is inaccessible.')
            else:
                print('ok.')

    print('\n' + 79 * '-' + '\n')
    print('Testing database connection...', end='')
    sys.stdout.flush()
    with engine.begin() as conn:
        conn.execute('select 1;')
    print('ok.')
Пример #9
0
	def recurse(self, *k, **kw):
		self.env.RSYNC = getattr(Context.g_module, 'rsync', 'rsync -a --chmod=u+rwx')
		self.env.SSH = getattr(Context.g_module, 'ssh', 'ssh')
		self.env.SSH_KEYSCAN = getattr(Context.g_module, 'ssh_keyscan', 'ssh-keyscan')
		try:
			self.env.WAF = getattr(Context.g_module, 'waf')
		except AttributeError:
			try:
				os.stat('waf')
			except KeyError:
				self.fatal('Put a waf file in the directory (./waf-light --tools=remote)')
			else:
				self.env.WAF = './waf'

		self.extract_groups_of_builds()
		self.setup_private_ssh_key()
		for k, v in self.vgroups.items():
			task = self(rule=rsync_and_ssh, always=True)
			task.env.login = self.variant_to_login(k)

			task.env.commands = []
			for opt, value in v.items():
				task.env.commands += value
			task.env.variant = task.env.commands[0].partition('_')[2]
			for opt, value in self.custom_options(k):
				task.env[opt] = value
		self.jobs = len(self.vgroups)
Пример #10
0
Файл: build.py Проект: CDees/dxr
def build_folder(tree, conn, folder, indexed_files, indexed_folders):
    """Build an HTML index file for a single folder."""
    # Create the subfolder if it doesn't exist:
    ensure_folder(os.path.join(tree.target_folder, folder))

    # Build the folder listing:
    # Name is either basename (or if that is "" name of tree)
    name = os.path.basename(folder) or tree.name

    # Generate list of folders and their mod dates:
    folders = [('folder',
                f,
                datetime.fromtimestamp(stat(os.path.join(tree.source_folder,
                                                         folder,
                                                         f)).st_mtime),
                # TODO: DRY with Flask route. Use url_for:
                _join_url(tree.name, 'source', folder, f))
               for f in indexed_folders]

    # Generate list of files:
    files = []
    for f in indexed_files:
        # Get file path on disk
        path = os.path.join(tree.source_folder, folder, f)
        file_info = stat(path)
        files.append((dxr.mime.icon(path),
                      f,
                      datetime.fromtimestamp(file_info.st_mtime),
                      file_info.st_size,
                      _join_url(tree.name, 'source', folder, f)))

    # Lay down the HTML:
    jinja_env = load_template_env(tree.config.temp_folder,
                                  tree.config.dxrroot)
    dst_path = os.path.join(tree.target_folder,
                            folder,
                            tree.config.directory_index)

    _fill_and_write_template(
        jinja_env,
        'folder.html',
        dst_path,
        {# Common template variables:
         'wwwroot': tree.config.wwwroot,
         'tree': tree.name,
         'tree_tuples': [(t.name,
                          browse_url(t.name, tree.config.wwwroot, folder),
                          t.description)
                         for t in tree.config.sorted_tree_order],
         'generated_date': tree.config.generated_date,
         'paths_and_names': linked_pathname(folder, tree.name),
         'filters': filter_menu_items(tree.config.filter_language),
         # Autofocus only at the root of each tree:
         'should_autofocus_query': folder == '',

         # Folder template variables:
         'name': name,
         'path': folder,
         'folders': folders,
         'files': files})
Пример #11
0
def func(args):
    # Get modification times
    fromdate = time.ctime(os.stat(args.FILE1).st_mtime)
    todate = time.ctime(os.stat(args.FILE2).st_mtime)

    # Open fromfile
    try:
        with open(args.FILE1, 'U') as fd:
            fromlines = fd.readlines()
    except IOError:
        print("Error opening file " + args.FILE1, file=sys.stderr)

    # Open tofile
    try:
        with open(args.FILE2, 'U') as fd:
            tolines = fd.readlines()
    except IOError:
        print("Error opening file " + args.FILE2, file=sys.stderr)

    # Create diff
    if args.unified:
        diff = difflib.unified_diff(fromlines, tolines, args.FILE1, args.FILE2,
                                    fromdate, todate, n=args.lines)
    elif args.ndiff:
        diff = difflib.ndiff(fromlines, tolines)
    elif args.html:
        diff = difflib.HtmlDiff().make_file(fromlines, tolines, args.FILE1,
                                            args.FILE2, context=args.context,
                                            numlines=args.lines)
    else:
        diff = difflib.context_diff(fromlines, tolines, args.FILE1, args.FILE2,
                                    fromdate, todate, n=args.lines)

    # we're using writelines because diff is a generator
    sys.stdout.writelines(diff)
Пример #12
0
    def test_stage_package_gets_cached(self):
        self.run_snapcraft(["pull", "oneflatwithstagepackages"], "dump")

        # Verify the 'hello' deb package was cached.
        cache_dir = os.path.join(
            xdg.BaseDirectory.xdg_cache_home, "snapcraft", "stage-packages", "apt"
        )
        archive_dir = os.path.join("var", "cache", "apt", "archives")
        cached = glob.glob(os.path.join(cache_dir, "*", archive_dir, "hello*"))
        self.assertThat(cached, HasLength(1))
        cached_deb = cached[0]

        staged = glob.glob(
            os.path.join(
                "parts", "oneflatwithstagepackages", "ubuntu", "download", "hello*"
            )
        )
        self.assertThat(staged, HasLength(1))
        staged_deb = staged[0]

        # Verify that the staged and cached debs are the same file (hard
        # linked) by comparing inodes.
        cached_deb_inode = os.stat(cached_deb).st_ino
        self.assertThat(cached_deb_inode, Equals(os.stat(staged_deb).st_ino))

        # Now clean the part and pull again.
        self.run_snapcraft("clean", "dump")
        self.run_snapcraft(["pull", "oneflatwithstagepackages"], "dump")

        # Verify that the staged deb is _still_ the same one from the cache.
        self.assertThat(cached_deb_inode, Equals(os.stat(staged_deb).st_ino))
Пример #13
0
def recursively_add_write_bit(inputdir):
    """
    Function to walk a directory tree, adding the write it to every file
    and directory.  This is mostly useful right before deleting a tree of
    files extracted from an ISO, since those were all read-only to begin
    with.
    """
    for dirpath, dirnames, filenames in os.walk(inputdir):
        # If the path is a symlink, and it is an absolute symlink, this would
        # attempt to change the permissions of the *host* file, not the
        # file that is relative to here.  That is no good, and could be a
        # security problem if Oz is being run as root.  We skip all paths that
        # are symlinks; what they point to will be changed later on.
        if os.path.islink(dirpath):
            continue

        os.chmod(dirpath, os.stat(dirpath).st_mode|stat.S_IWUSR)
        for name in filenames:
            fullpath = os.path.join(dirpath, name)

            # we have the same guard for symlinks as above, for the same reason
            if os.path.islink(fullpath):
                continue

            try:
                # if there are broken symlinks in the ISO,
                # then the below might fail.  This probably
                # isn't fatal, so just allow it and go on
                os.chmod(fullpath, os.stat(fullpath).st_mode|stat.S_IWUSR)
            except OSError as err:
                if err.errno != errno.ENOENT:
                    raise
Пример #14
0
def savesTemplates(gabaritos): # salva em disco todos os gabaritos num arquivo csv por turma
    print ""

    if randomTests==0: #questões não aleatórias
        print "Warning: You chose in config.txt the option to generate non-random tests."
        print "In this case, if you want to use the automatic correction using MCTest,"
        print "you must provide a file with the template or consider that the first test"
        print "in pdf file is a template."
    
    else:

        files = []
        for g in gabaritos:
            files.append(g[0])
    
        for ff in sorted(set(files)):

            f = ff[:-4]+'[email protected]_GAB'

            past = f[10:]
            filename = past[past.find(barra):]
            past = mypathTex+barra+past[:past.find(barra)]
            try:
                os.stat(past)
            except:
                os.mkdir(past)
            
            past += barra+folderQuestions
            try:
                os.stat(past)
            except:
                os.mkdir(past)

            f = past+filename
            
            print "aquivo salvo com os gabaritos da cada aluno da turma:",f

            #[n[0], n[1], gab, indexQuest, conteudo]
            with open(f, 'w') as csvfile:
                for gab in gabaritos:
                    if ff is gab[0]:
                        spamWriter = csv.writer(csvfile, delimiter=' ',quotechar=' ', quoting=csv.QUOTE_MINIMAL)
                        pathFile = gab[0]
                        if os.name=='nt': #windows
                            pathFile = pathFile.replace(barra,'/')
                        s = ''.join([x for x in str(gab[2])] )
                        s = s.strip('[')
                        s = s.strip(']')
                        s = s.replace(' ','')
                        s = s.strip()
                        
                        i = ''.join([x for x in str(gab[3])] )
                        i = i.strip('[')
                        i = i.strip(']')
                        i = i.replace(' ','')
                        i = i.strip()
                        
                        t = ''.join([x for x in str(gab[4])] )
                        
                        spamWriter.writerow([pathFile, ';',  gab[1],';', s, ';', i, ';',t])
Пример #15
0
 def has_orig(orig_file, dir):
     "Check if orig tarball exists in dir"
     try:
         os.stat( os.path.join(dir, orig_file) )
     except OSError:
         return False
     return True
Пример #16
0
def static(headers, root, filepath='.', index_files=None,
           special_extensions=None):
    """Reads a static file and returns a response object.

    If the file cannot be opened, ``None`` is returned.

    This function honours the ``If-Modified-Since`` header.

    :param headers: The request headers. These are used to decide whether to
        return ``HTTP 304`` when requesting a file the second time.

    :param str root: The root path to which `filepath` is a relative path.

    :param str filepath: The path of the resource. The resource is read using
        :func:`virtualtouchpad.resource.open_stream`.

    :param index_files: The names of files to use as index files. These are only
        used if ``filepath`` ends with ``'/'``, which is used to denote a
        directory.

    :param special_extensions: Extensions to strip when determining the MIME
        type.

    :return: a response

    :raises HTTPNotFound: if the resource does not exist
    """
    fullroot = os.path.join(ROOT, root)

    # Open the file and get its size
    try:
        response_headers, body = read(
            fullroot, filepath, index_files or [], special_extensions or [])

    except FileNotFoundError:
        raise HTTPNotFound()

    response_headers['Content-Length'] = str(len(body))

    # Check the file mtime; we use the egg file or the current binary
    try:
        st = os.stat(os.path.join(
            os.path.dirname(__file__),
            os.path.pardir,
            os.path.pardir))
    except OSError:
        st = os.stat(os.path.abspath(sys.argv[0]))
    response_headers['Last-Modified'] = email.utils.formatdate(st.st_mtime)

    if not IGNORE_CACHED and headers.get('if-modified-since'):
        if_modified_since = time.mktime(email.utils.parsedate(
            headers.get('if-modified-since').split(";")[0].strip()))
        if if_modified_since is not None \
                and if_modified_since >= int(st.st_mtime):
            response_headers['Date'] = time.strftime(
                '%a, %d %b %Y %H:%M:%S GMT',
                time.gmtime())
        return Response(status=304, headers=response_headers)

    return Response(status=200, body=body, headers=response_headers)
Пример #17
0
def main(argv):
    start_time = datetime.datetime.now()
    parser = ArgumentParser()
    parser.add_argument('--number', '-n', default='10', help='Number of entries to show')
    parser.add_argument('--outfile', '-o', action='store_true', default=False, dest='out',
                        help='File to output full listing of files')
    parser.add_argument('paths', help='Directory to list size of.')
    args = parser.parse_args()

    out_file = 'disk-usage.json'
    files_dict = {}
    paths_to_do = args.paths
    with open(out_file, 'w') as out_file_handle:
        for path_to_do in paths_to_do:
            for curdir, subdir, files in os.walk(path_to_do):
                for file in files:
                    if os.path.exists(os.path.join(curdir, file)):
                        files_dict[os.path.join(curdir, file)] = os.stat(os.path.join(curdir, file)).st_size
                        out_file_handle.write(
                            '%s,%s\n' % (os.path.join(curdir, file), os.stat(os.path.join(curdir, file)).st_size))

    sorted_files = (sorted(files_dict.iteritems(), key=operator.itemgetter(1), reverse=True))
    nice_numbers = [(file, convert_bytes(bytes)) for file, bytes in sorted_files]

    end_time = datetime.datetime.now()
    delta_time = end_time - start_time

    print delta_time
    if args.number == 'all':
        pprint(nice_numbers)
    else:
        pprint(nice_numbers[:int(args.number)])
Пример #18
0
def reorgdb(outname, fname, threshold, verbose):
    print >> stderr, "input=%s, output=%s, threshold=%d" % (fname, outname, threshold)
    try:
        os.stat(outname)
        raise IOError("file %r already exists." % outname)
    except OSError:
        pass
    now = int(time.time())
    db = dbmopen(fname, "r")
    out = dbmopen(outname, "c")
    remain = 0
    filtered = 0
    for (k, v) in db.iteritems():
        t = struct.unpack("<L", v)[0]
        if now - t < threshold:
            out[k] = v
            remain += 1
        else:
            filtered += 1
            if verbose:
                print "".join(["%02x" % ord(c) for c in k]), time.asctime(time.localtime(t)), "(out)"
    print >> stderr, "total: %d (remain: %d, filtered: %d)" % (remain + filtered, remain, filtered)
    db.close()
    out.close()
    return
Пример #19
0
def compile(cflags, last_cflags_ts, objfile, srcfile):
    source_stat = os.stat(srcfile)
    header_stat = os.stat('uwsgi.h')
    try:
        if os.environ.get('UWSGI_FORCE_REBUILD', None):
            raise
        if source_stat[8] >= last_cflags_ts:
            raise
        if header_stat[8] >= last_cflags_ts:
            raise
        object_stat = os.stat(objfile)
        if object_stat[8] <= source_stat[8]:
            raise
        if object_stat[8] <= header_stat[8]:
            raise
        for profile in os.listdir('buildconf'):
            profile_stat = os.stat('buildconf/%s' % profile)
            if object_stat[8] <= profile_stat[8]:
                raise
        print("%s is up to date" % objfile)
        return
    except:
        pass
    cmdline = "%s -c %s -o %s %s" % (GCC, cflags, objfile, srcfile)
    push_command(objfile, cmdline)
Пример #20
0
    def test_permissions_warnings(self):
        """Make sure letsencrypt-auto properly warns about permissions problems."""
        # This test assumes that only the parent of the directory containing
        # letsencrypt-auto (usually /tmp) may have permissions letsencrypt-auto
        # considers insecure.
        with temp_paths() as (le_auto_path, venv_dir):
            le_auto_path = abspath(le_auto_path)
            le_auto_dir = dirname(le_auto_path)
            le_auto_dir_parent = dirname(le_auto_dir)
            install_le_auto(self.NEW_LE_AUTO, le_auto_path)

            run_letsencrypt_auto = partial(
                run_le_auto, le_auto_path, venv_dir,
                le_auto_args_str='--install-only --no-self-upgrade',
                PIP_FIND_LINKS=join(tests_dir(), 'fake-letsencrypt', 'dist'))
            # Run letsencrypt-auto once with current permissions to avoid
            # potential problems when the script tries to write to temporary
            # directories.
            run_letsencrypt_auto()

            le_auto_dir_mode = stat(le_auto_dir).st_mode
            le_auto_dir_parent_mode = S_IMODE(stat(le_auto_dir_parent).st_mode)
            try:
                # Make letsencrypt-auto happy with the current permissions
                chmod(le_auto_dir, S_IRUSR | S_IXUSR)
                sudo_chmod(le_auto_dir_parent, 0o755)

                self._test_permissions_warnings_about_path(le_auto_path, run_letsencrypt_auto)
                self._test_permissions_warnings_about_path(le_auto_dir, run_letsencrypt_auto)
            finally:
                chmod(le_auto_dir, le_auto_dir_mode)
                sudo_chmod(le_auto_dir_parent, le_auto_dir_parent_mode)
    def forwards(self, orm):
        "Write your forwards migration here"
        for item in orm.AlbumConvertableItem.objects.all():
            try:
                image_path = item.image.image.path
            except:
                image_path = os.path.join(settings.MEDIA_ROOT, item.thumbFilename)

            try:
                os.stat(image_path)
            except OSError as e:
                if e.errno != 2:
                    raise e
                else:
                    continue

            old_dir, filename = os.path.split(image_path)
            new_path = os.path.join('albums', str(item.parent.pk), str(item.pk))
            new_dir = os.path.join(settings.MEDIA_ROOT, new_path)

            try:
                os.makedirs(new_dir)
            except OSError as e:
                if(e.errno != 17):
                    raise e
                print "Directory %s already exists" % new_dir

            print "Moving %s" % image_path
            if(image_path != os.path.join(new_dir, filename)):
                shutil.move(image_path, new_dir)
            else:
                print "Skipping"

            item.preview = os.path.join(new_path, filename)
            item.save()
Пример #22
0
def getStartAfterLogon():
	try:
		k = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, RUN_REGKEY)
		val = _winreg.QueryValueEx(k, u"nvda")[0]
		return os.stat(val) == os.stat(sys.argv[0])
	except (WindowsError, OSError):
		return False
Пример #23
0
def try_import():
    """
    Load the cuda_ndarray module if present and up to date.
    Return True if loaded correctly, otherwise return False.

    """
    cuda_files = (
        'cuda_ndarray.cu',
        'cuda_ndarray.cuh',
        'conv_full_kernel.cu',
        'cnmem.h',
        'cnmem.cpp',
        'conv_kernel.cu')
    stat_times = [os.stat(os.path.join(cuda_path, cuda_file))[stat.ST_MTIME]
                  for cuda_file in cuda_files]
    date = max(stat_times)
    if os.path.exists(cuda_ndarray_so):
        if date >= os.stat(cuda_ndarray_so)[stat.ST_MTIME]:
            return False
    try:
        # If we load a previously-compiled version, config.compiledir should
        # be in sys.path.
        sys.path[0:0] = [config.compiledir]
        import cuda_ndarray.cuda_ndarray
        del sys.path[0]
    except ImportError:
        return False
    return True
Пример #24
0
    def test_lchflags_symlink(self):
        testfn_st = os.stat(test_support.TESTFN)

        self.assertTrue(hasattr(testfn_st, 'st_flags'))

        os.symlink(test_support.TESTFN, _DUMMY_SYMLINK)
        self.teardown_files.append(_DUMMY_SYMLINK)
        dummy_symlink_st = os.lstat(_DUMMY_SYMLINK)

        # ZFS returns EOPNOTSUPP when attempting to set flag UF_IMMUTABLE.
        try:
            posix.lchflags(_DUMMY_SYMLINK,
                           dummy_symlink_st.st_flags | stat.UF_IMMUTABLE)
        except OSError as err:
            if err.errno != errno.EOPNOTSUPP:
                raise
            msg = 'chflag UF_IMMUTABLE not supported by underlying fs'
            self.skipTest(msg)

        try:
            new_testfn_st = os.stat(test_support.TESTFN)
            new_dummy_symlink_st = os.lstat(_DUMMY_SYMLINK)

            self.assertEqual(testfn_st.st_flags, new_testfn_st.st_flags)
            self.assertEqual(dummy_symlink_st.st_flags | stat.UF_IMMUTABLE,
                             new_dummy_symlink_st.st_flags)
        finally:
            posix.lchflags(_DUMMY_SYMLINK, dummy_symlink_st.st_flags)
Пример #25
0
    def copy_to_build_dir(self):
        """ Copy the entire work directory (excluding templates and hidden files)
            to the build directory.
        """
        # What not to copy to _build directory.
        ignore_patterns = re.compile('\.+|.+\.pyc?|.+\.pl|.+_eq.+\.png')
        ignore_dirs     = ['_build', '_templates', '.git']

        # Walk through the directory and copy appropriate files.
        for dirpath, dirnames, filenames in os.walk(self.work_dir):

            # Remove excluded directories.
            dirnames[:] = [d for d in dirnames if d not in ignore_dirs]

            reldirpath = os.path.relpath(dirpath, self.work_dir)

            # I hate that this is necessary, but create any missing directories.
            for somedir in dirnames:
                somedir = os.path.join(self.build_dir, reldirpath, somedir)
                somedir = os.path.normpath(somedir)
                if not os.path.exists(somedir):
                    os.makedirs(somedir)

            for filename in filenames:
                # Ignore specified file patterns.
                if not re.match(ignore_patterns, filename):
                    src = os.path.normpath(os.path.join(self.work_dir,  reldirpath, filename))
                    dst = os.path.normpath(os.path.join(self.build_dir, reldirpath, filename))

                    # Only copy files that have been modified.
                    if not os.path.exists(dst) or os.stat(src).st_mtime > os.stat(dst).st_mtime:
                        print('Modified: ' + src)
                        shutil.copy2(src, dst)
Пример #26
0
def configure(self):
	kdeconfig=self.find_program('kde4-config')
	prefix=self.cmd_and_log(kdeconfig+['--prefix']).strip()
	fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
	try:os.stat(fname)
	except OSError:
		fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix
		try:os.stat(fname)
		except OSError:self.fatal('could not open %s'%fname)
	try:
		txt=Utils.readf(fname)
	except(OSError,IOError):
		self.fatal('could not read %s'%fname)
	txt=txt.replace('\\\n','\n')
	fu=re.compile('#(.*)\n')
	txt=fu.sub('',txt)
	setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
	found=setregexp.findall(txt)
	for(_,key,val)in found:
		self.env[key]=val
	self.env['LIB_KDECORE']=['kdecore']
	self.env['LIB_KDEUI']=['kdeui']
	self.env['LIB_KIO']=['kio']
	self.env['LIB_KHTML']=['khtml']
	self.env['LIB_KPARTS']=['kparts']
	self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR]
	self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']]
	self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE'])
	self.find_program('msgfmt',var='MSGFMT')
Пример #27
0
    def __really_writeIndex(self):
        """Do the actual work of writing the index for all subjobs"""
        import os

        all_caches = {}
        for sj_id in range(len(self)):
            if sj_id in self._cachedJobs.keys():
                this_cache = self._registry.getIndexCache( self.__getitem__(sj_id) )
                all_caches[sj_id] = this_cache
                disk_location = self.__get_dataFile(sj_id)
                all_caches[sj_id]['modified'] = os.stat(disk_location).st_ctime
            else:
                if sj_id in self._subjobIndexData.keys():
                    all_caches[sj_id] = self._subjobIndexData[sj_id]
                else:
                    this_cache = self._registry.getIndexCache( self.__getitem__(sj_id) )
                    all_caches[sj_id] = this_cache
                    disk_location = self.__get_dataFile(sj_id)
                    all_caches[sj_id]['modified'] = os.stat(disk_location).st_ctime

        try:
            from Ganga.Core.GangaRepository.PickleStreamer import to_file
            index_file = os.path.join(self._jobDirectory, self._subjob_master_index_name )
            index_file_obj = open( index_file, "w" )
            to_file( all_caches, index_file_obj )
            index_file_obj.close()
        except Exception, err:
            logger.debug( "cache write error: %s" % str(err) )
Пример #28
0
def link_or_copy(src, dst):
    """Attempts to hardlink ``src`` to ``dst``, copying if the link fails.

    Attempts to maintain the semantics of ``shutil.copy``.

    Because ``os.link`` does not overwrite files, a unique temporary file
    will be used if the target already exists, then that file will be moved
    into place.
    """

    if os.path.isdir(dst):
        dst = os.path.join(dst, os.path.basename(src))

    link_errno = link(src, dst)
    if link_errno == errno.EEXIST:
        if os.stat(src).st_ino == os.stat(dst).st_ino:
            # dst is already a hard link to the correct file, so we don't need
            # to do anything else. If we try to link and rename the file
            # anyway, we get duplicate files - see http://bugs.python.org/issue21876
            return

        new_dst = dst + "-temp-%04X" %(random.randint(1, 16**4), )
        try:
            link_or_copy(src, new_dst)
        except:
            try:
                os.remove(new_dst)
            except OSError:
                pass
            raise
        os.rename(new_dst, dst)
    elif link_errno != 0:
        # Either link isn't supported, or the filesystem doesn't support
        # linking, or 'src' and 'dst' are on different filesystems.
        shutil.copy(src, dst)
Пример #29
0
def l1b_metadata_after_noaa_15():
    try:
        os.stat('tmp/cache/n16gac10bit.l1b')
    except:
        return 'skip'

    gdal.SetConfigOption('L1B_FETCH_METADATA', 'YES')
    gdal.SetConfigOption('L1B_METADATA_DIRECTORY', 'tmp')
    ds = gdal.Open('tmp/cache/n16gac10bit.l1b')
    gdal.SetConfigOption('L1B_FETCH_METADATA', None)
    gdal.SetConfigOption('L1B_METADATA_DIRECTORY', None)
    del ds

    f = open('tmp/n16gac10bit.l1b_metadata.csv', 'rb')
    l = f.readline().decode('ascii')
    if l != 'SCANLINE,NBLOCKYOFF,YEAR,DAY,MS_IN_DAY,SAT_CLOCK_DRIF_DELTA,SOUTHBOUND,SCANTIME_CORRECTED,C3_SELECT,FATAL_FLAG,TIME_ERROR,DATA_GAP,INSUFFICIENT_DATA_FOR_CAL,NO_EARTH_LOCATION,FIRST_GOOD_TIME_AFTER_CLOCK_UPDATE,INSTRUMENT_STATUS_CHANGED,SYNC_LOCK_DROPPED,FRAME_SYNC_ERROR,FRAME_SYNC_DROPPED_LOCK,FLYWHEELING,BIT_SLIPPAGE,TIP_PARITY_ERROR,REFLECTED_SUNLIGHT_C3B,REFLECTED_SUNLIGHT_C4,REFLECTED_SUNLIGHT_C5,RESYNC,P_N_STATUS,BAD_TIME_CAN_BE_INFERRED,BAD_TIME_CANNOT_BE_INFERRED,TIME_DISCONTINUITY,REPEAT_SCAN_TIME,UNCALIBRATED_BAD_TIME,CALIBRATED_FEWER_SCANLINES,UNCALIBRATED_BAD_PRT,CALIBRATED_MARGINAL_PRT,UNCALIBRATED_CHANNELS,NO_EARTH_LOC_BAD_TIME,EARTH_LOC_QUESTIONABLE_TIME,EARTH_LOC_QUESTIONABLE,EARTH_LOC_VERY_QUESTIONABLE,C3B_UNCALIBRATED,C3B_QUESTIONABLE,C3B_ALL_BLACKBODY,C3B_ALL_SPACEVIEW,C3B_MARGINAL_BLACKBODY,C3B_MARGINAL_SPACEVIEW,C4_UNCALIBRATED,C4_QUESTIONABLE,C4_ALL_BLACKBODY,C4_ALL_SPACEVIEW,C4_MARGINAL_BLACKBODY,C4_MARGINAL_SPACEVIEW,C5_UNCALIBRATED,C5_QUESTIONABLE,C5_ALL_BLACKBODY,C5_ALL_SPACEVIEW,C5_MARGINAL_BLACKBODY,C5_MARGINAL_SPACEVIEW,BIT_ERRORS,VIS_OP_CAL_C1_SLOPE_1,VIS_OP_CAL_C1_INTERCEPT_1,VIS_OP_CAL_C1_SLOPE_2,VIS_OP_CAL_C1_INTERCEPT_2,VIS_OP_CAL_C1_INTERSECTION,VIS_TEST_CAL_C1_SLOPE_1,VIS_TEST_CAL_C1_INTERCEPT_1,VIS_TEST_CAL_C1_SLOPE_2,VIS_TEST_CAL_C1_INTERCEPT_2,VIS_TEST_CAL_C1_INTERSECTION,VIS_PRELAUNCH_CAL_C1_SLOPE_1,VIS_PRELAUNCH_CAL_C1_INTERCEPT_1,VIS_PRELAUNCH_CAL_C1_SLOPE_2,VIS_PRELAUNCH_CAL_C1_INTERCEPT_2,VIS_PRELAUNCH_CAL_C1_INTERSECTION,VIS_OP_CAL_C2_SLOPE_1,VIS_OP_CAL_C2_INTERCEPT_1,VIS_OP_CAL_C2_SLOPE_2,VIS_OP_CAL_C2_INTERCEPT_2,VIS_OP_CAL_C2_INTERSECTION,VIS_TEST_CAL_C2_SLOPE_1,VIS_TEST_CAL_C2_INTERCEPT_1,VIS_TEST_CAL_C2_SLOPE_2,VIS_TEST_CAL_C2_INTERCEPT_2,VIS_TEST_CAL_C2_INTERSECTION,VIS_PRELAUNCH_CAL_C2_SLOPE_1,VIS_PRELAUNCH_CAL_C2_INTERCEPT_1,VIS_PRELAUNCH_CAL_C2_SLOPE_2,VIS_PRELAUNCH_CAL_C2_INTERCEPT_2,VIS_PRELAUNCH_CAL_C2_INTERSECTION,VIS_OP_CAL_C3A_SLOPE_1,VIS_OP_CAL_C3A_INTERCEPT_1,VIS_OP_CAL_C3A_SLOPE_2,VIS_OP_CAL_C3A_INTERCEPT_2,VIS_OP_CAL_C3A_INTERSECTION,VIS_TEST_CAL_C3A_SLOPE_1,VIS_TEST_CAL_C3A_INTERCEPT_1,VIS_TEST_CAL_C3A_SLOPE_2,VIS_TEST_CAL_C3A_INTERCEPT_2,VIS_TEST_CAL_C3A_INTERSECTION,VIS_PRELAUNCH_CAL_C3A_SLOPE_1,VIS_PRELAUNCH_CAL_C3A_INTERCEPT_1,VIS_PRELAUNCH_CAL_C3A_SLOPE_2,VIS_PRELAUNCH_CAL_C3A_INTERCEPT_2,VIS_PRELAUNCH_CAL_C3A_INTERSECTION,IR_OP_CAL_C3B_COEFF_1,IR_OP_CAL_C3B_COEFF_2,IR_OP_CAL_C3B_COEFF_3,IR_TEST_CAL_C3B_COEFF_1,IR_TEST_CAL_C3B_COEFF_2,IR_TEST_CAL_C3B_COEFF_3,IR_OP_CAL_C4_COEFF_1,IR_OP_CAL_C4_COEFF_2,IR_OP_CAL_C4_COEFF_3,IR_TEST_CAL_C4_COEFF_1,IR_TEST_CAL_C4_COEFF_2,IR_TEST_CAL_C4_COEFF_3,IR_OP_CAL_C5_COEFF_1,IR_OP_CAL_C5_COEFF_2,IR_OP_CAL_C5_COEFF_3,IR_TEST_CAL_C5_COEFF_1,IR_TEST_CAL_C5_COEFF_2,IR_TEST_CAL_C5_COEFF_3,EARTH_LOC_CORR_TIP_EULER,EARTH_LOC_IND,SPACECRAFT_ATT_CTRL,ATT_SMODE,ATT_PASSIVE_WHEEL_TEST,TIME_TIP_EULER,TIP_EULER_ROLL,TIP_EULER_PITCH,TIP_EULER_YAW,SPACECRAFT_ALT\n':
        print(l)
        return 'fail'
    l = f.readline().decode('ascii')
    if l != '3406,0,2003,85,3275054,79,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.052300,-2.015999,0.152800,-51.910000,499,0.052300,-2.015999,0.152800,-51.910000,498,0.052300,-2.015999,0.152800,-51.910000,498,0.051300,-1.942999,0.151000,-51.770000,500,0.051300,-1.942999,0.151000,-51.770000,500,0.051300,-1.942999,0.151000,-51.770000,500,0.000000,0.000000,0.000000,0.000000,0,0.000000,0.000000,0.000000,0.000000,0,0.000000,0.000000,0.000000,0.000000,0,2.488212,-0.002511,0.000000,2.488212,-0.002511,0.000000,179.546496,-0.188553,0.000008,179.546496,-0.188553,0.000008,195.236384,-0.201709,0.000006,195.236384,-0.201709,0.000006,0,0,0,0,0,608093,-0.021000,-0.007000,0.000000,862.000000\n':
        print(l)
        return 'fail'
    f.close()

    os.unlink('tmp/n16gac10bit.l1b_metadata.csv')

    return 'success'
Пример #30
0
def check(self):
    """Check to see if assets need to be rebuilt.

    A non-zero exit status will be returned if any of the input files are
    newer (based on mtime) than their output file. This is intended to be used
    in pre-commit hooks.
    """
    from pprint import pprint
    self.log.debug('Checking:')
    needsupdate = False
    for bundle in self.environment:
        outputname = join(self.environment.directory, bundle.output)
        outputtime = None
        self.log.debug('  asset: %s', outputname)
        if not isfile(outputname):
            self.log.warn('%s does not exist', outputname)
            needsupdate = True
        else:
            outputtime = stat(outputname).st_mtime
            for filename in bundle.get_files():
                inputtime = stat(filename).st_mtime
                self.log.debug('    %s', filename)
                if inputtime > outputtime:
                    self.log.warn('%s is newer than %s', filename, outputname)
                    needsupdate = True
    if needsupdate:
        sys.exit(-1)
Пример #31
0
def get_lat_lng(location):

    #Check to see if a location.json is in the config folder
    reload = False
    ipfallback = False
    today = datetime.today()#gets current time
    latlng = []

    j = {}
    path = get_file("config/location.json")
    if os.path.isfile(path):
        try:
            j = json.load(open(path))
            msg = "json loaded OK"
            #Get the city, country and latlng from the loaded json
            latlng = [j["lat"],j["lng"]]
            #Check the age of the file, if it's older than 7 days, reload it.
            t = os.stat(path)[8]
            filetime = datetime.fromtimestamp(t) - today
            if filetime.days <= -7:
                reload = True
            
            if reload:
                message = "location loaded from cache has expired, reloading...."
            else:
                message = "location loaded from cache (saved {} days ago): ".format(filetime.days) + j["city"] + ", "+ j["country"] + " " + str(latlng)

        except json.decoder.JSONDecodeError as e:
            msg = "Unable to load json: {0}".format(e)
            j = {}
            reload = True
    else:
        msg="Unable to open file {}".format(path)
        reload = True

    if reload:
        if len(location) > 0:

            g = geocoder.osm(location)

            if not g.ok:
                ipfallback = True
                message = "Unable to find [{}] with Open Street Map, used IP address to find your location is: ".format(location) + g.city + ","+ g.country + " " + str(g.latlng)
            else:
                latlng = g.latlng
                message = "location is: " + location + " " + str(g.latlng)
        else:
            ipfallback = True

        if ipfallback:
            g = geocoder.ip('me')
            if g.ok:
                latlng = g.latlng
                message = "location is: " + g.city + ","+ g.country + " " + str(g.latlng)
            else:
                # Get the location of the timezone from the /usr/share/zoneinfo/zone.tab

                try:
                    stream=os.popen("cat /usr/share/zoneinfo/zone.tab | grep $(cat /etc/timezone) | awk '{print $2}'")
                    get_tzlatlng=stream.read().rstrip() + "/"
                    loc=Location(get_tzlatlng)
                    latlng = [float(loc.lat.decimal),float(loc.lng.decimal)]
                except:
                    #If this hits, your rpi is foobarred and locale and timezone info is missing
                    #So, we will default to a Tragically Hip song lyric
                    #At the 100th meridian, where the great plains begin
                    latlng = [float(67.833333), float(-100)]
                    
                g.latlng = latlng
                message = "Unable to find location with open street maps or IP address, using lat/lon of your timezone, {}".format(str(latlng))

        if g.ok:
            #Dump the location to a file
            savefile = json.dumps(g.json, sort_keys=False, indent=4)
            try:
                with open(path,'w') as f:
                    try:
                        f.write(savefile)
                    except Exception as e:
                        print("Could not write {0}. Error Message: {1}".format(path,e))
            except Exception as e:
                print("Could not open {0} unable to save location.json. Error Message: {1}".format(path,e))


    return latlng,message
with open(file_path, "w") as f:
    # Print header.
    f.write("# This shell script executes the Slurm jobs for data augmentation.\n")
    f.write("\n")

    # Loop over augmentations.
    for aug_str in augmentations:

        # Loop over instances.
        n_instances = augmentations[aug_str]
        for instance_id in range(n_instances):
            instance_str = str(instance_id)

            # Loop over recording units.
            for unit_str in units:
                # Define job name.
                job_name = "_".join(["003", aug_str, instance_str, unit_str])
                sbatch_str = "sbatch " + job_name + ".sbatch"

                # Write SBATCH command to shell file.
                f.write(sbatch_str + "\n")
            f.write("\n")
        f.write("\n")


# Grant permission to execute the shell file.
# https://stackoverflow.com/a/30463972
mode = os.stat(file_path).st_mode
mode |= (mode & 0o444) >> 2
os.chmod(file_path, mode)
Пример #33
0
def file_size(file_path):
    """ Return the file size in bytes """

    if os.path.isfile(file_path):
        file_info = os.stat(file_path)
        return convert_bytes(file_info.st_size)
Пример #34
0
            os.path.basename(sys.argv[0]) +
            " <katalog_z_programem_run.sh> <sciezka_do_katalogu_ze_zbiorami_danych>"
        )
        exit(1)

    programdir_path = os.path.abspath(sys.argv[1])
    data_path = os.path.abspath(sys.argv[2])

    print("AUTORZY:")
    with open(os.path.join(programdir_path, 'autorzy.txt'), 'rt') as f:
        for l in f.readlines():
            print(l.strip())

    program_path = os.path.join(programdir_path, 'run.bat')
    os.chmod(program_path,
             os.stat(program_path).st_mode | 0o100)  # stat.S_IEXEC)

    dirs = [('set0', 6), ('set1', 20), ('set2', 20), ('set3', 20),
            ('set4', 20), ('set5', 200), ('set6', 200), ('set7', 20),
            ('set8', 100)]

    print("WYNIKI:")
    total = []
    times = []
    for d in dirs:
        res, t = checkDir(programdir_path, os.path.join(data_path, d[0]), d[1])
        total.append(res)
        times.append(t)
        print(d[0], '=', res[:-1], 'score =', res[-1], "[%dsec]" % t)

    print("----")
Пример #35
0
def get_rdev(file):
    r = os.stat(file).st_rdev
    return r >> 8, r & 255
Пример #36
0
 def __init__(self, fname):
     self.fname = fname
     self.position = 0
     self.max_size = os.stat(fname).st_size
     self.gen_bytes = {}
     self.state = "OPEN"
Пример #37
0
def prepare_impl(t, cwd, ver, wafdir):
	Options.tooldir = [t]
	Options.launch_dir = cwd

	# some command-line options can be processed immediately
	if '--version' in sys.argv:
		opt_obj = Options.Handler()
		opt_obj.curdir = cwd
		opt_obj.parse_args()
		sys.exit(0)

	# now find the wscript file
	msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE

	# in theory projects can be configured in an autotool-like manner:
	# mkdir build && cd build && ../waf configure && ../waf
	build_dir_override = None
	candidate = None

	lst = os.listdir(cwd)

	search_for_candidate = True
	if WSCRIPT_FILE in lst:
		candidate = cwd

	elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
		# autotool-like configuration
		calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
		if WSCRIPT_FILE in os.listdir(calldir):
			candidate = calldir
			search_for_candidate = False
		else:
			error('arg[0] directory does not contain a wscript file')
			sys.exit(1)
		build_dir_override = cwd

	# climb up to find a script if it is not found
	while search_for_candidate:
		if len(cwd) <= 3:
			break # stop at / or c:
		dirlst = os.listdir(cwd)
		if WSCRIPT_FILE in dirlst:
			candidate = cwd
		if 'configure' in sys.argv and candidate:
			break
		if Options.lockfile in dirlst:
			env = Environment.Environment()
			try:
				env.load(os.path.join(cwd, Options.lockfile))
			except:
				error('could not load %r' % Options.lockfile)
			try:
				os.stat(env['cwd'])
			except:
				candidate = cwd
			else:
				candidate = env['cwd']
			break
		cwd = os.path.dirname(cwd) # climb up

	if not candidate:
		# check if the user only wanted to display the help
		if '-h' in sys.argv or '--help' in sys.argv:
			warn('No wscript file found: the help message may be incomplete')
			opt_obj = Options.Handler()
			opt_obj.curdir = cwd
			opt_obj.parse_args()
		else:
			error(msg1)
		sys.exit(0)

	# We have found wscript, but there is no guarantee that it is valid
	try:
		os.chdir(candidate)
	except OSError:
		raise Utils.WafError("the folder %r is unreadable" % candidate)

	# define the main module containing the functions init, shutdown, ..
	Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))

	if build_dir_override:
		d = getattr(Utils.g_module, BLDDIR, None)
		if d:
			# test if user has set the blddir in wscript.
			msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
			warn(msg)
		Utils.g_module.blddir = build_dir_override

	# bind a few methods and classes by default

	def set_def(obj, name=''):
		n = name or obj.__name__
		if not n in Utils.g_module.__dict__:
			setattr(Utils.g_module, n, obj)

	for k in [dist, distclean, distcheck, clean, install, uninstall]:
		set_def(k)

	set_def(Configure.ConfigurationContext, 'configure_context')

	for k in ['build', 'clean', 'install', 'uninstall']:
		set_def(Build.BuildContext, k + '_context')

	# now parse the options from the user wscript file
	opt_obj = Options.Handler(Utils.g_module)
	opt_obj.curdir = candidate
	try:
		f = Utils.g_module.set_options
	except AttributeError:
		pass
	else:
		opt_obj.sub_options([''])
	opt_obj.parse_args()

	if not 'init' in Utils.g_module.__dict__:
		Utils.g_module.init = Utils.nada
	if not 'shutdown' in Utils.g_module.__dict__:
		Utils.g_module.shutdown = Utils.nada

	main()
Пример #38
0
 def file_size(self):
     if not self.has_attr('file_stat'):
         self.set_attr('file_stat', os.stat(self.__path))
     return self.get_attr('file_stat').st_size
Пример #39
0
def make_executable(path):
    import stat
    st = os.stat(path)
    os.chmod(path, st.st_mode | stat.S_IEXEC)
Пример #40
0
def reset(args: argparse.Namespace,
          context: Optional[AppContext] = None) -> int:
    """Clears the SecureDrop development applications' state, restoring them to
    the way they were immediately after running `setup_dev.sh`. This command:
    1. Erases the development sqlite database file.
    2. Regenerates the database.
    3. Erases stored submissions and replies from the store dir.
    """
    # Erase the development db file
    if not hasattr(config, 'DATABASE_FILE'):
        raise Exception("./manage.py doesn't know how to clear the db "
                        'if the backend is not sqlite')

    # we need to save some data about the old DB file so we can recreate it
    # with the same state
    try:
        stat_res = os.stat(config.DATABASE_FILE)
        uid = stat_res.st_uid
        gid = stat_res.st_gid
    except OSError:
        uid = os.getuid()
        gid = os.getgid()

    try:
        os.remove(config.DATABASE_FILE)
    except OSError:
        pass

    # Regenerate the database
    # 1. Create it
    subprocess.check_call(['sqlite3', config.DATABASE_FILE, '.databases'])
    # 2. Set permissions on it
    os.chown(config.DATABASE_FILE, uid, gid)
    os.chmod(config.DATABASE_FILE, 0o0640)

    if os.environ.get('SECUREDROP_ENV') == 'dev':
        # 3. Create the DB from the metadata directly when in 'dev' so
        # developers can test application changes without first writing
        # alembic migration.
        with context or app_context():
            db.create_all()
    else:
        # We have to override the hardcoded .ini file because during testing
        # the value in the .ini doesn't exist.
        ini_dir = os.path.dirname(
            getattr(config, 'TEST_ALEMBIC_INI', 'alembic.ini'))

        # 3. Migrate it to 'head'
        subprocess.check_call('cd {} && alembic upgrade head'.format(ini_dir),
                              shell=True)  # nosec

    # Clear submission/reply storage
    try:
        os.stat(args.store_dir)
    except OSError:
        pass
    else:
        for source_dir in os.listdir(args.store_dir):
            try:
                # Each entry in STORE_DIR is a directory corresponding
                # to a source
                shutil.rmtree(os.path.join(args.store_dir, source_dir))
            except OSError:
                pass
    return 0
Пример #41
0
def retrieve(target=None,source=None,env=None):
    "Fetch data from the web"
    top = env.get('top')
    if top:
        folder = top + os.sep +env['dir']
    else:
        folder = env['dir']
    private = env.get('private')
    if sys.platform[:6] != 'cygwin':
        usedatapath = env.get('usedatapath')
    else:
        usedatapath = False
    if private:
        login = private['login']
        password = private['password']
        server = private['server']
        if not server:
            print 'Cannot access proprietary data server' 
            return 7
        try:
            session = ftplib.FTP(server,login,password)
            session.cwd(folder)
        except:
            print 'Could not establish connection with "%s/%s" ' % (server,
                                                                    folder)
            return 3
        for file in filter(lambda x: not os.path.abspath(x).startswith(env.path), 
                           map(str,target)):
            remote = os.path.basename(file)
            if usedatapath:
                localfile=env.path+remote
            else:
                localfile=file
            try:
                download = open(localfile,'wb')
                session.retrbinary('RETR '+remote,
                                   lambda x: download.write(x))
                download.close()
            except:
                print 'Could not download file "%s" ' % file
                return 1
            if not os.stat(localfile)[6]: # if zero size file
                print 'Could not download file "%s" ' % file
                os.unlink(localfile)
                return 4
            if usedatapath:
                if os.path.isfile(file):
                    os.unlink(file)
                os.symlink(localfile,file)
        session.quit()
    else:
        server = env.get('server')
        if server == 'local':
            for file in map(str,target):
                remote = os.path.basename(file)  
                remote = os.path.join(folder,remote)
                try:
                    os.symlink(remote,file)
                except:
                    print 'Could not link file "%s" ' % remote
                    os.unlink(file)
                    return 6
        else:
            for file in filter(lambda x: not os.path.abspath(x).startswith(env.path),
                               map(str,target)):
                remote = os.path.basename(file)  
                rdir =  '/'.join([server,folder,remote])
                if usedatapath:
                    localfile=env.path+remote
                else:
                    localfile=file
                try:
                    urllib.urlretrieve(rdir,localfile)
                    if not os.stat(localfile)[6]:
                        print 'Could not download file "%s" ' % localfile
                        os.unlink(localfile)
                        return 2
                except:
                    print 'Could not download "%s" from "%s" ' % (localfile,rdir)
                    return 5
                if usedatapath:
                    if os.path.isfile(file):
                        os.unlink(file)
                    os.symlink(localfile,file)
    return 0
Пример #42
0
def _make_executable(path):
    mode = os.stat(path).st_mode
    mode |= (mode & 0o444) >> 2  # copy R bits to X
    print("* Making %s executeable..." % (path))
    os.chmod(path, mode)
Пример #43
0
        # Copy the entire frozen app
        shutil.copytree(os.path.join(PATH, "build", exe_dir),
                        os.path.join(app_dir_path, "usr", "bin"))

        # Copy desktop integration wrapper (prompts users to install shortcut)
        launcher_path = os.path.join(app_dir_path, "usr", "bin", "openshot-qt")
        os.rename(os.path.join(app_dir_path, "usr", "bin", "launch-linux.sh"),
                  launcher_path)
        desktop_wrapper = os.path.join(app_dir_path, "usr", "bin",
                                       "openshot-qt.wrapper")
        shutil.copyfile("/home/ubuntu/apps/AppImageKit/desktopintegration",
                        desktop_wrapper)

        # Change permission of AppRun (and desktop.wrapper) file (add execute permission)
        st = os.stat(app_run_path)
        os.chmod(app_run_path, st.st_mode | stat.S_IEXEC)
        os.chmod(desktop_wrapper, st.st_mode | stat.S_IEXEC)
        os.chmod(launcher_path, st.st_mode | stat.S_IEXEC)

        # Create AppImage (OpenShot-%s-x86_64.AppImage)
        app_image_success = False
        for line in run_command(
                '/home/ubuntu/apps/AppImageKit/AppImageAssistant "%s" "%s"' %
            (app_dir_path, app_build_path)):
            output(line)
        app_image_success = os.path.exists(app_build_path)

        # Was the AppImage creation successful
        if not app_image_success or errors_detected:
            # AppImage failed
Пример #44
0
def save_in_file(filepath, data):
    if filepath:
        with open(filepath, 'a') as file_handler:
            if os.stat(filepath).st_size == 0:
                print >>file_handler,  BLOCK_SEPARATOR
            print >> file_handler, data
Пример #45
0
def file_size(path):
    return os.stat(path).st_size
Пример #46
0
import os, glob, subprocess

import postprocessor as pprocessor

the_files = sorted(list(set(glob.glob('*JPG') + glob.glob('*jpg'))))
the_files_list = ' '.join(the_files)
project_file = the_files[0] + ".pto"
if the_files:
    the_script = """#!/usr/bin/env bash
pto_gen -o %s %s
""" % (project_file, the_files_list)
    
    the_script = the_script + """
cpfind --multirow --celeste -o %s %s
cpclean -o %s %s
linefind -o %s %s
autooptimiser -a -l -s -m -o %s %s
pano_modify --canvas=AUTO --crop=AUTO -o %s %s
# hugin_executor -s %s                              # Uncomment to stitch the panorama immediately
""" % tuple([project_file] * 11)
    
    script_file_name = os.path.splitext(the_files[0])[0] + '-pano.SH'
    with open(script_file_name, mode='w') as script_file:
        script_file.write(''.join(the_script))
    
    os.chmod(script_file_name, os.stat(script_file_name).st_mode | 0o111)    # or, in Bash, "chmod a+x SCRIPT_FILE_NAME"
    
    # pprocessor.run_shell_scripts()    # uncomment this line to automatically run all scripts in the directory.
else:
    raise IndexError('You must call panorama_script_generator.py in a folder with at least one .jpg or .JPG file;\n   current working directory is %s' % os.getcwd())
Пример #47
0
def cand_snv_allele_calling(database, infile, infile_full, infile_full_gt, infile_spec, cn):
    

    f = open(infile_spec, "r")

    all_variants = []

    for line in open(infile_full, "r"):
        line.strip()
        all_variants.append(line)
        # all_variants = line.strip().split(";")
        # print(all_variants)

    if os.stat(infile).st_size == 0:
        cand_res = ['2.v1_2.v1']
        allele_res = "*2/*2"
        return ["".join(cand_res), "".join(cand_res), allele_res];
        #print("\nSupporting variants")
        #print("\n" + "".join(all_variants))
        sys.exit()

    # core_variants = []

    # for line in open(infile, "r"):
    #      line = line.strip()
    #      core_variants.append(line)

    # core_variants = ";".join(sorted(core_variants))

    core_variants = get_core_variants(infile, cn)

    # if int(cn) == 1:
    #     core_variants = core_variants.replace("~0/1", "~1/1")

    # else:
    #     pass

    all_var_gt = []
    for line in open(infile_full_gt, "r"):
        line = line.strip()
        all_var_gt.append(line)

    
    dbs = []

    for line in open(database, "r"):
        line = line.strip().split("\t")
        dbs.append(line)

    soln_list1 = []
    soln_list2 = []

    for record in dbs:
        record_core_var = record[1].split(";")
        record_core_var = ";".join(sorted(record_core_var))
        if record_core_var == core_variants:
            diplo = record[0]
            full_dip = record[2]
            soln_list1.append(record[0])
            soln_list2.append(record[2])
        else:
            pass

    #return soln_list1

    #print("\nResult:")

    diff_alleles_check = False

    def chkList(lst):
        if len(lst) < 0 :
            diff_alleles_check = True
        diff_alleles_check = all(ele == lst[0] for ele in lst)

        if(diff_alleles_check):
            return("Equal")
        else:
            return("Not equal")


    def format_allele(m_diplo):
        res1 = [i for i in range(len(m_diplo)) if m_diplo.startswith("_", i)]
        res2 = [i for i in range(len(m_diplo)) if m_diplo.startswith(".", i)]
        hap1 = "*" + str (m_diplo[:res2[0]])
        hap2 = "*" + str (m_diplo[res1[0]+1:res2[1]])
        return (hap1 + "/" + hap2)


    if len(soln_list1) == 1:
        diplo = "".join(soln_list1)
        res1 = [i for i in range(len(diplo)) if diplo.startswith("_", i)]
        res2 = [i for i in range(len(diplo)) if diplo.startswith(".", i)]
        hap1 = "*" + str (diplo[:res2[0]])
        hap2 = "*" + str (diplo[res1[0]+1:res2[1]])
        allele_res = hap1 + "/" + hap2
        return [soln_list1, diplo, allele_res];
        #print ("\nSupporting variants:")
        #print ("\n" + core_variants + "\n")


    elif len(soln_list1) == 2:
        diplo1 = soln_list1[0]
        diplo2 = soln_list1[1]
        diplo1_supp_var = soln_list2[0].split(";")
        diplo2_supp_var = soln_list2[1].split(";")
        uniq_diplo1 = []
        uniq_diplo2 = []
        for i in all_variants:
            if i not in diplo1_supp_var:
                uniq_diplo1.append(i)
        
            if i not in diplo2_supp_var:
                uniq_diplo2.append(i)

        #print("\nUnique variants in soln 1: {}".format(len(uniq_diplo1)))
        #print("\nUnique variants in soln 2: {}".format(len(uniq_diplo2)))
            
        if len(uniq_diplo1) < len(uniq_diplo2):
            res1 = [i for i in range(len(diplo1)) if diplo1.startswith("_", i)]
            res2 = [i for i in range(len(diplo1)) if diplo1.startswith(".", i)]
            hap1 = "*" + str (diplo1[:res2[0]])
            hap2 = "*" + str (diplo1[res1[0]+1:res2[1]])
            allele_res =  hap1 + "/" + hap2 
            return [soln_list1, diplo1, allele_res];
            #print ("Supporting variants:")
            #print ("\n" + core_variants + "\n")

        elif len(uniq_diplo1) > len(uniq_diplo2):
            res1 = [i for i in range(len(diplo2)) if diplo2.startswith("_", i)]
            res2 = [i for i in range(len(diplo2)) if diplo2.startswith(".", i)]
            hap1 = "*" + str (diplo2[:res2[0]])
            hap2 = "*" + str (diplo2[res1[0]+1:res2[1]])
            allele_res =  hap1 + "/" + hap2 
            return [soln_list1, diplo2, allele_res];
            #print ("Supporting variants:")
            #print ("\n" + core_variants + "\n")

        elif len(uniq_diplo1) == len(uniq_diplo2) and (diplo1 == "4.v11_74.v1" and diplo2 == "4.v12_1.v1"):
            res1 = [i for i in range(len(diplo2)) if diplo2.startswith("_", i)]
            res2 = [i for i in range(len(diplo2)) if diplo2.startswith(".", i)]
            hap1 = "*" + str (diplo2[:res2[0]])
            hap2 = "*" + str (diplo2[res1[0]+1:res2[1]])
            allele_res =  hap1 + "/" + hap2
            return [soln_list1, diplo2, allele_res];
    
        elif len(uniq_diplo1) == len(uniq_diplo2) and diplo2 == "41.v1_65.v1":
            res1 = [i for i in range(len(diplo2)) if diplo2.startswith("_", i)]
            res2 = [i for i in range(len(diplo2)) if diplo2.startswith(".", i)]
            hap1 = "*" + str (diplo2[:res2[0]])
            hap2 = "*" + str (diplo2[res1[0]+1:res2[1]])
            allele_res =  hap1 + "/" + hap2 
            return [soln_list1, diplo2, allele_res];
            #print ("Supporting variants:")
            #print ("\n" + core_variants + "\n")

        elif len(uniq_diplo1) == len(uniq_diplo2) and (diplo1 == "4.v1_6.v1" and diplo2 == "4.v4_6.v2") :
            res1 = [i for i in range(len(diplo1)) if diplo1.startswith("_", i)]
            res2 = [i for i in range(len(diplo1)) if diplo1.startswith(".", i)]
            hap1 = "*" + str (diplo1[:res2[0]])
            hap2 = "*" + str (diplo1[res1[0]+1:res2[1]])
            allele_res =  hap1 + "/" + hap2 
            return [soln_list1, diplo1, allele_res];
            #print ("Supporting variants:")
            #print ("\n" + core_variants + "\n")
    
    
        else:
            tiebreak1 = []
            tiebreak2 = []
            tiebreak3 = []
            score = []
            for line in f:
                line = line.strip().split()
                #print(line)
                if line[2] == core_variants:
                    tiebreak1.append(line[1])
                    tiebreak2.append(line[3])
                    tiebreak3.append(line[0])
            for full_dip in tiebreak2:
                diplo_supp_gt = full_dip.split(";")
                uniq_gt = []
                for i in all_var_gt:
                    if i not in diplo_supp_gt:
                        uniq_gt.append(i)
                score_dip = len(uniq_gt)
                score.append(score_dip)

            min_score = min(score)    
            #print(score)

            if chkList(score) == "Equal" and soln_list1[1] != "39.v1_4.v5":
                amb_soln_set = []
                for elem in soln_list1:
                    res1 = [i for i in range(len(elem)) if elem.startswith("_", i)]
                    res2 = [i for i in range(len(elem)) if elem.startswith(".", i)]
                    hap1 = "*" + str (elem[:res2[0]])
                    hap2 = "*" + str (elem[res1[0]+1:res2[1]])
                    result_dip = hap1 + "/" + hap2
                    amb_soln_set.append(result_dip)
                    #elem_pos = tiebreak1.index(elem)
                    #print ("Solution " + str(elem_pos) + ": " + result_dip)
                allele_res =  " or ".join(amb_soln_set) 
                return [soln_list1, allele_res];
                #print ("\nSupporting core variants:")
                #print ("\n" + core_variants + "\n")

            elif chkList(score) == "Equal" and soln_list1[1] == "39.v1_4.v5":
                elem = "39.v1_4.v5"
                res1 = [i for i in range(len(elem)) if elem.startswith("_", i)]
                res2 = [i for i in range(len(elem)) if elem.startswith(".", i)]
                hap1 = "*" + str (elem[:res2[0]])
                hap2 = "*" + str (elem[res1[0]+1:res2[1]])
                result_dip = hap1 + "/" + hap2
                return [soln_list1, elem, result_dip];
                #amb_soln_set.append(result_dip)
                #elem_pos = tiebreak1.index(elem)                                                                                                             
                #print ("Solution " + str(elem_pos) + ": " + result_dip)                                                                                      
                #print("\n" + result_dip)

                #print ("\nSupporting core variants:")
                #print ("\n" + core_variants + "\n")


            elif score.count(min_score) > 1 and soln_list1[1] == "39.v1_4.v5":
                amb_soln_set = []
                temp_set = []
                temp_set.append(tiebreak1[0])
                temp_set.append(tiebreak1[-1])

                for elem in temp_set:
                    res1 = [i for i in range(len(elem)) if elem.startswith("_", i)]
                    res2 = [i for i in range(len(elem)) if elem.startswith(".", i)]
                    hap1 = "*" + str (elem[:res2[0]])
                    hap2 = "*" + str (elem[res1[0]+1:res2[1]])
                    result_dip = hap1 + "/" + hap2
                    amb_soln_set.append(result_dip)
                    #elem_pos = tiebreak1.index(elem)                                                                                                            
    
                    #print ("Solution " + str(elem_pos) + ": " + result_dip)                                                                                     
        
                allele_res =  " or ".join(amb_soln_set)
                return [soln_list1, allele_res];

                #print ("\nSupporting core variants:")
                #print ("\n" + core_variants + "\n")


            elif score.count(min_score) > 1 and soln_list1[0] == "1.v1_2.v1" and soln_list1[1] == "34.v1_39.v1":
                amb_soln_set = []
                temp_set = []
                temp_set.append("1.v1_2.v1")
                temp_set.append("34.v1_39.v1")
                for elem in temp_set:
                    res1 = [i for i in range(len(elem)) if elem.startswith("_", i)]
                    res2 = [i for i in range(len(elem)) if elem.startswith(".", i)]
                    hap1 = "*" + str (elem[:res2[0]])
                    hap2 = "*" + str (elem[res1[0]+1:res2[1]])
                    result_dip = hap1 + "/" + hap2
                    amb_soln_set.append(result_dip)
                    #elem_pos = tiebreak1.index(elem)                                                                                                            

                    #print ("Solution " + str(elem_pos) + ": " + result_dip)                                                                                     

                allele_res =  " or ".join(amb_soln_set) 
                return [soln_list1, allele_res];
                #print ("\nSupporting core variants:")
                #print ("\n" + core_variants + "\n")
        

            elif score.count(min_score) > 2:
                amb_soln_set = []
                temp_set = []
                temp_set.append(tiebreak1[0])
                temp_set.append(tiebreak1[-1])

                for elem in temp_set:
                    res1 = [i for i in range(len(elem)) if elem.startswith("_", i)]
                    res2 = [i for i in range(len(elem)) if elem.startswith(".", i)]
                    hap1 = "*" + str (elem[:res2[0]])
                    hap2 = "*" + str (elem[res1[0]+1:res2[1]])
                    result_dip = hap1 + "/" + hap2
                    amb_soln_set.append(result_dip)
                    #elem_pos = tiebreak1.index(elem)                                                                                                             
                #print ("Solution " + str(elem_pos) + ": " + result_dip)                                                                                      
                allele_res = " or ".join(amb_soln_set)
                return [soln_list1, allele_res];
                #print ("\nSupporting core variants:")
                #print ("\n" + core_variants + "\n")


            else:
                minpos = score.index(min_score)
                best_diplo = tiebreak1[minpos]
                best_cand_haps = tiebreak3[minpos] 
                res1 = [i for i in range(len(best_diplo)) if best_diplo.startswith("_", i)]
                res2 = [i for i in range(len(best_diplo)) if best_diplo.startswith(".", i)]
                hap1 = "*" + str (best_diplo[:res2[0]])
                hap2 = "*" + str (best_diplo[res1[0]+1:res2[1]])
                allele_res =  hap1 + "/" + hap2 
                return [soln_list1, best_cand_haps, allele_res];
                #print ("Supporting core variants:")
                #print ("\n" + core_variants + "\n")


    elif len(soln_list1) == 3:
        diplo1 = soln_list1[0]
        diplo2 = soln_list1[1]
        diplo3 = soln_list1[2]
        diplo1_supp_var = soln_list2[0].split(";") 
        diplo2_supp_var = soln_list2[1].split(";")
        diplo3_supp_var = soln_list2[2].split(";")
        uniq_diplo1 = []
        uniq_diplo2 = []
        uniq_diplo3 = []

        for i in all_variants:
            if i not in diplo1_supp_var:
                uniq_diplo1.append(i)

            if i not in diplo2_supp_var:
                uniq_diplo2.append(i)

            if i not in diplo3_supp_var:
                uniq_diplo3.append(i)


        if len(uniq_diplo1) < len(uniq_diplo2) and len(uniq_diplo1) < len(uniq_diplo3):
            res1 = [i for i in range(len(diplo1)) if diplo1.startswith("_", i)]
            res2 = [i for i in range(len(diplo1)) if diplo1.startswith(".", i)]
            hap1 = "*" + str (diplo1[:res2[0]])
            hap2 = "*" + str (diplo1[res1[0]+1:res2[1]])
            allele_res = hap1 + "/" + hap2
            return [soln_list1, diplo1, allele_res];
            #print ("Supporting variants:")
            #print ("\n" + core_variants + "\n")

        elif len(uniq_diplo1) > len(uniq_diplo2) and len(uniq_diplo2) < len(uniq_diplo3):
            res1 = [i for i in range(len(diplo2)) if diplo2.startswith("_", i)]
            res2 = [i for i in range(len(diplo2)) if diplo2.startswith(".", i)]
            hap1 = "*" + str (diplo2[:res2[0]])
            hap2 = "*" + str (diplo2[res1[0]+1:res2[1]])
            allele_res = hap1 + "/" + hap2
            return [soln_list1, diplo2, allele_res]
            #print ("Supporting variants:")
            #print ("\n" + core_variants + "\n")

        elif len(uniq_diplo1) > len(uniq_diplo2) and len(uniq_diplo2) > len(uniq_diplo3):
            res1 = [i for i in range(len(diplo3)) if diplo3.startswith("_", i)]
            res2 = [i for i in range(len(diplo3)) if diplo3.startswith(".", i)]
            hap1 = "*" + str (diplo3[:res2[0]])
            hap2 = "*" + str (diplo3[res1[0]+1:res2[1]])
            allele_res = hap1 + "/" + hap2
            return [soln_list1, diplo3, allele_res]
            #print ("Supporting variants:")
            #print ("\n" + core_variants + "\n")


        elif len(uniq_diplo1) == len(uniq_diplo2) == len(uniq_diplo3) and diplo3 == "39.v1_4.v4":
            res1 = [i for i in range(len(diplo3)) if diplo3.startswith("_", i)]
            res2 = [i for i in range(len(diplo3)) if diplo3.startswith(".", i)]
            hap1 = "*" + str (diplo3[:res2[0]])
            hap2 = "*" + str (diplo3[res1[0]+1:res2[1]])
            allele_res = hap1 + "/" + hap2
            return [soln_list1, diplo3, allele_res]
            #print ("Supporting variants:")
            #print ("\n" + core_variants + "\n")


        elif len(uniq_diplo1) == len(uniq_diplo2) == len(uniq_diplo3) or (len(uniq_diplo1) != len(uniq_diplo2) == len(uniq_diplo3)) or (len(uniq_diplo1) == len(uniq_diplo2) != len(uniq_diplo3)):

            tiebreak1 = []
            tiebreak2 = []
            tiebreak3 = []
            score = []
            for line in f:
                line = line.strip().split()
                #print(line)                                                                                                                  
                if line[2] == core_variants:
                    tiebreak1.append(line[1])
                    tiebreak2.append(line[3])
                    tiebreak3.append(line[0])
            for full_dip in tiebreak2:
                diplo_supp_gt = full_dip.split(";")
                uniq_gt = []
                for i in all_var_gt:
                    if i not in diplo_supp_gt:
                        uniq_gt.append(i)
                score_dip = len(uniq_gt)
                score.append(score_dip)

            min_score = min(score)
            # print(score)
        
            if chkList(score) == "Equal":
                amb_soln_set = []
                for elem in tiebreak1:
                    res1 = [i for i in range(len(elem)) if elem.startswith("_", i)]
                    res2 = [i for i in range(len(elem)) if elem.startswith(".", i)]
                    hap1 = "*" + str (elem[:res2[0]])
                    hap2 = "*" + str (elem[res1[0]+1:res2[1]])
                    result_dip = hap1 + "/" + hap2
                    amb_soln_set.append(result_dip)
                    #elem_pos = tiebreak1.index(elem)                                                                                                             
                    #print ("Solution " + str(elem_pos) + ": " + result_dip)                                                                                      
                allele_res = " or ".join(amb_soln_set)
                return [soln_list1, tiebreak1, allele_res];
                #print ("\nSupporting core variants:")
                #print ("\n" + core_variants + "\n")


            else:
                minpos = score.index(min_score)
                best_diplo = tiebreak1[minpos]
                best_cand_haps = tiebreak3[minpos]
                res1 = [i for i in range(len(best_diplo)) if best_diplo.startswith("_", i)]
                res2 = [i for i in range(len(best_diplo)) if best_diplo.startswith(".", i)]
                hap1 = "*" + str (best_diplo[:res2[0]])
                hap2 = "*" + str (best_diplo[res1[0]+1:res2[1]])
                allele_res = hap1 + "/" + hap2
                return [soln_list1, best_cand_haps, allele_res];
Пример #48
0
def get_size(filepath):
    return os.stat(filepath).st_size
Пример #49
0
def clonePipeline(srcdir, destdir=None):
    '''clone a pipeline.

    Cloning entails creating a mirror of the source pipeline.
    Generally, data files are mirrored by linking. Configuration
    files and the pipeline database will be copied.

    Without modification of any files, building the cloned pipeline in
    `destdir` should not re-run any commands. However, on deleting
    selected files, the pipeline should run from the appropriate
    point.  Newly created files will not affect the original pipeline.

    Cloning pipelines permits sharing partial results between
    pipelines, for example for parameter optimization.

    Arguments
    ---------
    scrdir : string
        Source directory
    destdir : string
        Destination directory. If None, use the current directory.

    '''

    if destdir is None:
        destdir = os.path.curdir

    E.info("cloning pipeline from %s to %s" % (srcdir, destdir))

    copy_files = ("conf.py", "pipeline.ini", "csvdb")
    ignore_prefix = ("report", "_cache", "export", "tmp", "ctmp", "_static",
                     "_templates")

    def _ignore(p):
        for x in ignore_prefix:
            if p.startswith(x):
                return True
        return False

    for root, dirs, files in os.walk(srcdir):

        relpath = os.path.relpath(root, srcdir)
        if _ignore(relpath):
            continue

        for d in dirs:
            if _ignore(d):
                continue
            dest = os.path.join(os.path.join(destdir, relpath, d))
            os.mkdir(dest)
            # touch
            s = os.stat(os.path.join(root, d))
            os.utime(dest, (s.st_atime, s.st_mtime))

        for f in files:
            if _ignore(f):
                continue

            fn = os.path.join(root, f)
            dest_fn = os.path.join(destdir, relpath, f)
            if f in copy_files:
                shutil.copyfile(fn, dest_fn)
            else:
                # realpath resolves links - thus links will be linked to
                # the original target
                os.symlink(os.path.realpath(fn), dest_fn)
Пример #50
0
def mtime(path):
    """shorthand for mtime"""
    return os.stat(path).st_mtime
def extract_data(delta= 3):
    """
    This is the master function that calls subsequent functions
    to extract uwnd, vwnd, slp for the specified
    tide gauges
    
    delta: distance (in degrees) from the tide gauge
    """
    
    print('Delta =  {}'.format(delta), '\n')
    
    #defining the folders for predictors
    dir_in = "/lustre/fs0/home/mtadesse/MERRAv2/data"
    surge_path = "/lustre/fs0/home/mtadesse/obs_surge"
    csv_path = "/lustre/fs0/home/mtadesse/merraLocalized"
    
    #cd to the obs_surge dir to get TG information
    os.chdir(surge_path)
    tg_list = os.listdir()
    
    #cd to the obs_surge dir to get TG information
    os.chdir(dir_in)
    years = os.listdir()
    
    #################################
    #looping through the year folders
    #################################
    
    #to mark the first csv
    firstCsv = True;
    
    for yr in years:
        os.chdir(dir_in)
        #print(yr, '\n')
        os.chdir(os.path.join(dir_in, yr))

        ####################################
        #looping through the daily .nc files
        ####################################

        for dd in os.listdir():
            
            os.chdir(os.path.join(dir_in, yr)) #back to the predictor folder
            print(dd, '\n')
            
            #########################################
            #get netcdf components  - predictor file            
            #########################################

            nc_file = readnetcdf(dd)
            lon, lat, time, predSLP, predU10, predV10 = \
                nc_file[0], nc_file[1], nc_file[2], nc_file[3], nc_file[4]\
                    , nc_file[5]
                
            x = 75
            y = 76
            
            #looping through individual tide gauges
            for t in range(x, y):
                
                #the name of the tide gauge - for saving purposes
                # tg = tg_list[t].split('.mat.mat.csv')[0] 
                tg = tg_list[t]
                
                #extract lon and lat data from surge csv file
                #print(tg, '\n')
                os.chdir(surge_path)
                
                if os.stat(tg).st_size == 0:
                    print('\n', "This tide gauge has no surge data!", '\n')
                    continue
                
                surge = pd.read_csv(tg, header = None)
                #surge_with_date = add_date(surge)
        
                #define tide gauge coordinate(lon, lat)
                tg_cord = Coordinate(surge.iloc[0,0], surge.iloc[0,1])
                
                
                #find closest grid points and their indices
                close_grids = findPixels(tg_cord, delta, lon, lat)
                ind_grids = findindx(close_grids, lon, lat)                
                
                
                #loop through preds#
                #subset predictor on selected grid size
                predictors = {'slp':predSLP, 'wnd_u':predU10, \
                              'wnd_v':predV10}
                
    
                for xx in predictors.keys():
                               
                    pred_new = subsetter(dd, predictors[xx], ind_grids, time)
                    
                    if xx == 'slp':
                        if firstCsv:
                            finalSLP = pred_new
                        else:    
                            finalSLP = pd.concat([finalSLP, pred_new], axis = 0)
                            print(finalSLP.shape)
                    elif xx == 'wnd_u':
                        if firstCsv:
                            finalUwnd = pred_new
                        else:
                            finalUwnd = pd.concat([finalUwnd, pred_new], axis = 0)
                    elif xx == 'wnd_v':
                        if firstCsv:
                            finalVwnd = pred_new
                            firstCsv = False;
                        else:
                            finalVwnd = pd.concat([finalVwnd, pred_new], axis = 0)


        #create directories to save pred_new
        os.chdir(csv_path)
            
        #tide gauge directory
        tg_name_old = tg.split('.mat.mat.csv')[0]
        tg_name = '-'.join([str(t), tg_name_old])
        try:
            os.makedirs(tg_name)
            os.chdir(tg_name) #cd to it after creating it
        except FileExistsError:
            #directory already exists
            os.chdir(tg_name)
            
            
        #save as csv
        finalSLP.to_csv('slp.csv')
        finalUwnd.to_csv('wnd_u.csv')
        finalVwnd.to_csv('wnd_v.csv')
Пример #52
0
def cachable(func=None, version=None, cache_dir=config['cache_dir'],
             fallback_cache_dirs=config['fallback_cache_dirs'],
             keepweakref=False, ignore=set(), verbose=True):
    """
    Decorator to mark long running functions, which should be saved to
    disk for a pickled version of their arguments.

    Arguments:
    func        - Shouldn't be supplied, but instead contains the
                  function, if the decorator is used without arguments
    version     - if given it is saved together with the arguments and
                  must be the same as the cache to be valid.
    cache_dir   - where to save the cached files, if it does not exist
                  it is created (default defined by ~/.vresutils.config)
    keepweakref - Also store a weak reference and return it instead of
                  rereading from disk (default: False).
    ignore      - Set of kwd arguments not to take into account.
    verbose     - Output cache hits and timing information (default:
                  True).
    """
    enable = os.path.isdir(cache_dir)

    if enable:
        st = os.stat(cache_dir[0])
        gid = st.st_gid
        # mode is bitmask of the same rights as the directory without
        # exec rights for anybody
        mode = stat.S_IMODE(st.st_mode) & ~(stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)

    # remove missing directories
    fallback_cache_dirs = [d for d in fallback_cache_dirs if os.path.isdir(d)]

    def deco(func):
        """
        Wrap function to check for a cached result. Everything is
        pickled to a file of the name
        cache_dir/<funcname>_param1_param2_kw1key.kw1val
        (it would be better to use np.save/np.load for numpy arrays)
        """

        cache_fn = func.__module__ + "." + func.__name__ + "_"
        if version is not None:
            cache_fn += _format_filename("ver" + str(version) + "_")

        if keepweakref:
            cache = weakref.WeakValueDictionary()

        def name(x):
            y = six.text_type(x)
            if len(y) > 40:
                y = sha1(y.encode('utf-8')).hexdigest()
            return y

        def load_from(fn, dn, try_latin=False):
            full_fn = os.path.join(dn, fn)
            if os.path.exists(full_fn):
                try:
                    with open(full_fn, 'rb') as f:
                        dn_label = os.path.basename(dn)
                        if try_latin:
                            dn_label += " (with forced encoding)"
                        with optional(
                                verbose,
                                timer("Serving call to {} from file {} of {}"
                                      .format(func.__name__, fn, dn_label))
                        ):
                            if try_latin:
                                return cPickle.load(f, encoding='latin-1')
                            else:
                                return cPickle.load(f)
                except Exception as e:
                    if not try_latin and isinstance(e, UnicodeDecodeError):
                        return load_from(fn, dn, try_latin=True)
                    print("Couldn't unpickle from %s: %s" % (fn, e.args[0]), file=sys.stderr)

        @wraps(func)
        def wrapper(*args, **kwds):
            recompute = kwds.pop('recompute', False)

            # Check if there is a cached version
            fn = cache_fn + _format_filename(
                '_'.join(name(a) for a in args) + '_' +
                '_'.join(name(k) + '.' + name(v)
                         for k,v in iteritems(kwds)
                         if k not in ignore) +
                '.pickle'
            )

            ret = None
            if not recompute:
                if keepweakref and fn in cache:
                    return cache[fn]

                ret = load_from(fn, cache_dir)

            if ret is None:
                if not recompute:
                    for fallback in fallback_cache_dirs:
                        ret = load_from(fn, fallback)
                        if ret is not None: break

                if ret is None:
                    with optional(
                            verbose,
                            timer("Caching call to {} in {}".format(func.__name__, fn))
                    ):
                        ret = func(*args, **kwds)
                try:
                    with open(os.path.join(cache_dir, fn), 'wb') as f:
                        if gid: os.fchown(f.fileno(), -1, gid)
                        if mode: os.fchmod(f.fileno(), mode)
                        cPickle.dump(ret, f, protocol=-1)
                except Exception as e:
                    print("Couldn't pickle to %s: %s" % (fn, e.args[0]), file=sys.stderr)

            if keepweakref and ret is not None:
                cache[fn] = ret

            return ret

        return wrapper

    if not enable:
        def deco(func):
            # logger.warn("Deactivating cache for function %s, since cache directory %s does not exist",
            #             func.__name__, cache_dir)
            return func

    if callable(func):
        return deco(func)
    else:
        return deco
Пример #53
0
def main():
    cli = odm.cli.CLI(['path', 'action', '--upload-user', '--upload-group', '--upload-path'], 'microsoft')
    client = cli.client
    dir_map = {}

    ts_start = datetime.datetime.now()
    retval = 0

    if cli.args.action in ('upload', 'verify'):
        count = 0
        size = 0

        if cli.args.upload_user:
            container = odm.ms365.User(
                client,
                client.mangle_user(cli.args.upload_user),
            )
        elif cli.args.upload_group:
            container = odm.ms365.Group(
                client,
                client.mangle_user(cli.args.upload_group),
            )
        upload_dir = container.drive.root

        if cli.args.upload_path:
            for tok in cli.args.upload_path.split('/'):
                if upload_dir:
                    upload_dir = upload_dir.get_folder(tok, cli.args.action == 'upload')

        dir_map['.'] = upload_dir

        for root, dirs, files in os.walk(cli.args.path):
            parent = os.path.relpath(root, cli.args.path)
            for dname in dirs:
                relpath = os.path.relpath('/'.join((root, dname)), cli.args.path)
                cli.logger.info('Working on folder %s', relpath)
                if dir_map[parent]:
                    dir_map[relpath] = dir_map[parent].get_folder(dname, cli.args.action == 'upload')
                else:
                    dir_map[relpath] = None

            for fname in files:
                count += 1
                fpath = '/'.join((root, fname))
                stat = os.stat(fpath)
                size += stat.st_size
                relpath = os.path.relpath(fpath, cli.args.path)
                cli.logger.info('Working on file %s', relpath)
                if cli.args.action == 'upload':
                    attempt = 0
                    result = False
                    while attempt < 3 and not result:
                        attempt += 1
                        result = dir_map[parent].upload_file(fpath, fname)
                    if not result:
                        cli.logger.warning('Failed to upload %s', relpath)
                        retval = 1
                elif dir_map[parent]:
                    existing = dir_map[parent].verify_file(fpath, fname)
                    if existing:
                        cli.logger.info('Verified %s', relpath)
                    else:
                        cli.logger.warning('Failed to verify %s', relpath)
                        retval = 1
                else:
                    cli.logger.warning('Failed to verify %s: parent folder does not exist', relpath)
                    retval = 1

        cli.logger.info(
            '%.2f MiB across %s items, elapsed time %s',
            size / (1024 ** 2),
            count,
            datetime.datetime.now() - ts_start,
        )

    else:
        print('Unsupported action {}'.format(cli.args.action), file=sys.stderr)
        retval = 1

    sys.exit(retval)
#игнорирование предупреждений
import warnings
warnings.simplefilter("ignore", DeprecationWarning)

#изменение текущего окружения на окружение скрипта
pathProgramm = os.path.dirname(sys.argv[0])
os.chdir(pathProgramm)

file_path = "scikit_learn_data/mldata/"

if not os.path.exists(file_path + 'mnist-original.mat'):

    directory = os.path.dirname(file_path)

    try:
        os.stat(directory)
    except IOError as err:
        os.makedirs(directory)

    print('Beginning mnist-original.mat download ...')

    url = 'https://raw.githubusercontent.com/amplab/datascience-sp14/master/lab7/mldata/mnist-original.mat'
    urllib.request.urlretrieve(url, file_path + 'mnist-original.mat')
    if not os.path.exists(file_path + 'mnist-original.mat'):
        exit()

# Загрузка dataset с mldata.org
print('Loading MNIST Original...')
mnist = datasets.fetch_mldata('MNIST Original', data_home='scikit_learn_data')

# Получение массива элементов и меток
import os
import stat
import sys

# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(
        os.path.join('/opt/ros/kinetic/share/catkin/cmake',
                     'catkinConfig.cmake.in')):
    sys.path.insert(
        0, os.path.join('/opt/ros/kinetic/share/catkin/cmake', '..', 'python'))
try:
    from catkin.environment_cache import generate_environment_script
except ImportError:
    # search for catkin package in all workspaces and prepend to path
    for workspace in "/opt/ros/kinetic".split(';'):
        python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
        if os.path.isdir(os.path.join(python_path, 'catkin')):
            sys.path.insert(0, python_path)
            break
    from catkin.environment_cache import generate_environment_script

code = generate_environment_script('/home/yuyu/Openni/devel/env.sh')

output_filename = '/home/yuyu/Openni/build/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
    #print('Generate script for cached setup "%s"' % output_filename)
    f.write('\n'.join(code))

mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
Пример #56
0
def move_run_dir(runid, from_dir, to_dir):
  """
  
  \author   M.Frank
  \version  1.0
  \date     30/06/2002
  """
  import os
  from_dir_entry = from_dir + os.sep + str(runid)
  to_dir_entry   = to_dir   + os.sep + str(runid)

  # Check if source directory exists:
  try:
    os.stat(from_dir_entry)
  except Exception,X:
    raise(X)
  # Check if target directory already exists:
  try:
    os.stat(to_dir_entry)
    raise(Exception('Todo entry '+to_dir_entry+' already exists!'))
  except Exception,X:
    pass

  # Now move it:
  try:
    os.rename(from_dir_entry,to_dir_entry)
  except OSError, e:
    if e.errno==39: # OSError: [Errno 39] Directory not empty
      os.system('rm -rf '+to_dir_entry)
      os.rename(from_dir_entry,to_dir_entry)
Пример #57
0
def main(args) -> None:
    # load data
    categories, observations, confidences, idx2category, category2idx, labels = prepare_data(
        DATAFILE_LIST[args.dataset], False)
    # train a ground_truth ece model
    if args.ground_truth_type == 'bayesian':
        ground_truth_model = SumOfBetaEce(num_bins=args.num_bins,
                                          pseudocount=args.pseudocount)
    else:
        ground_truth_model = SumOfBetaEce(num_bins=args.num_bins,
                                          pseudocount=1e-3)
    ground_truth_model.update_batch(confidences, observations)

    results = np.zeros((args.num_runs, len(N_list), 5))

    for run_id in range(args.num_runs):

        tmp = list(zip(confidences, observations))
        random.shuffle(tmp)
        confidences, observations = zip(*tmp)

        model = SumOfBetaEce(num_bins=args.num_bins,
                             pseudocount=args.pseudocount)

        for i in range(len(N_list)):
            tmp = 0 if i == 0 else N_list[i - 1]
            model.update_batch(confidences[tmp:N_list[i]],
                               observations[tmp:N_list[i]])

            results[run_id, i, 0] = N_list[i]
            results[run_id, i, 1] = model.eval
            results[run_id, i, 2] = model.frequentist_eval
            results[run_id, i, 3] = model.calibration_estimation_error(
                ground_truth_model, args.weight_type)
            results[run_id, i,
                    4] = model.frequentist_calibration_estimation_error(
                        ground_truth_model, args.weight_type)

    results_mean = np.mean(results, axis=0)
    results_variance = np.std(results, axis=0)

    if args.weight_type == 'online':
        OUTPUT_DIR += "online_weights/"
    try:
        os.stat(OUTPUT_DIR)
    except:
        os.mkdir(OUTPUT_DIR)

    if args.ground_truth_type == 'frequentist':
        filename_mean = OUTPUT_DIR + "frequentist_ground_truth_%s_pseudocount%d.csv" % (
            args.dataset, args.pseudocount)
        filename_std = OUTPUT_DIR + "frequentist_ground_truth_%s_pseudocount%d_std.csv" % (
            args.dataset, args.pseudocount)
    else:
        filename_mean = OUTPUT_DIR + "bayesian_ground_truth_%s_pseudocount%d.csv" % (
            args.dataset, args.pseudocount)
        filename_std = OUTPUT_DIR + "bayesian_ground_truth_%s_pseudocount%d_std.csv" % (
            args.dataset, args.pseudocount)

    header = 'N, bayesian_ece, frequentist_ece, bayesian_estimation_error, frequentist_estimation_error'
    np.savetxt(filename_mean, results_mean, delimiter=',', header=header)
    np.savetxt(filename_std, results_variance, delimiter=',', header=header)
Пример #58
0
    async def validate_rsync_task(self, data, schema):
        verrors = ValidationErrors()

        # Windows users can have spaces in their usernames
        # http://www.freebsd.org/cgi/query-pr.cgi?pr=164808

        username = data.get('user')
        if ' ' in username:
            verrors.add(f'{schema}.user', 'User names cannot have spaces')
            raise verrors

        user = None
        with contextlib.suppress(KeyError):
            user = await self.middleware.call('dscache.get_uncached_user', username)

        if not user:
            verrors.add(f'{schema}.user', f'Provided user "{username}" does not exist')
            raise verrors

        remote_host = data.get('remotehost')
        if not remote_host:
            verrors.add(f'{schema}.remotehost', 'Please specify a remote host')

        if data.get('extra'):
            data['extra'] = ' '.join(data['extra'])
        else:
            data['extra'] = ''

        mode = data.get('mode')
        if not mode:
            verrors.add(f'{schema}.mode', 'This field is required')

        remote_module = data.get('remotemodule')
        if mode == 'MODULE' and not remote_module:
            verrors.add(f'{schema}.remotemodule', 'This field is required')

        if mode == 'SSH':
            remote_port = data.get('remoteport')
            if not remote_port:
                verrors.add(f'{schema}.remoteport', 'This field is required')

            remote_path = data.get('remotepath')
            if not remote_path:
                verrors.add(f'{schema}.remotepath', 'This field is required')

            search = os.path.join(user['pw_dir'], '.ssh', 'id_[edr]*')
            exclude_from_search = os.path.join(user['pw_dir'], '.ssh', 'id_[edr]*pub')
            key_files = set(glob.glob(search)) - set(glob.glob(exclude_from_search))
            if not key_files:
                verrors.add(
                    f'{schema}.user',
                    'In order to use rsync over SSH you need a user'
                    ' with a private key (DSA/ECDSA/RSA) set up in home dir.'
                )
            else:
                for file in glob.glob(search):
                    if '.pub' not in file:
                        # file holds a private key and it's permissions should be 600
                        if os.stat(file).st_mode & 0o077 != 0:
                            verrors.add(
                                f'{schema}.user',
                                f'Permissions {oct(os.stat(file).st_mode & 0o777)} for {file} are too open. Please '
                                f'correct them by running chmod 600 {file}'
                            )

            if(
                data.get('validate_rpath') and
                remote_path and
                remote_host and
                remote_port
            ):
                if '@' in remote_host:
                    remote_username, remote_host = remote_host.rsplit('@', 1)
                else:
                    remote_username = username

                try:
                    with (await asyncio.wait_for(asyncssh.connect(
                            remote_host,
                            port=remote_port,
                            username=remote_username,
                            client_keys=key_files,
                            known_hosts=None
                    ), timeout=5)) as conn:

                        await conn.run(f'test -d {shlex.quote(remote_path)}', check=True)

                except asyncio.TimeoutError:

                    verrors.add(
                        f'{schema}.remotehost',
                        'SSH timeout occurred. Remote path cannot be validated.'
                    )

                except OSError as e:

                    if e.errno == 113:
                        verrors.add(
                            f'{schema}.remotehost',
                            f'Connection to the remote host {remote_host} on port {remote_port} failed.'
                        )
                    else:
                        verrors.add(
                            f'{schema}.remotehost',
                            e.__str__()
                        )

                except asyncssh.DisconnectError as e:

                    verrors.add(
                        f'{schema}.remotehost',
                        f'Disconnect Error[ error code {e.code} ] was generated when trying to '
                        f'communicate with remote host {remote_host} and remote user {remote_username}.'
                    )

                except asyncssh.ProcessError as e:

                    if e.code == '1':
                        verrors.add(
                            f'{schema}.remotepath',
                            'The Remote Path you specified does not exist or is not a directory.'
                            'Either create one yourself on the remote machine or uncheck the '
                            'validate_rpath field'
                        )
                    else:
                        verrors.add(
                            f'{schema}.remotepath',
                            f'Connection to Remote Host was successful but failed to verify '
                            f'Remote Path. {e.__str__()}'
                        )

                except asyncssh.Error as e:

                    if e.__class__.__name__ in e.__str__():
                        exception_reason = e.__str__()
                    else:
                        exception_reason = e.__class__.__name__ + ' ' + e.__str__()
                    verrors.add(
                        f'{schema}.remotepath',
                        f'Remote Path could not be validated. An exception was raised. {exception_reason}'
                    )
            elif data.get('validate_rpath'):
                verrors.add(
                    f'{schema}.remotepath',
                    'Remote path could not be validated because of missing fields'
                )

        data.pop('validate_rpath', None)

        # Keeping compatibility with legacy UI
        for field in ('mode', 'direction'):
            data[field] = data[field].lower()

        return verrors, data
Пример #59
0
def autoconf():
	#Configuration for LINUX 
	if xbmc.getCondVisibility('system.platform.linux') and not xbmc.getCondVisibility('system.platform.Android') and not settings.getSetting('force_android') == "true":
		print("Detected OS: Linux")
		#Linux Armv6
		if os.uname()[4] == "armv6l":
			try:
				if re.search(os.uname()[1],"openelec",re.IGNORECASE): acestream_rpi = acestream_openelec_raspberry
				elif re.search(os.uname()[1],"raspbmc",re.IGNORECASE): acestream_rpi = acestream_generic_raspberry
				elif os.path.isfile("/etc/xbian_version"): acestream_rpi = acestream_generic_raspberry
				elif "ARCH" in os.uname()[2]:
					acestream_rpi = acestream_generic_raspberry
					settings.setSetting('python_cmd',value='python2')
				else:
					mensagemok(translate(40000),translate(400007),translate(400008))
					OS_list = ["OpenELEC","Raspbmc","Xbian","Pipplware","Arch Linux Arm"]
					url_packagerpi_list = [acestream_openelec_raspberry, acestream_generic_raspberry, acestream_generic_raspberry,acestream_generic_raspberry, acestream_generic_raspberry]
					OS_Rpi_choose = xbmcgui.Dialog().select
					choose=OS_Rpi_choose('Select your OS',OS_list)
					if choose > -1:
						acestream_rpi= url_packagerpi_list[choose]
						if OS_list[choose] == "Arch Linux Arm": settings.setSetting('python_cmd',value='python2')
			except: acestream_rpi = ""
			print("Detected linux armv6 - possible Raspberry PI")
			#Sop

			SPSC_KIT = os.path.join(addonpath,sopcast_raspberry.split("/")[-1])
			download_tools().Downloader(sopcast_raspberry,SPSC_KIT,translate(40025),translate(40000))
			import tarfile            
			if tarfile.is_tarfile(SPSC_KIT):
				path_libraries = os.path.join(pastaperfil,"sopcast")
				download_tools().extract(SPSC_KIT,path_libraries)
				xbmc.sleep(500)
				download_tools().remove(SPSC_KIT)

            		#Ace
			SPSC_KIT = os.path.join(addonpath,acestream_rpi.split("/")[-1])
			download_tools().Downloader(acestream_rpi,SPSC_KIT,translate(40026),translate(40000))
        
			if tarfile.is_tarfile(SPSC_KIT):
				path_libraries = os.path.join(pastaperfil,"acestream")
				download_tools().extract(SPSC_KIT,path_libraries)
				xbmc.sleep(500)
				download_tools().remove(SPSC_KIT)

			settings.setSetting('autoconfig',value='false')


                elif os.uname()[4] == "armv7l":
			if re.search(os.uname()[1],"openelec",re.IGNORECASE):
				OS_Choose = "OpenELEC"
			elif os.path.isfile("/etc/xbian_version"):
				OS_Choose = "Xbian"
			else:
                		mensagemok(translate(40000),translate(40109),translate(40110))
                		OS_list = ["MXLinux","OpenELEC","Xbian"]
                		choose=xbmcgui.Dialog().select('Select your OS',OS_list)
                		if choose > -1:
                			OS_Choose= OS_list[choose]

			#Linux armv7 configuration according to platform

			#MXLINUX
                	if OS_Choose == "MXLinux":
				acestream_installed = False
				sopcast_installed = False
               			print("Detected MXLinux armv7")
               			SPSC_KIT = os.path.join(addonpath,sopcast_raspberry.split("/")[-1])
               			download_tools().Downloader(sopcast_raspberry,SPSC_KIT,translate(40025),translate(40000))
               			import tarfile
				if tarfile.is_tarfile(SPSC_KIT):
					path_libraries = os.path.join(pastaperfil,"sopcast")
					download_tools().extract(SPSC_KIT,path_libraries)
					xbmc.sleep(500)
					download_tools().remove(SPSC_KIT)
					sopcast_installed = True

				SPSC_KIT = os.path.join(addonpath,acestream_mxlinux.split("/")[-1])
				download_tools().Downloader(acestream_mxlinux,SPSC_KIT,translate(40026),translate(40000))
        			if tarfile.is_tarfile(SPSC_KIT):
					path_libraries = os.path.join(pastaperfil,"acestream")
					download_tools().extract(SPSC_KIT,path_libraries)
					xbmc.sleep(500)
					download_tools().remove(SPSC_KIT)
					acestream_installed = True
				if acestream_installed and sopcast_installed:
					settings.setSetting('autoconfig',value='false')	

			#OPENELEC

                	if OS_Choose == "OpenELEC":
                		import tarfile
				acestream_installed = False
				sopcast_installed = False
                		print("Openelec armv7 platform detected")
                		SPSC_KIT = os.path.join(addonpath,sopcast_raspberry.split("/")[-1])
                		download_tools().Downloader(sopcast_raspberry,SPSC_KIT,translate(40025),translate(40000))
				if tarfile.is_tarfile(SPSC_KIT):
					path_libraries = os.path.join(pastaperfil,"sopcast")
					download_tools().extract(SPSC_KIT,path_libraries)
					xbmc.sleep(500)
					download_tools().remove(SPSC_KIT)
					sopcast_installed = True
				SPSC_KIT = os.path.join(addonpath,acestream_armv7_openelec.split("/")[-1])
				download_tools().Downloader(acestream_armv7_openelec,SPSC_KIT,translate(40026),translate(40000))
        			if tarfile.is_tarfile(SPSC_KIT):
					path_libraries = os.path.join(pastaperfil,"acestream")
					download_tools().extract(SPSC_KIT,path_libraries)
					xbmc.sleep(500)
					download_tools().remove(SPSC_KIT)
					acestream_installed = True
				if acestream_installed and sopcast_installed:
					settings.setSetting('autoconfig',value='false')	

			#XBIAN
               		if OS_Choose == "Xbian":
               			import tarfile
				acestream_installed = False
				sopcast_installed = False
               			print("Xbian armv7 platform detected")
               			SPSC_KIT = os.path.join(addonpath,sopcast_raspberry.split("/")[-1])
               			download_tools().Downloader(sopcast_raspberry,SPSC_KIT,translate(40025),translate(40000))
				if tarfile.is_tarfile(SPSC_KIT):
					path_libraries = os.path.join(pastaperfil,"sopcast")
					download_tools().extract(SPSC_KIT,path_libraries)
					xbmc.sleep(500)
					download_tools().remove(SPSC_KIT)
					sopcast_installed = True
				SPSC_KIT = os.path.join(addonpath,acestream_armv7_xbian.split("/")[-1])
				download_tools().Downloader(acestream_armv7_xbian,SPSC_KIT,translate(40026),translate(40000))
       				if tarfile.is_tarfile(SPSC_KIT):
					path_libraries = os.path.join(pastaperfil,"acestream")
					download_tools().extract(SPSC_KIT,path_libraries)
					xbmc.sleep(500)
					download_tools().remove(SPSC_KIT)
					acestream_installed = True
				if acestream_installed and sopcast_installed:
					settings.setSetting('autoconfig',value='false')	


			
		elif (os.uname()[4] == "x86_64" and re.search(os.uname()[1],"openelec",re.IGNORECASE)) or settings.getSetting('openelecx86_64') == "true":
			settings.setSetting('openelecx86_64',value='true')
			print("Detected OpenELEC x86_64")
			SPSC_KIT = os.path.join(addonpath,openelecx86_64_package.split("/")[-1])
			download_tools().Downloader(openelecx86_64_package,SPSC_KIT,translate(40112),translate(40000))
			import tarfile
			if tarfile.is_tarfile(SPSC_KIT):
				download_tools().extract(SPSC_KIT,pastaperfil)
				xbmc.sleep(500)
				download_tools().remove(SPSC_KIT)
			settings.setSetting('autoconfig',value='false')

		elif (os.uname()[4] == "i386" and re.search(os.uname()[1],"openelec",re.IGNORECASE)) or (os.uname()[4] == "i686" and re.search(os.uname()[1],"openelec",re.IGNORECASE)) or settings.getSetting('openeleci386') == "true":
			settings.setSetting('openeleci386',value='true')
			print("Detected OpenELEC i386")
			SPSC_KIT = os.path.join(addonpath,openeleci386_package.split("/")[-1])
			download_tools().Downloader(openeleci386_package,SPSC_KIT,translate(40112),translate(40000))
			import tarfile
			if tarfile.is_tarfile(SPSC_KIT):
				download_tools().extract(SPSC_KIT,pastaperfil)
				xbmc.sleep(500)
				download_tools().remove(SPSC_KIT)
			settings.setSetting('autoconfig',value='false')		
	
		else:
			if os.uname()[4] == "x86_64":
				opcao= xbmcgui.Dialog().yesno(translate(40000), translate(40113))
				if opcao: 
					settings.setSetting('openelecx86_64',value='true')
					autoconf()
			elif os.uname()[4] == "i386" or os.uname()[4] == "i686":
				opcao= xbmcgui.Dialog().yesno(translate(40000), translate(600023))
				if opcao: 
					settings.setSetting('openeleci386',value='true')
					autoconf()

			else: mensagemok(translate(40000),translate(40056))
			

			#Linux but not openelec i386 nor openelec x86_64 - General Linux platforms configuration
			
			if settings.getSetting('openeleci386') == "false" and settings.getSetting('openelecx86_64') == "false":

				print("Detected Other Linux i386 Plataform")

            		#Sop
            		#Download and extract sopcast-bundle
				SPSC_KIT = os.path.join(addonpath,sopcast_linux_generico.split("/")[-1])
				download_tools().Downloader(sopcast_linux_generico,SPSC_KIT,translate(40025),translate(40000))
				import tarfile
				if tarfile.is_tarfile(SPSC_KIT):
					path_libraries = os.path.join(pastaperfil,"sopcast")
					download_tools().extract(SPSC_KIT,path_libraries)
					xbmc.sleep(500)
					download_tools().remove(SPSC_KIT)
				#set every single file from the bundle as executable
				path_libraries = os.path.join(pastaperfil,"sopcast")
				dirs, files = xbmcvfs.listdir(path_libraries)
				for ficheiro in files:
					binary_path = os.path.join(path_libraries,ficheiro)
					st = os.stat(binary_path)
					import stat
					os.chmod(binary_path, st.st_mode | stat.S_IEXEC)
				path_libraries = os.path.join(path_libraries,"lib")
				dirs, files = xbmcvfs.listdir(path_libraries)
				for ficheiro in files:
					binary_path = os.path.join(path_libraries,ficheiro)
					st = os.stat(binary_path)
					import stat
					os.chmod(binary_path, st.st_mode | stat.S_IEXEC)
	   		 
	   		 #Ace
	   		 
				if os.uname()[4] == "x86_64":
					ACE_KIT = os.path.join(addonpath,acestream_linux_x64_generic.split("/")[-1])
					download_tools().Downloader(acestream_linux_x64_generic,ACE_KIT,translate(40026),translate(40000))
					import tarfile
					if tarfile.is_tarfile(ACE_KIT):
						download_tools().extract(ACE_KIT,pastaperfil)
						xbmc.sleep(500)
						download_tools().remove(ACE_KIT)
				
					settings.setSetting('autoconfig',value='false')
					
				elif os.uname()[4] == "i386" or os.uname()[4] == "i686":
					ACE_KIT = os.path.join(addonpath,acestream_linux_i386_generic.split("/")[-1])
					download_tools().Downloader(acestream_linux_i386_generic,ACE_KIT,translate(40026),translate(40000))
					import tarfile
					if tarfile.is_tarfile(ACE_KIT):
						download_tools().extract(ACE_KIT,pastaperfil)
						xbmc.sleep(500)
						download_tools().remove(ACE_KIT)
				
					settings.setSetting('autoconfig',value='false')


	elif xbmc.getCondVisibility('system.platform.windows'):
		print("Detected OS: Windows")
		if not xbmcvfs.exists(pastaperfil): xbmcvfs.mkdir(pastaperfil)
        	#Sop
		import ctypes
                is_admin=ctypes.windll.shell32.IsUserAnAdmin() != 0
                if is_admin == False:
                    mensagemok(translate(40000),translate(40158),translate(40159))
                else:
		    import subprocess
                    cmd = ['sc','delete','sopcastp2p']
                    proc = subprocess.Popen(cmd,stdout=subprocess.PIPE,shell=True)
                    for line in proc.stdout:
                        print("cmd out: " + line.rstrip())
                    xbmc.sleep(1000)
                    ret = mensagemprogresso.create(translate(40000),translate(40000))
                    mensagemprogresso.update(0,translate(40160),"  ")
                    xbmc.sleep(1000)
                    import _winreg
                    aReg = _winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE)
                    try:
                        aKey = _winreg.OpenKey(aReg, r'SOFTWARE\SopCast\Player\InstallPath',0, _winreg.KEY_READ)
                        name, value, type = _winreg.EnumValue(aKey, 0)
                        sopcast_executable = value
                        print("Installation executable of sopcast was found: " + sopcast_executable)
                        _winreg.CloseKey(aKey)
                        mensagemprogresso.update(10,translate(40160),translate(40161))
                    except:
                        sopcast_executable = ""
                        mensagemok(translate(40000),translate(40162),translate(40163))
                    if not sopcast_executable: pass
                    else:
                        xbmc.sleep(1000)
                        mensagemprogresso.update(20,translate(40164),"  ")
                        xbmc.sleep(1000)
                        print ("Getting windows users IDS")
                        aReg = _winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE)
                        aKey = _winreg.OpenKey(aReg, r'SOFTWARE\Microsoft\Windows NT\CurrentVersion\ProfileList')
                        users = []
                        for i in range(1024):
                            try:
                                asubkey=_winreg.EnumKey(aKey,i)
                                print(asubkey)
                                aKeydois = _winreg.OpenKey(aReg, os.path.join('SOFTWARE\Microsoft\Windows NT\CurrentVersion\ProfileList',asubkey))
                                val=_winreg.QueryValueEx(aKeydois, "ProfileImagePath")
                                try:
                                    print(val[0])
                                except:
                                    print("Notice: User with strange characters, print cmd ignored.")
                                if "Windows" in val[0] or "%systemroot%" in val[0]:
                                    pass
                                else:
                                    users.append(asubkey)
                            except:
                                pass
                        if not users:
                            mensagemok(translate(40000),translate(40165))
                        else:
                            mensagemprogresso.update(30,translate(40164),translate(40161))
                            xbmc.sleep(200)
                            mensagemprogresso.update(30,translate(40166),"   ")
                            xbmc.sleep(1000)
                            print("System Users", users)
                            srvany_final_location = os.path.join(sopcast_executable.replace("SopCast.exe",""),"srvany.exe")
                            srvany_download_location = os.path.join(addonpath,"srvany.exe")
                            srvanytgz_download_location = os.path.join(addonpath,"srvany.tar.gz")                            
                            download_tools().Downloader(srvany_executable,srvanytgz_download_location,translate(40167),translate(40000)) 
                            xbmc.sleep(1000)
                            import tarfile
                            if tarfile.is_tarfile(srvanytgz_download_location):
                                path_libraries = addonpath
                                download_tools().extract(srvanytgz_download_location,path_libraries)
                                xbmcvfs.copy(srvany_download_location,srvany_final_location)
                                download_tools().remove(srvanytgz_download_location)
                                download_tools().remove(srvany_download_location)
                            xbmc.sleep(1000)
                            ret = mensagemprogresso.create(translate(40000),translate(40000))
                            xbmc.sleep(200)
                            mensagemprogresso.update(35,translate(40168),"  ")
                            xbmc.sleep(1000)
                            import subprocess
                            cmd = ['sc','create','sopcastp2p','binpath=',os.path.join(os.path.join(sopcast_executable.replace("SopCast.exe","")),'srvany.exe')]
                            proc = subprocess.Popen(cmd,stdout=subprocess.PIPE,shell=True)
                            servicecreator = False
                            for line in proc.stdout:
                                print ("cmd out: " + line.rstrip())
                                servicecreator = True
                            if servicecreator == False:
                                mensagemok(translate(40000),translate(40169))
                            else:
                                mensagemprogresso.update(40,translate(40168),translate(40161))
                                xbmc.sleep(1000)
                                mensagemprogresso.update(45,translate(40170),"  ")
                                xbmc.sleep(1000)
                                print("Trying to modify regedit....")
                                try:
                                    aReg = _winreg.ConnectRegistry(None,_winreg.HKEY_LOCAL_MACHINE)
                                    key = _winreg.CreateKey(aReg, r'SYSTEM\CurrentControlSet\Services\sopcastp2p\Parameters')
                                    _winreg.SetValueEx(key, 'AppDirectory', 0, _winreg.REG_SZ, os.path.join(sopcast_executable.replace("SopCast.exe","")))
                                    _winreg.SetValueEx(key, 'Application', 0, _winreg.REG_SZ, os.path.join(os.path.join(sopcast_executable.replace("SopCast.exe","")),"SopCast.exe"))
                                    _winreg.SetValueEx(key, 'AppParameters', 0, _winreg.REG_SZ, "sop://")
                                    mensagemprogresso.update(50,translate(40170), translate(40161))
                                    regedit = True
                                except:
                                    mensagemok(translate(40000),translate(40171))
                                    regedit = False
                                if regedit == False: pass
                                else:
                                    xbmc.sleep(1000)
                                    mensagemprogresso.update(50,translate(40172), "   ")
                                    cmd = ['sc','sdshow','sopcastp2p']
                                    proc = subprocess.Popen(cmd,stdout=subprocess.PIPE,shell=True)
                                    lines = []
                                    for line in proc.stdout:
					print(line.rstrip())
                                        if line.rstrip() != "" and "(" in line.rstrip(): lines.append(line.rstrip())
                                        else: pass
                                    if len(lines) != 1: mensagemok(translate(40000),translate(40173))
                                    else:
                                        linha_arr = []
                                        for user in users:
                                            linha_arr.append('(A;;RPWPCR;;;' + user + ')')
                                        linha_add = ''
                                        for linha in linha_arr:
                                            linha_add += linha
                                        print("line peace to add: " + linha_add)
                                        linha_final = lines[0].replace("S:(",linha_add + "S:(")
                                        print("Final line: " + linha_final)
                                        permissions = False
                                        xbmc.sleep(500)
                                        mensagemprogresso.update(60,translate(40172), translate(40161))
                                        xbmc.sleep(500)
                                        mensagemprogresso.update(60,translate(40174), "   ")
                                        cmd = ['sc','sdset','sopcastp2p',linha_final]
                                        proc = subprocess.Popen(cmd,stdout=subprocess.PIPE,shell=True)
                                        for line in proc.stdout:
                                            print(line.rstrip())
                                            permissions = True
                                        if permissions == False: mensagemok(translate(40000),translate(40175))
                                        else:
                                            mensagemprogresso.update(70,translate(40174), translate(40161))
                                            xbmc.sleep(1000)
                                            mensagemprogresso.update(70,translate(40176), "   ")
                                            print("Trying to set sopcastp2p service regedit permissions...")
                                            download_tools().Downloader(srvany_permissions,os.path.join(pastaperfil,"sopcastp2p-permissions.txt"),translate(40177),translate(40000))
                                            xbmc.sleep(500)
                                            ret = mensagemprogresso.create(translate(40000),translate(40000))
                                            xbmc.sleep(500)
                                            mensagemprogresso.update(80,translate(40178), "   ")
                                            xbmc.sleep(1000)
                                            cmd = ['regini',os.path.join(pastaperfil,"sopcastp2p-permissions.txt")]
                                            proc = subprocess.Popen(cmd,stdout=subprocess.PIPE,shell=True)
                                            for line in proc.stdout:
                                                print(line.rstrip())
                                            mensagemprogresso.update(90,translate(40178), translate(40178))
                                            mensagemprogresso.update(100,translate(40179), "   ")
                                            xbmc.sleep(2000)
                                            mensagemprogresso.close()
        #Ace
		SPSC_KIT = os.path.join(addonpath,acestream_windows.split("/")[-1])
		download_tools().Downloader(acestream_windows,SPSC_KIT,translate(40026),translate(40000))
		import shutil
		if xbmcvfs.exists(os.path.join(pastaperfil,"acestream")):
			shutil.rmtree(os.path.join(pastaperfil,"acestream"))
		if xbmcvfs.exists(os.path.join(pastaperfil,"player")):
			shutil.rmtree(os.path.join(pastaperfil,"player"))
		import tarfile
		if tarfile.is_tarfile(SPSC_KIT):
			path_libraries = os.path.join(pastaperfil)
			download_tools().extract(SPSC_KIT,path_libraries)
			download_tools().remove(SPSC_KIT)
		settings.setSetting('autoconfig',value='false')
    
	elif xbmc.getCondVisibility('System.Platform.OSX'):
		print("Detected OS: Mac OSX")
		available = False
		if os.uname()[-1] == "x86_64":
			mac_package = osx_x86_64
			available = True
		elif os.uname()[-1] == "i386":
			mac_package = osx_i386
			available = True
		else:
			available = False
		if available == True:		
			if not xbmcvfs.exists(pastaperfil):
				xbmcvfs.mkdir(pastaperfil)		
			MAC_KIT = os.path.join(addonpath,mac_package.split("/")[-1])
			download_tools().Downloader(mac_package,MAC_KIT,translate(40112),translate(40000))
			import tarfile
			if tarfile.is_tarfile(MAC_KIT):
				path_libraries = os.path.join(pastaperfil)
				download_tools().extract(MAC_KIT,pastaperfil)
				download_tools().remove(MAC_KIT)
				sp_sc_auth = os.path.join(pastaperfil,"sopcast","sp-sc-auth")
				st = os.stat(sp_sc_auth)
				import stat
				os.chmod(sp_sc_auth, st.st_mode | stat.S_IEXEC)
				settings.setSetting('autoconfig',value='false')
		else:
			mensagemok(translate(40000),translate(600014))
			sys.exit(0)
				
	elif xbmc.getCondVisibility('System.Platform.Android') or settings.getSetting('force_android') == "true":

		print("Detected OS: Android")
		#Sopcast configuration
		print("Starting SopCast Configuration")

		#Moving sopclient to ext4 hack - tks steeve from xbmctorrent

		sopclient_builtin_location = os.path.join(addonpath,"resources","binaries","sopclient")

		#Hack to get current xbmc app id
		xbmcfolder=xbmc.translatePath(addonpath).split("/")

		i = 0
		found = False
		sopcast_installed = False
		
		for folder in xbmcfolder:
			if folder.count('.') >= 2 and folder != addon_id :
				found = True
				break
			else:
				i+=1

		if found == True:
			uid = os.getuid()
			app_id = xbmcfolder[i]
			xbmc_data_path = os.path.join("/data", "data", app_id)
			if os.path.exists(xbmc_data_path) and uid == os.stat(xbmc_data_path).st_uid:
				android_binary_dir = os.path.join(xbmc_data_path, "files", "plugin.video.p2p-streams")
				if not os.path.exists(android_binary_dir):
            				os.makedirs(android_binary_dir)
				android_binary_path = os.path.join(android_binary_dir, "sopclient")
		        	if not os.path.exists(android_binary_path) or os.path.getsize(android_binary_path) != os.path.getsize(sopclient_builtin_location):
					import shutil
					shutil.copy2(sopclient_builtin_location, android_binary_path)
				binary_path = android_binary_path
				st = os.stat(binary_path)
				import stat
				os.chmod(binary_path, st.st_mode | stat.S_IEXEC)
				settings.setSetting('android_sopclient',value=binary_path)
				opcao= xbmcgui.Dialog().yesno(translate(40000), translate(50011),translate(50012))
				if not opcao:
					settings.setSetting('external_sopcast',value='1')
					settings.setSetting('force_android',value='true')
					sopcast_installed = True
					mensagemok(translate(40000),translate(50014))
				else:
					mensagemok(translate(40000),translate(50013))
					if xbmcvfs.exists(os.path.join("sdcard","Download")):
						pasta = os.path.join("sdcard","Download")
						sopfile = os.path.join("sdcard","Download",sopcast_apk.split("/")[-1])
					else:
						dialog = xbmcgui.Dialog()
						pasta = dialog.browse(int(0), translate(40190), 'myprograms')
						sopfile = os.path.join(pasta,sopcast_apk.split("/")[-1])
					download_tools().Downloader(sopcast_apk,sopfile,translate(40073),translate(40000))
					import tarfile
					if tarfile.is_tarfile(sopfile):
						download_tools().extract(sopfile,pasta)
						download_tools().remove(sopfile)
					mensagemok(translate(40000),translate(50015),pasta,translate(50016))
					sopcast_installed = True
					settings.setSetting('external_sopcast',value='0')
					mensagemok(translate(40000),translate(50014))

		else:
			mensagemok(translate(40000),translate(50017))

		#acestream config for android

		if sopcast_installed == True:
			mensagemok(translate(40000),translate(50018),translate(50019),translate(50020))
			if xbmcvfs.exists(os.path.join("sdcard","Download")):
				pasta = os.path.join("sdcard","Download")
				acefile = os.path.join("sdcard","Download",acestreamengine_apk.split("/")[-1])
			else:
				dialog = xbmcgui.Dialog()
				pasta = dialog.browse(int(0), translate(40190), 'myprograms')
				acefile = os.path.join(pasta,acestreamengine_apk.split("/")[-1])
			download_tools().Downloader(acestreamengine_apk,acefile,translate(40072),translate(40000))
			import tarfile
			if tarfile.is_tarfile(acefile):
				download_tools().extract(acefile,pasta)
				download_tools().remove(acefile)
			xbmc.sleep(2000)
			mensagemok(translate(40000),translate(50021),pasta,translate(50016))
			mensagemok(translate(40000),translate(50022))
			mensagemok(translate(40000),translate(50023),translate(50024),translate(50025))
			settings.setSetting('autoconfig',value='false')	
Пример #60
0
def get_system_ca_bundle_path(settings):
    """
    Get the filesystem path to the system CA bundle. On Linux it looks in a
    number of predefined places, however on OS X it has to be programatically
    exported from the SystemRootCertificates.keychain. Windows does not ship
    with a CA bundle, but also we use WinINet on Windows, so we don't need to
    worry about CA certs.

    :param settings:
        A dict to look in for the `debug` key

    :return:
        The full filesystem path to the .ca-bundle file, or False on error
    """

    hours_to_cache = 7 * 24

    platform = sys.platform
    debug = settings.get('debug')

    ca_path = False

    if platform == 'win32' or platform == 'darwin':
        ensure_ca_bundle_dir()
        ca_path, _ = trust_list._ca_path(ca_bundle_dir)

        exists = os.path.exists(ca_path)
        is_empty = False
        is_old = False
        if exists:
            stats = os.stat(ca_path)
            is_empty = stats.st_size == 0
            # The bundle is old if it is a week or more out of date
            is_old = stats.st_mtime < time.time() - (hours_to_cache * 60 * 60)

        if not exists or is_empty or is_old:
            cert_callback = None
            if debug:
                console_write(u'''
                    Generating new CA bundle from system keychain
                    ''')
                cert_callback = print_cert_subject
            trust_list.get_path(ca_bundle_dir,
                                hours_to_cache,
                                cert_callback=cert_callback)
            if debug:
                console_write(
                    u'''
                    Finished generating new CA bundle at %s (%d bytes)
                    ''', (ca_path, os.stat(ca_path).st_size))

        elif debug:
            console_write(
                u'''
                Found previously exported CA bundle at %s (%d bytes)
                ''', (ca_path, os.stat(ca_path).st_size))

    # Linux
    else:
        # Common CA cert paths
        paths = [
            '/usr/lib/ssl/certs/ca-certificates.crt',
            '/etc/ssl/certs/ca-certificates.crt',
            '/etc/ssl/certs/ca-bundle.crt', '/etc/pki/tls/certs/ca-bundle.crt',
            '/etc/ssl/ca-bundle.pem', '/usr/local/share/certs/ca-root-nss.crt',
            '/etc/ssl/cert.pem'
        ]
        # First try SSL_CERT_FILE
        if 'SSL_CERT_FILE' in os.environ:
            paths.insert(0, os.environ['SSL_CERT_FILE'])
        for path in paths:
            if os.path.exists(path) and os.path.getsize(path) > 0:
                ca_path = path
                break

        if debug and ca_path:
            console_write(
                u'''
                Found system CA bundle at %s (%d bytes)
                ''', (ca_path, os.stat(ca_path).st_size))

    return ca_path