Beispiel #1
0
def load_hashes():
    '''
    Loads icon hashes from
    '''

    assert 'hashes' not in globals()  # make sure only called once
    global hashes
    global cache_path

    hashes = {}

    # save icon hashes on exit
    wx.GetApp().PreShutdown.append(write_hashes)

    cache_path = pathjoin(stdpaths.userlocaldata, 'cache')

    if not pathexists(cache_path):
        os.makedirs(cache_path)

    elif pathexists(cache_path):
        hash_filename = pathjoin(cache_path, ICON_HASH_FILE)

        if not pathexists(hash_filename):
            log.info('no icon hash file %r found', hash_filename)
        else:
            try:
                with file(hash_filename, 'rb') as f:
                    hashes = cPickle.load(f)
                    return
            except Exception:
                log.critical('error loading icon hashes from %r',
                             hash_filename)
                print_exc()
Beispiel #2
0
def CREATE_DIR(OUTDIR):
    if not pathexists(pathjoin(OUTDIR)):
        osmkdir(pathjoin(OUTDIR))

    for DIR in ["IdListDIR", "IdListDIR/disease", "IdListDIR/query"]:
        if not pathexists(pathjoin(OUTDIR, DIR)):
            osmkdir(pathjoin(OUTDIR, DIR))
Beispiel #3
0
def collect_batch(imgs,labs,batch_idx=[],imgs_out=[],labs_out=[],
                  outf=None,verbose=0):
    # collect img_out,lab_out from collection imgs
    imgshape = imgs[0].shape
    nbatch = len(batch_idx)
    if nbatch==0:
        nbatch = len(labs)
        batch_idx = range(nbatch)
    if len(imgs_out)!=nbatch:
        imgs_out = np.zeros([nbatch]+list(imgshape),dtype=imgs[0].dtype)
        labs_out = np.zeros([nbatch,labs.shape[1]],dtype=labs[0].dtype)
    batch_iter = enumerate(batch_idx)
    if verbose:
        pmsg = 'Loading %d images into memory'%nbatch
        pbar = progressbar(pmsg,nbatch)
        batch_iter = pbar(batch_iter)
    for i,idx in batch_iter:
        imgs_out[i] = imgs[idx]
        labs_out[i] = labs[idx]

    if outf:
        outbase,outext = splitext(outf)
        if len(outext)==0:
            outext='.npy'
        outdatf = outbase+'_X'+outext
        if not pathexists(outdatf):
            np.save(outdatf, imgs_out, allow_pickle=False, fix_imports=True)
            print('saved',outdatf)
        outlabf = outbase+'_y'+outext
        if not pathexists(outlabf):
            np.save(outlabf, labs_out, allow_pickle=False, fix_imports=True)
            print('saved',outlabf)        
        
    return imgs_out,labs_out
def placeArtifact(artifact_file, repo_dirname, org, module, revision, status="release", meta={}, deps=[], supplied_ivy_file=None, scala=None, override=None, override_dir_only=False):
    if scala is not None:
        module = module + "_%s" % scala
    jarmodule = module
    if override is not None:
        org, module = override
        if not override_dir_only:
            jarmodule = module
    repo_dir = realpath(repo_dirname)
    artifact_dir = pathjoin(*[repo_dir] + [org] + [module, revision])
    ivyxml_path = pathjoin(artifact_dir, "ivy.xml")
    artifact_repo_path = pathjoin(artifact_dir, "%s-%s.jar" % (jarmodule, revision))
    
    if not pathexists(artifact_dir):
        makedirs(artifact_dir)
    
    ivyxml_file = open(ivyxml_path, "w")
    if supplied_ivy_file is None:
        writeIvyXml(org, module, revision, status, ivyxml_file, meta=meta, deps=deps)
    else:
        copyfile(supplied_ivy_file, ivyxml_path)
    
    if pathexists(artifact_repo_path):
        rmfile(artifact_repo_path)
    
    symlink(artifact_file, artifact_repo_path)
def placeArtifact(artifact_file, repo_dirname, org, module, revision, status="release", meta={}, deps=[], supplied_ivy_file=None, scala=None, override=None, override_dir_only=False):
    if scala is not None:
        module = module + "_%s" % scala
    jarmodule = module
    if override is not None:
        org, module = override
        if not override_dir_only:
            jarmodule = module
    repo_dir = realpath(repo_dirname)
    artifact_dir = pathjoin(*[repo_dir] + [org] + [module, revision])
    ivyxml_path = pathjoin(artifact_dir, "ivy.xml")
    artifact_repo_path = pathjoin(artifact_dir, "%s-%s.jar" % (jarmodule, revision))
    
    if not pathexists(artifact_dir):
        makedirs(artifact_dir)
    
    ivyxml_file = open(ivyxml_path, "w")
    if supplied_ivy_file is None:
        writeIvyXml(org, module, revision, status, ivyxml_file, meta=meta, deps=deps)
    else:
        copyfile(supplied_ivy_file, ivyxml_path)
    
    if pathexists(artifact_repo_path):
        rmfile(artifact_repo_path)
    
    symlink(artifact_file, artifact_repo_path)
Beispiel #6
0
def edit_file(filename):
    """Open the filename in the editor.

    Actually does the whole pattern of opening a temp file.
    """
    _, extension = splitext(filename)
    from tempfile import mkstemp
    try:
        fd, tempfilename = mkstemp(suffix=".conf", text=True)
        data = CFG_TEMPLATE
        if pathexists(filename):
            with open(filename) as srcfd:
                data = srcfd.read()

        print(os.write(fd, data))
        os.close(fd)
    except Exception as e:
        print("problem making temp file: " + str(e), file=sys.stderr)
    else:
        editor = os.environ.get("VISUAL", os.environ.get("EDITOR", "editor"))
        try:
            p = Popen([editor, tempfilename])
            p.wait()
        except Exception as e:
            print("problem running editor", file=sys.stderr)
        else:
            # Copy the temp file in
            if pathexists(filename):
                os.remove(filename)
            os.rename(tempfilename, filename)
Beispiel #7
0
def load_hashes():
    '''
    Loads icon hashes from
    '''

    assert 'hashes' not in globals()     # make sure only called once
    global hashes
    global cache_path

    hashes = {}

    # save icon hashes on exit
    wx.GetApp().PreShutdown.append(write_hashes)

    cache_path = pathjoin(stdpaths.userlocaldata, 'cache')

    if not pathexists(cache_path):
        os.makedirs(cache_path)

    elif pathexists(cache_path):
        hash_filename = pathjoin(cache_path, ICON_HASH_FILE)

        if not pathexists(hash_filename):
            log.info('no icon hash file %r found', hash_filename)
        else:
            try:
                with file(hash_filename, 'rb') as f:
                    hashes = cPickle.load(f)
                    return
            except Exception:
                log.critical('error loading icon hashes from %r', hash_filename)
                print_exc()
Beispiel #8
0
def InitConfig():
    # BACKUPFILES contains all files and folders to back up, for wildcard entries ALWAYS use eEnv_resolve_multi!
    BACKUPFILES = ['/etc/enigma2/', '/etc/CCcam.cfg', '/usr/keys/',
     '/etc/davfs2/', '/etc/tuxbox/config/', '/etc/auto.network', '/etc/feeds.xml', '/etc/machine-id', '/etc/rc.local',
     '/etc/openvpn/', '/etc/ipsec.conf', '/etc/ipsec.secrets', '/etc/ipsec.user', '/etc/strongswan.conf', '/etc/vtuner.conf',
     '/etc/default/crond', '/etc/dropbear/', '/etc/default/dropbear', '/home/', '/etc/samba/', '/etc/fstab', '/etc/inadyn.conf',
     '/etc/network/interfaces', '/etc/wpa_supplicant.conf', '/etc/wpa_supplicant.ath0.conf',
     '/etc/wpa_supplicant.wlan0.conf', '/etc/wpa_supplicant.wlan1.conf', '/etc/resolv.conf', '/etc/enigma2/nameserversdns.conf', '/etc/default_gw', '/etc/hostname', '/etc/epgimport/', '/etc/exports',
     '/etc/enigmalight.conf', '/etc/volume.xml', '/etc/enigma2/ci_auth_slot_0.bin', '/etc/enigma2/ci_auth_slot_1.bin',
     '/usr/lib/enigma2/python/Plugins/Extensions/VMC/DB/',
     '/usr/lib/enigma2/python/Plugins/Extensions/VMC/youtv.pwd',
     '/usr/lib/enigma2/python/Plugins/Extensions/VMC/vod.config',
     '/usr/share/enigma2/MetrixHD/skinparts/',
     '/usr/share/enigma2/display/skin_display_usr.xml',
     '/usr/share/enigma2/display/userskin.png',
     '/usr/lib/enigma2/python/Plugins/Extensions/SpecialJump/keymap_user.xml',
     '/usr/lib/enigma2/python/Plugins/Extensions/MP3Browser/db',
     '/usr/lib/enigma2/python/Plugins/Extensions/MovieBrowser/db',
     '/usr/lib/enigma2/python/Plugins/Extensions/TVSpielfilm/db', '/etc/ConfFS',
     '/etc/rc3.d/S99tuner.sh',
     '/usr/bin/enigma2_pre_start.sh',
     eEnv.resolve("${datadir}/enigma2/keymap.usr"),
     eEnv.resolve("${datadir}/enigma2/keymap_usermod.xml")]\
     + eEnv_resolve_multi("${sysconfdir}/opkg/*-secret-feed.conf")\
     + eEnv_resolve_multi("${datadir}/enigma2/*/mySkin_off")\
     + eEnv_resolve_multi("${datadir}/enigma2/*/mySkin")\
     + eEnv_resolve_multi("${datadir}/enigma2/*/skin_user_*.xml")\
     + eEnv_resolve_multi("/etc/*.emu")\
     + eEnv_resolve_multi("${sysconfdir}/cron*")\
     + eEnv_resolve_multi("${sysconfdir}/init.d/softcam*")\
     + eEnv_resolve_multi("${sysconfdir}/init.d/cardserver*")\
     + eEnv_resolve_multi("${sysconfdir}/sundtek.*")\
     + eEnv_resolve_multi("/usr/sundtek/*")\
     + eEnv_resolve_multi("/opt/bin/*")\
     + eEnv_resolve_multi("/usr/script/*")

    # Drop non existant paths from list
    tmpfiles = []
    for f in BACKUPFILES:
        if pathexists(f):
            tmpfiles.append(f)
    backupset = tmpfiles

    config.plugins.configurationbackup = ConfigSubsection()
    if boxtype in ('maram9', 'classm', 'axodin', 'axodinc', 'starsatlx',
                   'genius', 'evo', 'galaxym6') and not pathexists(
                       "/media/hdd/backup_%s" % boxtype):
        config.plugins.configurationbackup.backuplocation = ConfigText(
            default='/media/backup/', visible_width=50, fixed_size=False)
    else:
        config.plugins.configurationbackup.backuplocation = ConfigText(
            default='/media/hdd/', visible_width=50, fixed_size=False)
    config.plugins.configurationbackup.backupdirs_default = NoSave(
        ConfigLocations(default=backupset))
    config.plugins.configurationbackup.backupdirs = ConfigLocations(
        default=[]
    )  # 'backupdirs_addon' is called 'backupdirs' for backwards compatibility, holding the user's old selection, duplicates are removed during backup
    config.plugins.configurationbackup.backupdirs_exclude = ConfigLocations(
        default=[])
    return config.plugins.configurationbackup
Beispiel #9
0
def main(zipfile, source):
    '''(str, str) Accepts 2 file paths
    Returns nothing
    For each version of source, store code and output on disk in HTML format
    '''
    zipfile = zipfile.replace('.zip', '')
    zipdir = pathjoin(ziproot, basename(zipfile))
    savedir = pathjoin(saveroot, basename(zipfile))

    if not isdir(zipdir):
        makedirs(zipdir)
    if not isdir(savedir):
        makedirs(savedir)

    run([unzip_cmd, '-u', '-o', '-q', '-d', zipdir, zipfile])  # unzip automatically adds extension
#    run(['chmod', '-R', '777', zipdir])

    previous = None
    for version in sorted(listdir(zipdir)):
        original = pathjoin(zipdir, version)
        saves = pathjoin(savedir, version)

        # overwrite existing
        if isdir(saves):
            rmtree(saves)
        makedirs(saves)

        # not required
        compile_output = ''
        if pathexists(pathjoin(original, compile_snap)):
            try:
                compile_output = run([sh_cmd, compile_snap],
                        stdout=PIPE, cwd=original).stdout.decode()
            except FileNotFoundError:
                pass  # Shouldn't happen but just in case

        output = ''
        if pathexists(pathjoin(original, run_snap)):
            try:
                output = run([sh_cmd, run_snap],
                        stdout=PIPE, cwd=original).stdout.decode()
                # output = '\n'.join(i.decode() for i in output)
            except FileNotFoundError:
                pass  # Shouldn't happen but just in case
            except OSError as e:
                output = 'OSError in snap %s\n%s' % (version, e)

        pygments.highlight(output, BashLexer(), HtmlFormatter(),
                open(pathjoin(saves, 'output.html'), 'x'))
        code = ''.join(open(pathjoin(original, source)).readlines())
        code = pygments.highlight(code, get_lexer_for_filename(source),
                HtmlFormatter(), open(pathjoin(saves, 'code.html'), 'x'))

        # add bolding
        if previous is not None:
            diff_result = file_diff(pathjoin(previous, source), pathjoin(original, source))
            add_strongs(diff_result, pathjoin(saves, 'code.html'))

        previous = original
Beispiel #10
0
	def checkPlugins(self):
		if pathexists("/tmp/installed-list.txt"):
			if pathexists("/media/hdd/images/config/noplugins") and config.misc.firstrun.value:
				self.userRestoreScript()
			else:
				self.session.openWithCallback(self.userRestoreScript, installedPlugins)
		else:
			self.userRestoreScript()
Beispiel #11
0
def fill_venv(repo, cfg=None):
    """Install packages into the venv.

    Makes the venv if it needs to.
    """
    venvdir = _get_active_venv(repo)
    if not venvdir or not pathexists(venvdir):
        venvdir = make_venv(repo)

    if cfg is None:
        cfg = get_config(repo)

    installed_list = {}

    # Install each package in the venv
    venviron = venvdir
    packages = cfg.items("packages")
    for package in packages:
        package_name = package[1] or package[0]

        # Have you already got installed the version that is being requested
        if VIRTUALENV_VERSION_PEEKING:
            if not installed_list:
                installed_list = _pip_freeze(repo, venvdir) 
            packagespec = PIP_VERSION_RE.match(package_name)
            if packagespec:
                if packagespec.group("version") == installed_list[packagespec.group("packagename")]:
                    continue

        # Check whether pip can't install it.
        if package[0] in FORCE_EASY_INSTALL or package_name in FORCE_EASY_INSTALL:
            ez_command = "easy_install %s" % package_name
            ez = _venvsh(repo, venvdir, ez_command)
        else:
            # Use pip to install into the venv
            cmd = "pip install -v"

            # Some things that you can specify about pip with veh config
            if (cfg.has_option('pip', 'always-upgrade')
                and cfg.getboolean('pip', 'always-upgrade')):
                ## FIXME we've turned this off now we're using pip inside the venv
                #cmd += ' --upgrade'
                pass

            if cfg.has_option("pip", "download-cache"):
                cachedir = expanduser(cfg.get("pip", "download-cache"))
                try:
                    if not pathexists(cachedir):
                        os.mkdir(cachedir)
                except:
                    print >>sys.stderr, "%s does not exist but cannot be created" % cachedir
                else:
                    cmd += " --download-cache=%s" % cachedir

            pip_command = cmd + " " + package_name
            pip = _venvsh(repo, venvdir, pip_command)
Beispiel #12
0
def fill_venv(repo, cfg=None):
    """Install packages into the venv.

    Makes the venv if it needs to.
    """
    venvdir = _get_active_venv(repo)
    if not venvdir or not pathexists(venvdir):
        venvdir = make_venv(repo)

    if cfg is None:
        cfg = get_config(repo)

    installed_list = {}

    # Install each package in the venv
    venviron = venvdir
    packages = cfg.items("packages")
    for package in packages:
        package_name = package[1] or package[0]

        # Have you already got installed the version that is being requested
        if VIRTUALENV_VERSION_PEEKING:
            if not installed_list:
                installed_list = _pip_freeze(repo, venvdir) 
            packagespec = PIP_VERSION_RE.match(package_name)
            if packagespec:
                if packagespec.group("version") == installed_list[packagespec.group("packagename")]:
                    continue

        # Check whether pip can't install it.
        if package[0] in FORCE_EASY_INSTALL or package_name in FORCE_EASY_INSTALL:
            ez_command = "easy_install %s" % package_name
            ez = _venvsh(repo, venvdir, ez_command)
        else:
            # Use pip to install into the venv
            cmd = "pip install -v"

            # Some things that you can specify about pip with veh config
            if (cfg.has_option('pip', 'always-upgrade')
                and cfg.getboolean('pip', 'always-upgrade')):
                ## FIXME we've turned this off now we're using pip inside the venv
                #cmd += ' --upgrade'
                pass

            if cfg.has_option("pip", "download-cache"):
                cachedir = expanduser(cfg.get("pip", "download-cache"))
                try:
                    if not pathexists(cachedir):
                        os.mkdir(cachedir)
                except:
                    print("%s does not exist but cannot be created" % cachedir, file=sys.stderr)
                else:
                    cmd += " --download-cache=%s" % cachedir

            pip_command = cmd + " " + package_name
            pip = _venvsh(repo, venvdir, pip_command)
Beispiel #13
0
 def __copy_file(self, path):
     color_path = path + self.__path_color
     codestyle_path = path + self.__path_codestyle
     keymap_path = path + self.__path_keymap
     if pathexists(color_path):
         shcopy(self.__file_color, color_path + self.__file_color)
     if pathexists(codestyle_path):
         shcopy(self.__file_codestyle, codestyle_path + self.__file_codestyle)
     if pathexists(keymap_path):
         shcopy(self.__file_keymap, keymap_path + self.__file_keymap)
Beispiel #14
0
	def doBackup(self):
		self.save_shutdownOK = config.usage.shutdownOK.value
		config.usage.shutdownOK.setValue(True)
		config.usage.shutdownOK.save()
		configfile.save()
		try:
			if config.plugins.softwaremanager.epgcache.value:
				eEPGCache.getInstance().save()
		except:
			pass
		try:
			if pathexists(self.backuppath) == False:
				makedirs(self.backuppath)
			InitConfig()
			self.backupdirs = " ".join(f.strip("/") for f in config.plugins.configurationbackup.backupdirs_default.value)
			for f in config.plugins.configurationbackup.backupdirs.value:
				if not f.strip("/") in self.backupdirs:
					self.backupdirs += " " + f.strip("/")
			if not "tmp/installed-list.txt" in self.backupdirs:
				self.backupdirs += " tmp/installed-list.txt"
			if not "tmp/changed-configfiles.txt" in self.backupdirs:
				self.backupdirs += " tmp/changed-configfiles.txt"
			if not "tmp/passwd.txt" in self.backupdirs:
				self.backupdirs += " tmp/passwd.txt"
			if not "tmp/groups.txt" in self.backupdirs:
				self.backupdirs += " tmp/groups.txt"

			ShellCompatibleFunctions.backupUserDB()
			pkgs = ShellCompatibleFunctions.listpkg(type="user")
			installed = open("/tmp/installed-list.txt", "w")
			installed.write('\n'.join(pkgs))
			installed.close()
			cmd2 = "opkg list-changed-conffiles > /tmp/changed-configfiles.txt"
			cmd3 = "tar -C / -czvf " + self.fullbackupfilename
			for f in config.plugins.configurationbackup.backupdirs_exclude.value:
				cmd3 = cmd3 + " --exclude " + f.strip("/")
			for f in BLACKLISTED:
				cmd3 = cmd3 + " --exclude " + f.strip("/")
			cmd3 = cmd3 + " " + self.backupdirs
			cmd = [cmd2, cmd3]
			if pathexists(self.fullbackupfilename):
				dt = str(date.fromtimestamp(stat(self.fullbackupfilename).st_ctime))
				self.newfilename = self.backuppath + "/" + dt + '-' + self.backupfile
				if pathexists(self.newfilename):
					remove(self.newfilename)
				rename(self.fullbackupfilename, self.newfilename)
			if self.finished_cb:
				self.session.openWithCallback(self.finished_cb, Console, title=_("Backup is running..."), cmdlist=cmd, finishedCallback=self.backupFinishedCB, closeOnSuccess=True)
			else:
				self.session.open(Console, title=_("Backup is running..."), cmdlist=cmd, finishedCallback=self.backupFinishedCB, closeOnSuccess=True)
		except OSError:
			if self.finished_cb:
				self.session.openWithCallback(self.finished_cb, MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout=10)
			else:
				self.session.openWithCallback(self.backupErrorCB, MessageBox, _("Sorry, your backup destination is not writeable.\nPlease select a different one."), MessageBox.TYPE_INFO, timeout=10)
Beispiel #15
0
def main():
    versions = get_versions()
    latest = versions[0]
    if pathexists(pathjoin("./patch", latest['ver'])):
        print("No updates!")
        return 0
    else:
        workdir = tempfile.mkdtemp()
        print("Work Directory:{0}".format(workdir))
        latest_file, _ = download_apk(latest, workdir)
        lastest_d_path = pathjoin(workdir, latest["ver"])
        for item in range(1, len(versions)):
            if pathexists(pathjoin("./patch", item['ver'])):
                break
        else:
            print("Nothing Comparable!")
            # clean up
            # 1. delete latest_file 2. delete workdir
            return 1
        cmpver = item
        cmpver_file, _ = download_apk(cmpver, workdir)
        cmpver_d_path = pathjoin(workdir, cmpver["ver"])
        apktool_file, _ = download_apktool(workdir)
        ret = subprocess.call([
            "java", "-jar", apktool_file, "d", "-f", "-o", lastest_d_path,
            latest_file
        ])
        if ret != 0:
            print("Decompiled Error!")
            # cleanup
            return 1
        ret = subprocess.call([
            "java", "-jar", apktool_file, "d", "-f", "-o", cmpver_d_path,
            cmpver_file
        ])
        if ret != 0:
            print("Decompiled Error!")
            # cleanup
            return 1

        cmp_smali_patch = pathjoin("./patch", cmpver["ver"], "smali.patch")
        noticing_smali = get_noticing_smali(cmp_smali_patch)
        if not check_stable(noticing_smali, lastest_d_path, cmpver_d_path):
            # cleanup
            return 1
        latest_lib_path = pathjoin(lastest_d_path, "lib")
        latest_lib_kai_path = pathjoin(lastest_d_path, "lib_kai")
        copytree(latest_lib_path, latest_lib_kai_path)
        # for loop
        # make patch/latest dir and copy cmp_smali_patch & generate so patch

    pass
Beispiel #16
0
def check_stable(noticing_smali, lastest_d_path, cmpver_d_path):
    for smali in noticing_smali:
        dst = pathjoin(lastest_d_path, smali)
        src = pathjoin(cmpver_d_path, smali)
        if not pathexists(dst) or not pathexists(src):
            return False
        with open(dst, "r") as f1:
            content1 = f1.read()
            with open(src, "r") as f2:
                content2 = f2.read()
                if content2 != content1:
                    return False
    return True
Beispiel #17
0
 def on_match(m):
     url = m.group(1)
     if url:
         src = pathjoin(source_root, url.lstrip('/'))
         if pathexists(src):
             _, ext = os.path.splitext(src)
             hash = hashlib.md5(src).hexdigest()
             relpath = pathjoin(os.path.basename(source_root), hash) + ext
             dst = pathjoin(MEDIA_ROOT, relpath)
             if not pathexists(os.path.dirname(dst)):
                 os.makedirs(os.path.dirname(dst))
             shutil.copyfile(src, dst)
             url = pathjoin(MEDIA_URL, relpath)
     return url
Beispiel #18
0
 def __init__(self, config_map=None, environ=None, default_config=None):
     if config_map is None:
         config_map = DEFAULT_CONFIG_MAP
     if environ is None:
         environ = os.environ
     if default_config is None:
         default_config = DEFAULT_CONFIG_MAP
     self.environ = environ
     config = ConfigParser()
     if isinstance(config_map, str):
         self.path = path = config_map
         with Pfx(path):
             read_ok = False
             if pathexists(path):
                 try:
                     config.read(path)
                 except OSError as e:
                     error("read error: %s", e)
                 else:
                     read_ok = True
             else:
                 warning("missing config file")
         if not read_ok:
             warning("falling back to default configuration")
             config.read_dict(default_config)
     else:
         self.path = None
         config.read_dict(config_map)
     self.map = config
     self._clause_stores = {}  # clause_name => Result->Store
     self._lock = Lock()
Beispiel #19
0
def choose(basepath, preferred_indexclass=None):
  ''' Choose an indexclass from a `basepath` with optional preferred indexclass.
      This prefers an existing index if present.
  '''
  global _CLASSES  # pylint: disable=global-statement
  global _BY_NAME  # pylint: disable=global-statement
  if preferred_indexclass is not None:
    if isinstance(preferred_indexclass, str):
      indexname = preferred_indexclass
      try:
        preferred_indexclass = _BY_NAME[indexname]
      except KeyError:
        warning("ignoring unknown indexclass name %r", indexname)
        preferred_indexclass = None
  indexclasses = list(_CLASSES)
  if preferred_indexclass:
    indexclasses.insert((preferred_indexclass.NAME, preferred_indexclass))
  # look for a preexisting index
  for indexname, indexclass in indexclasses:
    if not indexclass.is_supported():
      continue
    indexpath = indexclass.pathof(basepath)
    if pathexists(indexpath):
      return indexclass
  # otherwise choose the first supported index
  for indexname, indexclass in indexclasses:
    if not indexclass.is_supported():
      continue
    return indexclass
  raise ValueError(
      "no supported index classes available: tried %r" % (indexclasses,)
  )
Beispiel #20
0
def get_config(repo, rev=None):
    """Get the config from the veh root.

Using a specified rev only works on Mercurial repos right now."""
    if not rev:
        repo_root = find_root_with_file(".veh.conf", repo)
        cfgfile = os.path.join(repo_root, '.veh.conf')
        if not pathexists(cfgfile):
            raise ConfigMissing(cfgfile)

        with open(cfgfile) as fd:
            cfg = ConfigParser()
            cfg.readfp(fd, '.veh.conf')

        return cfg

    else:
        # This obviously needs fixing to be DVCS agnostic

        from mercurial import hg, ui, error
        # NOTE: rev = None in the mercurial api will give you the working dir.
        u = ui.ui()
        try:
            repo = hg.repository(u, repo)
        except error.RepoError, e:
            # repo not found
            raise
        try:
            cfgdata = repo[rev]['.veh.conf'].data()
        except error.RepoLookupError, e:
            # revision not found
            raise
Beispiel #21
0
 def test_reload(self):
     """Ensure reload command is run (instead of HUP) when provided."""
     reload = '/bin/touch %s' % self.sig
     with ReloadConf(self.dir, self.file, '/bin/sleep 1',
                     reload=reload) as rc:
         rc.poll()
         # Command should now be running.
         self.assertTrue(rc.check_command())
         self.assertFalse(pathexists(self.sig))
         # Write out "config" file.
         with open(pathjoin(self.dir, basename(self.file)), 'wb') as f:
             f.write(b'foo')
         # Reload command should be executed.
         rc.poll()
         time.sleep(0.1)
         self.assertTrue(pathexists(self.sig))
Beispiel #22
0
 def _genSrt(self):
     with open(self.outPath, 'wt') as fp:
         idx = -1
         for index, chunk in enumerate(self.chunks):
             text = chunk.text.strip().strip('。').strip()
             if not text:
                 continue
             fpath = splitext(chunk.fpath)[0] + '-zh.txt'
             text_zh = ''
             if pathexists(fpath):
                 with open(fpath, 'rt') as fp_zh:
                     text_zh = fp_zh.read()
             idx += 1
             hour, minute, sec, ms = self.parseHourMinuteSecondMs(
                 chunk.start)
             hour_e, minute_e, sec_e, ms_e = self.parseHourMinuteSecondMs(
                 chunk.end)
             fp.write('%s\n' % (idx + 1))
             fp.write(
                 '%02d:%02d:%02d,%03d --> %02d:%02d:%02d,%03d\n' %
                 (hour, minute, sec, ms, hour_e, minute_e, sec_e, ms_e))
             if text_zh:
                 fp.write(text_zh)
             else:
                 fp.write(text)
             fp.write('\n\n')
     print('gen srt: %s' % self.outPath)
Beispiel #23
0
    def initialScript(self):
        '''
        @return: QString
        '''
        if not self._loaded:
            self.loadSettings()

        if not self._regenerateScript:
            return self._initialScript

        self._regenerateScript = False
        self._initialScript = ''

        pages = []

        for page in self._pages:
            imgSource = self._genFileNameByUrl(page.url)

            if not pathexists(imgSource):
                imgSource = 'qrc:html/loading.gif'

                if not page.isValid():
                    imgSource = ''
            else:
                imgSource = gVar.appTools.pixmapToDataUrl(QPixmap(imgSource)).toString()

            map_ = {}
            map_['url'] = page.url
            map_['title'] = page.title
            map_['img'] = imgSource
            pages.append(map_)

        self._initialScript = jdumps(pages)
        return self._initialScript
Beispiel #24
0
    def work(self):
        moduleHPaths = []
        for root, dirs, files in os.walk(self.rootdir):
            for fname in files:
                if fname.startswith('Module') and fname.endswith('.h'):
                    moduleHPaths.append(pathjoin(root, fname))

        genMap = OrderedDict()
        for fpath in moduleHPaths:
            genMap[fpath] = self.parseHFileFuncInfos(fpath)

        allFuncInfos = []
        chunks = []
        for fpath, funcInfos in genMap.items():
            outPath = pathjoin('gen', basename(fpath))
            chunk = self.genFileChunk(fpath, funcInfos, outPath)
            chunks.append(chunk)
            allFuncInfos.extend(funcInfos)

        allFuncInfos.sort(key=lambda x: x['szName'].upper())
        fileContent = self.genFile(chunks, allFuncInfos)
        if not pathexists('src/gen'):
            os.makedirs('src/gen')

        with open('src/gen/ModulesGen.h', 'wt') as fp:
            fp.write(fileContent)
Beispiel #25
0
def array2img(outf,img,mapinfostr=None,bandnames=None,**kwargs):
    outhdrf = outf+'.hdr'
    if pathexists(outf) and not kwargs.pop('overwrite',False):
        warn('unable to save array: file "%s" exists and overwrite=False'%outf)
        return
        
    img = np.atleast_3d(img)
    outmeta = dict(samples=img.shape[1], lines=img.shape[0], bands=img.shape[2],
                   interleave='bip')
    
    outmeta['file type'] = 'ENVI'
    outmeta['byte order'] = 0
    outmeta['header offset'] = 0
    outmeta['data type'] = envitypecode(img.dtype)

    if mapinfostr:
        outmeta['map info'] = mapinfostr

    if bandnames:
        outmeta['band names'] = '{%s}'%", ".join(bandnames)
        
    outmeta['data ignore value'] = -9999

    outimg = createimg(outhdrf,outmeta)
    outmm = openimgmm(outimg,writable=True)
    outmm[:] = img
    outmm = None # flush output to disk
    print('saved %s array to %s'%(str(img.shape),outf))
Beispiel #26
0
def download_from_url(url, dest_file):
    """
    Attempt to download file specified by url to 'dest_file'

    Raises:

        WrongFileTypeException

            when content-type is not in the supported types or cannot
            be derived from the URL

        FileExceptionsException

            If the filename (derived from the URL) already exists in
            the destination directory.

        HTTPError

            ...
    """
    # Don't download files multiple times!
    if pathexists(dest_file):
        raise FileExistsException('URL [%s] already downloaded.' % url)

    response = request(url)
    info = response.info()
    actual_url = response.url
    if actual_url == 'http://i.imgur.com/removed.png':
        raise HTTPError(actual_url, 404, "Imgur suggests the image was removed", None, None)

    # Work out file type either from the response or the url.
    if 'content-type' in list(info.keys()):
        filetype = info['content-type']
    elif url.endswith('.jpg') or url.endswith('.jpeg'):
        filetype = 'image/jpeg'
    elif url.endswith('.png'):
        filetype = 'image/png'
    elif url.endswith('.gif'):
        filetype = 'image/gif'
    elif url.endswith('.mp4'):
        filetype = 'video/mp4'
    elif url.endswith('.webm'):
        filetype = 'video/webm'
    else:
        filetype = 'unknown'

    # Only try to download acceptable image types
    if filetype not in ['image/jpeg', 'image/png', 'image/gif', 'video/webm', 'video/mp4']:
        raise WrongFileTypeException('WRONG FILE TYPE: %s has type: %s!' % (url, filetype))

    filedata = response.read()
    filehandle = open(dest_file, 'wb')
    filehandle.write(filedata)
    filehandle.close()

    #converting png to jpg so that images can be trained in CNN
    if filetype == 'image/png':
        im = Image.open(dest_file)
        im_jpg = im.convert('RGB')
        im_jpg.save(dest_file.split('.')[0] + '.jpg')
Beispiel #27
0
def guess(update, context):
    quest = propose_image_to_guess(update.effective_user.id)
    if not quest:
        return context.bot.send_message(
            chat_id=update.effective_chat.id,
            text=f"Sorry, no images for you to guess",
            reply_markup=config.keyboard.action)

    qid, _, img_code, association, user_name = quest
    tile_ids, gt = image.from_repr(img_code)
    imset_name = f"{len(gt)}/{len(tile_ids)}"
    imset = get_imset(imset_name)
    if imset.shape[0] * imset.shape[1] != len(tile_ids):
        return context.bot.send_message(chat_id=update.effective_chat.id,
                                        text=f"Something went wrong",
                                        reply_markup=config.keyboard.action)

    fname = image.guess_imagename(tile_ids)
    if not pathexists(fname):
        fname = image.generate_image(tile_ids, imset.shape)
    context.chat_data['guess'] = {
        'qid': qid,
        'tile_ids': img_code,
        'gt': gt,
        'guess': []
    }

    caption = f"{user_name} says “{association} {len(gt)}”"
    keyboard = config.keyboard.get(imset_name, config.keyboard.empty)
    context.bot.send_photo(chat_id=update.effective_chat.id,
                           photo=open(fname, 'rb'),
                           caption=caption,
                           reply_markup=keyboard)
def get_file_type(text):
    if not pathexists(text):
        # FIXME: Real test for URL here
        return shortcuts.SHORTCUT_TYPE_URL
    if isdir(text):
        return shortcuts.SHORTCUT_TYPE_FOLDER
    if not isfile(text) and not islink(text):
        return shortcuts.SHORTCUT_TYPE_DOCUMENT
    filename = text
    # Sample file contents
    with open(filename, "r") as fd:
        sample = fd.read(128)
    # Guess if it's executable
    can_execute = False
    content_type = None
    try:
        content_type = gio.content_type_guess(
            filename, sample, want_uncertain=False
        )  # IGNORE:E1101 @UndefinedVariable Keep PyLint and PyDev happy
        can_execute = gio.content_type_can_be_executable(
            content_type
        )  # IGNORE:E1101 @UndefinedVariable Keep PyLint and PyDev happy
    except Exception as e:
        logging.error("Error guessing file type: %s", e)
    if not can_execute:
        if os.access(filename, os.X_OK):
            return shortcuts.SHORTCUT_TYPE_EXECUTABLE
    if can_execute and os.access(filename, os.X_OK):
        return shortcuts.SHORTCUT_TYPE_EXECUTABLE
    return shortcuts.SHORTCUT_TYPE_DOCUMENT
Beispiel #29
0
def load_config(config_path, config={}):
    if not pathexists(config_path):
        sys.stderr.write('Can not get config file %s\n' % config_path)
    else:
        with open(config_path) as fp:
            item = jloads(fp.read())
        config.update(item)
Beispiel #30
0
def get_imagemap(imgid,hdr_path,hdr_pattern,verbose=False):
    import os
    from glob import glob
    from os.path import join as pathjoin, exists as pathexists, splitext
    
    if not pathexists(hdr_path):
        warn('hdr_path "%s" not found'%hdr_path)            
        return None
    
    # remove .hdr from suffix if it's there
    hdr_files  = glob(pathjoin(hdr_path,hdr_pattern))
    msgtup=(imgid,hdr_path,hdr_pattern)
    if len(hdr_files)==0:
        warn('No hdr for "%s" in "%s" matching pattern "%s"'%msgtup)
        return None
    hdrf = hdr_files[0]
    if len(hdr_files)>1:
        hdrf = None
        for hdr_file in hdr_files:
            if imgid in hdr_file:
                hdrf = hdr_file
                break
        if not hdrf:
            warn('No hdr for "%s" in "%s" matching pattern "%s"'%msgtup)
            return None
        
        msg = 'Multiple .hdr files for "%s" in "%s" matching pattern "%s"'%msgtup
        msg += ', using file "%s"'%hdrf
        warn(msg)
    imgf = hdrf.replace('.hdr','')
    imgmap = mapinfo(openimg(imgf,hdrf=hdrf),astype=dict)
    imgmap['rotation'] = -imgmap['rotation']               

    return imgmap
Beispiel #31
0
 def do_cat(self, arg):
     """Cat the veh config file"""
     root = self._getroot()
     cfgfile = "%s/.veh.conf" % root
     if pathexists(cfgfile):
         with open(cfgfile) as fd:
             cfgcontent = fd.read()
             sys.stdout.write(cfgcontent)
Beispiel #32
0
 def startDelete(self, ret=False):
     if ret == True:
         self.exe = True
         print("removing:", self.val)
         if pathexists(self.val) == True:
             remove(self.val)
         self.exe = False
         self.fill_list()
Beispiel #33
0
def compute_mean(X_train,X_test,meanf):
    if pathexists(meanf):
        return loadmat(meanf)['mean_image']
    mean_image = np.sum(X_train,axis=0)+np.sum(X_test,axis=0)
    mean_image /= X_train.shape[0]+X_test.shape[0]
    
    savemat({'mean_image':mean_image},meanf)
    return mean_image
Beispiel #34
0
 def getFileName(self, path):
     if isfile(path):
         return basename(path)
     if isdir(path):
         return ''
     if pathexists(dirname(path)):
         return basename(path)
     return ''
Beispiel #35
0
def _parse_key(key):
    if not key:
        return

    if pathexists(key):
        key = open(key, 'rb').read()

    return key
    def _updateCurrentProfile(self):
        '''
        @brief: update profile by version
        '''
        profileDir = DataPaths.currentProfilePath()
        if not pathexists(profileDir):
            makedirs(profileDir)

        versionFile = pathjoin(profileDir, 'version')
        if pathexists(versionFile):
            with open(versionFile, 'rt') as fp:
                profileVersion = fp.read()
                self._updateProfile(const.VERSION, profileVersion.strip())
        else:
            self._copyDataToProfile()
        with open(versionFile, 'wt') as fp:
            fp.write(const.VERSION)
Beispiel #37
0
def venv(repo, cfg=None):
    """Make the repos venv"""

    venvdir = _get_active_venv(repo)
    if not venvdir or not pathexists(venvdir):
        fill_venv(repo, cfg=cfg)
        venvdir = _get_active_venv(repo)
    return venvdir
Beispiel #38
0
    def testUnpackZipCond(self):
        bctx = BuildContext("tests/pccts/pspec.xml")
        url = uri.URI(bctx.spec.source.archive.uri)
        targetDir = bctx.pkg_work_dir()
        filePath = join(ctx.config.archives_dir(), url.filename())

        # check cached
        if util.sha1_file(filePath) != bctx.spec.source.archive.sha1sum:
            fetch = fetcher.Fetcher(bctx.spec.source.archive.uri, targetDir)
            fetch.fetch()
        assert bctx.spec.source.archive.type == "zip"

        achv = archive.Archive(filePath, bctx.spec.source.archive.type)
        achv.unpack_files(["pccts/history.txt"], targetDir)
        assert pathexists(targetDir + "/pccts")
        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
Beispiel #39
0
 def get_object_api_doc(name_or_path):
     """returns the epydoc parse-only APIDoc object for the python file or object"""
     if pathexists(name_or_path):
         return parse_docs(filename=name_or_path)
     else:
         if valid_dotted_name(name_or_path):
             return parse_docs(name=name_or_path)
     raise IOError("No such file %s" % name_or_path)
Beispiel #40
0
    def testUnpackZipCond(self):
        spec = SpecFile("tests/pccts/pspec.xml")
        targetDir = '/tmp'
        achv = sourcearchive.SourceArchive(spec, targetDir)
        url = uri.URI(spec.source.archive.uri)
        filePath = join(ctx.config.archives_dir(), url.filename())

        # check cached
        if util.sha1_file(filePath) != spec.source.archive.sha1sum:
            fetch = fetcher.Fetcher(spec.source.archive.uri, targetDir)
            fetch.fetch()
        assert spec.source.archive.type == "zip"

        achv = archive.Archive(filePath, spec.source.archive.type)
        achv.unpack_files(["pccts/history.txt"], targetDir)
        assert pathexists(targetDir + "/pccts")
        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
Beispiel #41
0
    def testUnpackZip(self):
        spec = SpecFile("tests/pccts/pspec.xml")
        targetDir = '/tmp/pisitest'

        assert spec.source.archive.type == "zip"

        achv = sourcearchive.SourceArchive(spec, targetDir)
        achv.fetch(interactive=False)
        achv.unpack(clean_dir=True)

        assert pathexists(targetDir + "/pccts")

        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
    
        # check file integrity
        self.assertEqual(util.sha1_file(testfile),
             "f2be0f9783e84e98fe4e2b8201a8f506fcc07a4d")
Beispiel #42
0
def edit(repo):
    """Edit the veh config file

    Opens VISUAL on the veh config file."""
    cfgfile = "%s/.veh.conf" % repo
    if pathexists(cfgfile):
        edit_file(cfgfile)
    else:
        print >>sys.stderr, "%s does not exist" % cfgfile
Beispiel #43
0
def doesfileexist(_filename: str) -> bool:
    """Test that the specified file exists"""
    if not pathexists(_filename) or not isfile(_filename):
        return False
    if isdir(_filename):
        return False
    if not fileaccess(_filename, R_OK):
        return False
    return True
Beispiel #44
0
    def testUnpackZip(self):
        bctx = BuildContext("tests/pccts/pspec.xml")

        assert bctx.spec.source.archive.type == "zip"

        achv = sourcearchive.SourceArchive(bctx)
        achv.fetch(interactive=False)
        achv.unpack(clean_dir=True)

        targetDir = bctx.pkg_work_dir()
        assert pathexists(targetDir + "/pccts")

        testfile = targetDir + "/pccts/history.txt"
        assert pathexists(testfile)
    
        # check file integrity
        self.assertEqual(util.sha1_file(testfile),
             "f2be0f9783e84e98fe4e2b8201a8f506fcc07a4d")
Beispiel #45
0
 def do_check(self, arg):
     """Report whether the repository has an associated veh.
     """
     root = self._getroot()
     vehenv = _get_active_venv(root)
     if vehenv and pathexists(vehenv):
         print root
     else:
         return 1
Beispiel #46
0
def _loadcookies():
    """ Load cookies from COOKIE_FILE in to `session`.

    """
    if not pathexists(COOKIE_FILE):
        return
    with file(COOKIE_FILE, 'r') as f:
        cookiedict = json.loads(f.read())
        session.cookies = requests.cookies.cookiejar_from_dict(cookiedict)
Beispiel #47
0
def get_app_data(path, ensure_root_exists=True):
    fpath = pathjoin(APPLICATION_DATA_ROOT, path)
    if ensure_root_exists:
        if fpath.endswith('/'):
            root = fpath
        else:
            root = dirname(fpath)
        if not pathexists(root):
            os.makedirs(root)
    return fpath
Beispiel #48
0
def get_config(repo, rev=None):
    """Get the config from the veh root.

    We try and work out what version control system is being used from
    the location of the veh config file.

    We currently only support Mercurial and GIT.

    The veh.conf MUST exist in the file system though we might read a
    completly different version of course."""

    repo_root = find_root_with_file(".veh.conf", repo)
    cfgfile = os.path.join(repo_root, '.veh.conf')
    if not pathexists(cfgfile):
        raise ConfigMissing(cfgfile)

    if not rev:
        with open(cfgfile) as fd:
            cfg = ConfigParser()
            cfg.readfp(fd, '.veh.conf')
        return cfg
    else:
        # FIXME: Moar DVCS! Moar!
        repo_command = None
        if pathexists(os.path.join(repo_root, ".git")):
            repo_command = "git show %(rev)s:%(path)s" % {
                "rev": rev,
                "path": path
                }
        elif pathexists(os.path.join(repo_root, ".hg")):
            repo_command = "hg cat -r %(rev)s %(path)s" % {
                "rev": rev,
                "path": path
                }
        
        if repo_command:
            # FIXE we need to mark that this is a verion problem?
            raise ConfigMissing(cfgfile)

        cfgdata = subprocess.check_output(repo_command.split(" "))
        cfg = ConfigParser()
        cfg.readfp(StringIO(cfgdata), '.veh.conf')
        return cfg
def init_clf(X_train,y_train,trainfn,clf_file,**kwargs):
    from os.path import exists as pathexists

    retrain = kwargs.pop('retrain',False)
    if not pathexists(clf_file) or retrain:
        clf = trainfn(X_train,y_train,**kwargs)
        _ = save_classifier(clf,clf_file)
    else:
        clf = load_classifier(clf_file)
    return clf
Beispiel #50
0
def load_file_cache(obj, user=False):
    cache_path = get_obj_cache_path(obj, user)

    # If this object has never been cached, return an empty dict
    if not pathexists(cache_path):
        return None

    # Read in, decompress, unpickle, and return.
    with file(cache_path, 'rb') as f:
        return f.read()
def download_from_url(url, dest_file):
    """
    Attempt to download file specified by url to 'dest_file'

    Raises:

        WrongFileTypeException

            when content-type is not in the supported types or cannot
            be derived from the URL

        FileExceptionsException

            If the filename (derived from the URL) already exists in
            the destination directory.

        HTTPError

            ...
    """
    # Don't download files multiple times!
    if pathexists(dest_file):
        raise FileExistsException('URL [%s] already downloaded.' % url)

    response = request(url)
    info = response.info()
    actual_url = response.url
    if actual_url == 'http://i.imgur.com/removed.png':
        raise HTTPError(actual_url, 404, "Imgur suggests the image was removed", None, None)

    # Work out file type either from the response or the url.
    if 'content-type' in info.keys():
        filetype = info['content-type']
    elif url.endswith('.jpg') or url.endswith('.jpeg'):
        filetype = 'image/jpeg'
    elif url.endswith('.png'):
        filetype = 'image/png'
    elif url.endswith('.gif'):
        filetype = 'image/gif'
    elif url.endswith('.mp4'):
        filetype = 'video/mp4'
    elif url.endswith('.webm'):
        filetype = 'video/webm'
    else:
        filetype = 'unknown'

    # Only try to download acceptable image types
    if filetype not in ['image/jpeg', 'image/png', 'image/gif', 'video/webm', 'video/mp4']:
        raise WrongFileTypeException('WRONG FILE TYPE: %s has type: %s!' % (url, filetype))

    filedata = response.read()
    filehandle = open(dest_file, 'wb')
    filehandle.write(filedata)
    filehandle.close()
Beispiel #52
0
def install(repo):
    """Install a veh config file

    Opens VISUAL (and EDITOR if it can't find that) with a template.
    """
    # TODO I think this should commit really. For now, it doesn't.
    # TODO we need to add the line to hgignore for the state file
    cfgfile = "%s/.veh.conf" % repo
    if pathexists(cfgfile):
        raise Exists(cfgfile)
    edit_file(cfgfile)
Beispiel #53
0
def ensurefileexists(_filename: str) -> None:
    """Ensure that the specified file exists; if not, then raise an exception"""
    if not pathexists(_filename) or not isfile(_filename):
        stderr.write(_filename + ': The specified file is non-readable or non-existent!\n')
    elif isdir(_filename):
        stderr.write(_filename + ': This "file" is actually a directory!\n')
    elif not fileaccess(_filename, R_OK):
        stderr.write(r'Permission Error: Unable to write to "' + _filename + '"!\n')
    else:
        return
    raise SystemExit(1)
Beispiel #54
0
    def __copy_file(self):

        for base in self.__base__:
            for os_path in self.__apps__:
                __path = self.__home__ + base + os_path

                if pathexists(__path):
                    for i in range(0, 3):
                        file_path = __path + self.__paths__[i] + self.__files__[i]
                        shcopy(self.__files__[i], file_path)
                        print(file_path)
Beispiel #55
0
def convert(plistpath, dest=None):
    imagepath = re.sub(r'.plist$', '.png', plistpath)
    plist = readPlist(plistpath)
    image = Image.open(imagepath) 
    if dest and not pathexists(dest):
        makedirs(dest)
    for filename, v in plist['frames'].items():
        if dest:
            filename = pathjoin(dest, filename)
        print 'save', filename
        convert_each(image, filename, v)
    print 'done'
Beispiel #56
0
def parse_and_download(url):
    if "comico." not in url:
        print("url is not comico.jp or comico.com.tw")
        return
    titleNo, articleNo = parse_url_for_title_and_episode(url)
    target_dir = "%s-%s" % (titleNo, articleNo)
    print("creating directory: %s" % target_dir)
    if not pathexists(target_dir):
        mkdir(target_dir)
    assert isdir(target_dir)
    image_urls = parse_image_url(url)
    for img_url in image_urls:
        download_into(target_dir, img_url, url)
Beispiel #57
0
def save_file_cache(obj, data, user=False):
    if not getattr(obj, '_disk_cacheable', True):
        return
    cache_path = get_obj_cache_path(obj, user)

    # Ensure that the location for the cache file exists.
    cache_head = pathsplit(cache_path)[0]
    if not pathexists(cache_head):
        os.makedirs(cache_head)

    # Pickle, compress, and write out.
    with file(cache_path, 'wb') as f:
        f.write(data)
def run_extraction(conf, artifact2extraction, resdir):
    """
    Runs the extraction process for the list of given parameters.

    :param conf: the Codeface configuration object
    :param artifact2extraction: a list of pairs (kind of artifact to extract, extraction-process name);
           e.g., [('Feature', 'author2feature')]
    :param resdir: the Codeface results dir, where output files are written
    """

    log.info("%s: Extracting data" % conf["project"])

    # initialize database manager with given configuration
    dbm = DBManager(conf)

    # get setting for current combination
    project = conf["project"]
    project_resdir = conf["repo"]
    revs = conf["revisions"]
    tagging = conf["tagging"]
    project_resdir = pathjoin(resdir, project_resdir, tagging)

    # for all revisions of this project
    for i in range(len(revs) - 1):
        start_rev = revs[i]
        end_rev = revs[i + 1]

        # print (project, tagging, kind, start_rev, end_rev)

        # results directory for current revision
        range_resdir = pathjoin(project_resdir, "{0}-{1}".format(start_rev, end_rev))
        if not pathexists(range_resdir):
            makedirs(range_resdir)

        # get the list of authors in this project
        get_list_of_authors(dbm, project, range_resdir)

        # for all kinds of artifacts that have been analyzed for the current tagging
        for (artifact, extraction) in artifact2extraction:
            log.info("%s: Extracting data: %s" % (conf["project"], extraction))

            # extract the author--artifact list
            get_artifacts_per_author(dbm, project, tagging, extraction, end_rev, artifact, range_resdir)

            # get co-changed artifacts (= artifacts per commit)
            get_cochanged_artifacts(dbm, project, tagging, end_rev, artifact, range_resdir)

        # extract mailing-list analysis (associated with proximity/feature projects!)
        if tagging == 'proximity' or tagging == 'feature':
            log.info("%s: Extracting mailing network for version '%s'" % (conf["project"], end_rev))
            get_mailing_authors(dbm, project, tagging, end_rev, range_resdir)