Esempio n. 1
0
    def generate(self, domain, force=False):
        domain, fdomain = self.sanitize_domain(domain)

        if self.check_cert(domain) and not force:
            SCLogger.tls.debug('Certificate for {0} is ok'.format(domain))
            return True, fdomain

        ssl_key = pathjoin(self.key_dir, fdomain + '.key')
        ssl_cert = pathjoin(self.key_dir, fdomain + '.crt')
        ssl_csr = pathjoin(self.key_dir, fdomain + '.csr')

        if not self.check_cert(domain):
            self.cleanup(domain)

        SCLogger.tls.debug(
            'Generate new certificate for {0}'.format(domain))
        ssl_subj = self.OPENSSL_SUBJECT_FORMAT.format(
            Config.tls.subj_country,
            Config.tls.subj_state,
            Config.tls.subj_locality,
            Config.tls.subj_organization,
            Config.tls.subj_ounit,
            domain,
            Config.tls.subj_email,
        )
        cmd_key = self.OPENSSL_NEWKEY_FORMAT.format(
            self.ca_cnf,
            ssl_key,
            ssl_csr,
            ssl_subj,
            self.obin,
        )
        if Config.tls.debug_openssl_cmd:
            SCLogger.tls.debug('OpenSSL cmd key: {0}'.format(cmd_key))
        ssl_log_key = Utils.popen_process(cmd_key)
        cmd_cert = self.OPENSSL_CASIGN_FORMAT.format(
            self.ca_crt,
            self.ca_key,
            self.ca_ser,
            ssl_csr,
            ssl_cert,
            self.validity_days,
            self.obin,
        )
        if Config.tls.debug_openssl_cmd:
            SCLogger.tls.debug('OpenSSL cmd cert: {0}'.format(cmd_cert))
        ssl_log_cert = Utils.popen_process(cmd_cert)

        cert_check = self.check_cert(domain)

        if cert_check:
            SCLogger.tls.info('Certificate for {0} generated'.format(domain))
        else:
            ssl_log_key_full = Utils.popen_fulloutput(ssl_log_key)
            ssl_log_cert_full = Utils.popen_fulloutput(ssl_log_cert)
            SCLogger.tls.critical('Certificate for {0} has NOT been generated'.format(domain))
            SCLogger.tls.critical('OpenSSL output for {0}.key:\n{1}'.format(domain, ssl_log_key_full))
            SCLogger.tls.critical('OpenSSL output for {0}.crt:\n{1}'.format(domain, ssl_log_cert_full))

        return cert_check, fdomain
Esempio n. 2
0
def test_function():
    # Insert a date in the DateTime function below to compare the new and old databases before a
    # specific date.
    datecheckbefore = DateTime().secs

    glimmondb.recreate_db(glimmondbfile='glimmondb_testing.sqlite3')
    copy('./testing_data/glimmondb_testing.sqlite3', './glimmondb_testing_backup.sqlite3')

    new_all_limits, new_all_states, new_all_versions = querydatabase(pathjoin(glimmondb.DBDIR, 
        'glimmondb_testing.sqlite3'), datecheckbefore)

    old_all_limits, old_all_states, old_all_versions = querydatabase(pathjoin(glimmondb.DBDIR, 
        'glimmondb.sqlite3'), datecheckbefore)

    saveoutputs(new_all_limits, new_all_states, new_all_versions, prestr='new')
    saveoutputs(old_all_limits, old_all_states, old_all_versions, prestr='new')

    newlimithash, newstatehash, newversionhash = gethashes(new_all_limits, new_all_states, 
                                                           new_all_versions)
    oldlimithash, oldstatehash, oldversionhash = gethashes(old_all_limits, old_all_states,
                                                           old_all_versions)

    print('newlimithash = {}\nnewstatehash = {}\nnewversionhash = {}'.format(newlimithash,
                                                                             newstatehash,
                                                                             newversionhash))
    print('oldlimithash = {}\noldstatehash = {}\noldversionhash = {}'.format(oldlimithash,
                                                                             oldstatehash,
                                                                             oldversionhash))

    assert oldlimithash == newlimithash
    assert oldstatehash == newstatehash
    assert oldversionhash == newversionhash

    print('Test Completed {}'.format(DateTime().caldate))
Esempio n. 3
0
def prepare_environment_variables():
    if "nt" == os.name:
        section = "Windows"
        vs = cp.get("Windows", "VisualStudio9")
        sdk = cp.get("Windows", "WindowsSDK7")
        pypyextlibs = cp.get("Windows", "pypyextlibs")
        paths = [pathjoin(vs, "VC"), pathjoin(vs, "VC", "atlmfc"), pypyextlibs, sdk]
        os.environ['INCLUDE'] = ";".join([pathjoin(e, "include") for e in paths])
        os.environ['LIB'] = ";".join([pathjoin(e, "lib") for e in paths])
        os.environ['LIBPATH'] = ";".join([pathjoin(e, "lib") for e in paths[0:3]])
        os.environ['Path'] = ";".join([pathjoin(vs, "VC", "bin"),
                                       pathjoin(vs, "Common7", "IDE"),
                                       pathjoin(sdk, "Bin"),
                                       pathjoin(cp.get("Windows", "Graphviz"), "bin"),
                                       pathjoin(pypyextlibs, "bin"),
                                       os.environ["Path"]])
        os.environ["SDL_PREFIX"] = cp.get("Windows", "SDL")
    elif "linux" in sys.platform:
        section = "Linux"
    elif "darwin" == sys.platform:
        section = "macOS"
        try:
            # Check if sdl2-config is installed (e.g. when SDL2 is installed via brew)
            os.environ["SDL_PREFIX"] = subprocess.check_output(['sdl2-config', '--prefix']).strip()
        except:
            pass
        try:
            mac_ver, _ignore, _ignore2 = platform.mac_ver()
            if len(mac_ver) != 0:
                mac_maj, mac_min, mac_point = mac_ver.split('.')[:3]
                if int(mac_min) >= 11 and not (
                    os.path.isdir('/usr/include/openssl/ssl.h') or
                    os.path.isdir('/usr/local/include/openssl/ssl.h')):
                    #
                    # Since 10.11, OS X no longer ships
                    # openssl system-wide, and Homebrew does not install it system-wide.
                    #
                    # see if header is there:
                    if len(subprocess.check_output(["pkg-config", "openssl", "--cflags-only-I"]).strip()) == 0:
                        if os.path.isdir('/usr/local/opt/openssl/lib/pkgconfig'):
                            if 'PKG_CONFIG_PATH' in os.environ:
                                os.environ['PKG_CONFIG_PATH'] =  '/usr/local/opt/openssl/lib/pkgconfig:' + os.environ['PKG_CONFIG_PATH']
                            else:
                                os.environ['PKG_CONFIG_PATH'] = '/usr/local/opt/openssl/lib/pkgconfig'
                    else:
                        # works nonetheless, ignore
                        pass
        except:
            pass
    else:
        raise AssertionError("Unsupported platform")
    for dependency in ["pypy", "rsdl", "sqpyte", "topaz", "rply", "appdirs"]:
        try:
            sys.path.insert(0, cp.get(section, dependency))
        except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
            sys.path.insert(0, cp.get("General", dependency))
    try:
        import targetrsqueak as rsqueak
    except ImportError:
        sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
Esempio n. 4
0
 def daemon(self, logStdout=True, logStderr=True, logPidfile=True):
   ''' The actual call to execute the dameon. When running as another user,
       This calls the actual executables
       
       Optional arguments:
       logStdout - True or False to enable or disable the logging of stdout.
                   Can also be a string to specify a different log file.
                   Does not create directory if it does not exist. 
                   Default: True
       logStderr - True or False to enable or disable the logging of stderr.
                   Can also be a string to specify a different log file.
                   Does not create directory if it does not exist. 
                   Default: True
       logPidfile - True or False to enable or disable the logging of pid file.
                    Can also be a string to specify a different log file.
                    Does not create directory if it does not exist. 
                    Default: True
                    
       Returns - Pid number'''
   args = []
   if logStdout:
     if logStdout is True:
       logStdout = pathjoin(env['VIP_LOG_DIR'], '%s_out.log' % self.name)
     args.extend(['-o', logStdout])
   if logStderr:
     if logStderr is True:
       logStderr = pathjoin(env['VIP_LOG_DIR'], '%s_err.log' % self.name)
     args.extend(['-e', logStderr])
   if logPidfile:
     if logPidfile is True:
       logPidfile = self.getPidFile()
     args.extend(['-p', logPidfile])
   return simpleWrap(args + self.precmd + self.cmd)
  def handle(self, *args, **options):
    for (module_name, model_name, field_name, func_name) in fields:
      module = __import__(module_name, fromlist=[''])
      model = getattr(module, model_name)
      func = getattr(module, func_name)
      for obj in model.objects.all():
        filefield = getattr(obj, field_name)
        if not filefield: continue

        filepath = filefield.name
        expectedpath = func(obj, basename(filepath))


        absfilepath = pathjoin(settings.MEDIA_ROOT, filepath)
        absexpectedpath = pathjoin(settings.MEDIA_ROOT, expectedpath)
        expecteddirpath = dirname(absexpectedpath)

        if absfilepath == absexpectedpath and isfile(absexpectedpath): continue

        if not isdir(expecteddirpath):
          print 'mkdir', dirname(absexpectedpath)
          makedirs(dirname(absexpectedpath))

        if isfile(absfilepath):
          print 'mv', absfilepath, absexpectedpath
          rename(absfilepath, absexpectedpath)
        elif isfile(absexpectedpath):
          print '%s.%d: incorrect path in db; ok' % (model_name, obj.id)
        else:
          print 'MISSING FILE! (%s.%d) %s' % (model_name, obj.id, absfilepath)

        filefield.name = expectedpath
        obj.save()
Esempio n. 6
0
    def _acquire_sources(self, out):
        out.write("PyMVPA:\n")
        out.write(" Version:       %s\n" % mvpa2.__version__)
        out.write(" Hash:          %s\n" % mvpa2.__hash__)
        out.write(" Path:          %s\n" % mvpa2.__file__)

        # Try to obtain git information if available
        out.write(" Version control (GIT):\n")
        try:
            gitpath = pathjoin(os.path.dirname(mvpa2.__file__), os.path.pardir)
            gitpathgit = pathjoin(gitpath, '.git')
            if os.path.exists(gitpathgit):
                for scmd, cmd in [
                    ('Status', ['status']),
                    ('Reference', 'show-ref -h HEAD'.split(' ')),
                    ('Difference from last release %s' % mvpa2.__version__,
                     ['diff', '--shortstat', 'upstream/%s...' % mvpa2.__version__])]:
                    try:
                        (tmpd, tmpn) = mkstemp('mvpa', 'git')
                        retcode = subprocess.call(['git',
                                                   '--git-dir=%s' % gitpathgit,
                                                   '--work-tree=%s' % gitpath] + cmd,
                                                  stdout=tmpd,
                                                  stderr=subprocess.STDOUT)
                    finally:
                        outlines = open(tmpn, 'r').readlines()
                        if len(outlines):
                            out.write('  %s:\n   %s' % (scmd, '   '.join(outlines)))
                        os.remove(tmpn)
                    #except Exception, e:
                    #    pass
            else:
                raise RuntimeError, "%s is not under GIT" % gitpath
        except Exception, e:
            out.write(' GIT information could not be obtained due "%s"\n' % e)
Esempio n. 7
0
def get_pymvpa_gitversion():
    """PyMVPA version as reported by git.

    Returns
    -------
    None or str
      Version of PyMVPA according to git.
    """
    gitpath = pathjoin(os.path.dirname(mvpa2.__file__), os.path.pardir)
    gitpathgit = pathjoin(gitpath, '.git')
    if not os.path.exists(gitpathgit):
        return None
    ver = None
    try:
        (tmpd, tmpn) = mkstemp('mvpa', 'git')
        retcode = subprocess.call(['git',
                                   '--git-dir=%s' % gitpathgit,
                                   '--work-tree=%s' % gitpath,
                                   'describe', '--abbrev=4', 'HEAD'
                                   ],
                                  stdout=tmpd,
                                  stderr=subprocess.STDOUT)
        outline = open(tmpn, 'r').readlines()[0].strip()
        if outline.startswith('upstream/'):
            ver = outline.replace('upstream/', '')
    finally:
        os.remove(tmpn)
    return ver
Esempio n. 8
0
def test_lightbox(slices):
    skip_if_no_external('nibabel') # used for loading the niftis here
    # smoketest for lightbox - moved from its .py __main__
    from mvpa2.misc.plot.lightbox import plot_lightbox
    fig = plot_lightbox(
        #background = NiftiImage('%s/anat.nii.gz' % impath),
        background = pathjoin(pymvpa_dataroot, 'bold.nii.gz'),
        background_mask = None,
        overlay = pathjoin(pymvpa_dataroot, 'bold.nii.gz'),
        overlay_mask = pathjoin(pymvpa_dataroot, 'mask.nii.gz'),
        #
        do_stretch_colors = False,
        add_colorbar = True,
        cmap_bg = 'gray',
        cmap_overlay = 'hot', # YlOrRd_r # pl.cm.autumn
        #
        fig = None,
        # vlim describes value limits
        # clim color limits (same by default)
        vlim = [1500, None],
        #vlim_type = 'symneg_z',
        interactive = True,
        #
        #nrows = 2,
        #ncolumns = 3,
        add_info = (1, 2),
        add_hist = (0, 2),
        #
        slices = slices
        )
    assert_true(fig)
Esempio n. 9
0
    def lint(self, view):
        self.rc -= 1
        if self.rc == 0:
            err = ''
            cmd = gs.setting('gslint_cmd', 'gotype')
            real_path = view.file_name()
            pat_prefix = ''
            pwd = dirname(real_path)
            fn = basename(real_path)
            # normalize the path so we can compare it below
            real_path = pathjoin(pwd, fn)
            tmp_path = pathjoin(pwd, '.GoSublime~tmp~%d~%s~' % (view.id(), fn))
            try:
                if cmd:
                    files = []
                    if real_path:
                        for fn in listdir(pwd):
                            if fn.lower().endswith('.go'):
                                fn = pathjoin(pwd, fn)
                                if fn != real_path:
                                    files.append(fn)

                    src = view.substr(sublime.Region(0, view.size())).encode('utf-8')
                    if files:
                        # m = LEADING_COMMENTS_PAT.sub('', src)
                        m = LEADING_COMMENTS_PAT.match(src)
                        m = PACKAGE_NAME_PAT.search(src, m.end(1) if m else 0)
                        if m:
                            pat_prefix = '^' + re.escape(tmp_path)
                            with open(tmp_path, 'wb') as f:
                                f.write(src)
                            args = [cmd, '-p', m.group(1), tmp_path]
                            args.extend(files)
                            _, err = gs.runcmd(args)
                            unlink(tmp_path)
                        else:
                            sublime.status_message('Cannot find PackageName')
                    else:
                        _, err = gs.runcmd([cmd], src)
            except Exception as e:
                sublime.status_message(str(e))

            regions = []
            view_id = view.id()
            self.errors[view_id] = {}

            for m in re.finditer(r'%s:(\d+):(\d+):\s+(.+)\s*$' % pat_prefix, err, re.MULTILINE):
                line, start, err = int(m.group(1))-1, int(m.group(2))-1, m.group(3)
                self.errors[view_id][line] = err
                pos = view.line(view.text_point(line, 0)).begin() + start
                if pos >= view.size():
                    pos = view.size() - 1
                regions.append(sublime.Region(pos, pos))

            if regions:
                flags = sublime.DRAW_EMPTY_AS_OVERWRITE
                view.add_regions('GsLint-errors', regions, 'invalid.illegal', 'cross', flags)
            else:
                view.erase_regions('GsLint-errors')
        self.on_selection_modified(view)
Esempio n. 10
0
def multicloud():
    """
    Handles the functionality on the multicloud pages.

    Note: Returns a response object (often a render_template call) to flask and eventually
    to the browser.
    """

    fileManager = session_functions.loadFileManager()
    if 'multicloudoptions' not in session:
        session['multicloudoptions'] = constants.DEFAULT_MC_OPTIONS

    folderPath = pathjoin(session_functions.session_folder(), constants.RESULTS_FOLDER)
    if (not os.path.isdir(folderPath)):
            makedirs(folderPath)
    malletPath = pathjoin(folderPath, "topicFile")


    if request.method == 'GET':
        # 'GET' request occurs when the page is first loaded.

        labels = fileManager.getActiveLabels()

        return render_template('multicloud.html', jsonStr="", labels=labels)

    if request.method == "POST":
        # 'POST' request occur when html form is submitted (i.e. 'Get Graphs', 'Download...')

        labels = fileManager.getActiveLabels()        

        JSONObj = fileManager.generateMCJSONObj(malletPath)

        return render_template('multicloud.html', JSONObj = JSONObj, labels=labels, loading='loading')
Esempio n. 11
0
def load_hashes():
    '''
    Loads icon hashes from
    '''

    assert 'hashes' not in globals()     # make sure only called once
    global hashes
    global cache_path

    hashes = {}

    # save icon hashes on exit
    wx.GetApp().PreShutdown.append(write_hashes)

    cache_path = pathjoin(stdpaths.userlocaldata, 'cache')

    if not pathexists(cache_path):
        os.makedirs(cache_path)

    elif pathexists(cache_path):
        hash_filename = pathjoin(cache_path, ICON_HASH_FILE)

        if not pathexists(hash_filename):
            log.info('no icon hash file %r found', hash_filename)
        else:
            try:
                with file(hash_filename, 'rb') as f:
                    hashes = cPickle.load(f)
                    return
            except Exception:
                log.critical('error loading icon hashes from %r', hash_filename)
                print_exc()
Esempio n. 12
0
    def create():
        """
swift-ring-builder <builder_file> create <part_power> <replicas>
                                         <min_part_hours>
    Creates <builder_file> with 2^<part_power> partitions and <replicas>.
    <min_part_hours> is number of hours to restrict moving a partition more
    than once.
        """
        if len(argv) < 6:
            print(Commands.create.__doc__.strip())
            exit(EXIT_ERROR)
        # 1、生成RingBuilder对象实例
        builder = RingBuilder(int(argv[3]), float(argv[4]), int(argv[5]))

        # 2、创建备份目录
        backup_dir = pathjoin(dirname(builder_file), 'backups')
        try:
            mkdir(backup_dir)
        except OSError as err:
            if err.errno != EEXIST:
                raise

        # 3、保存原始数据到备份目录,以及/etc/swift目录中
        builder.save(pathjoin(backup_dir,
                              '%d.' % time() + basename(builder_file)))
        builder.save(builder_file)
        exit(EXIT_SUCCESS)
Esempio n. 13
0
def stubbed_env():
    """Create stubbed module with a sample script"""
    os.makedirs(stubbed_dir)
    with open(stubbed_script, 'wb') as f:
        f.write("""
from due import due, Doi

kwargs = dict(
    entry=Doi("10.1007/s12021-008-9041-y"),
    description="Multivariate pattern analysis of neural data",
    tags=["use"]
)

due.cite(path="test", **kwargs)


@due.dcite(**kwargs)
def method(arg):
    return arg+1

assert method(1) == 2
print("done123")
""".encode())
    # copy stub.py under stubbed
    shutil.copy(
        pathjoin(dirname(__file__), os.pardir, 'stub.py'),
        pathjoin(stubbed_dir, 'due.py')
    )
    yield stubbed_script
    # cleanup
    shutil.rmtree(stubbed_dir)
 def abrir_factura(self, tv, path, view_column):
     model = tv.get_model()
     idfactura = model[path][-1]
     if idfactura > 0 and model[path][0] != "":
         if model[path][0].startswith("*"):
             fra = pclases.Prefactura.get(idfactura)
             try:
                 import prefacturas
             except ImportError:
                 from os.path import join as pathjoin
                 from sys import path
                 path.insert(0, pathjoin("..", "formularios"))
                 import prefacturas
             ventana = prefacturas.Prefacturas(fra, self.usuario)
         else:
             fra = pclases.FacturaVenta.get(idfactura)
             try:
                 import facturas_venta
             except ImportError:
                 from os.path import join as pathjoin
                 from sys import path
                 path.insert(0, pathjoin("..", "formularios"))
                 import facturas_venta
             ventana = facturas_venta.FacturasVenta(fra, self.usuario)
     elif idfactura > 0 and model[path][0] == "":    # Es cliente.
         cliente = pclases.Cliente.get(idfactura)
         try:
             import clientes
         except ImportError:
             from os.path import join as pathjoin
             from sys import path
             path.insert(0, pathjoin("..", "formularios"))
             import clientes
         ventana_clientes = clientes.Clientes(cliente, self.usuario)
def placeArtifact(artifact_file, repo_dirname, org, module, revision, status="release", meta={}, deps=[], supplied_ivy_file=None, scala=None, override=None, override_dir_only=False):
    if scala is not None:
        module = module + "_%s" % scala
    jarmodule = module
    if override is not None:
        org, module = override
        if not override_dir_only:
            jarmodule = module
    repo_dir = realpath(repo_dirname)
    artifact_dir = pathjoin(*[repo_dir] + [org] + [module, revision])
    ivyxml_path = pathjoin(artifact_dir, "ivy.xml")
    artifact_repo_path = pathjoin(artifact_dir, "%s-%s.jar" % (jarmodule, revision))
    
    if not pathexists(artifact_dir):
        makedirs(artifact_dir)
    
    ivyxml_file = open(ivyxml_path, "w")
    if supplied_ivy_file is None:
        writeIvyXml(org, module, revision, status, ivyxml_file, meta=meta, deps=deps)
    else:
        copyfile(supplied_ivy_file, ivyxml_path)
    
    if pathexists(artifact_repo_path):
        rmfile(artifact_repo_path)
    
    symlink(artifact_file, artifact_repo_path)
Esempio n. 16
0
def load_example_fmri_dataset(name='1slice', literal=False):
    """Load minimal fMRI dataset that is shipped with PyMVPA."""
    from mvpa2.datasets.sources.openfmri import OpenFMRIDataset
    from mvpa2.datasets.mri import fmri_dataset
    from mvpa2.misc.io import SampleAttributes

    basedir = pathjoin(pymvpa_dataroot, 'haxby2001')
    mask = {'1slice': pathjoin(pymvpa_dataroot, 'mask.nii.gz'),
            '25mm': pathjoin(basedir, 'sub001', 'masks', '25mm',
                                 'brain.nii.gz')}[name]

    if literal:
        model = 1
        subj = 1
        openfmri = OpenFMRIDataset(basedir)
        ds = openfmri.get_model_bold_dataset(model, subj, flavor=name,
                                             mask=mask, noinfolabel='rest')
        # re-imagine the global time_coords of a concatenated time series
        # this is only for the purpose of keeping the example data in the
        # exact same shape as it has always been. in absolute terms this makes no
        # sense as there is no continuous time in this dataset
        ds.sa['run_time_coords'] = ds.sa.time_coords
        ds.sa['time_coords'] = np.arange(len(ds)) * 2.5
    else:
        if name == '25mm':
            raise ValueError("The 25mm dataset is no longer available with "
                             "numerical labels")
        attr = SampleAttributes(pathjoin(pymvpa_dataroot, 'attributes.txt'))
        ds = fmri_dataset(samples=pathjoin(pymvpa_dataroot, 'bold.nii.gz'),
                          targets=attr.targets, chunks=attr.chunks,
                          mask=mask)

    return ds
Esempio n. 17
0
 def __init__(self, conf):
     self.current_md5 = {}
     self.swiftdir = conf.get('swiftdir', '/etc/swift')
     self.rings = {'account': conf.get('account_ring',
                                       pathjoin(self.swiftdir,
                                                'account.ring.gz')),
                   'container': conf.get('container_ring',
                                         pathjoin(self.swiftdir,
                                                  'container.ring.gz')),
                   'object': conf.get('object_ring',
                                      pathjoin(self.swiftdir,
                                               'object.ring.gz'))}
     self.start_delay = int(conf.get('start_delay_range', '120'))
     self.check_interval = int(conf.get('check_interval', '30'))
     self.ring_master = conf.get('ring_master', 'http://127.0.0.1:8090/')
     self.ring_master_timeout = int(conf.get('ring_master_timeout', '30'))
     self.debug = conf.get('debug', 'n') in TRUE_VALUES
     if self.debug:
         conf['log_level'] = 'DEBUG'
     self.logger = get_logger(conf, 'ringminiond', self.debug)
     if not os.access(self.swiftdir, os.W_OK):
         self.logger.error('swift_dir is not writable. exiting!')
         sys.exit(1)
     for ring in self.rings:
         if exists(self.rings[ring]):
             self.current_md5[self.rings[ring]] = \
                 get_md5sum(self.rings[ring])
         else:
             self.current_md5[self.rings[ring]] = ''
    def onButton_run(self, evt):
        self.Disable()

        # First, remove all files
        if os.path.exists(self.proj.extracted_dir): shutil.rmtree(self.proj.extracted_dir)
        if os.path.exists(self.proj.extracted_metadata): shutil.rmtree(self.proj.extracted_metadata)
        if os.path.exists(self.proj.ballot_metadata): shutil.rmtree(self.proj.ballot_metadata)
        if os.path.exists(pathjoin(self.proj.projdir_path, self.proj.targetextract_quarantined)):
            os.remove(pathjoin(self.proj.projdir_path, self.proj.targetextract_quarantined))
        if os.path.exists(pathjoin(self.proj.projdir_path, "extracted_radix")): 
            shutil.rmtree(pathjoin(self.proj.projdir_path, "extracted_radix"))
        if os.path.exists(pathjoin(self.proj.projdir_path, "extracted_diffs")):
            shutil.rmtree(pathjoin(self.proj.projdir_path, "extracted_diffs"))
        if os.path.exists(pathjoin(self.proj.projdir_path, "targetextract_avg_intensities.p")):
            os.remove(pathjoin(self.proj.projdir_path, "targetextract_avg_intensities.p"))
        if os.path.exists(pathjoin(self.proj.projdir_path, self.proj.ballot_to_targets)):
            os.remove(pathjoin(self.proj.projdir_path, self.proj.ballot_to_targets))
        if os.path.exists(self.proj.classified):
            os.remove(self.proj.classified)
        if os.path.exists(self.proj.extractedfile):
            os.remove(self.proj.extractedfile)
        if os.path.exists(self.proj.extractedfile+".type"):
            os.remove(self.proj.extractedfile+".type")
        if os.path.exists(self.proj.extractedfile+".size"):
            os.remove(self.proj.extractedfile+".size")
        if os.path.exists(self.proj.threshold_internal):
            os.remove(self.proj.threshold_internal)

        t = RunThread(self.proj)
        t.start()

        gauge = util.MyGauge(self, 5, tofile=self.proj.timing_runtarget,
                             ondone=self.on_targetextract_done, thread=t)
        gauge.Show()
Esempio n. 19
0
    def export_results(self):
        print "(LabelDigits) Exporting results."
        digitpatch2temp = pickle.load(open(pathjoin(self.proj.projdir_path,
                                                    self.proj.digitpatch2temp)))
        
        digitattrvals_blanks = {}  # maps {str templatepath: {digitattrtype: (str precinctstr, bb, side)}}
        # Oddly enough, I don't think anything uses the SCORE or PATCHPATH entries
        # of DIGITEXEMPLARS_MAP. Let's not fill them in.
        digitexemplars_map = {} # maps {str digit: ((regionpath_i, score, bb, patchpath_i), ...)} where BB := [y1,y2,x1,x2]

        for cellid, boxes in self.gridpanel.cellid2boxes.iteritems():
            # TODO: Assumes precinct nums are oriented horizontally.
            boxes_sorted = sorted(boxes, key=lambda b: b[0])
            precinctstr = "".join([b[-1] for b in boxes_sorted])

            regionpath = self.gridpanel.cellid2imgpath[cellid]
            temppath, attrstr, bb, side = digitpatch2temp[regionpath]
            digitattrvals_blanks.setdefault(temppath, {})[attrstr] = (precinctstr, bb, side)

            for box in boxes_sorted:
                if box[0] == None:
                    # This is a manual-labeled cell
                    continue
                # digitexemplars_map expects the bb to be: [y1, y2, x1, x2]
                bb = [box[1], box[3], box[0], box[2]]
                digitval = box[-1]
                digitexemplars_map.setdefault(digitval, []).append((regionpath, None, bb, None))

        pickle.dump(digitattrvals_blanks, open(pathjoin(self.proj.projdir_path,
                                                        self.proj.digitattrvals_blanks),
                                               'wb'))
        de_mapP = pathjoin(self.proj.projdir_path,
                           self.proj.digit_exemplars_map)
        pickle.dump(digitexemplars_map, open(de_mapP, 'wb'))
Esempio n. 20
0
    def run(self):
        if self.mafargs.path is None:
            return

        with _processSemaphore:
            try:
                # this is just where the annotation and binarization scripts put things. Don't ask too many questions...
                outputdir = pathjoin(self.dateDir, self.mafargs.tumorcode)
                annotationOutput = pathjoin(outputdir, os.path.basename(self.mafargs.path) + '.ncm.with_uniprot')

                if self._ensurePipelineOutput(ANNOTATE, (self.mafargs.tumorcode, self.mafargs.path), annotationOutput): return

                binarizationOutput = findBinarizationOutput(outputdir)
                if self._ensurePipelineOutput(BINARIZATION, (self.mafargs.tumorcode, annotationOutput), binarizationOutput): return
                binarizationOutput = findBinarizationOutput(outputdir)

                if self._ensurePipelineOutput(POST_MAF, (self.mafargs.tumorcode, binarizationOutput), None): return

                ppstring = 'private'  # TODO is it always this way?
                fmxsuffix = tumorTypeConfig[self.mafargs.tumorcode]['fmx_suffix'] + ".tsv"

                if self._ensurePipelineOutput(FMX, (self.dateString, self.mafargs.tumorcode, ppstring, fmxsuffix), None): return

                # TODO load into re
            except KeyboardInterrupt:
                logToFile(self.env[LOGGER_ENV], 'FATAL', "Keyboard interrupt")
            except PipelineError as perror:
                logToFile(self.env[LOGGER_ENV], 'FATAL', perror.message)
            finally:
                self._cleanupOutputFolder()
Esempio n. 21
0
def build_libxslt():
    'needed by webkit and lxml'

    from compiledeps import libxml2_dirname

    libxmlabs = abspath(libxml2_dirname)
    libxmlinc = pathjoin(libxmlabs, 'include')
    libxmllib = pathjoin(libxmlabs, 'win32', 'bin.msvc')
    iconvinc = pathjoin(abspath(iconv_dirname), 'include')

    libxslt_dir = libxslt.get()
    with cd(libxslt_dir):
        with cd('libxslt'):
            ensure_processonenode_is_public()

        with cd('win32'):
            patch_libxml2_h('..', 'libxslt')
            debug_flag = ['debug=yes'] if DEBUG else []
            run(['cscript', 'configure.js', '//E:JavaScript', 'vcmanifest=yes']
                + debug_flag +
                 ['include=%s' % os.pathsep.join([libxmlinc, iconvinc]),
                 'lib=%s' % libxmllib])

            filerepl('Makefile.msvc', '/O2', '/Os /GL /GS- /Zi')
            filerepl('Makefile.msvc', 'LDFLAGS = /nologo', 'LDFLAGS = /OPT:REF /OPT:ICF /nologo /DEBUG')

            run(['nmake', '-f', 'Makefile.msvc'] + (['clean'] if CLEAN else []))

    return libxslt_dir
Esempio n. 22
0
def row_into_images_table(row, dbconn, topdir):
    '''
    Take row dictionary (returned by csv.DictReader) and use it
    to push a row to the images table in the database.
    '''
    base = ("insert into images "
            "(gloss, image, im_ty, im_credit) values "
            "(?, ?, ?, ?)")

    image_loc = pathjoin(topdir, 'SignTypFromLightroom', row['File name']+'.jpg')
    with open(image_loc, mode='rb') as f:
        image = sqlite3.Binary(f.read())

    credit_loc = pathjoin(topdir, 'SignTypFromLightroom', row['File name']+'.txt')
    credit = None
    with open(credit_loc) as f:
        for line in f:
            if line.startswith('Photo URL'):
                header, tail = line.split(':', 1)
                credit = tail.strip()

    try:
        with dbconn:
            # J = JPG here -- have to ask Bob what he meant by im_ty
            dbconn.execute(base, (row['CaptionKey'], image, 'J', credit))
    except sqlite3.Error as e:
        logging.error(e.args[0])
Esempio n. 23
0
def user_files_list(dir_name=""):
    ''' HTML list of user-uploaded files. '''

    user = user_session.get_user()

    full_path = pathjoin(g.site_vars['user_dir'], dir_name)

    if not isdir(full_path):
        makedirs(full_path)

    if request.method == 'POST' and user.is_admin:
        if request.form.get('action') == 'upload':
            f = request.files['image_file']
            if f and allow_filetype(f.filename):
                filename = secure_filename(f.filename)
                f.save(pathjoin(full_path, filename))
                flash('Uploaded file:' + filename)
            else:
                flash('Sorry. Invalid Filetype')
        elif request.form.get('action') == 'delete':
            filename = secure_filename(request.form.get('filename'))
            full_filename = pathjoin(full_path, filename)
            remove(full_filename)
            flash('Deleted ' + filename)


    files = make_dirlist(dir_name)

    return render_template('user_files.html',
                           full_path=full_path,
                           file_list=files,
                           dirname=dir_name)
Esempio n. 24
0
def thumbnail(filename):
    ''' return a thumbnail of an (image) file.  if one doesn't exist,
        create one (with imagemagick(convert)) '''

    full_path = pathjoin(g.site_vars['user_dir'], filename)
    thumb_path = pathjoin(g.site_vars['user_dir'], '.thumbnails', filename)

    if splitext(filename)[-1] not in IMAGE_FORMATS:
        return 'not an image I will not make a thumbnail.'

    if isfile(full_path):
        if not isfile(thumb_path):
            # we need to make a thumbnail!
            where = pathjoin(g.site_vars['user_dir'],
                             '.thumbnails',
                             dirname(filename))
            if not isdir(where):
                makedirs(where)

            try:
                check_call([pathjoin(g.site_vars['site_dir'],
                                     'scripts',
                                     'makethumbnail.sh'),
                           full_path, thumb_path])
            except:
                return 'Sorry!'
        # either there is a thumbnail, or we just made one.
        return redirect(g.site_vars['user_url'] + '/.thumbnails/' + filename)
    else:
        return 'Sorry! not a valid original file!'
Esempio n. 25
0
def make_dirlist(path):
    ''' returns a list of the files and sub-dirs in a directory, ready to be
        JSON encoded, and sent to a client, or rendererd server-side. '''

    return_list = []
    things = glob(pathjoin(g.site_vars['user_dir'], path, '*'))
    for f in things:
        name = basename(f)
        if isdir(f):
            return_list.append(
                {'name': name + '/',
                 'url':  name + '/',
                 'size': "{0} items".format(len(glob(pathjoin(f, '*'))))})
        else:
            if allow_filetype(name):
                thumb = '<img src="{0}" alt="{1}" />'.format(
                    url_for('thumbnail', filename=path + name),
                    name)
            else:
                thumb = ''

            return_list.append(
                {'name':  name,
                 'thumb': thumb,
                 'url':   pathjoin(g.site_vars['user_url'], path, name),
                 'size':  human_size_str(f)})
    return return_list
    def make_joblist(self):
        """Make a list of jobs with all variants for all infiles and create outfile directories.

        The in/out mapping is file.* > outdir/variant_name/provider/file.mp4."""

        def get_task_lock_file(out_filename):
            "Get task-lock filename."
            return "%s.X" % splitext(out_filename)[0]

        def get_logfile(out_filename):
            "Get logfile name"
            return "%s.log" % splitext(out_filename)[0]

        for infile in self.infiles:
            if not os.path.exists(infile):
                print "Warning: infile %s does not exist. Skipping it" % infile
                continue
            infile_base = splitext(pathsplit(infile)[1])[0]
            for variant in self.config:
                outdir = normpath(pathjoin(self.outdir, infile_base))
                if not os.path.exists(outdir):
                    os.makedirs(outdir)
                outfile = pathjoin(outdir, variant['name'] + '.mp4')
                taskfile = get_task_lock_file(outfile)
                logfile = get_logfile(outfile)
                if os.path.exists(taskfile) or (not os.path.exists(outfile)):
                    job = {'inFile' : infile, 'outFile' : outfile, 'lockFile' : taskfile,
                           'get_logfile' : logfile}
                    job.update(variant)
                    self.jobs.append(job)
                    if len(self.jobs) == self.max_jobs:
                        break
Esempio n. 27
0
def load_projects(projdir):
    """ Returns a list of all Project instances contained in PROJDIR.
    Input:
        str PROJDIR:
    Output:
        list PROJECTS.
    """
    projects = []
    dummy_proj = Project()
    #for dirpath, dirnames, filenames in os.walk(projdir):
    #    for f in filenames:
    try: os.makedirs(projdir)
    except: pass

    for subfolder in os.listdir(projdir):
        if os.path.isdir(pathjoin(projdir, subfolder)):
            for f in os.listdir(pathjoin(projdir, subfolder)):
                if f == PROJ_FNAME:
                    fullpath = pathjoin(projdir, pathjoin(subfolder, f))
                    try:
                        proj = pickle.load(open(fullpath, 'rb'))
                        # Add in any new Project properties to PROJ
                        for prop, propval_default in dummy_proj.vals.iteritems():
                            if not hasattr(proj, prop):
                                print '...adding property {0}->{1} to project...'.format(prop, propval_default)
                                setattr(proj, prop, propval_default)
                        projects.append(proj)
                    except:
                        pass
    return projects
Esempio n. 28
0
def generateRWmatrix(dataList):
    """
    Generates rolling windows graph raw data matrix

    Args:
        dataPoints: a list of [x, y] points

    Returns:
        Output file path and extension.
    """

    extension = '.csv'
    deliminator = ','

    folderPath = pathjoin(session_manager.session_folder(), constants.RESULTS_FOLDER)
    if (not os.path.isdir(folderPath)):
        makedirs(folderPath)
    outFilePath = pathjoin(folderPath, 'RWresults' + extension)

    rows = ["" for _ in xrange(len(dataList[0]))]

    with open(outFilePath, 'w') as outFile:
        for i in xrange(len(dataList)):

            for j in xrange(len(dataList[i])):
                rows[j] = rows[j] + str(dataList[i][j]) + deliminator

        for i in xrange(len(rows)):
            outFile.write(rows[i] + '\n')
    outFile.close()

    return outFilePath, extension
Esempio n. 29
0
	def syncFiles(self, path=None):
		app = self.obj
		if isinstance(app, dApp):
			homedir = app.HomeDirectory
			try:
				appname = file(pathjoin(homedir, ".appname")).read()
			except IOError:
				# Use the HomeDirectory name
				appname = os.path.split(homedir.rstrip("/"))[1]
		else:
			# Running from the launch method. The path will contain the app name
			appname = path
			homedir = pathjoin(self._dataDir, path)
			if not os.path.exists(homedir):
				os.makedirs(homedir)
			os.chdir(homedir)
		url = self._getManifestUrl(appname, "diff")
		# Get the current manifest
		currentMf = Manifest.getManifest(homedir)
		params = {"current": jsonEncode(currentMf)}
		prm = urllib.urlencode(params)
		try:
			res = self.UrlOpener.open(url, data=prm)
		except urllib2.HTTPError, e:
			errcode = e.code
			errText = e.read()
			errMsg = "\n".join(errText.splitlines()[4:])
			if errcode == 304:
				# Nothing has changed on the server, so we're cool...
				return homedir
			else:
				dabo.log.error(_("HTTP Error syncing files: %s") % e)
				return
Esempio n. 30
0
    def create():
        """
swift-ring-builder <builder_file> create <part_power> <replicas>
                                         <min_part_hours>
    Creates <builder_file> with 2^<part_power> partitions and <replicas>.
    <min_part_hours> is number of hours to restrict moving a partition more
    than once.
OR
swift-ring-builder <builder_file> create <part_power> <replicas>
                                         <min_part_hours> <policy_info_file>
    policy_info_file contains the information about the policy
        """
        if len(argv) < 6:
            print Commands.create.__doc__.strip()
            exit(EXIT_ERROR)
        if len (argv) > 6:
            builder = RingBuilder(int(argv[3]), float(argv[4]), int(argv[5]), argv[6])
        else:
            builder = RingBuilder(int(argv[3]), float(argv[4]), int(argv[5]))
        backup_dir = pathjoin(dirname(argv[1]), 'backups')
        try:
            mkdir(backup_dir)
        except OSError as err:
            if err.errno != EEXIST:
                raise
        builder.save(pathjoin(backup_dir, '%d.' % time() + basename(argv[1])))
        builder.save(argv[1])
        exit(EXIT_SUCCESS)
Esempio n. 31
0
    def from_config_files(cls, files=None, reload=False):
        """Loads information about related/possible websites requiring authentication from:

        - datalad/downloaders/configs/*.cfg files provided by the codebase
        - current dataset .datalad/providers/
        - User's home directory directory (ie ~/.config/datalad/providers/*.cfg)
        - system-wide datalad installation/config (ie /etc/datalad/providers/*.cfg)

        For sample configs files see datalad/downloaders/configs/providers.cfg

        If files is None, loading is cached between calls.  Specify reload=True to force
        reloading of files from the filesystem.  The class method reset_default_providers
        can also be called to reset the cached providers.
        """
        # lazy part
        dsroot = get_dataset_root("")
        if files is None and cls._DEFAULT_PROVIDERS and not reload and dsroot == cls._DS_ROOT:
            return cls._DEFAULT_PROVIDERS

        config = SafeConfigParserWithIncludes()
        files_orig = files
        if files is None:
            # Config files from the datalad dist
            files = glob(
                pathjoin(dirname(abspath(__file__)), 'configs', '*.cfg'))

            # Dataset config
            if dsroot is not None:
                files.extend(
                    glob(pathjoin(dsroot, '.datalad', 'providers', '*.cfg')))
            cls._DS_ROOT = dsroot

            # System config
            if dirs.site_config_dir is not None:
                files.extend(
                    glob(pathjoin(dirs.site_config_dir, "providers", "*.cfg")))

            # User config
            if dirs.user_config_dir is not None:
                files.extend(
                    glob(pathjoin(dirs.user_config_dir, 'providers', '*.cfg')))

        config.read(files)

        # We need first to load Providers and credentials
        # Order matters, because we need to ensure that when
        # there's a conflict between configuration files declared
        # at different precedence levels (ie. dataset vs system)
        # the appropriate precedence config wins.
        providers = OrderedDict()
        credentials = {}

        for section in config.sections():
            if ':' in section:
                type_, name = section.split(':', 1)
                assert type_ in {
                    'provider', 'credential'
                }, "we know only providers and credentials, got type %s" % type_
                items = {
                    o: config.get(section, o)
                    for o in config.options(section)
                }
                # side-effect -- items get poped
                locals().get(type_ + "s")[name] = getattr(
                    cls, '_process_' + type_)(name, items)
                if len(items):
                    raise ValueError("Unprocessed fields left for %s: %s" %
                                     (name, str(items)))
            else:
                lgr.warning("Do not know how to treat section %s here" %
                            section)

        # link credentials into providers
        lgr.debug("Assigning credentials into %d providers" % len(providers))
        for provider in providers.values():
            if provider.credential:
                if provider.credential not in credentials:
                    raise ValueError(
                        "Unknown credential %s. Known are: %s" %
                        (provider.credential, ", ".join(credentials.keys())))
                provider.credential = credentials[provider.credential]

        providers = Providers(list(providers.values()))

        if files_orig is None:
            # Store providers for lazy access
            cls._DEFAULT_PROVIDERS = providers

        return providers
Esempio n. 32
0
    KERNEL32 = None
    USER32 = None

# Colors
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
BACKGROUND_BLUE = (43, 70, 104)
BACKGROUND_BLUE_DARK = (31, 46, 63)
BACKGROUND_BLUE_GRID = (38, 63, 94)
BACKGROUND_GRAY = (162, 154, 194)
BACKGROUND_GRAY_GRID = (178, 169, 211)

try:  # When bundled as single executable
    # noinspection PyUnresolvedReferences
    TEMP_FILES = sys._MEIPASS
    POLYCONVERTER = pathjoin(TEMP_FILES, "PolyConverter.exe")
    ICON = pathjoin(TEMP_FILES, "favicon.ico")
except AttributeError:
    TEMP_FILES = None
    POLYCONVERTER = "PolyConverter.exe"
    ICON = None
JSON_EXTENSION = ".layout.json"
LAYOUT_EXTENSION = ".layout"
BACKUP_EXTENSION = ".layout.backup"
FILE_REGEX = re.compile(f"^(.+)({JSON_EXTENSION}|{LAYOUT_EXTENSION})$")
SUCCESS_CODE = 0
JSON_ERROR_CODE = 1
CONVERSION_ERROR_CODE = 2
FILE_ERROR_CODE = 3
GAMEPATH_ERROR_CODE = 4
Esempio n. 33
0
 def mboxes(self):
     project = basename(self.directory)
     return [(name,
              pathjoin(self.directory, ".git", ".".join(
                  (project, name, "mbox"))))
             for name in ("dev1", "dev2", "user1", "user2")]
Esempio n. 34
0
 def codeface_conf(self):
     return pathjoin(self.directory, ".git", "testproject.conf")
Esempio n. 35
0
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''

# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''

# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
from os.path import dirname, abspath, join as pathjoin
STATIC_ROOT=abspath(pathjoin(dirname(__file__), '..', '..', 'staticfiles'))
COMPRESS_ROOT = STATIC_ROOT

# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
COMPRESS_URL = STATIC_URL

# Additional locations of static files
STATICFILES_DIRS = (
    # Put strings here, like "/home/html/static" or "C:/www/django/static".
    # Always use forward slashes, even on Windows.
    # Don't forget to use absolute paths, not relative paths.
)

# Cache-busting static assets
Esempio n. 36
0
def main():
    parser = create_parser()

    (options, args) = parser.parse_args()

    if len(args) != 1:
        if len(args) == 0:
            parser.error('command name not specified')
        else:
            parser.error('too many arguments')
    command = args[0]

    if command not in COMMANDS:
        parser.error('unsupported command: %s' % command)

    try:
        install_path = find_install_path(sys.argv[0])
    except Exception as e:
        print('ERROR: %s' % e)
        sys.exit(LSB_STATUS_UNKNOWN)

    o = Options()
    o.verbose = options.verbose
    o.install_path = install_path
    o.launcher_config = realpath(
        options.launcher_config
        or pathjoin(o.install_path, 'bin/launcher.properties'))
    o.etc_dir = realpath(options.etc_dir or pathjoin(o.install_path, 'etc'))
    o.node_config = realpath(options.node_config
                             or pathjoin(o.etc_dir, 'node.properties'))
    o.jvm_config = realpath(options.jvm_config
                            or pathjoin(o.etc_dir, 'jvm.config'))
    o.config_path = realpath(options.config
                             or pathjoin(o.etc_dir, 'config.properties'))
    o.log_levels = realpath(options.log_levels_file
                            or pathjoin(o.etc_dir, 'log.properties'))
    o.log_levels_set = bool(options.log_levels_file)

    if options.node_config and not exists(o.node_config):
        parser.error('Node config file is missing: %s' % o.node_config)

    node_properties = {}
    if exists(o.node_config):
        node_properties = load_properties(o.node_config)

    data_dir = node_properties.get('node.data-dir')
    o.data_dir = realpath(options.data_dir or data_dir or o.install_path)

    o.pid_file = realpath(options.pid_file
                          or pathjoin(o.data_dir, 'var/run/launcher.pid'))
    o.launcher_log = realpath(options.launcher_log_file
                              or pathjoin(o.data_dir, 'var/log/launcher.log'))
    o.server_log = realpath(options.server_log_file
                            or pathjoin(o.data_dir, 'var/log/server.log'))

    o.properties = parse_properties(parser, options.properties or {})
    for k, v in node_properties.items():
        if k not in o.properties:
            o.properties[k] = v

    o.arguments = options.arguments or []

    if o.verbose:
        print_options(o)

    try:
        handle_command(command, o)
    except SystemExit:
        raise
    except Exception as e:
        if o.verbose:
            traceback.print_exc()
        else:
            print('ERROR: %s' % e)
        sys.exit(LSB_STATUS_UNKNOWN)
Esempio n. 37
0
def build_java_execution(options, daemon):
    if not exists(options.config_path):
        raise Exception('Config file is missing: %s' % options.config_path)
    if not exists(options.jvm_config):
        raise Exception('JVM config file is missing: %s' % options.jvm_config)
    if not exists(options.launcher_config):
        raise Exception('Launcher config file is missing: %s' %
                        options.launcher_config)
    if options.log_levels_set and not exists(options.log_levels):
        raise Exception('Log levels file is missing: %s' % options.log_levels)

    with open(os.devnull, 'w') as devnull:
        try:
            subprocess.check_call(['java', '-version'],
                                  stdout=devnull,
                                  stderr=devnull)
        except (OSError, subprocess.CalledProcessError):
            raise Exception('Java is not installed')

    properties = options.properties.copy()

    if exists(options.log_levels):
        properties['log.levels-file'] = options.log_levels

    if daemon:
        properties['log.output-file'] = options.server_log
        properties['log.enable-console'] = 'false'

    jvm_properties = load_lines(options.jvm_config)
    launcher_properties = load_properties(options.launcher_config)

    try:
        main_class = launcher_properties['main-class']
    except KeyError:
        raise Exception("Launcher config is missing 'main-class' property")

    properties['config'] = options.config_path

    system_properties = ['-D%s=%s' % i for i in properties.items()]
    classpath = pathjoin(options.install_path, 'lib', '*')

    command = ['java', '-cp', classpath]
    command += jvm_properties + options.jvm_options + system_properties
    command += [main_class]

    if options.verbose:
        print(command)
        print("")

    env = os.environ.copy()

    # set process name: https://github.com/airlift/procname
    process_name = launcher_properties.get('process-name', '')
    if len(process_name) > 0:
        system = platform.system() + '-' + platform.machine()
        shim = pathjoin(options.install_path, 'bin', 'procname', system,
                        'libprocname.so')
        if exists(shim):
            env['LD_PRELOAD'] = (env.get('LD_PRELOAD', '') + ':' +
                                 shim).strip()
            env['PROCNAME'] = process_name

    return command, env
Esempio n. 38
0
def test_er_nifti_dataset():
    # setup data sources
    tssrc = pathjoin(pymvpa_dataroot, 'bold.nii.gz')
    evsrc = pathjoin(pymvpa_dataroot, 'fslev3.txt')
    masrc = pathjoin(pymvpa_dataroot, 'mask.nii.gz')
    evs = FslEV3(evsrc).to_events()
    # load timeseries
    ds_orig = fmri_dataset(tssrc)
    # segment into events
    ds = eventrelated_dataset(ds_orig, evs, time_attr='time_coords')

    # or like this
    def toevents(ds):
        return eventrelated_dataset(ds, evs, time_attr='time_coords')

    import nibabel
    ds_alt = preprocessed_dataset(tssrc,
                                  nibabel.load,
                                  fmri_dataset,
                                  preproc_ds=toevents)
    assert_datasets_almost_equal(ds, ds_alt)

    # we ask for boxcars of 9s length, and the tr in the file header says 2.5s
    # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features
    assert_equal(ds.nfeatures, 3200)
    assert_equal(len(ds), len(evs))
    # the voxel indices are reflattened after boxcaring , but still 3D
    assert_equal(ds.fa.voxel_indices.shape, (ds.nfeatures, 3))
    # and they have been broadcasted through all boxcars
    assert_array_equal(ds.fa.voxel_indices[:800],
                       ds.fa.voxel_indices[800:1600])
    # each feature got an event offset value
    assert_array_equal(ds.fa.event_offsetidx, np.repeat([0, 1, 2, 3], 800))
    # check for all event attributes
    assert_true('onset' in ds.sa)
    assert_true('duration' in ds.sa)
    assert_true('features' in ds.sa)
    # check samples
    origsamples = _load_anyimg(tssrc)[0]
    for i, onset in \
        enumerate([value2idx(e['onset'], ds_orig.sa.time_coords, 'floor')
                   for e in evs]):
        assert_array_equal(ds.samples[i], origsamples[onset:onset + 4].ravel())
        assert_array_equal(ds.sa.time_indices[i], np.arange(onset, onset + 4))
        assert_array_equal(ds.sa.time_coords[i],
                           np.arange(onset, onset + 4) * 2.5)
        for evattr in [
                a for a in ds.sa
                if a.count("event_attrs") and not a.count('event_attrs_event')
        ]:
            assert_array_equal(evs[i]['_'.join(evattr.split('_')[2:])],
                               ds.sa[evattr].value[i])
    # check offset: only the last one exactly matches the tr
    assert_array_equal(ds.sa.orig_offset, [1, 1, 0])

    # map back into voxel space, should ignore addtional features
    nim = map2nifti(ds)
    # origsamples has t,x,y,z
    assert_equal(nim.shape, origsamples.shape[1:] + (len(ds) * 4, ))
    # check shape of a single sample
    nim = map2nifti(ds, ds.samples[0])
    # pynifti image has [t,]z,y,x
    assert_equal(nim.shape, (40, 20, 1, 4))

    # and now with masking
    ds = fmri_dataset(tssrc, mask=masrc)
    ds = eventrelated_dataset(ds, evs, time_attr='time_coords')
    nnonzero = len(_load_anyimg(masrc)[0].nonzero()[0])
    assert_equal(nnonzero, 530)
    # we ask for boxcars of 9s length, and the tr in the file header says 2.5s
    # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features
    assert_equal(ds.nfeatures, 4 * 530)
    assert_equal(len(ds), len(evs))
    # and they have been broadcasted through all boxcars
    assert_array_equal(ds.fa.voxel_indices[:nnonzero],
                       ds.fa.voxel_indices[nnonzero:2 * nnonzero])
Esempio n. 39
0
 def test_rtf(self):
     fname = pathjoin(HERE, "files/others/test.rtf")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], 'hello there')
Esempio n. 40
0
def read(*rnames):
    return open(pathjoin(dirname(__file__), *rnames)).read()
Esempio n. 41
0
 def test_pptx(self):
     fname = pathjoin(HERE, "files/others/test.pptx")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], 'lorem ipsum')
Esempio n. 42
0
 def test_xlsx(self):
     fname = pathjoin(HERE, "files/others/test.xlsx")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], 'yo man!')
Esempio n. 43
0
 def test_docx(self):
     fname = pathjoin(HERE, "files/others/hello-world.docx")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], 'MPI example')
Esempio n. 44
0
 def test_ps(self):
     fname = pathjoin(HERE, "files/others/lecture.ps")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], 'Hey there')
Esempio n. 45
0
 def test_odt(self):
     fname = pathjoin(HERE, "files/others/pretty-ones.odt")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], "PRETTY ONES")
Esempio n. 46
0
 def test_epub(self):
     fname = pathjoin(HERE, "files/others/jquery.epub")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], 'JQuery Hello World')
Esempio n. 47
0
 def test_html(self):
     fname = pathjoin(HERE, "files/others/title.html")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], "Lorem ipsum")
Esempio n. 48
0
 def test_doc(self):
     fname = pathjoin(HERE, "files/others/hello-world.doc")
     fulltext.get_with_title(fname)
     self.assertEqual(
         fulltext.get_with_title(fname)[1], 'Lab 1: Hello World')
Esempio n. 49
0
 def test_japanese(self):
     self.doit(pathjoin(HERE, "files/unicode/jp.%s" % self.ext),
               self.japanese)
Esempio n. 50
0
 def test_pdf(self):
     fname = pathjoin(HERE, "files/others/test.pdf")
     self.assertEqual(
         fulltext.get_with_title(fname)[1], "This is a test PDF file")
Esempio n. 51
0
 def test_old_doc_path(self):
     "Antiword does not support older Word documents."
     text = fulltext.get(pathjoin(HERE, 'files/test.old.doc'),
                         backend='doc')
     self.assertStartsWith('eZ-Audit', text)
     self.assertIsInstance(text, u"".__class__)
Esempio n. 52
0
 def test_italian(self):
     fname = pathjoin(HERE, "files/unicode/it.rtf")
     with self.assertRaises(UnicodeDecodeError):
         fulltext.get(fname)
     ret = fulltext.get(fname, encoding_errors="ignore")
     assert ret.startswith("ciao bella")  # the rest is garbage
Esempio n. 53
0
 def test_csv(self):
     text = fulltext.get(pathjoin(HERE, "files/gz/test.csv.gz"))
     self.assertMultiLineEqual(self.text.replace(',', ''), text)
Esempio n. 54
0
 def test_italian(self):
     self.doit(pathjoin(HERE, "files/unicode/it.%s" % self.ext),
               self.italian)
Esempio n. 55
0
 def test_path(self):
     path = pathjoin(HERE, 'files/test.%s' % self.ext)
     text = fulltext.get(path, mime=self.mime)
     self.assertMultiLineEqual(self.text, text)
Esempio n. 56
0
 def test_txt(self):
     text = fulltext.get(pathjoin(HERE, "files/gz/test.txt.gz"))
     self.assertMultiLineEqual(self.text, text)
Esempio n. 57
0
 def test_file(self):
     path = pathjoin(HERE, 'files/test.%s' % self.ext)
     with open(path, 'rb') as f:
         text = fulltext.get(f, mime=self.mime)
         self.assertMultiLineEqual(self.text, text)
Esempio n. 58
0
 def test_pdf(self):
     # See: https://github.com/btimby/fulltext/issues/56
     text = fulltext.get(pathjoin(HERE, "files/gz/test.pdf.gz"))
     self.assertMultiLineEqual(self.text, text)
Esempio n. 59
0
 def test_extract(self):
     subprocess.check_output(
         "%s -m fulltext extract %s" % (
             sys.executable, pathjoin(HERE, "files/test.txt")),
         shell=True)
Esempio n. 60
0
 def test_file_codecs(self):
     path = pathjoin(HERE, 'files/test.%s' % self.ext)
     with self.assertRaises(AssertionError):
         self._handle_text(codecs.open(path, encoding='utf8'))